From 5763fb393a4bbc5b2de7b633cb1b04bf2cb0d659 Mon Sep 17 00:00:00 2001 From: TO21Consortium <51391510+TO21Consortium@users.noreply.github.com> Date: Tue, 11 Jun 2019 18:50:50 +0900 Subject: [PATCH] import exynos 7570 bsp * source taken from https://github.com/TO21Consortium/SGSWPlatform Change-Id: I8d4f20b9f4646bc3b4b7332fd89708186bdbc672 --- Android.mk | 136 + BoardConfigCFlags.mk | 175 + NOTICE | 190 + exyrngd/Android.mk | 15 + exyrngd/LICENSE | 30 + exyrngd/NOTICE | 190 + exyrngd/README.txt | 40 + exyrngd/exyrngd.c | 395 + gralloc/Android.mk | 54 + gralloc/format_chooser.cpp | 99 + gralloc/format_chooser.h | 40 + gralloc/framebuffer.cpp | 282 + gralloc/gr.h | 43 + gralloc/gralloc.cpp | 591 ++ gralloc/mapper.cpp | 495 + include/ExynosBuffer.h | 176 + include/ExynosExif.h | 276 + include/ExynosJpegApi.h | 185 + include/ExynosJpegEncoderForCamera.h | 128 + include/FimgApi.h | 199 + include/csc.h | 562 ++ include/decon-fb.h | 240 + include/decon_8890.h | 243 + include/exynos-hwjpeg.h | 616 ++ include/exynos_blender.h | 495 + include/exynos_blender_obj.h | 211 + include/exynos_format.h | 224 + include/exynos_gscaler.h | 518 ++ include/exynos_ion.h | 54 + include/exynos_scaler.h | 455 + include/exynos_v4l2.h | 208 + include/gralloc_priv.h | 220 + include/hwjpeglib-exynos.h | 177 + include/ion.h | 188 + include/media.h | 125 + include/s3c-fb.h | 126 + include/s5p_fimc_v4l2.h | 160 + include/s5p_tvout_v4l2.h | 218 + include/sec_g2d_4x.h | 479 + include/sec_g2d_5x.h | 409 + include/swconverter.h | 405 + include/v4l2-mediabus.h | 100 + include/v4l2-subdev.h | 94 + kernel-3.10-headers/linux/v4l2-common.h | 45 + kernel-3.10-headers/linux/v4l2-controls.h | 901 ++ kernel-3.10-headers/v4l2-dv-timings.h | 181 + kernel-3.10-headers/videodev2.h | 1506 +++ kernel-3.10-headers/videodev2_exynos_media.h | 371 + kernel-3.18-headers/linux/v4l2-common.h | 55 + kernel-3.18-headers/linux/v4l2-controls.h | 994 ++ kernel-3.18-headers/videodev2.h | 1571 ++++ kernel-3.18-headers/videodev2_exynos_media.h | 53 + kernel-3.4-headers/videodev2.h | 2098 +++++ kernel-3.4-headers/videodev2_exynos_media.h | 355 + .../videodev2_exynos_media_ext.h | 73 + libaudio/Android.mk | 28 + libaudio/effecthal/Android.mk | 28 + libaudio/effecthal/postprocessing/Android.mk | 28 + .../postprocessing/aosp-effect/Android.mk | 53 + .../postprocessing/aosp-effect/Bundle/LVM.h | 628 ++ .../aosp-effect/Bundle/LVM_Types.h | 188 + .../Bundle/exynos_effectbundle.cpp | 3162 +++++++ .../aosp-effect/Bundle/exynos_effectbundle.h | 227 + .../aosp-effect/MODULE_LICENSE_APACHE2 | 0 .../postprocessing/aosp-effect/NOTICE | 190 + .../postprocessing/aosp-effect/Reverb/LVREV.h | 312 + .../Reverb/exynos_effectReverb.cpp | 2238 +++++ .../aosp-effect/Reverb/exynos_effectReverb.h | 47 + libaudio/effecthal/visualizer/Android.mk | 27 + .../visualizer/MODULE_LICENSE_APACHE2 | 0 libaudio/effecthal/visualizer/NOTICE | 190 + .../effecthal/visualizer/exynos_visualizer.c | 1225 +++ .../effecthal/visualizer/exynos_visualizer.h | 132 + libaudio/hal/Android.mk | 72 + libaudio/hal/NOTICE | 190 + libaudio/hal/audio_hw.c | 3526 +++++++ libaudio/hal/audio_hw.h | 378 + libaudio/hal/audio_hw_def.h | 170 + libaudio/hal/sec/voice_manager.c | 312 + libaudio/hal/sec/voice_manager.h | 166 + libaudio/hal/ww/voice_manager.c | 337 + libaudio/hal/ww/voice_manager.h | 187 + .../34xx/ExynosCameraActivityControl.cpp | 851 ++ libcamera/34xx/ExynosCameraActivityControl.h | 124 + libcamera/34xx/ExynosCameraDefine.h | 130 + libcamera/34xx/ExynosCameraMetadata.h | 40 + libcamera/34xx/ExynosCameraParameters.h | 460 + libcamera/34xx/ExynosCameraScalableSensor.cpp | 66 + libcamera/34xx/ExynosCameraScalableSensor.h | 63 + libcamera/34xx/ExynosCameraUtilsModule.cpp | 628 ++ libcamera/34xx/ExynosCameraUtilsModule.h | 153 + libcamera/34xx/fimc-is-metadata.h | 1965 ++++ libcamera/34xx/fimc-is-metadata_for_hal3.3.h | 1903 ++++ libcamera/34xx/hal1/ExynosCamera.cpp | 4661 ++++++++++ libcamera/34xx/hal1/ExynosCamera.h | 597 ++ .../34xx/hal1/ExynosCamera1Parameters.cpp | 6360 +++++++++++++ libcamera/34xx/hal1/ExynosCamera1Parameters.h | 1017 ++ .../34xx/hal1/ExynosCameraFrameFactory.cpp | 943 ++ .../34xx/hal1/ExynosCameraFrameFactory.h | 218 + .../ExynosCameraFrameFactory3aaIspM2M.cpp | 784 ++ .../hal1/ExynosCameraFrameFactory3aaIspM2M.h | 67 + .../ExynosCameraFrameFactory3aaIspM2MTpu.cpp | 840 ++ .../ExynosCameraFrameFactory3aaIspM2MTpu.h | 68 + .../ExynosCameraFrameFactory3aaIspOtf.cpp | 716 ++ .../hal1/ExynosCameraFrameFactory3aaIspOtf.h | 68 + .../ExynosCameraFrameFactory3aaIspOtfTpu.cpp | 674 ++ .../ExynosCameraFrameFactory3aaIspOtfTpu.h | 68 + .../hal1/ExynosCameraFrameFactoryFront.cpp | 1059 +++ .../34xx/hal1/ExynosCameraFrameFactoryFront.h | 70 + .../hal1/ExynosCameraFrameFactoryPreview.cpp | 853 ++ .../hal1/ExynosCameraFrameFactoryPreview.h | 78 + .../hal1/ExynosCameraFrameFactoryVision.cpp | 317 + .../hal1/ExynosCameraFrameFactoryVision.h | 67 + .../ExynosCameraFrameReprocessingFactory.cpp | 1214 +++ .../ExynosCameraFrameReprocessingFactory.h | 69 + ...ynosCameraFrameReprocessingFactoryNV21.cpp | 409 + ...ExynosCameraFrameReprocessingFactoryNV21.h | 52 + .../34xx/hal1/ExynosCameraSizeControl.cpp | 96 + libcamera/34xx/hal1/ExynosCameraSizeControl.h | 65 + .../Ged/ExynosCamera1ParametersVendor.cpp | 4212 +++++++++ .../34xx/hal1/Ged/ExynosCameraVendor.cpp | 7870 ++++++++++++++++ libcamera/34xx/hal3/ExynosCamera3.cpp | 8212 +++++++++++++++++ libcamera/34xx/hal3/ExynosCamera3.h | 407 + .../34xx/hal3/ExynosCamera3FrameFactory.cpp | 963 ++ .../34xx/hal3/ExynosCamera3FrameFactory.h | 236 + .../hal3/ExynosCamera3FrameFactoryPreview.cpp | 1562 ++++ .../hal3/ExynosCamera3FrameFactoryPreview.h | 78 + .../hal3/ExynosCamera3FrameFactoryVision.cpp | 365 + .../hal3/ExynosCamera3FrameFactoryVision.h | 69 + .../ExynosCamera3FrameReprocessingFactory.cpp | 1214 +++ .../ExynosCamera3FrameReprocessingFactory.h | 69 + .../34xx/hal3/ExynosCamera3Parameters.cpp | 8138 ++++++++++++++++ libcamera/34xx/hal3/ExynosCamera3Parameters.h | 965 ++ .../34xx/hal3/ExynosCameraSizeControl.cpp | 96 + libcamera/34xx/hal3/ExynosCameraSizeControl.h | 65 + libcamera/34xx/videodev2_exynos_camera.h | 1306 +++ .../ExynosCameraActivityAutofocus.cpp | 385 + .../ExynosCameraActivityAutofocus.h | 267 + .../Activities/ExynosCameraActivityBase.cpp | 84 + .../Activities/ExynosCameraActivityBase.h | 114 + .../Activities/ExynosCameraActivityFlash.cpp | 1154 +++ .../Activities/ExynosCameraActivityFlash.h | 253 + .../ExynosCameraActivitySpecialCapture.cpp | 496 + .../ExynosCameraActivitySpecialCapture.h | 169 + .../Activities/ExynosCameraActivityUCTL.cpp | 148 + .../Activities/ExynosCameraActivityUCTL.h | 93 + .../common_v2/Buffers/ExynosCameraBuffer.h | 215 + .../Buffers/ExynosCameraBufferLocker.cpp | 226 + .../Buffers/ExynosCameraBufferLocker.h | 94 + .../Buffers/ExynosCameraBufferManager.cpp | 3453 +++++++ .../Buffers/ExynosCameraBufferManager.h | 509 + .../ExynosCamera1MetadataConverter.cpp | 66 + .../ExynosCamera1MetadataConverter.h | 54 + .../common_v2/ExynosCamera3Interface.cpp | 1177 +++ libcamera/common_v2/ExynosCamera3Interface.h | 493 + libcamera/common_v2/ExynosCameraAutoTimer.h | 136 + .../common_v2/ExynosCameraCommonDefine.h | 97 + libcamera/common_v2/ExynosCameraCommonEnum.h | 77 + libcamera/common_v2/ExynosCameraCounter.h | 81 + .../ExynosCameraDualFrameSelector.cpp | 1299 +++ .../common_v2/ExynosCameraDualFrameSelector.h | 403 + libcamera/common_v2/ExynosCameraFrame.cpp | 2024 ++++ libcamera/common_v2/ExynosCameraFrame.h | 528 ++ .../common_v2/ExynosCameraFrameManager.cpp | 1118 +++ .../common_v2/ExynosCameraFrameManager.h | 321 + .../common_v2/ExynosCameraFrameSelector.cpp | 853 ++ .../common_v2/ExynosCameraFrameSelector.h | 116 + libcamera/common_v2/ExynosCameraInterface.cpp | 802 ++ libcamera/common_v2/ExynosCameraInterface.h | 400 + .../common_v2/ExynosCameraInterfaceState.h | 130 + libcamera/common_v2/ExynosCameraList.h | 241 + libcamera/common_v2/ExynosCameraMemory.cpp | 1026 ++ libcamera/common_v2/ExynosCameraMemory.h | 234 + .../ExynosCameraMetadataConverter.cpp | 4313 +++++++++ .../common_v2/ExynosCameraMetadataConverter.h | 210 + libcamera/common_v2/ExynosCameraNode.cpp | 1775 ++++ libcamera/common_v2/ExynosCameraNode.h | 420 + .../common_v2/ExynosCameraNodeJpegHAL.cpp | 740 ++ libcamera/common_v2/ExynosCameraNodeJpegHAL.h | 173 + .../common_v2/ExynosCameraRequestManager.cpp | 2814 ++++++ .../common_v2/ExynosCameraRequestManager.h | 536 ++ libcamera/common_v2/ExynosCameraSingleton.h | 102 + libcamera/common_v2/ExynosCameraSizeTable.h | 131 + .../common_v2/ExynosCameraSizeTable2P2_12M.h | 465 + .../common_v2/ExynosCameraSizeTable2P2_FHD.h | 712 ++ .../common_v2/ExynosCameraSizeTable2P2_WQHD.h | 637 ++ .../common_v2/ExynosCameraSizeTable2P3.h | 425 + .../common_v2/ExynosCameraSizeTable2P8_WQHD.h | 1593 ++++ .../common_v2/ExynosCameraSizeTable2T2_WQHD.h | 811 ++ .../common_v2/ExynosCameraSizeTable3H5.h | 152 + .../common_v2/ExynosCameraSizeTable3H7.h | 263 + .../common_v2/ExynosCameraSizeTable3L2.h | 523 ++ .../ExynosCameraSizeTable3L2_FULL_OTF.h | 564 ++ .../common_v2/ExynosCameraSizeTable3L8.h | 949 ++ .../common_v2/ExynosCameraSizeTable3M2.h | 558 ++ .../common_v2/ExynosCameraSizeTable3M3.h | 949 ++ .../common_v2/ExynosCameraSizeTable3P3.h | 655 ++ .../ExynosCameraSizeTable3P3_FULL_OTF.h | 921 ++ .../common_v2/ExynosCameraSizeTable4E6.h | 752 ++ .../common_v2/ExynosCameraSizeTable4H5.h | 840 ++ .../common_v2/ExynosCameraSizeTable5E2.h | 723 ++ .../common_v2/ExynosCameraSizeTable5E3.h | 543 ++ .../ExynosCameraSizeTable5E3_FULL_OTF.h | 518 ++ .../ExynosCameraSizeTable5E3_VFLIP.h | 361 + .../common_v2/ExynosCameraSizeTable5E8.h | 318 + .../common_v2/ExynosCameraSizeTable6A3.h | 145 + .../common_v2/ExynosCameraSizeTable6B2.h | 396 + .../common_v2/ExynosCameraSizeTable6D1.h | 564 ++ .../common_v2/ExynosCameraSizeTable8B1.h | 155 + .../common_v2/ExynosCameraSizeTableIMX134.h | 388 + .../common_v2/ExynosCameraSizeTableIMX135.h | 152 + .../common_v2/ExynosCameraSizeTableIMX175.h | 347 + .../common_v2/ExynosCameraSizeTableIMX219.h | 343 + .../ExynosCameraSizeTableIMX228_WQHD.h | 811 ++ .../ExynosCameraSizeTableIMX240_2P2_FHD.h | 853 ++ .../ExynosCameraSizeTableIMX240_2P2_WQHD.h | 970 ++ .../ExynosCameraSizeTableIMX240_FHD.h | 620 ++ .../ExynosCameraSizeTableIMX240_WQHD.h | 638 ++ .../ExynosCameraSizeTableIMX260_2L1_WQHD.h | 827 ++ .../common_v2/ExynosCameraSizeTableOV5670.h | 256 + .../common_v2/ExynosCameraSizeTableSR259.h | 171 + .../common_v2/ExynosCameraSizeTableSR261.h | 155 + .../common_v2/ExynosCameraSizeTableSR544.h | 386 + .../common_v2/ExynosCameraStreamManager.cpp | 739 ++ .../common_v2/ExynosCameraStreamManager.h | 182 + libcamera/common_v2/ExynosCameraStreamMutex.h | 69 + libcamera/common_v2/ExynosCameraThread.h | 101 + .../common_v2/ExynosCameraThreadFactory.h | 33 + libcamera/common_v2/ExynosCameraUtils.cpp | 2260 +++++ libcamera/common_v2/ExynosCameraUtils.h | 268 + .../Fusion/DofLut/ExynosCameraDofLut.h | 69 + .../Fusion/DofLut/ExynosCameraDofLut3L8.h | 75 + .../Fusion/DofLut/ExynosCameraDofLut3M3.h | 75 + .../Fusion/DofLut/ExynosCameraDofLut4H8.h | 75 + .../Fusion/DofLut/ExynosCameraDofLutBase.h | 75 + .../Fusion/ExynosCameraFusionInclude.h | 41 + .../ExynosCameraFusionMetaDataConverter.cpp | 456 + .../ExynosCameraFusionMetaDataConverter.h | 93 + .../Fusion/ExynosCameraFusionWrapper.cpp | 354 + .../Fusion/ExynosCameraFusionWrapper.h | 111 + .../Fusion/ExynosCameraPipeFusion.cpp | 661 ++ .../common_v2/Fusion/ExynosCameraPipeFusion.h | 84 + .../ExynosCameraActivityAutofocusVendor.cpp | 584 ++ .../Ged/ExynosCameraActivityFlashVendor.cpp | 742 ++ .../Ged/ExynosCameraFrameSelectorVendor.cpp | 359 + .../common_v2/MCPipes/ExynosCameraMCPipe.cpp | 3655 ++++++++ .../common_v2/MCPipes/ExynosCameraMCPipe.h | 330 + .../common_v2/Pipes2/ExynosCameraPipe.cpp | 2091 +++++ libcamera/common_v2/Pipes2/ExynosCameraPipe.h | 426 + .../common_v2/Pipes2/ExynosCameraPipe3AA.cpp | 786 ++ .../common_v2/Pipes2/ExynosCameraPipe3AA.h | 72 + .../Pipes2/ExynosCameraPipe3AA_ISP.cpp | 1781 ++++ .../Pipes2/ExynosCameraPipe3AA_ISP.h | 119 + .../common_v2/Pipes2/ExynosCameraPipe3AC.cpp | 366 + .../common_v2/Pipes2/ExynosCameraPipe3AC.h | 65 + .../common_v2/Pipes2/ExynosCameraPipeDIS.cpp | 761 ++ .../common_v2/Pipes2/ExynosCameraPipeDIS.h | 69 + .../Pipes2/ExynosCameraPipeFlite.cpp | 530 ++ .../common_v2/Pipes2/ExynosCameraPipeFlite.h | 186 + .../common_v2/Pipes2/ExynosCameraPipeGSC.cpp | 261 + .../common_v2/Pipes2/ExynosCameraPipeGSC.h | 71 + .../common_v2/Pipes2/ExynosCameraPipeISP.cpp | 883 ++ .../common_v2/Pipes2/ExynosCameraPipeISP.h | 71 + .../common_v2/Pipes2/ExynosCameraPipeISPC.cpp | 635 ++ .../common_v2/Pipes2/ExynosCameraPipeISPC.h | 71 + .../common_v2/Pipes2/ExynosCameraPipeJpeg.cpp | 318 + .../common_v2/Pipes2/ExynosCameraPipeJpeg.h | 73 + .../common_v2/Pipes2/ExynosCameraPipeSCC.cpp | 640 ++ .../common_v2/Pipes2/ExynosCameraPipeSCC.h | 71 + .../common_v2/Pipes2/ExynosCameraPipeSCP.cpp | 551 ++ .../common_v2/Pipes2/ExynosCameraPipeSCP.h | 68 + .../Pipes2/ExynosCameraPipeSTK_PICTURE.cpp | 394 + .../Pipes2/ExynosCameraPipeSTK_PICTURE.h | 149 + .../Pipes2/ExynosCameraPipeSTK_PREVIEW.cpp | 358 + .../Pipes2/ExynosCameraPipeSTK_PREVIEW.h | 96 + .../common_v2/Pipes2/ExynosCameraPipeUVS.cpp | 316 + .../common_v2/Pipes2/ExynosCameraPipeUVS.h | 131 + .../common_v2/Pipes2/ExynosCameraPipeVRA.cpp | 468 + .../common_v2/Pipes2/ExynosCameraPipeVRA.h | 73 + .../ExynosCamera3SensorInfoBase.cpp | 6559 +++++++++++++ .../SensorInfos/ExynosCamera3SensorInfoBase.h | 240 + .../SensorInfos/ExynosCameraSensorInfo2P8.cpp | 437 + .../ExynosCameraSensorInfoBase.cpp | 6989 ++++++++++++++ .../SensorInfos/ExynosCameraSensorInfoBase.h | 946 ++ libcamera_external/Exif.cpp | 429 + libcamera_external/Exif.h | 318 + libcamera_external/ISecCameraHardware.cpp | 4149 +++++++++ libcamera_external/ISecCameraHardware.h | 888 ++ libcamera_external/NOTICE | 190 + libcamera_external/SecCameraCommonConfig.h | 299 + libcamera_external/SecCameraHardware.cpp | 5127 ++++++++++ libcamera_external/SecCameraHardware.h | 389 + .../SecCameraHardware1MetadataConverter.cpp | 57 + .../SecCameraHardware1MetadataConverter.h | 53 + libcamera_external/SecCameraInterface.cpp | 766 ++ libcamera_external/SecCameraInterface.h | 411 + libcamera_external/SecCameraInterfaceState.h | 135 + libcamera_external/SecCameraParameters.cpp | 135 + libcamera_external/SecCameraParameters.h | 183 + libcec/Android.mk | 25 + libcec/NOTICE | 190 + libcec/cec.h | 26 + libcec/libcec.c | 384 + libcec/libcec.h | 173 + libcsc/Android.mk | 47 + libcsc/NOTICE | 190 + libcsc/csc.c | 1312 +++ libdisplay/Android.mk | 57 + libdisplay/ExynosDisplay.cpp | 202 + libdisplay/ExynosDisplay.h | 106 + libdisplay/ExynosOverlayDisplay.cpp | 2005 ++++ libdisplay/ExynosOverlayDisplay.h | 140 + libdisplay/NOTICE | 190 + libexynosutils/Android.mk | 38 + libexynosutils/ExynosMutex.cpp | 273 + libexynosutils/ExynosMutex.h | 139 + libexynosutils/Exynos_log.c | 50 + libexynosutils/Exynos_log.h | 48 + libexynosutils/NOTICE | 190 + libexynosutils/exynos_format_v4l2.c | 438 + libfimg4x/Android.mk | 41 + libfimg4x/FimgApi.cpp | 494 + libfimg4x/FimgExynos5.cpp | 331 + libfimg4x/FimgExynos5.h | 165 + libfimg4x/NOTICE | 190 + libfimg5x/Android.mk | 42 + libfimg5x/FimgApi.cpp | 793 ++ libfimg5x/FimgExynos5.cpp | 378 + libfimg5x/FimgExynos5.h | 170 + libfimg5x/NOTICE | 189 + libg2d/Android.mk | 32 + libg2d/NOTICE | 190 + libg2d/exynos_blender.cpp | 289 + libg2d/exynos_blender_obj.cpp | 184 + libg2d/libg2d.cpp | 627 ++ libg2d/libg2d_obj.h | 65 + libgscaler/Android.mk | 39 + libgscaler/NOTICE | 190 + libgscaler/libgscaler.cpp | 646 ++ libgscaler/libgscaler_obj.cpp | 2073 +++++ libgscaler/libgscaler_obj.h | 270 + libhdmi/Android.mk | 53 + libhdmi/ExynosExternalDisplay.cpp | 1341 +++ libhdmi/ExynosExternalDisplay.h | 143 + libhdmi/NOTICE | 190 + libhdmi/decon_tv.h | 24 + libhdmi/dv_timings.c | 35 + libhdmi_dummy/Android.mk | 51 + libhdmi_dummy/ExynosExternalDisplay.cpp | 121 + libhdmi_dummy/ExynosExternalDisplay.h | 51 + libhdmi_dummy/NOTICE | 190 + libhdmi_legacy/Android.mk | 58 + libhdmi_legacy/ExynosExternalDisplay.cpp | 1337 +++ libhdmi_legacy/ExynosExternalDisplay.h | 109 + libhdmi_legacy/NOTICE | 190 + libhdmi_legacy/dv_timings.c | 30 + libhwc/Android.mk | 123 + libhwc/ExynosHWC.cpp | 1587 ++++ libhwc/ExynosHWC.h | 452 + libhwc/ExynosHWCDebug.c | 59 + libhwc/ExynosHWCDebug.h | 49 + libhwc/NOTICE | 190 + libhwcService/Android.mk | 91 + libhwcService/ExynosHWCService.cpp | 516 ++ libhwcService/ExynosHWCService.h | 129 + libhwcService/IExynosHWC.cpp | 670 ++ libhwcService/IExynosHWC.h | 109 + libhwcService/NOTICE | 190 + libhwc_tiny/Android.mk | 46 + libhwc_tiny/ExynosHWC.cpp | 475 + libhwc_tiny/ExynosHWC.h | 201 + libhwc_tiny/NOTICE | 190 + libhwcutils/Android.mk | 106 + libhwcutils/ExynosG2DWrapper.cpp | 843 ++ libhwcutils/ExynosG2DWrapper.h | 65 + libhwcutils/ExynosHWCUtils.cpp | 492 + libhwcutils/ExynosHWCUtils.h | 143 + libhwcutils/ExynosMPP.cpp | 1486 +++ libhwcutils/ExynosMPP.h | 165 + libhwcutils/ExynosMPPv2.cpp | 2001 ++++ libhwcutils/ExynosMPPv2.h | 241 + libhwcutils/NOTICE | 190 + libhwjpeg/Android.mk | 34 + libhwjpeg/AppMarkerWriter.cpp | 629 ++ libhwjpeg/AppMarkerWriter.h | 137 + libhwjpeg/ExynosJpegEncoder.cpp | 251 + libhwjpeg/ExynosJpegEncoderForCamera.cpp | 878 ++ libhwjpeg/IFDWriter.h | 304 + libhwjpeg/hwjpeg-base.cpp | 152 + libhwjpeg/hwjpeg-internal.h | 86 + libhwjpeg/hwjpeg-libcsc.h | 54 + libhwjpeg/hwjpeg-v4l2.cpp | 975 ++ libhwjpeg/libcsc.cpp | 174 + libhwjpeg/libhwjpeg-exynos.cpp | 522 ++ libion_exynos/Android.mk | 28 + libion_exynos/NOTICE | 190 + libion_exynos/libion.cpp | 210 + libmemtrack/Android.mk | 26 + libmemtrack/ion.c | 97 + libmemtrack/mali.c | 227 + libmemtrack/memtrack_exynos.c | 61 + libmemtrack/memtrack_exynos.h | 27 + libmpp/Android.mk | 40 + libmpp/LibMpp.h | 49 + libmpp/MppFactory.cpp | 25 + libmpp/MppFactory.h | 17 + libmpp/NOTICE | 190 + libscaler/Android.mk | 41 + libscaler/NOTICE | 190 + libscaler/libscaler-common.h | 97 + libscaler/libscaler-m2m1shot.cpp | 339 + libscaler/libscaler-m2m1shot.h | 115 + libscaler/libscaler-swscaler.cpp | 106 + libscaler/libscaler-swscaler.h | 61 + libscaler/libscaler-v4l2.cpp | 625 ++ libscaler/libscaler-v4l2.h | 305 + libscaler/libscaler.cpp | 636 ++ libscaler/libscalerblend-v4l2.cpp | 147 + libscaler/libscalerblend-v4l2.h | 64 + libseiren/Android.mk | 25 + libseiren/NOTICE | 190 + libseiren/dec/seiren_hw.c | 309 + libseiren/include/seiren_error.h | 39 + libseiren/include/seiren_hw.h | 141 + libseiren/include/seiren_ioctl.h | 30 + libsrp/Android.mk | 7 + libsrp/NOTICE | 190 + libsrp/alp/Android.mk | 20 + libsrp/alp/NOTICE | 190 + libsrp/alp/dec/srp_api.c | 293 + libsrp/alp/include/srp_api.h | 78 + libsrp/alp/include/srp_error.h | 48 + libsrp/alp/include/srp_ioctl.h | 49 + libstagefrighthw/Android.mk | 39 + libstagefrighthw/Exynos_OMX_Plugin.cpp | 147 + libstagefrighthw/Exynos_OMX_Plugin.h | 76 + libstagefrighthw/NOTICE | 190 + libswconverter/Android.mk | 49 + libswconverter/NOTICE | 190 + .../csc_BGRA8888_to_RGBA8888_NEON.s | 112 + .../csc_BGRA8888_to_YUV420SP_NEON.s | 365 + .../csc_RGBA8888_to_YUV420SP_NEON.s | 388 + libswconverter/csc_interleave_memcpy_neon.s | 120 + .../csc_linear_to_tiled_crop_neon.s | 492 + ...csc_linear_to_tiled_interleave_crop_neon.s | 563 ++ .../csc_tiled_to_linear_crop_neon.s | 701 ++ ...c_tiled_to_linear_deinterleave_crop_neon.s | 786 ++ ...csc_tiled_to_linear_uv_deinterleave_neon.s | 250 + libswconverter/csc_tiled_to_linear_uv_neon.s | 217 + libswconverter/csc_tiled_to_linear_y_neon.s | 232 + libswconverter/swconverter_dummy.c | 81 + libswconverter/swconvertor.c | 2214 +++++ libv4l2/Android.mk | 38 + libv4l2/NOTICE | 190 + libv4l2/exynos_mc.c | 776 ++ libv4l2/exynos_subdev.c | 454 + libv4l2/exynos_v4l2.c | 889 ++ libvideocodec/Android.mk | 76 + libvideocodec/ExynosVideoInterface.c | 156 + libvideocodec/NOTICE | 190 + libvideocodec/dec/ExynosVideoDecoder.c | 3107 +++++++ libvideocodec/enc/ExynosVideoEncoder.c | 4054 ++++++++ libvideocodec/include/ExynosVideoApi.h | 555 ++ libvideocodec/include/ExynosVideoDec.h | 70 + libvideocodec/include/ExynosVideoEnc.h | 74 + libvideocodec/mfc_headers/exynos_mfc_media.h | 283 + libvirtualdisplay/Android.mk | 52 + libvirtualdisplay/ExynosVirtualDisplay.cpp | 608 ++ libvirtualdisplay/ExynosVirtualDisplay.h | 79 + libvirtualdisplay/NOTICE | 190 + libvppdisplay/Android.mk | 74 + libvppdisplay/ExynosDisplay.cpp | 3404 +++++++ libvppdisplay/ExynosDisplay.h | 264 + .../ExynosDisplayResourceManager.cpp | 558 ++ libvppdisplay/ExynosDisplayResourceManager.h | 45 + libvppdisplay/ExynosOverlayDisplay.cpp | 101 + libvppdisplay/ExynosOverlayDisplay.h | 26 + libvppdisplay/ExynosSecondaryDisplay.cpp | 100 + libvppdisplay/ExynosSecondaryDisplay.h | 25 + libvppdisplay/NOTICE | 190 + libvppdisplay_tiny/Android.mk | 45 + libvppdisplay_tiny/ExynosDisplay.cpp | 491 + libvppdisplay_tiny/ExynosDisplay.h | 114 + libvppdisplay_tiny/ExynosOverlayDisplay.cpp | 12 + libvppdisplay_tiny/ExynosOverlayDisplay.h | 14 + libvppdisplay_tiny/ExynosPrimaryDisplay.cpp | 10 + libvppdisplay_tiny/ExynosPrimaryDisplay.h | 13 + libvppdisplay_tiny/NOTICE | 190 + libvpphdmi/Android.mk | 58 + libvpphdmi/ExynosExternalDisplay.cpp | 1151 +++ libvpphdmi/ExynosExternalDisplay.h | 147 + libvpphdmi/NOTICE | 190 + libvpphdmi/decon_tv.h | 24 + libvpphdmi/dv_timings.c | 35 + libvpphdmi/videodev2_exynos_hdmi.h | 86 + libvppvirtualdisplay/Android.mk | 52 + libvppvirtualdisplay/ExynosVirtualDisplay.cpp | 1107 +++ libvppvirtualdisplay/ExynosVirtualDisplay.h | 123 + libvppvirtualdisplay/NOTICE | 190 + rpmbd/Android.mk | 37 + rpmbd/rpmbd.c | 517 ++ rpmbd/rpmbd.h | 172 + 502 files changed, 268939 insertions(+) create mode 100644 Android.mk create mode 100644 BoardConfigCFlags.mk create mode 100644 NOTICE create mode 100644 exyrngd/Android.mk create mode 100644 exyrngd/LICENSE create mode 100644 exyrngd/NOTICE create mode 100644 exyrngd/README.txt create mode 100644 exyrngd/exyrngd.c create mode 100644 gralloc/Android.mk create mode 100644 gralloc/format_chooser.cpp create mode 100644 gralloc/format_chooser.h create mode 100644 gralloc/framebuffer.cpp create mode 100644 gralloc/gr.h create mode 100644 gralloc/gralloc.cpp create mode 100644 gralloc/mapper.cpp create mode 100644 include/ExynosBuffer.h create mode 100644 include/ExynosExif.h create mode 100644 include/ExynosJpegApi.h create mode 100644 include/ExynosJpegEncoderForCamera.h create mode 100644 include/FimgApi.h create mode 100644 include/csc.h create mode 100644 include/decon-fb.h create mode 100644 include/decon_8890.h create mode 100644 include/exynos-hwjpeg.h create mode 100644 include/exynos_blender.h create mode 100644 include/exynos_blender_obj.h create mode 100644 include/exynos_format.h create mode 100644 include/exynos_gscaler.h create mode 100644 include/exynos_ion.h create mode 100644 include/exynos_scaler.h create mode 100644 include/exynos_v4l2.h create mode 100644 include/gralloc_priv.h create mode 100644 include/hwjpeglib-exynos.h create mode 100644 include/ion.h create mode 100644 include/media.h create mode 100644 include/s3c-fb.h create mode 100644 include/s5p_fimc_v4l2.h create mode 100644 include/s5p_tvout_v4l2.h create mode 100644 include/sec_g2d_4x.h create mode 100644 include/sec_g2d_5x.h create mode 100644 include/swconverter.h create mode 100644 include/v4l2-mediabus.h create mode 100644 include/v4l2-subdev.h create mode 100644 kernel-3.10-headers/linux/v4l2-common.h create mode 100644 kernel-3.10-headers/linux/v4l2-controls.h create mode 100644 kernel-3.10-headers/v4l2-dv-timings.h create mode 100644 kernel-3.10-headers/videodev2.h create mode 100644 kernel-3.10-headers/videodev2_exynos_media.h create mode 100644 kernel-3.18-headers/linux/v4l2-common.h create mode 100644 kernel-3.18-headers/linux/v4l2-controls.h create mode 100644 kernel-3.18-headers/videodev2.h create mode 100644 kernel-3.18-headers/videodev2_exynos_media.h create mode 100644 kernel-3.4-headers/videodev2.h create mode 100644 kernel-3.4-headers/videodev2_exynos_media.h create mode 100644 kernel-3.4-headers/videodev2_exynos_media_ext.h create mode 100644 libaudio/Android.mk create mode 100644 libaudio/effecthal/Android.mk create mode 100644 libaudio/effecthal/postprocessing/Android.mk create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Android.mk create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM.h create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM_Types.h create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.cpp create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.h create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/MODULE_LICENSE_APACHE2 create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/NOTICE create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Reverb/LVREV.h create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.cpp create mode 100644 libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.h create mode 100644 libaudio/effecthal/visualizer/Android.mk create mode 100644 libaudio/effecthal/visualizer/MODULE_LICENSE_APACHE2 create mode 100644 libaudio/effecthal/visualizer/NOTICE create mode 100644 libaudio/effecthal/visualizer/exynos_visualizer.c create mode 100644 libaudio/effecthal/visualizer/exynos_visualizer.h create mode 100644 libaudio/hal/Android.mk create mode 100644 libaudio/hal/NOTICE create mode 100644 libaudio/hal/audio_hw.c create mode 100644 libaudio/hal/audio_hw.h create mode 100644 libaudio/hal/audio_hw_def.h create mode 100644 libaudio/hal/sec/voice_manager.c create mode 100644 libaudio/hal/sec/voice_manager.h create mode 100644 libaudio/hal/ww/voice_manager.c create mode 100644 libaudio/hal/ww/voice_manager.h create mode 100644 libcamera/34xx/ExynosCameraActivityControl.cpp create mode 100644 libcamera/34xx/ExynosCameraActivityControl.h create mode 100644 libcamera/34xx/ExynosCameraDefine.h create mode 100644 libcamera/34xx/ExynosCameraMetadata.h create mode 100644 libcamera/34xx/ExynosCameraParameters.h create mode 100644 libcamera/34xx/ExynosCameraScalableSensor.cpp create mode 100644 libcamera/34xx/ExynosCameraScalableSensor.h create mode 100644 libcamera/34xx/ExynosCameraUtilsModule.cpp create mode 100644 libcamera/34xx/ExynosCameraUtilsModule.h create mode 100644 libcamera/34xx/fimc-is-metadata.h create mode 100644 libcamera/34xx/fimc-is-metadata_for_hal3.3.h create mode 100644 libcamera/34xx/hal1/ExynosCamera.cpp create mode 100644 libcamera/34xx/hal1/ExynosCamera.h create mode 100644 libcamera/34xx/hal1/ExynosCamera1Parameters.cpp create mode 100644 libcamera/34xx/hal1/ExynosCamera1Parameters.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.h create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.h create mode 100644 libcamera/34xx/hal1/ExynosCameraSizeControl.cpp create mode 100644 libcamera/34xx/hal1/ExynosCameraSizeControl.h create mode 100644 libcamera/34xx/hal1/Ged/ExynosCamera1ParametersVendor.cpp create mode 100644 libcamera/34xx/hal1/Ged/ExynosCameraVendor.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3.h create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactory.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactory.h create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.h create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.h create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.h create mode 100644 libcamera/34xx/hal3/ExynosCamera3Parameters.cpp create mode 100644 libcamera/34xx/hal3/ExynosCamera3Parameters.h create mode 100644 libcamera/34xx/hal3/ExynosCameraSizeControl.cpp create mode 100644 libcamera/34xx/hal3/ExynosCameraSizeControl.h create mode 100644 libcamera/34xx/videodev2_exynos_camera.h create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.cpp create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.h create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityBase.cpp create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityBase.h create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityFlash.cpp create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityFlash.h create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.cpp create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.h create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityUCTL.cpp create mode 100644 libcamera/common_v2/Activities/ExynosCameraActivityUCTL.h create mode 100644 libcamera/common_v2/Buffers/ExynosCameraBuffer.h create mode 100644 libcamera/common_v2/Buffers/ExynosCameraBufferLocker.cpp create mode 100644 libcamera/common_v2/Buffers/ExynosCameraBufferLocker.h create mode 100644 libcamera/common_v2/Buffers/ExynosCameraBufferManager.cpp create mode 100644 libcamera/common_v2/Buffers/ExynosCameraBufferManager.h create mode 100644 libcamera/common_v2/ExynosCamera1MetadataConverter.cpp create mode 100644 libcamera/common_v2/ExynosCamera1MetadataConverter.h create mode 100644 libcamera/common_v2/ExynosCamera3Interface.cpp create mode 100644 libcamera/common_v2/ExynosCamera3Interface.h create mode 100644 libcamera/common_v2/ExynosCameraAutoTimer.h create mode 100644 libcamera/common_v2/ExynosCameraCommonDefine.h create mode 100644 libcamera/common_v2/ExynosCameraCommonEnum.h create mode 100644 libcamera/common_v2/ExynosCameraCounter.h create mode 100644 libcamera/common_v2/ExynosCameraDualFrameSelector.cpp create mode 100644 libcamera/common_v2/ExynosCameraDualFrameSelector.h create mode 100644 libcamera/common_v2/ExynosCameraFrame.cpp create mode 100644 libcamera/common_v2/ExynosCameraFrame.h create mode 100644 libcamera/common_v2/ExynosCameraFrameManager.cpp create mode 100644 libcamera/common_v2/ExynosCameraFrameManager.h create mode 100644 libcamera/common_v2/ExynosCameraFrameSelector.cpp create mode 100644 libcamera/common_v2/ExynosCameraFrameSelector.h create mode 100644 libcamera/common_v2/ExynosCameraInterface.cpp create mode 100644 libcamera/common_v2/ExynosCameraInterface.h create mode 100644 libcamera/common_v2/ExynosCameraInterfaceState.h create mode 100644 libcamera/common_v2/ExynosCameraList.h create mode 100644 libcamera/common_v2/ExynosCameraMemory.cpp create mode 100644 libcamera/common_v2/ExynosCameraMemory.h create mode 100644 libcamera/common_v2/ExynosCameraMetadataConverter.cpp create mode 100644 libcamera/common_v2/ExynosCameraMetadataConverter.h create mode 100644 libcamera/common_v2/ExynosCameraNode.cpp create mode 100644 libcamera/common_v2/ExynosCameraNode.h create mode 100644 libcamera/common_v2/ExynosCameraNodeJpegHAL.cpp create mode 100644 libcamera/common_v2/ExynosCameraNodeJpegHAL.h create mode 100644 libcamera/common_v2/ExynosCameraRequestManager.cpp create mode 100644 libcamera/common_v2/ExynosCameraRequestManager.h create mode 100644 libcamera/common_v2/ExynosCameraSingleton.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2P2_12M.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2P2_FHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2P2_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2P3.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2P8_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable2T2_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3H5.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3H7.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3L2.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3L2_FULL_OTF.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3L8.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3M2.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3M3.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3P3.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable3P3_FULL_OTF.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable4E6.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable4H5.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable5E2.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable5E3.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable5E3_FULL_OTF.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable5E3_VFLIP.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable5E8.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable6A3.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable6B2.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable6D1.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTable8B1.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX134.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX135.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX175.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX219.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX228_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_FHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX240_FHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX240_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableIMX260_2L1_WQHD.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableOV5670.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableSR259.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableSR261.h create mode 100644 libcamera/common_v2/ExynosCameraSizeTableSR544.h create mode 100644 libcamera/common_v2/ExynosCameraStreamManager.cpp create mode 100644 libcamera/common_v2/ExynosCameraStreamManager.h create mode 100644 libcamera/common_v2/ExynosCameraStreamMutex.h create mode 100644 libcamera/common_v2/ExynosCameraThread.h create mode 100644 libcamera/common_v2/ExynosCameraThreadFactory.h create mode 100644 libcamera/common_v2/ExynosCameraUtils.cpp create mode 100644 libcamera/common_v2/ExynosCameraUtils.h create mode 100644 libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut.h create mode 100644 libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3L8.h create mode 100644 libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3M3.h create mode 100644 libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut4H8.h create mode 100644 libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLutBase.h create mode 100644 libcamera/common_v2/Fusion/ExynosCameraFusionInclude.h create mode 100644 libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.cpp create mode 100644 libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.h create mode 100644 libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.cpp create mode 100644 libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.h create mode 100644 libcamera/common_v2/Fusion/ExynosCameraPipeFusion.cpp create mode 100644 libcamera/common_v2/Fusion/ExynosCameraPipeFusion.h create mode 100644 libcamera/common_v2/Ged/ExynosCameraActivityAutofocusVendor.cpp create mode 100644 libcamera/common_v2/Ged/ExynosCameraActivityFlashVendor.cpp create mode 100644 libcamera/common_v2/Ged/ExynosCameraFrameSelectorVendor.cpp create mode 100644 libcamera/common_v2/MCPipes/ExynosCameraMCPipe.cpp create mode 100644 libcamera/common_v2/MCPipes/ExynosCameraMCPipe.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeISP.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeISP.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.h create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.cpp create mode 100644 libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.h create mode 100644 libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.cpp create mode 100644 libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.h create mode 100644 libcamera/common_v2/SensorInfos/ExynosCameraSensorInfo2P8.cpp create mode 100644 libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.cpp create mode 100644 libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.h create mode 100644 libcamera_external/Exif.cpp create mode 100644 libcamera_external/Exif.h create mode 100644 libcamera_external/ISecCameraHardware.cpp create mode 100644 libcamera_external/ISecCameraHardware.h create mode 100644 libcamera_external/NOTICE create mode 100644 libcamera_external/SecCameraCommonConfig.h create mode 100644 libcamera_external/SecCameraHardware.cpp create mode 100644 libcamera_external/SecCameraHardware.h create mode 100644 libcamera_external/SecCameraHardware1MetadataConverter.cpp create mode 100644 libcamera_external/SecCameraHardware1MetadataConverter.h create mode 100644 libcamera_external/SecCameraInterface.cpp create mode 100644 libcamera_external/SecCameraInterface.h create mode 100644 libcamera_external/SecCameraInterfaceState.h create mode 100644 libcamera_external/SecCameraParameters.cpp create mode 100644 libcamera_external/SecCameraParameters.h create mode 100644 libcec/Android.mk create mode 100644 libcec/NOTICE create mode 100644 libcec/cec.h create mode 100644 libcec/libcec.c create mode 100644 libcec/libcec.h create mode 100644 libcsc/Android.mk create mode 100644 libcsc/NOTICE create mode 100644 libcsc/csc.c create mode 100644 libdisplay/Android.mk create mode 100644 libdisplay/ExynosDisplay.cpp create mode 100644 libdisplay/ExynosDisplay.h create mode 100644 libdisplay/ExynosOverlayDisplay.cpp create mode 100644 libdisplay/ExynosOverlayDisplay.h create mode 100644 libdisplay/NOTICE create mode 100644 libexynosutils/Android.mk create mode 100644 libexynosutils/ExynosMutex.cpp create mode 100644 libexynosutils/ExynosMutex.h create mode 100644 libexynosutils/Exynos_log.c create mode 100644 libexynosutils/Exynos_log.h create mode 100644 libexynosutils/NOTICE create mode 100644 libexynosutils/exynos_format_v4l2.c create mode 100644 libfimg4x/Android.mk create mode 100644 libfimg4x/FimgApi.cpp create mode 100644 libfimg4x/FimgExynos5.cpp create mode 100644 libfimg4x/FimgExynos5.h create mode 100644 libfimg4x/NOTICE create mode 100644 libfimg5x/Android.mk create mode 100644 libfimg5x/FimgApi.cpp create mode 100644 libfimg5x/FimgExynos5.cpp create mode 100644 libfimg5x/FimgExynos5.h create mode 100644 libfimg5x/NOTICE create mode 100644 libg2d/Android.mk create mode 100644 libg2d/NOTICE create mode 100644 libg2d/exynos_blender.cpp create mode 100644 libg2d/exynos_blender_obj.cpp create mode 100644 libg2d/libg2d.cpp create mode 100644 libg2d/libg2d_obj.h create mode 100644 libgscaler/Android.mk create mode 100644 libgscaler/NOTICE create mode 100644 libgscaler/libgscaler.cpp create mode 100644 libgscaler/libgscaler_obj.cpp create mode 100644 libgscaler/libgscaler_obj.h create mode 100644 libhdmi/Android.mk create mode 100644 libhdmi/ExynosExternalDisplay.cpp create mode 100644 libhdmi/ExynosExternalDisplay.h create mode 100644 libhdmi/NOTICE create mode 100644 libhdmi/decon_tv.h create mode 100644 libhdmi/dv_timings.c create mode 100644 libhdmi_dummy/Android.mk create mode 100644 libhdmi_dummy/ExynosExternalDisplay.cpp create mode 100644 libhdmi_dummy/ExynosExternalDisplay.h create mode 100644 libhdmi_dummy/NOTICE create mode 100644 libhdmi_legacy/Android.mk create mode 100644 libhdmi_legacy/ExynosExternalDisplay.cpp create mode 100644 libhdmi_legacy/ExynosExternalDisplay.h create mode 100644 libhdmi_legacy/NOTICE create mode 100644 libhdmi_legacy/dv_timings.c create mode 100644 libhwc/Android.mk create mode 100644 libhwc/ExynosHWC.cpp create mode 100644 libhwc/ExynosHWC.h create mode 100644 libhwc/ExynosHWCDebug.c create mode 100644 libhwc/ExynosHWCDebug.h create mode 100644 libhwc/NOTICE create mode 100644 libhwcService/Android.mk create mode 100644 libhwcService/ExynosHWCService.cpp create mode 100644 libhwcService/ExynosHWCService.h create mode 100644 libhwcService/IExynosHWC.cpp create mode 100644 libhwcService/IExynosHWC.h create mode 100644 libhwcService/NOTICE create mode 100644 libhwc_tiny/Android.mk create mode 100644 libhwc_tiny/ExynosHWC.cpp create mode 100644 libhwc_tiny/ExynosHWC.h create mode 100644 libhwc_tiny/NOTICE create mode 100644 libhwcutils/Android.mk create mode 100644 libhwcutils/ExynosG2DWrapper.cpp create mode 100644 libhwcutils/ExynosG2DWrapper.h create mode 100644 libhwcutils/ExynosHWCUtils.cpp create mode 100644 libhwcutils/ExynosHWCUtils.h create mode 100644 libhwcutils/ExynosMPP.cpp create mode 100644 libhwcutils/ExynosMPP.h create mode 100644 libhwcutils/ExynosMPPv2.cpp create mode 100644 libhwcutils/ExynosMPPv2.h create mode 100644 libhwcutils/NOTICE create mode 100644 libhwjpeg/Android.mk create mode 100644 libhwjpeg/AppMarkerWriter.cpp create mode 100644 libhwjpeg/AppMarkerWriter.h create mode 100644 libhwjpeg/ExynosJpegEncoder.cpp create mode 100644 libhwjpeg/ExynosJpegEncoderForCamera.cpp create mode 100644 libhwjpeg/IFDWriter.h create mode 100644 libhwjpeg/hwjpeg-base.cpp create mode 100644 libhwjpeg/hwjpeg-internal.h create mode 100644 libhwjpeg/hwjpeg-libcsc.h create mode 100644 libhwjpeg/hwjpeg-v4l2.cpp create mode 100644 libhwjpeg/libcsc.cpp create mode 100644 libhwjpeg/libhwjpeg-exynos.cpp create mode 100644 libion_exynos/Android.mk create mode 100644 libion_exynos/NOTICE create mode 100644 libion_exynos/libion.cpp create mode 100644 libmemtrack/Android.mk create mode 100644 libmemtrack/ion.c create mode 100644 libmemtrack/mali.c create mode 100644 libmemtrack/memtrack_exynos.c create mode 100644 libmemtrack/memtrack_exynos.h create mode 100644 libmpp/Android.mk create mode 100644 libmpp/LibMpp.h create mode 100644 libmpp/MppFactory.cpp create mode 100644 libmpp/MppFactory.h create mode 100644 libmpp/NOTICE create mode 100644 libscaler/Android.mk create mode 100644 libscaler/NOTICE create mode 100644 libscaler/libscaler-common.h create mode 100644 libscaler/libscaler-m2m1shot.cpp create mode 100644 libscaler/libscaler-m2m1shot.h create mode 100644 libscaler/libscaler-swscaler.cpp create mode 100644 libscaler/libscaler-swscaler.h create mode 100644 libscaler/libscaler-v4l2.cpp create mode 100644 libscaler/libscaler-v4l2.h create mode 100644 libscaler/libscaler.cpp create mode 100644 libscaler/libscalerblend-v4l2.cpp create mode 100644 libscaler/libscalerblend-v4l2.h create mode 100644 libseiren/Android.mk create mode 100644 libseiren/NOTICE create mode 100644 libseiren/dec/seiren_hw.c create mode 100644 libseiren/include/seiren_error.h create mode 100644 libseiren/include/seiren_hw.h create mode 100644 libseiren/include/seiren_ioctl.h create mode 100644 libsrp/Android.mk create mode 100644 libsrp/NOTICE create mode 100644 libsrp/alp/Android.mk create mode 100644 libsrp/alp/NOTICE create mode 100644 libsrp/alp/dec/srp_api.c create mode 100644 libsrp/alp/include/srp_api.h create mode 100644 libsrp/alp/include/srp_error.h create mode 100644 libsrp/alp/include/srp_ioctl.h create mode 100644 libstagefrighthw/Android.mk create mode 100644 libstagefrighthw/Exynos_OMX_Plugin.cpp create mode 100644 libstagefrighthw/Exynos_OMX_Plugin.h create mode 100644 libstagefrighthw/NOTICE create mode 100644 libswconverter/Android.mk create mode 100644 libswconverter/NOTICE create mode 100644 libswconverter/csc_BGRA8888_to_RGBA8888_NEON.s create mode 100644 libswconverter/csc_BGRA8888_to_YUV420SP_NEON.s create mode 100644 libswconverter/csc_RGBA8888_to_YUV420SP_NEON.s create mode 100644 libswconverter/csc_interleave_memcpy_neon.s create mode 100644 libswconverter/csc_linear_to_tiled_crop_neon.s create mode 100644 libswconverter/csc_linear_to_tiled_interleave_crop_neon.s create mode 100644 libswconverter/csc_tiled_to_linear_crop_neon.s create mode 100644 libswconverter/csc_tiled_to_linear_deinterleave_crop_neon.s create mode 100644 libswconverter/csc_tiled_to_linear_uv_deinterleave_neon.s create mode 100644 libswconverter/csc_tiled_to_linear_uv_neon.s create mode 100644 libswconverter/csc_tiled_to_linear_y_neon.s create mode 100644 libswconverter/swconverter_dummy.c create mode 100644 libswconverter/swconvertor.c create mode 100644 libv4l2/Android.mk create mode 100644 libv4l2/NOTICE create mode 100644 libv4l2/exynos_mc.c create mode 100644 libv4l2/exynos_subdev.c create mode 100644 libv4l2/exynos_v4l2.c create mode 100644 libvideocodec/Android.mk create mode 100644 libvideocodec/ExynosVideoInterface.c create mode 100644 libvideocodec/NOTICE create mode 100644 libvideocodec/dec/ExynosVideoDecoder.c create mode 100644 libvideocodec/enc/ExynosVideoEncoder.c create mode 100644 libvideocodec/include/ExynosVideoApi.h create mode 100644 libvideocodec/include/ExynosVideoDec.h create mode 100644 libvideocodec/include/ExynosVideoEnc.h create mode 100644 libvideocodec/mfc_headers/exynos_mfc_media.h create mode 100644 libvirtualdisplay/Android.mk create mode 100644 libvirtualdisplay/ExynosVirtualDisplay.cpp create mode 100644 libvirtualdisplay/ExynosVirtualDisplay.h create mode 100644 libvirtualdisplay/NOTICE create mode 100644 libvppdisplay/Android.mk create mode 100644 libvppdisplay/ExynosDisplay.cpp create mode 100644 libvppdisplay/ExynosDisplay.h create mode 100644 libvppdisplay/ExynosDisplayResourceManager.cpp create mode 100644 libvppdisplay/ExynosDisplayResourceManager.h create mode 100644 libvppdisplay/ExynosOverlayDisplay.cpp create mode 100644 libvppdisplay/ExynosOverlayDisplay.h create mode 100644 libvppdisplay/ExynosSecondaryDisplay.cpp create mode 100644 libvppdisplay/ExynosSecondaryDisplay.h create mode 100644 libvppdisplay/NOTICE create mode 100644 libvppdisplay_tiny/Android.mk create mode 100644 libvppdisplay_tiny/ExynosDisplay.cpp create mode 100644 libvppdisplay_tiny/ExynosDisplay.h create mode 100644 libvppdisplay_tiny/ExynosOverlayDisplay.cpp create mode 100644 libvppdisplay_tiny/ExynosOverlayDisplay.h create mode 100644 libvppdisplay_tiny/ExynosPrimaryDisplay.cpp create mode 100644 libvppdisplay_tiny/ExynosPrimaryDisplay.h create mode 100644 libvppdisplay_tiny/NOTICE create mode 100644 libvpphdmi/Android.mk create mode 100644 libvpphdmi/ExynosExternalDisplay.cpp create mode 100644 libvpphdmi/ExynosExternalDisplay.h create mode 100644 libvpphdmi/NOTICE create mode 100644 libvpphdmi/decon_tv.h create mode 100644 libvpphdmi/dv_timings.c create mode 100644 libvpphdmi/videodev2_exynos_hdmi.h create mode 100644 libvppvirtualdisplay/Android.mk create mode 100644 libvppvirtualdisplay/ExynosVirtualDisplay.cpp create mode 100644 libvppvirtualdisplay/ExynosVirtualDisplay.h create mode 100644 libvppvirtualdisplay/NOTICE create mode 100644 rpmbd/Android.mk create mode 100644 rpmbd/rpmbd.c create mode 100644 rpmbd/rpmbd.h diff --git a/Android.mk b/Android.mk new file mode 100644 index 0000000..38d1631 --- /dev/null +++ b/Android.mk @@ -0,0 +1,136 @@ +# +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +common_exynos_dirs := \ + libion_exynos \ + libexynosutils \ + libcec \ + libcsc \ + libmpp \ + libhwcutils \ + libhwcService \ + libv4l2 \ + libscaler \ + libswconverter \ + libvideocodec \ + libstagefrighthw \ + libmemtrack \ + exyrngd \ + rpmbd + +ifeq ($(TARGET_USES_UNIVERSAL_LIBHWJPEG), true) +common_exynos_dirs += libhwjpeg +endif + +ifneq ($(BOARD_USES_FIMC), true) +common_exynos_dirs += \ + libgscaler +endif + +ifeq ($(BOARD_USES_FIMGAPI_V5X), true) +common_exynos_dirs += \ + libfimg5x +else +common_exynos_dirs += \ + libfimg4x +endif + + +ifeq ($(BOARD_USES_EXYNOS5_COMMON_GRALLOC), true) +common_exynos_dirs += \ + gralloc +endif + +ifeq ($(BOARD_USES_HWC_TINY), true) +common_exynos_dirs += \ + libvppdisplay_tiny +else +ifeq ($(BOARD_USES_VPP), true) +common_exynos_dirs += libvppdisplay +else +common_exynos_dirs += libdisplay +endif +endif + +ifeq ($(BOARD_USES_HWC_TINY), true) +common_exynos_dirs += \ + libhwc_tiny +else +ifneq ($(BOARD_TV_PRIMARY), true) +common_exynos_dirs += \ + libhwc +endif +endif + +ifneq ($(BOARD_USES_HWC_TINY), true) +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) +ifeq ($(BOARD_USES_VPP), true) +common_exynos_dirs += \ + libvppvirtualdisplay +else +common_exynos_dirs += \ + libvirtualdisplay +endif +endif +endif + +ifeq ($(BOARD_USE_ALP_AUDIO), true) +ifeq ($(BOARD_USE_SEIREN_AUDIO), true) +common_exynos_dirs += \ + libseiren +else +common_exynos_dirs += \ + libsrp +endif +endif + +ifeq ($(BOARD_USE_COMMON_AUDIOHAL), true) +common_exynos_dirs += \ + libaudio +endif + +ifneq ($(BOARD_USES_HWC_TINY), true) +ifeq ($(BOARD_HDMI_INCAPABLE), true) +common_exynos_dirs += libhdmi_dummy +else +ifeq ($(BOARD_USES_VPP), true) +common_exynos_dirs += libvpphdmi +else +ifeq ($(BOARD_USES_NEW_HDMI), true) +common_exynos_dirs += libhdmi +else +common_exynos_dirs += libhdmi_legacy +endif +endif +endif +endif + +ifeq ($(BOARD_USES_FIMGAPI_V4L2), true) +common_exynos_dirs += \ + libg2d +endif + +ifeq ($(BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA), true) +common_exynos_dirs += \ + libcamera_external +else +ifeq ($(BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA), true) +common_exynos_dirs += \ + libcamera_external +endif +endif + +include $(call all-named-subdir-makefiles,$(common_exynos_dirs)) diff --git a/BoardConfigCFlags.mk b/BoardConfigCFlags.mk new file mode 100644 index 0000000..bb489b3 --- /dev/null +++ b/BoardConfigCFlags.mk @@ -0,0 +1,175 @@ +ifeq ($(BOARD_USES_FIMGAPI_V5X), true) + LOCAL_CFLAGS += -DFIMGAPI_V5X +ifeq ($(filter 3.18, $(TARGET_LINUX_KERNEL_VERSION)), 3.18) + LOCAL_CFLAGS += -DFIMG2D_USE_M2M1SHOT2 +endif +endif + +ifeq ($(BOARD_G2D_NEAREST_UNSUPPORT), true) + LOCAL_CFLAGS += -DFIMGAPI_G2D_NEAREST_UNSUPPORT +endif + +ifeq ($(BOARD_DYNAMIC_RECOMPOSITION_DISABLED), true) + LOCAL_CFLAGS += -DDYNAMIC_RECOMPOSITION_DISABLED +endif + +ifeq ($(TARGET_USES_UNIVERSAL_LIBHWJPEG), true) + LOCAL_CFLAGS += -DUSES_UNIVERSAL_LIBHWJPEG +endif + +ifeq ($(BOARD_USES_FIMC), true) + LOCAL_CFLAGS += -DUSES_FIMC +else + LOCAL_CFLAGS += -DUSES_GSCALER +endif + +ifeq ($(BOARD_USES_IP_SERVICE), true) + LOCAL_CFLAGS += -DIP_SERVICE +endif + +ifeq ($(BOARD_USES_HWC_SERVICES),true) + LOCAL_CFLAGS += -DHWC_SERVICES +ifeq ($(BOARD_USE_S3D_SUPPORT),true) + LOCAL_CFLAGS += -DS3D_SUPPORT +endif +endif + +ifeq ($(BOARD_USES_WFD),true) + LOCAL_CFLAGS += -DUSES_WFD +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) + LOCAL_CFLAGS += -DUSES_VIRTUAL_DISPLAY +endif + +ifeq ($(BOARD_USES_DISABLE_COMPOSITIONTYPE_GLES), true) + LOCAL_CFLAGS += -DUSES_DISABLE_COMPOSITIONTYPE_GLES +endif + +ifeq ($(BOARD_USES_VDS_YUV420SPM), true) + LOCAL_CFLAGS += -DUSES_VDS_YUV420SPM +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY_DECON_EXT_WB), true) + LOCAL_CFLAGS += -DUSES_VIRTUAL_DISPLAY_DECON_EXT_WB +endif + +ifeq ($(BOARD_USE_VIDEO_EXT_FOR_WFD_DRM), true) + LOCAL_CFLAGS += -DUSE_VIDEO_EXT_FOR_WFD_DRM +endif + +ifeq ($(BOARD_USES_OVERLAY_FOR_WFD_UI_MIRROR), true) + LOCAL_CFLAGS += -DUSES_OVERLAY_FOR_WFD_UI_MIRROR +endif + +ifeq ($(BOARD_USES_3MSC_FOR_WFD), true) + LOCAL_CFLAGS += -DUSES_3MSC_FOR_WFD +endif + +ifeq ($(BOARD_USES_2MSC_FOR_WFD), true) + LOCAL_CFLAGS += -DUSES_2MSC_FOR_WFD +endif + +ifeq ($(BOARD_USES_VDS_BGRA8888), true) + LOCAL_CFLAGS += -DUSES_VDS_BGRA8888 +endif + +ifeq ($(BOARD_VIRTUAL_DISPLAY_VIDEO_IS_OVERLAY), true) + LOCAL_CFLAGS += -DVIRTUAL_DISPLAY_VIDEO_IS_OVERLAY +endif + +ifeq ($(BOARD_USES_VDS_OTHERFORMAT), true) + LOCAL_CFLAGS += -DUSES_VDS_OTHERFORMAT +endif + +ifeq ($(BOARD_USES_FB_PHY_LINEAR),true) + LOCAL_CFLAGS += -DUSE_FB_PHY_LINEAR +endif + +ifeq ($(BOARD_TV_PRIMARY),true) + LOCAL_CFLAGS += -DTV_PRIMARY +endif +ifeq ($(BOARD_HDMI_INCAPABLE), true) + LOCAL_CFLAGS += -DHDMI_INCAPABLE +else +ifeq ($(BOARD_USES_NEW_HDMI), true) + LOCAL_CFLAGS += -DUSES_NEW_HDMI +endif +ifeq ($(BOARD_USES_GSC_VIDEO), true) + LOCAL_CFLAGS += -DGSC_VIDEO +endif +ifeq ($(BOARD_USES_VP_VIDEO),true) + LOCAL_CFLAGS += -DVP_VIDEO +endif +ifeq ($(BOARD_USES_CEC),true) + LOCAL_CFLAGS += -DUSES_CEC +endif +endif + +ifeq ($(BOARD_SUPPORT_DQ_Q_SEQUENCE), true) + LOCAL_CFLAGS += -DSUPPORT_DQ_Q_SEQUENCE +endif + +ifeq ($(BOARD_USES_ONLY_GSC0_GSC1),true) + LOCAL_CFLAGS += -DUSES_ONLY_GSC0_GSC1 +endif + +ifeq ($(BOARD_USES_DT), true) + LOCAL_CFLAGS += -DUSES_DT +endif + +ifeq ($(BOARD_USES_U4A),true) + LOCAL_CFLAGS += -DUSES_U4A +endif + +ifeq ($(BOARD_USES_WINDOW_UPDATE), true) + LOCAL_CFLAGS += -DUSES_WINDOW_UPDATE +endif + +ifeq ($(BOARD_USES_VPP), true) + LOCAL_CFLAGS += -DUSES_VPP +endif + +ifeq ($(BOARD_USES_VPP_V2), true) + LOCAL_CFLAGS += -DUSES_VPP_V2 +endif + +ifeq ($(TARGET_SOC), exynos8890) + LOCAL_CFLAGS += -DUSES_DECON_AFBC_DECODER +endif + +ifeq ($(TARGET_SOC), exynos5433) + LOCAL_CFLAGS += -DTARGET_SOC_EXYNOS5433 +endif + +ifeq ($(TARGET_SOC), exynos5433) + LOCAL_CFLAGS += -DUSES_DRM_SETTING_BY_DECON +endif + +ifeq ($(TARGET_SOC), exynos5430) + LOCAL_CFLAGS += -DUSES_DRM_SETTING_BY_DECON +endif + +ifeq ($(TARGET_SOC), exynos5420) + LOCAL_CFLAGS += -DUSES_DRM_SETTING_BY_DECON +endif + +ifeq ($(BOARD_USE_GRALLOC_FLAG_FOR_HDMI), true) + LOCAL_CFLAGS += -DUSE_GRALLOC_FLAG_FOR_HDMI +endif + +ifeq ($(BOARD_USE_HDMI_ON_IN_SUSPEND), true) + LOCAL_CFLAGS += -DHDMI_ON_IN_SUSPEND +endif + +ifeq ($(BOARD_USES_DUAL_DISPLAY), true) + LOCAL_CFLAGS += -DUSES_DUAL_DISPLAY +endif + +ifeq ($(TARGET_ARCH), arm64) + LOCAL_CFLAGS += -DUSES_ARCH_ARM64 +endif + +ifeq ($(BOARD_USES_DECON_64BIT_ADDRESS), true) + LOCAL_CFLAGS += -DUSES_DECON_64BIT_ADDRESS +endif diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/exyrngd/Android.mk b/exyrngd/Android.mk new file mode 100644 index 0000000..36630d9 --- /dev/null +++ b/exyrngd/Android.mk @@ -0,0 +1,15 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := exyrngd +ifeq ($(BOARD_USES_FIPS_COMPLIANCE_RNG_DRV),true) + LOCAL_CFLAGS += -DUSES_FIPS_COMPLIANCE_RNG_DRV +endif +LOCAL_SRC_FILES := \ + exyrngd.c +LOCAL_SHARED_LIBRARIES := libc libcutils +#LOCAL_CFLAGS := -DANDROID_CHANGES +LOCAL_MODULE_TAGS := optional +include $(BUILD_EXECUTABLE) + diff --git a/exyrngd/LICENSE b/exyrngd/LICENSE new file mode 100644 index 0000000..0575fcf --- /dev/null +++ b/exyrngd/LICENSE @@ -0,0 +1,30 @@ + + Copyright (c) 2011, Code Aurora Forum. All rights reserved. + Copyright (C) 2013 Samsung Electronics Co., LTD + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of Code Aurora Forum, Inc. nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED + WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS + BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/exyrngd/NOTICE b/exyrngd/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/exyrngd/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/exyrngd/README.txt b/exyrngd/README.txt new file mode 100644 index 0000000..0f2dabc --- /dev/null +++ b/exyrngd/README.txt @@ -0,0 +1,40 @@ +Exrng Daemon +======================================================= + +Description: +The exyrng daemon is used to check and fill entropy. +By default it opens the "/dev/random" device and "dev/hw_random" device. +It requires H/W random driver(/dev/hw_random) for random data. +It requires random driver(/dev/random) for entropy + +Parameters: +It will accept the following optional arguments: + -b background - become a daemon(default) + -f foregrount = do not fork and become a daemon + -r hardware random input device (default: /dev/hw_random) + -o system random output device (default: /dev/random) + -h help + +Return: +It will return 0 if all cases succeed otherwise it +returns -1: + +Usage: +By init.rc exyrng daemon is automatically executed in booting time. +Exyrng daemon should be applied default root permission of user and group +To approach a random driver in order to check entropy, the permission is necessary + +Details: +Main loop check for entropy, get random data and feed entropy pool +2048 byte daemon buffer is filled from H/W random driver if buffer is zero. +exyrng daemon makes increase 128 bytes of entropy at a time if entropy count is insufficient. + +Files: + README this file + LICENSE terms of distribution and reuse(BSD) + + Android.mk script file, inform about native executable file + exyrngd.c exyrng daemon + +Targets: + Exynos diff --git a/exyrngd/exyrngd.c b/exyrngd/exyrngd.c new file mode 100644 index 0000000..85e174b --- /dev/null +++ b/exyrngd/exyrngd.c @@ -0,0 +1,395 @@ +/* + * Copyright (c) 2011, Code Aurora Forum. All rights reserved. + * Copyright (C) 2013 Samsung Electronics Co., LTD + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following + * disclaimer in the documentation and/or other materials provided + * with the distribution. + * * Neither the name of Code Aurora Forum, Inc. nor the names of its + * contributors may be used to endorse or promote products derived + * from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF + * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS + * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN + * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef ANDROID_CHANGES +#include +#endif + +#ifndef min + #define min(a,b) (((a)>(b))?(b):(a)) +#endif + +typedef unsigned char bool; + +#define TRUE 1 +#define FALSE 0 + +#define RANDOM_DEVICE "/dev/random" +#define RANDOM_DEVICE_HW "/dev/hw_random" + +#define MAX_ENT_POOL_WRITES 128 /* write pool with smaller chunks */ +#define RANDOM_NUMBER_BYTES 256 /* random data byte to check randomness */ +#ifdef USES_FIPS_COMPLIANCE_RNG_DRV +#define MAX_BUFFER 256 /* do not change this value */ +#else +/* Buffer to hold hardware entropy bytes (this must be 2KB for FIPS testing */ +#define MAX_BUFFER 2048 /* do not change this value */ +#endif +static unsigned char databuf[MAX_BUFFER]; /* create buffer for FIPS testing */ +static unsigned long buffsize; /* size of data in buffer */ +static unsigned long curridx; /* position of current index */ + +/* User parameters */ +struct user_options { + char input_device_name[128]; + char output_device_name[128]; + bool run_as_daemon; +}; + +/* Version number of this source */ +#define APP_NAME "exyrngd" + +const char *program_version = +APP_NAME "\n" +"Copyright (c) 2011, Code Aurora Forum. All rights reserved.\n" +"This is free software; see the source for copying conditions. There is NO\n" +"warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n\n"; + +const char *program_usage = +"Usage: " APP_NAME " [OPTION...]\n" +" -b background - become a daemon (default)\n" +" -f foreground - do not fork and become a daemon\n" +" -r hardware random input device (default: /dev/hw_random)\n" +" -o system random output device (default: /dev/random)\n" +" -h help (this page)\n"; + +/* Logging information */ +enum log_level { + DEBUG = 0, + INFO = 1, + WARNING = 2, + ERROR = 3, + FATAL = 4, + LOG_MAX = 4, +}; + +/* Logging function for outputing to stderr or log */ +void log_print(int level, char *format, ...) +{ + if (level >= 0 && level <= LOG_MAX) { +#ifdef ANDROID_CHANGES + static int levels[5] = { + ANDROID_LOG_DEBUG, ANDROID_LOG_INFO, ANDROID_LOG_WARN, + ANDROID_LOG_ERROR, ANDROID_LOG_FATAL + }; + va_list ap; + va_start(ap, format); + __android_log_vprint(levels[level], APP_NAME, format, ap); + va_end(ap); +#else + static char *levels = "DIWEF"; + va_list ap; + fprintf(stderr, "%c: ", levels[level]); + va_start(ap, format); + vfprintf(stderr, format, ap); + va_end(ap); + fputc('\n', stderr); +#endif + } +} + +static void title(void) +{ + printf("%s", program_version); +} + +static void usage(void) +{ + printf("%s", program_usage); +} + +/* Parse command line parameters */ +static int get_user_options(struct user_options *user_ops, int argc, char **argv) +{ + int max_params = argc; + int itr = 1; /* skip program name */ + while (itr < max_params) { + if (argv[itr][0] != '-') + return -1; + + switch (argv[itr++][1]) { + case 'b': + user_ops->run_as_daemon = TRUE; + break; + + case 'f': + user_ops->run_as_daemon = FALSE; + break; + + case 'r': + if (itr < max_params) { + if (strlen(argv[itr]) < sizeof(user_ops->input_device_name)) { + strncpy(user_ops->input_device_name, argv[itr], strlen(argv[itr]) + 1); + itr++; + } + else + return -1; + break; + } + else + return -1; + + case 'o': + if (itr < max_params) { + if (strlen(argv[itr]) < sizeof(user_ops->output_device_name)) { + strncpy(user_ops->output_device_name, argv[itr], strlen(argv[itr]) + 1); + itr++; + } + else + return -1; + break; + } + else + return -1; + + case 'h': + return -1; + + + default: + fprintf(stderr, "ERROR: Bad option: '%s'\n", argv[itr-1]); + return -1; + } + } + return 0; +} + +/* Only check FIPS 140-2 (Continuous Random Number Generator Test) */ +static int fips_test(const unsigned char *buf, size_t size) +{ + unsigned long *buff_ul = (unsigned long *) buf; + size_t size_ul = size >> 2; /* convert byte to word size */ + unsigned long last_value; + unsigned int rnd_ctr[256]; + unsigned int i; + + /* Continuous Random Number Generator Test */ + last_value = *(buff_ul++); + size_ul--; + + while (size_ul > 0) { + if (*buff_ul == last_value) { + log_print(ERROR, "ERROR: Bad word value from hardware."); + return -1; + } else + last_value = *buff_ul; + buff_ul++; + size_ul--; + } + + memset(rnd_ctr, 0, sizeof(rnd_ctr)); + /* count each random number */ + for (i = 0; i < size; ++i) { + rnd_ctr[buf[i]]++; + } + + /* check random numbers to make sure they are not bogus */ + for (i = 0; i < RANDOM_NUMBER_BYTES; ++i) { + if (rnd_ctr[i] == 0) { + log_print(ERROR, "ERROR: Bad spectral random number sample."); + return -1; + } + } + + return 0; +} + +/* Read data from the hardware RNG source */ +static int read_src(int fd, void *buf, size_t size) +{ + size_t offset = 0; + char *chr = (char *) buf; + ssize_t ret; + + if (!size) + return -1; + do { + ret = read(fd, chr + offset, size); + /* any read failure is bad */ + if (ret == -1) + return -1; + size -= ret; + offset += ret; + } while (size > 0); + + return 0; +} + +/* The beginning of everything */ +int main(int argc, char **argv) +{ + struct user_options user_ops; /* holds user configuration data */ + struct rand_pool_info *rand = NULL; /* structure to pass entropy (IOCTL) */ + int random_fd = 0; /* output file descriptor */ + int random_hw_fd = 0; /* input file descriptor */ + int write_size; /* max entropy data to pass */ + struct pollfd fds[1]; /* used for polling file descriptor */ + int ret; + int exitval = 0; + + /* set default parameters */ + user_ops.run_as_daemon = TRUE; + strncpy(user_ops.input_device_name, RANDOM_DEVICE_HW, strlen(RANDOM_DEVICE_HW) + 1); + strncpy(user_ops.output_device_name, RANDOM_DEVICE, strlen(RANDOM_DEVICE) + 1); + + /* display application header */ + title(); + + /* get user preferences */ + ret = get_user_options(&user_ops, argc, argv); + if (ret < 0) { + usage(); + exitval = 1; + goto exit; + } + + /* open hardware random device */ + random_hw_fd = open(user_ops.input_device_name, O_RDONLY); + if (random_hw_fd < 0) { + fprintf(stderr, "Can't open hardware random device file %s\n", user_ops.input_device_name); + exitval = 1; + goto exit; + } + + /* open random device */ + random_fd = open(user_ops.output_device_name, O_RDWR); + if (random_fd < 0) { + fprintf(stderr, "Can't open random device file %s\n", user_ops.output_device_name); + exitval = 1; + goto exit; + } + + /* allocate memory for ioctl data struct and buffer */ + rand = malloc(sizeof(struct rand_pool_info) + MAX_ENT_POOL_WRITES); + if (!rand) { + fprintf(stderr, "Can't allocate memory\n"); + exitval = 1; + goto exit; + } + + /* setup poll() data */ + memset(fds, 0, sizeof(fds)); + fds[0].fd = random_fd; + fds[0].events = POLLOUT; + + /* run as daemon if requested to do so */ + if (user_ops.run_as_daemon) { + fprintf(stderr, "Starting daemon.\n"); + if (daemon(0, 0) < 0) { + fprintf(stderr, "can't daemonize: %s\n", strerror(errno)); + exitval = 1; + goto exit; + } +#ifndef ANDROID_CHANGES + openlog(APP_NAME, 0, LOG_DAEMON); +#endif + } + + /* log message */ + log_print(INFO, APP_NAME " has started:\n" "Reading device:'%s' updating entropy for device:'%s'", + user_ops.input_device_name, + user_ops.output_device_name); + + /* main loop to get data from hardware and feed RNG entropy pool */ + while (1) { + /* Check for empty buffer and fill with hardware random generated numbers */ + if (buffsize == 0) { + /* fill buffer with random data from hardware */ + ret = read_src(random_hw_fd, databuf, MAX_BUFFER); + if (ret < 0) { + log_print(ERROR, "ERROR: Can't read from hardware source."); + continue; + } + + /* reset buffer variables to indicate full buffer */ + buffsize = MAX_BUFFER; + curridx = 0; + +#ifndef USES_FIPS_COMPLIANCE_RNG_DRV + /* run FIPS test on buffer, if buffer fails then ditch it and get new data */ + ret = fips_test(databuf, MAX_BUFFER); + if (ret < 0) { + buffsize = 0; + log_print(INFO, "ERROR: Failed FIPS test."); + } +#endif + } + /* We should have data here, if not then something bad happened above and we should wait and try again */ + if (buffsize == 0) { + log_print(ERROR, "ERROR: Timeout getting valid random data from hardware."); + continue; + } + + /* fill entropy pool */ + write_size = min(buffsize, MAX_ENT_POOL_WRITES); + + /* Write some data to the device */ + rand->entropy_count = write_size * 8; + rand->buf_size = write_size; + memcpy(rand->buf, &databuf[curridx], write_size); + curridx += write_size; + buffsize -= write_size; + + /* Issue the ioctl to increase the entropy count */ + if (ioctl(random_fd, RNDADDENTROPY, rand) < 0) { + log_print(ERROR,"ERROR: RNDADDENTROPY ioctl() failed."); + exitval = 1; + goto exit; + } + + /* Wait if entropy pool is full */ + ret = poll(fds, 1, -1); + if (ret < 0) { + log_print(ERROR,"ERROR: poll call failed."); + exitval = 1; + goto exit; + } + } + +exit: + /* free other resources */ + if (rand) + free(rand); + if (random_fd >= 0) + close(random_fd); + if (random_hw_fd >= 0) + close(random_hw_fd); + return exitval; +} diff --git a/gralloc/Android.mk b/gralloc/Android.mk new file mode 100644 index 0000000..a9788e5 --- /dev/null +++ b/gralloc/Android.mk @@ -0,0 +1,54 @@ +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +LOCAL_PATH := $(call my-dir) + +# HAL module implemenation stored in +# hw/..so +include $(CLEAR_VARS) + +LOCAL_MODULE_RELATIVE_PATH:= hw +#LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_SHARED_LIBRARIES)/hw +LOCAL_SHARED_LIBRARIES := liblog libcutils libion libutils + +ifneq ($(TARGET_SOC), exynos5420) +LOCAL_CFLAGS := -DUSES_EXYNOS_COMMON_GRALLOC +endif + +MALI_AFBC_GRALLOC := 1 + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/exynos5/include + +LOCAL_SRC_FILES := \ + format_chooser.cpp \ + gralloc.cpp \ + framebuffer.cpp \ + mapper.cpp + +LOCAL_MODULE := gralloc.$(TARGET_BOARD_PLATFORM) +LOCAL_CFLAGS += -DLOG_TAG=\"gralloc\" -Wno-missing-field-initializers -DMALI_AFBC_GRALLOC=$(MALI_AFBC_GRALLOC) + +ifeq ($(BOARD_USES_EXYNOS5_GRALLOC_RANGE_FLUSH), true) +LOCAL_CFLAGS += -DGRALLOC_RANGE_FLUSH +endif + +ifeq ($(BOARD_USES_EXYNOS5_CRC_BUFFER_ALLOC), true) +LOCAL_CFLAGS += -DUSES_EXYNOS_CRC_BUFFER_ALLOC +endif + +include $(BUILD_SHARED_LIBRARY) diff --git a/gralloc/format_chooser.cpp b/gralloc/format_chooser.cpp new file mode 100644 index 0000000..f973bc2 --- /dev/null +++ b/gralloc/format_chooser.cpp @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2014 ARM Limited. All rights reserved. + * + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include +#include +#include +#include +#include +#include "format_chooser.h" + +#define FBT (GRALLOC_USAGE_HW_FB | GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_COMPOSER) +#define GENERAL_UI (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER) + +/* It's for compression check format, width, usage*/ +int check_for_compression(int w, int h, int format, int usage) +{ + char value[256]; + int afbc_prop; + + property_get("ddk.set.afbc", value, "0"); + afbc_prop = atoi(value); + + switch(format) + { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: + case HAL_PIXEL_FORMAT_RGB_888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_YV12: + { + if(afbc_prop == 0) + return 0; + if (w % 16 != 0) /* width isn't 16 pixel alignment */ + return 0; + if ((w <= 144) || (h <= 144)) /* min restriction for performance */ + return 0; + if ((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_OFTEN) + return 0; + if ((usage & FBT) || (usage & GENERAL_UI)) /*only support FBT and General UI */ + return 1; + else + return 0; + + break; + } + default: + return 0; + } + +} + +uint64_t gralloc_select_format(int req_format, int usage, int is_compressible) +{ + uint64_t new_format = req_format; + + if( req_format == 0 ) + { + return 0; + } + + if( (usage & (GRALLOC_USAGE_SW_READ_MASK | GRALLOC_USAGE_SW_WRITE_MASK)) != 0 || + usage == 0 ) + { + return new_format; + } + + if( is_compressible == 0) + { + return new_format; + } +#if 0 + /* This is currently a limitation with the display and will be removed eventually + * We can't allocate fbdev framebuffer buffers in AFBC format */ + if( usage & GRALLOC_USAGE_HW_FB ) + { + return new_format; + } +#endif + new_format |= GRALLOC_ARM_INTFMT_AFBC; + + ALOGD("Returned iterated format: 0x%llX", new_format); + + return new_format; +} diff --git a/gralloc/format_chooser.h b/gralloc/format_chooser.h new file mode 100644 index 0000000..0f5778a --- /dev/null +++ b/gralloc/format_chooser.h @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2014 ARM Limited. All rights reserved. + * + * Copyright (C) 2008 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FORMAT_CHOOSER_H_ +#define FORMAT_CHOOSER_H_ + +#include + + +#define GRALLOC_ARM_INTFMT_EXTENSION_BIT_START 32 + +/* This format will be use AFBC */ +#define GRALLOC_ARM_INTFMT_AFBC (1ULL << (GRALLOC_ARM_INTFMT_EXTENSION_BIT_START+0)) + +/* This format uses AFBC split block mode */ +#define GRALLOC_ARM_INTFMT_AFBC_SPLITBLK (1ULL << (GRALLOC_ARM_INTFMT_EXTENSION_BIT_START+1)) + +/* Internal format masks */ +#define GRALLOC_ARM_INTFMT_FMT_MASK 0x00000000ffffffffULL +#define GRALLOC_ARM_INTFMT_EXT_MASK 0xffffffff00000000ULL + +int check_for_compression(int w, int h, int format, int usage); +uint64_t gralloc_select_format(int req_format, int usage, int is_compressible); + +#endif /* FORMAT_CHOOSER_H_ */ diff --git a/gralloc/framebuffer.cpp b/gralloc/framebuffer.cpp new file mode 100644 index 0000000..2ba4b68 --- /dev/null +++ b/gralloc/framebuffer.cpp @@ -0,0 +1,282 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include +#include + +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include +#include + +#if HAVE_ANDROID_OS +#include +#endif + +#include "gralloc_priv.h" +#include "gr.h" + +#include "format_chooser.h" + +/*****************************************************************************/ + +// numbers of buffers for page flipping +#define NUM_BUFFERS 2 +#define HWC_EXIST 0 + +struct hwc_callback_entry +{ + void (*callback)(void *, private_handle_t *); + void *data; +}; + +#if HWC_EXIST +typedef android::Vector hwc_callback_queue_t; +#endif + +struct fb_context_t { + framebuffer_device_t device; +}; + +/*****************************************************************************/ + +static int fb_setSwapInterval(struct framebuffer_device_t* dev, + int interval) +{ + fb_context_t* ctx = (fb_context_t*)dev; + if (interval < dev->minSwapInterval || interval > dev->maxSwapInterval) + return -EINVAL; + // FIXME: implement fb_setSwapInterval + return 0; +} + +static int fb_post(struct framebuffer_device_t* dev, buffer_handle_t buffer) +{ + if (private_handle_t::validate(buffer) < 0) + return -EINVAL; + + private_handle_t const* hnd = reinterpret_cast(buffer); + private_module_t* m = reinterpret_cast(dev->common.module); +#if HWC_EXIST + hwc_callback_queue_t *queue = reinterpret_cast(m->queue); + pthread_mutex_lock(&m->queue_lock); + if(queue->isEmpty()) + pthread_mutex_unlock(&m->queue_lock); + else { + private_handle_t *hnd = private_handle_t::dynamicCast(buffer); + struct hwc_callback_entry entry = queue->top(); + queue->pop(); + pthread_mutex_unlock(&m->queue_lock); + entry.callback(entry.data, hnd); + } +#else + // If we can't do the page_flip, just copy the buffer to the front + // FIXME: use copybit HAL instead of memcpy + void* fb_vaddr; + void* buffer_vaddr; + + m->base.lock(&m->base, m->framebuffer, + GRALLOC_USAGE_SW_WRITE_RARELY, + 0, 0, m->info.xres, m->info.yres, + &fb_vaddr); + + m->base.lock(&m->base, buffer, + GRALLOC_USAGE_SW_READ_RARELY, + 0, 0, m->info.xres, m->info.yres, + &buffer_vaddr); + + memcpy(fb_vaddr, buffer_vaddr, m->finfo.line_length * m->info.yres); + + m->base.unlock(&m->base, buffer); + m->base.unlock(&m->base, m->framebuffer); +#endif + return 0; +} + +/*****************************************************************************/ + +static int fb_close(struct hw_device_t *dev) +{ + fb_context_t* ctx = (fb_context_t*)dev; + if (ctx) { + free(ctx); + } + return 0; +} + +int init_fb(struct private_module_t* module) +{ + char const * const device_template[] = { + "/dev/graphics/fb%u", + "/dev/fb%u", + NULL + }; + + int fd = -1; + int i = 0; + + fd = open("/dev/graphics/fb0", O_RDWR); + if (fd < 0) { + ALOGE("/dev/graphics/fb0 Open fail"); + return -errno; + } + + struct fb_fix_screeninfo finfo; + if (ioctl(fd, FBIOGET_FSCREENINFO, &finfo) == -1) { + ALOGE("Fail to get FB Screen Info"); + close(fd); + return -errno; + } + + struct fb_var_screeninfo info; + if (ioctl(fd, FBIOGET_VSCREENINFO, &info) == -1) { + ALOGE("First, Fail to get FB VScreen Info"); + close(fd); + return -errno; + } + + int refreshRate = 1000000000000000LLU / + ( + uint64_t( info.upper_margin + info.lower_margin + info.yres ) + * ( info.left_margin + info.right_margin + info.xres ) + * info.pixclock + ); + + if (refreshRate == 0) + refreshRate = 60*1000; /* 60 Hz */ + + float xdpi = (info.xres * 25.4f) / info.width; + float ydpi = (info.yres * 25.4f) / info.height; + float fps = refreshRate / 1000.0f; + + ALOGI("using (id=%s)\n" + "xres = %d px\n" + "yres = %d px\n" + "width = %d mm (%f dpi)\n" + "height = %d mm (%f dpi)\n" + "refresh rate = %.2f Hz\n", + finfo.id, info.xres, info.yres, info.width, xdpi, info.height, ydpi, + fps); + + module->xres = info.xres; + module->yres = info.yres; + module->line_length = info.xres; + module->xdpi = xdpi; + module->ydpi = ydpi; + module->fps = fps; + module->info = info; + module->finfo = finfo; + +#if !HWC_EXIST + size_t fbSize = roundUpToPageSize(finfo.line_length * info.yres_virtual); + module->framebuffer = new private_handle_t(dup(fd), fbSize, 0); + + void* vaddr = mmap(0, fbSize, PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0); + if (vaddr == MAP_FAILED) { + ALOGE("Error mapping the framebuffer (%s)", strerror(errno)); + close(fd); + return -errno; + } + module->framebuffer->base = (size_t)vaddr; + memset(vaddr, 0, fbSize); +#endif + + close(fd); + + return 0; +} + +int fb_device_open(hw_module_t const* module, const char* name, + hw_device_t** device) +{ + int status = -EINVAL; +#ifdef GRALLOC_16_BITS + int bits_per_pixel = 16; + int format = HAL_PIXEL_FORMAT_RGB_565; +#else + int bits_per_pixel = 32; + int format = HAL_PIXEL_FORMAT_RGBA_8888; +#endif + + alloc_device_t* gralloc_device; + status = gralloc_open(module, &gralloc_device); + if (status < 0) { + ALOGE("Fail to Open gralloc device"); + return status; + } + + framebuffer_device_t *dev = (framebuffer_device_t *)malloc(sizeof(framebuffer_device_t)); + if (dev == NULL) { + ALOGE("Failed to allocate memory for dev"); + gralloc_close(gralloc_device); + return status; + } + + private_module_t* m = (private_module_t*)module; + status = init_fb(m); + if (status < 0) { + ALOGE("Fail to init framebuffer"); + free(dev); + gralloc_close(gralloc_device); + return status; + } + + /* initialize our state here */ + memset(dev, 0, sizeof(*dev)); + + /* initialize the procs */ + dev->common.tag = HARDWARE_DEVICE_TAG; + dev->common.version = 0; + dev->common.module = const_cast(module); + dev->common.close = fb_close; + dev->setSwapInterval = 0; + dev->post = fb_post; + dev->setUpdateRect = 0; + dev->compositionComplete = 0; +#if HWC_EXIST + m->queue = new hwc_callback_queue_t; +#endif + pthread_mutex_init(&m->queue_lock, NULL); + + int stride = m->line_length / (bits_per_pixel >> 3); + const_cast(dev->flags) = 0; + const_cast(dev->width) = m->xres; + const_cast(dev->height) = m->yres; + const_cast(dev->stride) = stride; + const_cast(dev->format) = format; + const_cast(dev->xdpi) = m->xdpi; + const_cast(dev->ydpi) = m->ydpi; + const_cast(dev->fps) = m->fps; + const_cast(dev->minSwapInterval) = 1; + const_cast(dev->maxSwapInterval) = 1; + *device = &dev->common; + status = 0; + + return status; +} diff --git a/gralloc/gr.h b/gralloc/gr.h new file mode 100644 index 0000000..6fe26b6 --- /dev/null +++ b/gralloc/gr.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GR_H_ +#define GR_H_ + +#include +#include +#include +#include +#include +#include + +#include + +/*****************************************************************************/ + +struct private_module_t; +struct private_handle_t; + +#ifndef SUPPORT_DIRECT_FB +inline size_t roundUpToPageSize(size_t x) { + return (x + (PAGE_SIZE-1)) & ~(PAGE_SIZE-1); +} +#endif + +int grallocMap(gralloc_module_t const* module, private_handle_t *hnd); +int grallocUnmap(gralloc_module_t const* module, private_handle_t *hnd); + +#endif /* GR_H_ */ diff --git a/gralloc/gralloc.cpp b/gralloc/gralloc.cpp new file mode 100644 index 0000000..d028d15 --- /dev/null +++ b/gralloc/gralloc.cpp @@ -0,0 +1,591 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include + +#include "gralloc_priv.h" +#include "exynos_format.h" +#include "gr.h" + +#define MB_1 (1024*1024) +#define PRIV_SIZE 64 + +#define HEAD_SIZE (16*16) + +#if MALI_AFBC_GRALLOC == 1 /* It's for AFBC support on GPU DDK*/ +//#include "gralloc_buffer_priv.h" +#include "format_chooser.h" +#define GRALLOC_ARM_INTFMT_EXTENSION_BIT_START 32 +/* This format will be use AFBC */ +#define GRALLOC_ARM_INTFMT_AFBC (1ULL << (GRALLOC_ARM_INTFMT_EXTENSION_BIT_START+0)) + +#define AFBC_PIXELS_PER_BLOCK 16 +#define AFBC_BODY_BUFFER_BYTE_ALIGNMENT 1024 +#define AFBC_HEADER_BUFFER_BYTES_PER_BLOCKENTRY 16 +#endif + +/*****************************************************************************/ + +struct gralloc_context_t { + alloc_device_t device; + /* our private data here */ +}; + +static int gralloc_alloc_buffer(alloc_device_t* dev, + size_t size, int usage, buffer_handle_t* pHandle); + +/*****************************************************************************/ + +int fb_device_open(const hw_module_t* module, const char* name, + hw_device_t** device); + +static int gralloc_device_open(const hw_module_t* module, const char* name, + hw_device_t** device); + +extern int gralloc_lock(gralloc_module_t const* module, + buffer_handle_t handle, int usage, + int l, int t, int w, int h, + void** vaddr); + +extern int gralloc_unlock(gralloc_module_t const* module, + buffer_handle_t handle); + +extern int gralloc_lock_ycbcr(gralloc_module_t const* module, + buffer_handle_t handle, int usage, + int l, int t, int w, int h, + android_ycbcr *ycbcr); + +extern int gralloc_register_buffer(gralloc_module_t const* module, + buffer_handle_t handle); + +extern int gralloc_unregister_buffer(gralloc_module_t const* module, + buffer_handle_t handle); + +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC +extern int gralloc_get_tile_num(unsigned int value); + +extern bool gralloc_crc_allocation_check(int format, int width, int height, int flags); +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + +/*****************************************************************************/ + +static struct hw_module_methods_t gralloc_module_methods = { +open: gralloc_device_open +}; + +/* version_major is for module_api_verison + * lock_ycbcr is for MODULE_API_VERSION_0_2 + */ +struct private_module_t HAL_MODULE_INFO_SYM = { +base: { + common: { + tag: HARDWARE_MODULE_TAG, + module_api_version: GRALLOC_MODULE_API_VERSION_0_2, + hal_api_version: 0, + id: GRALLOC_HARDWARE_MODULE_ID, + name: "Graphics Memory Allocator Module", + author: "The Android Open Source Project", + methods: &gralloc_module_methods + }, + registerBuffer: gralloc_register_buffer, + unregisterBuffer: gralloc_unregister_buffer, + lock: gralloc_lock, + unlock: gralloc_unlock, + perform: NULL, + lock_ycbcr: gralloc_lock_ycbcr, +}, +framebuffer: 0, +flags: 0, +numBuffers: 0, +bufferMask: 0, +lock: PTHREAD_MUTEX_INITIALIZER, +currentBuffer: 0, +ionfd: -1, +}; + +/*****************************************************************************/ + +static unsigned int _select_heap(int usage) +{ + unsigned int heap_mask; +#ifdef USES_EXYNOS_COMMON_GRALLOC + if (usage & GRALLOC_USAGE_PROTECTED) { + if (usage & GRALLOC_USAGE_PRIVATE_NONSECURE && !(usage & GRALLOC_USAGE_PHYSICALLY_LINEAR)) + heap_mask = ION_HEAP_SYSTEM_MASK; + else + heap_mask = ION_HEAP_EXYNOS_CONTIG_MASK; + } else if (usage & GRALLOC_USAGE_CAMERA_RESERVED) { + heap_mask = ION_HEAP_EXYNOS_CONTIG_MASK; + } else { + heap_mask = ION_HEAP_SYSTEM_MASK; + } +#else + if (usage & GRALLOC_USAGE_PROTECTED) + heap_mask = ION_HEAP_EXYNOS_CONTIG_MASK; + else + heap_mask = ION_HEAP_SYSTEM_MASK; +#endif + + return heap_mask; +} + +/* + * Define GRALLOC_ARM_FORMAT_SELECTION_DISABLE to disable the format selection completely + */ +static int gralloc_alloc_rgb(int ionfd, int w, int h, int format, int usage, + unsigned int ion_flags, private_handle_t **hnd, int *stride) +{ + size_t size, bpr, alignment = 0, ext_size=256; + int bpp = 0, vstride, fd, err; + uint64_t internal_format; + + unsigned int heap_mask = _select_heap(usage); + int frameworkFormat = format; + int is_compressible = check_for_compression(w, h, format, usage); + internal_format = gralloc_select_format(format, usage, is_compressible); + + switch (format) { + case HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888: + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: + bpp = 4; + break; + case HAL_PIXEL_FORMAT_RGB_888: + bpp = 3; + break; + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_RAW16: + case HAL_PIXEL_FORMAT_RAW_OPAQUE: + bpp = 2; + break; + case HAL_PIXEL_FORMAT_BLOB: + *stride = w; + vstride = h; + size = w * h; + break; + default: + return -EINVAL; + } + + if (format != HAL_PIXEL_FORMAT_BLOB) { + bpr = ALIGN(w, 16)* bpp; + vstride = ALIGN(h, 16); + + if (vstride < h + 2) + size = bpr * (h + 2); + else + size = bpr * vstride; + + *stride = bpr / bpp; + size = size + ext_size; + + if (is_compressible) + { + /* if is_compressible = 1, width is alread 16 align so we can use width instead of w_aligned*/ + int h_aligned = ALIGN( h, AFBC_PIXELS_PER_BLOCK ); + int nblocks = w / AFBC_PIXELS_PER_BLOCK * h_aligned / AFBC_PIXELS_PER_BLOCK; + + if ( size != NULL ) + { + size = w * h_aligned * bpp + + ALIGN( nblocks * AFBC_HEADER_BUFFER_BYTES_PER_BLOCKENTRY, AFBC_BODY_BUFFER_BYTE_ALIGNMENT ); + } + } + } + + if (usage & GRALLOC_USAGE_PROTECTED) { + if ((usage & GRALLOC_USAGE_PRIVATE_NONSECURE) && (usage & GRALLOC_USAGE_PHYSICALLY_LINEAR)) + ion_flags |= ION_EXYNOS_G2D_WFD_MASK; + else if (usage & GRALLOC_USAGE_VIDEO_EXT) + ion_flags |= (ION_EXYNOS_VIDEO_EXT_MASK | ION_FLAG_PROTECTED); + else if ((usage & GRALLOC_USAGE_HW_COMPOSER) && + !(usage & GRALLOC_USAGE_HW_TEXTURE) && !(usage & GRALLOC_USAGE_HW_RENDER)) { + // For DRM Playback + ion_flags |= (ION_EXYNOS_FIMD_VIDEO_MASK | ION_FLAG_PROTECTED); + } + else + ion_flags |= (ION_EXYNOS_MFC_OUTPUT_MASK | ION_FLAG_PROTECTED); + } + + err = ion_alloc_fd(ionfd, size, alignment, heap_mask, ion_flags, + &fd); + if (err) { + return err; + } + +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (gralloc_crc_allocation_check(format, w, h, usage)) { + int fd1 = -1, num_tiles_x, num_tiles_y, crc_size; + num_tiles_x = gralloc_get_tile_num(w); + num_tiles_y = gralloc_get_tile_num(h); + crc_size = num_tiles_x * num_tiles_y * sizeof(long long unsigned int); + err = ion_alloc_fd(ionfd, crc_size + sizeof(struct gralloc_crc_header), alignment, heap_mask, ion_flags, + &fd1); + *hnd = new private_handle_t(fd, fd1, size, usage, w, h, format, internal_format, frameworkFormat, *stride, + vstride, is_compressible); + } + else +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + { + *hnd = new private_handle_t(fd, size, usage, w, h, format, internal_format, frameworkFormat, *stride, + vstride, is_compressible); + } + + return err; +} + +static int gralloc_alloc_framework_yuv(int ionfd, int w, int h, int format, int frameworkFormat, + int usage, unsigned int ion_flags, + private_handle_t **hnd, int *stride) +{ + size_t size=0, ext_size=256; + int err, fd; + unsigned int heap_mask = _select_heap(usage); + int is_compressible = 0; + uint64_t internal_format = 0; + + switch (format) { + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + *stride = ALIGN(w, 16); + size = (*stride * h) + (ALIGN(*stride / 2, 16) * h) + ext_size; + break; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + *stride = w; + size = *stride * h * 3 / 2 + ext_size; + break; + default: + ALOGE("invalid yuv format %d\n", format); + return -EINVAL; + } + + if (frameworkFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) + *stride = 0; + + err = ion_alloc_fd(ionfd, size, 0, heap_mask, ion_flags, &fd); + if (err) + return err; + + *hnd = new private_handle_t(fd, size, usage, w, h, format, internal_format, frameworkFormat, *stride, h, is_compressible); + return err; +} + +static int gralloc_alloc_yuv(int ionfd, int w, int h, int format, + int usage, unsigned int ion_flags, + private_handle_t **hnd, int *stride) +{ + size_t luma_size=0, chroma_size=0, ext_size=256; + int err, planes, fd = -1, fd1 = -1, fd2 = -1; + size_t luma_vstride; + unsigned int heap_mask = _select_heap(usage); + // Keep around original requested format for later validation + int frameworkFormat = format; + int is_compressible = 0; + uint64_t internal_format = 0; + + *stride = ALIGN(w, 16); + + if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { + ALOGV("HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED : usage(%x), flags(%x)\n", usage, ion_flags); + if ((usage & GRALLOC_USAGE_HW_CAMERA_ZSL) == GRALLOC_USAGE_HW_CAMERA_ZSL) { + format = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV + } else if (usage & GRALLOC_USAGE_HW_TEXTURE) { + format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; //NV21M + } else if (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) { + format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; //NV21M + } + } + else if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) { + // Flexible framework-accessible YUV format; map to NV21 for now + format = HAL_PIXEL_FORMAT_YCrCb_420_SP; + } + + if (usage & GRALLOC_USAGE_PROTECTED) { + ion_flags |= ION_FLAG_PROTECTED; + if (usage & GRALLOC_USAGE_VIDEO_EXT) + ion_flags |= ION_EXYNOS_VIDEO_EXT_MASK; + else if (usage & GRALLOC_USAGE_PROTECTED_DPB) + ion_flags |= ION_EXYNOS_VIDEO_EXT2_MASK; + else if ((usage & GRALLOC_USAGE_HW_COMPOSER) && + !(usage & GRALLOC_USAGE_HW_TEXTURE) && !(usage & GRALLOC_USAGE_HW_RENDER)) { + // For DRM Playback + ion_flags |= ION_EXYNOS_FIMD_VIDEO_MASK; + } + else + ion_flags |= ION_EXYNOS_MFC_OUTPUT_MASK; + } else if (usage & GRALLOC_USAGE_CAMERA_RESERVED) + ion_flags |= ION_EXYNOS_MFC_OUTPUT_MASK; + + switch (format) { + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + { + *stride = ALIGN(w, 32); + luma_vstride = ALIGN(h, 16); + luma_size = luma_vstride * *stride + ext_size; + chroma_size = (luma_vstride / 2) * ALIGN(*stride / 2, 16) + ext_size; + planes = 3; + break; + } + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + { + size_t chroma_vstride = ALIGN(h / 2, 32); + luma_vstride = ALIGN(h, 32); + luma_size = luma_vstride * *stride + ext_size; + chroma_size = chroma_vstride * *stride + ext_size; + planes = 2; + break; + } + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + return gralloc_alloc_framework_yuv(ionfd, w, h, format, frameworkFormat, usage, + ion_flags, hnd, stride); + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + { + luma_vstride = ALIGN(h, 16); + luma_size = *stride * luma_vstride+ext_size; + chroma_size = *stride * ALIGN(luma_vstride / 2, 8)+ext_size; + planes = 2; + break; + } + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + { + luma_vstride = ALIGN(h, 16); + luma_size = *stride * luma_vstride+ext_size; + chroma_size = *stride * ALIGN(luma_vstride / 2, 8)+ext_size; + planes = 3; + break; + } + case HAL_PIXEL_FORMAT_YCbCr_422_I: + { + luma_vstride = h; + luma_size = luma_vstride * *stride * 2+ext_size; + chroma_size = 0; + planes = 1; + break; + } + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + { + luma_vstride = ALIGN(h, 16); + chroma_size = ALIGN((*stride * luma_vstride / 2) + ext_size, 16); + luma_size = (*stride * luma_vstride) + ext_size + chroma_size; + planes = 1; + break; + } + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + { + luma_vstride = ALIGN(h, 16); + luma_size = (*stride * luma_vstride + 64) + ((ALIGN(w / 4, 16) * h) + 64) + ext_size; + chroma_size = (*stride * ALIGN(luma_vstride / 2, 8) + 64) + ((ALIGN(w / 4, 16) * (h / 2)) + 64) + ext_size; + planes = 2; + break; + } + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + { + luma_vstride = ALIGN(h, 16); + chroma_size = ALIGN((*stride * luma_vstride / 2) + ext_size, 16) + (ALIGN(w / 4, 16) * (luma_vstride / 2)) + 64; + luma_size = (*stride * luma_vstride) + ext_size + (ALIGN(w / 4, 16) * luma_vstride) + 64 + chroma_size; + planes = 1; + break; + } + default: + ALOGE("invalid yuv format %d\n", format); + return -EINVAL; + } + + err = ion_alloc_fd(ionfd, luma_size, 0, heap_mask, ion_flags, &fd); + if (err) { + if (usage & GRALLOC_USAGE_PROTECTED_DPB) { + ion_flags &= ~ION_EXYNOS_VIDEO_EXT2_MASK; + ion_flags |= ION_EXYNOS_MFC_OUTPUT_MASK; + err = ion_alloc_fd(ionfd, luma_size, 0, heap_mask, ion_flags, &fd); + if (err) + return err; + } else { + return err; + } + } + if (planes == 1) { + *hnd = new private_handle_t(fd, luma_size, usage, w, h, + format, internal_format, frameworkFormat, *stride, luma_vstride, is_compressible); + } else { + err = ion_alloc_fd(ionfd, chroma_size, 0, heap_mask, ion_flags, &fd1); + if (err) + goto err1; + if (planes == 3) { + if (format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) + err = ion_alloc_fd(ionfd, PRIV_SIZE, 0, ION_HEAP_SYSTEM_MASK, 0, &fd2); + else + err = ion_alloc_fd(ionfd, chroma_size, 0, heap_mask, ion_flags, &fd2); + if (err) + goto err2; + + *hnd = new private_handle_t(fd, fd1, fd2, luma_size, usage, w, h, + format, internal_format, frameworkFormat, *stride, luma_vstride, is_compressible); + } else { + *hnd = new private_handle_t(fd, fd1, luma_size, usage, w, h, format, internal_format, frameworkFormat, + *stride, luma_vstride, is_compressible); + } + } + return err; + +err2: + close(fd1); +err1: + close(fd); + return err; +} + +static int gralloc_alloc(alloc_device_t* dev, + int w, int h, int format, int usage, + buffer_handle_t* pHandle, int* pStride) +{ + int stride; + int err; + unsigned int ion_flags = 0; + private_handle_t *hnd = NULL; + + if (!pHandle || !pStride || w <= 0 || h <= 0) + return -EINVAL; + + if ((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_OFTEN) { + ion_flags = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC; + if (usage & GRALLOC_USAGE_HW_RENDER) + ion_flags |= ION_FLAG_SYNC_FORCE; + } + + if (usage & GRALLOC_USAGE_NOZEROED) + ion_flags |= ION_FLAG_NOZEROED; + + private_module_t* m = reinterpret_cast + (dev->common.module); + gralloc_module_t* module = reinterpret_cast + (dev->common.module); + + err = gralloc_alloc_rgb(m->ionfd, w, h, format, usage, ion_flags, &hnd, + &stride); + if (err) + err = gralloc_alloc_yuv(m->ionfd, w, h, format, usage, ion_flags, + &hnd, &stride); + if (err) + goto err; + + *pHandle = hnd; + *pStride = stride; + return 0; +err: + if (!hnd) + return err; + close(hnd->fd); + if (hnd->fd1 >= 0) + close(hnd->fd1); + if (hnd->fd2 >= 0) + close(hnd->fd2); + delete hnd; + return err; +} + +static int gralloc_free(alloc_device_t* dev, + buffer_handle_t handle) +{ + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + private_handle_t const* hnd = reinterpret_cast(handle); + gralloc_module_t* module = reinterpret_cast( + dev->common.module); + if (hnd->base) + grallocUnmap(module, const_cast(hnd)); + + close(hnd->fd); + if (hnd->fd1 >= 0) + close(hnd->fd1); + if (hnd->fd2 >= 0) + close(hnd->fd2); + + delete hnd; + return 0; +} + +/*****************************************************************************/ + +static int gralloc_close(struct hw_device_t *dev) +{ + gralloc_context_t* ctx = reinterpret_cast(dev); + if (ctx) { + /* TODO: keep a list of all buffer_handle_t created, and free them + * all here. + */ + free(ctx); + } + return 0; +} + +int gralloc_device_open(const hw_module_t* module, const char* name, + hw_device_t** device) +{ + int status = -EINVAL; + if (!strcmp(name, GRALLOC_HARDWARE_GPU0)) { + gralloc_context_t *dev; + dev = (gralloc_context_t*)malloc(sizeof(*dev)); + + /* initialize our state here */ + memset(dev, 0, sizeof(*dev)); + + /* initialize the procs */ + dev->device.common.tag = HARDWARE_DEVICE_TAG; + dev->device.common.version = 0; + dev->device.common.module = const_cast(module); + dev->device.common.close = gralloc_close; + + dev->device.alloc = gralloc_alloc; + dev->device.free = gralloc_free; + + private_module_t *p = reinterpret_cast(dev->device.common.module); + if (p->ionfd == -1) + p->ionfd = ion_open(); + + *device = &dev->device.common; + status = 0; + } else { + status = fb_device_open(module, name, device); + } + return status; +} diff --git a/gralloc/mapper.cpp b/gralloc/mapper.cpp new file mode 100644 index 0000000..7bc7004 --- /dev/null +++ b/gralloc/mapper.cpp @@ -0,0 +1,495 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include + +#include +#include + +#include +#include +#include "gralloc_priv.h" +#include "exynos_format.h" + +#define INT_TO_PTR(var) ((void *)(unsigned long)var) +#define PRIV_SIZE 64 + +#if MALI_AFBC_GRALLOC == 1 +//#include "gralloc_buffer_priv.h" +#endif + +#include "format_chooser.h" + + +/*****************************************************************************/ +int getIonFd(gralloc_module_t const *module) +{ + private_module_t* m = const_cast(reinterpret_cast(module)); + if (m->ionfd == -1) + m->ionfd = ion_open(); + return m->ionfd; +} + +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC +int gralloc_get_tile_num(unsigned int value) +{ + int tile_num; + tile_num = ((value + CRC_TILE_SIZE - 1) & ~(CRC_TILE_SIZE - 1)) / CRC_TILE_SIZE; + return tile_num; +} + +bool gralloc_crc_allocation_check(int format, int width, int height, int flags) +{ + bool supported = false; + switch (format) { + case HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888: + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: + case HAL_PIXEL_FORMAT_RGB_888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_RAW_SENSOR: + case HAL_PIXEL_FORMAT_RAW_OPAQUE: + case HAL_PIXEL_FORMAT_BLOB: + if (!(flags & GRALLOC_USAGE_PROTECTED) && + (width >= CRC_LIMIT_WIDTH) && (height >= CRC_LIMIT_HEIGHT)) + supported = true; + break; + default: + break; + } + + return supported; +} +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + +static int gralloc_map(gralloc_module_t const* module, buffer_handle_t handle) +{ + size_t chroma_vstride = 0; + size_t chroma_size = 0; + size_t ext_size = 256; + void *privAddress; + + private_handle_t *hnd = (private_handle_t*)handle; + + if (hnd->flags & GRALLOC_USAGE_PROTECTED || hnd->flags & GRALLOC_USAGE_NOZEROED) { + hnd->base = hnd->base1 = hnd->base2 = 0; + } + + switch (hnd->format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + chroma_vstride = ALIGN(hnd->height / 2, 32); + chroma_size = chroma_vstride * hnd->stride + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + chroma_size = hnd->stride * ALIGN(hnd->vstride / 2, 8) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + chroma_size = (hnd->vstride / 2) * ALIGN(hnd->stride / 2, 16) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + chroma_size = (hnd->stride * ALIGN(hnd->vstride / 2, 8) + 64) + ((ALIGN(hnd->width / 4, 16) * (hnd->height / 2)) + 64) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + chroma_size = hnd->stride * ALIGN(hnd->vstride / 2, 8) + ext_size; + privAddress = mmap(0, PRIV_SIZE, PROT_READ|PROT_WRITE, MAP_SHARED, hnd->fd2, 0); + if (privAddress == MAP_FAILED) { + ALOGE("%s: could not mmap %s", __func__, strerror(errno)); + } else { + hnd->base2 = (uint64_t)privAddress; + ion_sync_fd(getIonFd(module), hnd->fd2); + } + break; + default: +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (gralloc_crc_allocation_check(hnd->format, hnd->width, hnd->height, hnd->flags)) { + int num_tiles_x, num_tiles_y; + num_tiles_x = gralloc_get_tile_num(hnd->width); + num_tiles_y = gralloc_get_tile_num(hnd->height); + chroma_size = num_tiles_x * num_tiles_y * sizeof(long long unsigned int) + + sizeof(struct gralloc_crc_header); + } +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + break; + } + + if ((hnd->flags & GRALLOC_USAGE_PROTECTED) && + !(hnd->flags & GRALLOC_USAGE_PRIVATE_NONSECURE)) { + return 0; + } + + if (!(hnd->flags & GRALLOC_USAGE_PROTECTED) && !(hnd->flags & GRALLOC_USAGE_NOZEROED)) { + void* mappedAddress = mmap(0, hnd->size, PROT_READ|PROT_WRITE, MAP_SHARED, + hnd->fd, 0); + if (mappedAddress == MAP_FAILED) { + ALOGE("%s: could not mmap %s", __func__, strerror(errno)); + return -errno; + } + ALOGV("%s: base %p %d %d %d %d\n", __func__, mappedAddress, hnd->size, + hnd->width, hnd->height, hnd->stride); + hnd->base = (uint64_t)mappedAddress; + ion_sync_fd(getIonFd(module), hnd->fd); + + if (hnd->fd1 >= 0) { + void *mappedAddress1 = (void*)mmap(0, chroma_size, PROT_READ|PROT_WRITE, + MAP_SHARED, hnd->fd1, 0); + hnd->base1 = (uint64_t)mappedAddress1; + ion_sync_fd(getIonFd(module), hnd->fd1); + } + if (hnd->fd2 >= 0) { + if (hnd->format != HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) { + void *mappedAddress2 = (void*)mmap(0, chroma_size, PROT_READ|PROT_WRITE, MAP_SHARED, hnd->fd2, 0); + hnd->base2 = (uint64_t)mappedAddress2; + ion_sync_fd(getIonFd(module), hnd->fd2); + } + } + } + + return 0; +} + +static int gralloc_unmap(gralloc_module_t const* module, buffer_handle_t handle) +{ + private_handle_t* hnd = (private_handle_t*)handle; + size_t chroma_vstride = 0; + size_t chroma_size = 0; + size_t ext_size = 256; + + switch (hnd->format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + chroma_vstride = ALIGN(hnd->height / 2, 32); + chroma_size = chroma_vstride * hnd->stride + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + chroma_size = hnd->stride * ALIGN(hnd->vstride / 2, 8) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + chroma_size = (hnd->vstride / 2) * ALIGN(hnd->stride / 2, 16) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + chroma_size = (hnd->stride * ALIGN(hnd->vstride / 2, 8) + 64) + ((ALIGN(hnd->width / 4, 16) * (hnd->height / 2)) + 64) + ext_size; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + chroma_size = hnd->stride * ALIGN(hnd->vstride / 2, 8) + ext_size; + if (munmap(INT_TO_PTR(hnd->base2), PRIV_SIZE) < 0) { + ALOGE("%s :could not unmap %s %llx %d", __func__, strerror(errno), hnd->base2, chroma_size); + } + hnd->base2 = 0; + break; + default: +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (gralloc_crc_allocation_check(hnd->format, hnd->width, hnd->height, hnd->flags)) { + int num_tiles_x, num_tiles_y; + num_tiles_x = gralloc_get_tile_num(hnd->width); + num_tiles_y = gralloc_get_tile_num(hnd->height); + chroma_size = num_tiles_x * num_tiles_y * sizeof(long long unsigned int) + + sizeof(struct gralloc_crc_header); + } +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + break; + } + + if (!hnd->base) + return 0; + + if (munmap(INT_TO_PTR(hnd->base), hnd->size) < 0) { + ALOGE("%s :could not unmap %s %llx %d", __func__, strerror(errno), + hnd->base, hnd->size); + } + ALOGV("%s: base %llx %d %d %d %d\n", __func__, hnd->base, hnd->size, + hnd->width, hnd->height, hnd->stride); + hnd->base = 0; + if (hnd->fd1 >= 0) { + if (!hnd->base1) + return 0; + if (munmap(INT_TO_PTR(hnd->base1), chroma_size) < 0) { + ALOGE("%s :could not unmap %s %llx %d", __func__, strerror(errno), + hnd->base1, chroma_size); + } + hnd->base1 = 0; + } + if (hnd->fd2 >= 0) { + if (!hnd->base2) + return 0; + if (munmap(INT_TO_PTR(hnd->base2), chroma_size) < 0) { + ALOGE("%s :could not unmap %s %llx %d", __func__, strerror(errno), + hnd->base2, chroma_size); + } + hnd->base2 = 0; + } + return 0; +} + +/*****************************************************************************/ + +int grallocMap(gralloc_module_t const* module, private_handle_t *hnd) +{ + return gralloc_map(module, hnd); +} + +int grallocUnmap(gralloc_module_t const* module, private_handle_t *hnd) +{ + return gralloc_unmap(module, hnd); +} + +static pthread_mutex_t sMapLock = PTHREAD_MUTEX_INITIALIZER; + +/*****************************************************************************/ + +int gralloc_register_buffer(gralloc_module_t const* module, + buffer_handle_t handle) +{ + int err; + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + err = gralloc_map(module, handle); + + private_handle_t* hnd = (private_handle_t*)handle; + ALOGV("%s: base %llx %d %d %d %d\n", __func__, hnd->base, hnd->size, + hnd->width, hnd->height, hnd->stride); + + int ret; + ret = ion_import(getIonFd(module), hnd->fd, &hnd->handle); + if (ret) + ALOGE("error importing handle %d %x\n", hnd->fd, hnd->format); + if (hnd->fd1 >= 0) { + ret = ion_import(getIonFd(module), hnd->fd1, &hnd->handle1); + if (ret) + ALOGE("error importing handle1 %d %x\n", hnd->fd1, hnd->format); + } + if (hnd->fd2 >= 0) { + ret = ion_import(getIonFd(module), hnd->fd2, &hnd->handle2); + if (ret) + ALOGE("error importing handle2 %d %x\n", hnd->fd2, hnd->format); + } + + return err; +} + +int gralloc_unregister_buffer(gralloc_module_t const* module, + buffer_handle_t handle) +{ + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + private_handle_t* hnd = (private_handle_t*)handle; + ALOGV("%s: base %llx %d %d %d %d\n", __func__, hnd->base, hnd->size, + hnd->width, hnd->height, hnd->stride); + + gralloc_unmap(module, handle); + + if (hnd->handle) + ion_free(getIonFd(module), hnd->handle); + if (hnd->handle1) + ion_free(getIonFd(module), hnd->handle1); + if (hnd->handle2) + ion_free(getIonFd(module), hnd->handle2); + + return 0; +} + +int gralloc_lock(gralloc_module_t const* module, + buffer_handle_t handle, int usage, + int l, int t, int w, int h, + void** vaddr) +{ + // this is called when a buffer is being locked for software + // access. in thin implementation we have nothing to do since + // not synchronization with the h/w is needed. + // typically this is used to wait for the h/w to finish with + // this buffer if relevant. the data cache may need to be + // flushed or invalidated depending on the usage bits and the + // hardware. + + int ext_size = 256; + + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + private_handle_t* hnd = (private_handle_t*)handle; + + if (hnd->frameworkFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) { + ALOGE("gralloc_lock can't be used with YCbCr_420_888 format"); + return -EINVAL; + } + +#ifdef GRALLOC_RANGE_FLUSH + if(usage & GRALLOC_USAGE_SW_WRITE_MASK) + { + hnd->lock_usage = GRALLOC_USAGE_SW_WRITE_RARELY; + hnd->lock_offset = t * hnd->stride; + hnd->lock_len = h * hnd->stride; + } + else + { + hnd->lock_usage = 0; + hnd->lock_offset = 0; + hnd->lock_len = 0; + } +#endif + + if (!hnd->base) + gralloc_map(module, hnd); + *vaddr = INT_TO_PTR(hnd->base); + + if (hnd->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN) + vaddr[1] = vaddr[0] + (hnd->stride * hnd->vstride) + ext_size; + else if (hnd->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B) + vaddr[1] = vaddr[0] + (hnd->stride * hnd->vstride) + ext_size + (ALIGN(hnd->width / 4, 16) * hnd->vstride) + 64; + +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (!gralloc_crc_allocation_check(hnd->format, hnd->width, hnd->height, hnd->flags)) +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + { + if (hnd->fd1 >= 0) + vaddr[1] = INT_TO_PTR(hnd->base1); + if (hnd->fd2 >= 0) + vaddr[2] = INT_TO_PTR(hnd->base2); + } + + return 0; +} + +int gralloc_unlock(gralloc_module_t const* module, + buffer_handle_t handle) +{ + // we're done with a software buffer. nothing to do in this + // implementation. typically this is used to flush the data cache. + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + private_handle_t* hnd = (private_handle_t*)handle; + + if (!((hnd->flags & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_OFTEN)) + return 0; + +#ifdef GRALLOC_RANGE_FLUSH + if(hnd->lock_usage & GRALLOC_USAGE_SW_WRITE_MASK) + { + if(((hnd->format == HAL_PIXEL_FORMAT_RGBA_8888) + || (hnd->format == HAL_PIXEL_FORMAT_RGBX_8888)) && (hnd->lock_offset != 0)) + ion_sync_fd_partial(getIonFd(module), hnd->fd, hnd->lock_offset * 4, hnd->lock_len * 4); + else + ion_sync_fd(getIonFd(module), hnd->fd); + +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (!gralloc_crc_allocation_check(hnd->format, hnd->width, hnd->height, hnd->flags)) +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + { + if (hnd->fd1 >= 0) + ion_sync_fd(getIonFd(module), hnd->fd1); + if (hnd->fd2 >= 0) + ion_sync_fd(getIonFd(module), hnd->fd2); + } + + hnd->lock_usage = 0; + } +#else + ion_sync_fd(getIonFd(module), hnd->fd); +#ifdef USES_EXYNOS_CRC_BUFFER_ALLOC + if (!gralloc_crc_allocation_check(hnd->format, hnd->width, hnd->height, hnd->flags)) +#endif /* USES_EXYNOS_CRC_BUFFER_ALLOC */ + { + if (hnd->fd1 >= 0) + ion_sync_fd(getIonFd(module), hnd->fd1); + if (hnd->fd2 >= 0) + ion_sync_fd(getIonFd(module), hnd->fd2); + } +#endif + + return 0; +} + +int gralloc_lock_ycbcr(gralloc_module_t const* module, + buffer_handle_t handle, int usage, + int l, int t, int w, int h, + android_ycbcr *ycbcr) +{ + if (private_handle_t::validate(handle) < 0) + return -EINVAL; + + if (!ycbcr) { + ALOGE("gralloc_lock_ycbcr got NULL ycbcr struct"); + return -EINVAL; + } + + private_handle_t* hnd = (private_handle_t*)handle; + + // Calculate offsets to underlying YUV data + size_t yStride; + size_t cStride; + size_t yOffset; + size_t uOffset; + size_t vOffset; + size_t cStep; + switch (hnd->format) { + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + yStride = cStride = hnd->width; + yOffset = 0; + vOffset = yStride * hnd->height; + uOffset = vOffset + 1; + cStep = 2; + ycbcr->y = (void *)(((unsigned long)hnd->base) + yOffset); + ycbcr->cb = (void *)(((unsigned long)hnd->base) + uOffset); + ycbcr->cr = (void *)(((unsigned long)hnd->base) + vOffset); + break; + case HAL_PIXEL_FORMAT_YV12: + yStride = ALIGN(hnd->width, 16); + cStride = ALIGN(yStride/2, 16); + ycbcr->y = (void*)((unsigned long)hnd->base); + ycbcr->cr = (void*)(((unsigned long)hnd->base) + yStride * hnd->height); + ycbcr->cb = (void*)(((unsigned long)hnd->base) + yStride * hnd->height + + cStride * hnd->height/2); + cStep = 1; + break; + default: + ALOGE("gralloc_lock_ycbcr unexpected internal format %x", + hnd->format); + return -EINVAL; + } + + ycbcr->ystride = yStride; + ycbcr->cstride = cStride; + ycbcr->chroma_step = cStep; + + // Zero out reserved fields + memset(ycbcr->reserved, 0, sizeof(ycbcr->reserved)); + + ALOGD("gralloc_lock_ycbcr success. format : %x, usage: %x, ycbcr.y: %p, .cb: %p, .cr: %p, " + ".ystride: %d , .cstride: %d, .chroma_step: %d", hnd->format, usage, + ycbcr->y, ycbcr->cb, ycbcr->cr, ycbcr->ystride, ycbcr->cstride, + ycbcr->chroma_step); + + return 0; +} diff --git a/include/ExynosBuffer.h b/include/ExynosBuffer.h new file mode 100644 index 0000000..e4c03dc --- /dev/null +++ b/include/ExynosBuffer.h @@ -0,0 +1,176 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosBuffer.h + * \brief header file for ExynosBuffer + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2011/06/02 + * + * Revision History: + * - 2010/06/03 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + * - 2012/03/14 : sangwoo.park(sw5771.park@samsung.com) \n + * Change file, struct name to ExynosXXX. + * + * - 2012/10/08 : sangwoo.park(sw5771.park@samsung.com) \n + * Add BUFFER_PLANE_NUM_DEFAULT, and, Increase Buffer as 4. + * + */ + +#ifndef EXYNOS_BUFFER_H_ +#define EXYNOS_BUFFER_H_ + +#include + +//! Buffer information +/*! + * \ingroup Exynos + */ +struct ExynosBuffer +{ +public: + //! Buffer type + enum BUFFER_TYPE + { + BUFFER_TYPE_BASE = 0, + BUFFER_TYPE_VIRT = 1 << 0, //!< virtual address + BUFFER_TYPE_PHYS = 1 << 1, //!< physical address + BUFFER_TYPE_FD = 1 << 2, //!< fd address + BUFFER_TYPE_RESERVED = 1 << 3, //!< reserved type + BUFFER_TYPE_MAX, + }; + + //! Buffer plane number + enum BUFFER_PLANE_NUM + { + BUFFER_PLANE_NUM_DEFAULT = 4, + }; + + //! Buffer virtual address + union { + char *p; //! single address. + char *extP[BUFFER_PLANE_NUM_DEFAULT]; //! Y Cb Cr. + } virt; + + //! Buffer physical address + union { + unsigned int p; //! single address. + unsigned int extP[BUFFER_PLANE_NUM_DEFAULT]; //! Y Cb Cr. + } phys; + + //! Buffer file descriptors + union { + int fd; //! single address. + int extFd[BUFFER_PLANE_NUM_DEFAULT]; //! Y Cb Cr. + } fd; + + //! Buffer reserved id + union { + int p; //! \n + int extP[BUFFER_PLANE_NUM_DEFAULT]; //! \n + } reserved; + + //! Buffer size + union { + unsigned int s; + unsigned int extS[BUFFER_PLANE_NUM_DEFAULT]; + } size; + +#ifdef __cplusplus + //! Constructor + ExynosBuffer() + { + for (int i = 0; i < BUFFER_PLANE_NUM_DEFAULT; i++) { + virt. extP [i] = NULL; + phys. extP [i] = 0; + fd. extFd[i] = -1; + reserved.extP [i] = 0; + size. extS [i] = 0; + } + } + + //! Constructor + ExynosBuffer(const ExynosBuffer *other) + { + for (int i = 0; i < BUFFER_PLANE_NUM_DEFAULT; i++) { + virt. extP [i] = other->virt.extP[i]; + phys. extP [i] = other->phys.extP[i]; + fd. extFd[i] = other->fd.extFd[i]; + reserved.extP [i] = other->reserved.extP[i]; + size. extS [i] = other->size.extS[i]; + } + } + + //! Operator(=) override + ExynosBuffer& operator =(const ExynosBuffer &other) + { + for (int i = 0; i < BUFFER_PLANE_NUM_DEFAULT; i++) { + virt. extP [i] = other.virt.extP[i]; + phys. extP [i] = other.phys.extP[i]; + fd. extFd[i] = other.fd.extFd[i]; + reserved.extP [i] = other.reserved.extP[i]; + size. extS [i] = other.size.extS[i]; + } + return *this; + } + + //! Operator(==) override + bool operator ==(const ExynosBuffer &other) const + { + bool ret = true; + + for (int i = 0; i < BUFFER_PLANE_NUM_DEFAULT; i++) { + if ( virt. extP [i] != other.virt. extP[i] + || phys. extP [i] != other.phys. extP[i] + || fd. extFd[i] != other.fd. extFd[i] + || reserved.extP [i] != other.reserved.extP[i] + || size. extS [i] != other.size. extS[i]) { + ret = false; + break; + } + } + + return ret; + } + + //! Operator(!=) override + bool operator !=(const ExynosBuffer &other) const + { + // use operator(==) + return !(*this == other); + } + + //! Get Buffer type + static int BUFFER_TYPE(ExynosBuffer *buf) + { + int type = BUFFER_TYPE_BASE; + if (buf->virt.p) + type |= BUFFER_TYPE_VIRT; + if (buf->phys.p) + type |= BUFFER_TYPE_PHYS; + if (buf->fd.fd >= 0) + type |= BUFFER_TYPE_FD; + if (buf->reserved.p) + type |= BUFFER_TYPE_RESERVED; + + return type; + } +#endif +}; + +#endif //EXYNOS_BUFFER_H_ diff --git a/include/ExynosExif.h b/include/ExynosExif.h new file mode 100644 index 0000000..b4b5647 --- /dev/null +++ b/include/ExynosExif.h @@ -0,0 +1,276 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// To prevent build conflict with the previous libhwjpeg +#ifdef USES_UNIVERSAL_LIBHWJPEG + +#ifndef __HARDWARE_EXYNOS_EXYNOS_EXIF_H__ +#define __HARDWARE_EXYNOS_EXYNOS_EXIF_H__ + +#include + +#include + +#define EXIF_LOG2(x) (log((double)(x)) / log(2.0)) +#define APEX_FNUM_TO_APERTURE(x) ((EXIF_LOG2((double)(x))) * 2.0) +#define APEX_EXPOSURE_TO_SHUTTER(x) ((x) >= 1 ? \ + (int)(-(EXIF_LOG2((double)(x)) + 0.5)) : \ + (int)(-(EXIF_LOG2((double)(x)) - 0.5))) +#define APEX_ISO_TO_FILMSENSITIVITY(x) ((int)(EXIF_LOG2((x) / 3.125) + 0.5)) + +#define NUM_SIZE 2 +#define IFD_SIZE 12 +#define OFFSET_SIZE 4 +#define EXIF_INFO_LIMIT_SIZE 12288 + +#define NUM_0TH_IFD_TIFF 10 +#define NUM_0TH_IFD_EXIF 29 +#define NUM_0TH_IFD_INTEROPERABILITY (2) +#define NUM_0TH_IFD_GPS 10 +#define NUM_1TH_IFD_TIFF 9 + +/* Type */ +#define EXIF_TYPE_BYTE 1 +#define EXIF_TYPE_ASCII 2 +#define EXIF_TYPE_SHORT 3 +#define EXIF_TYPE_LONG 4 +#define EXIF_TYPE_RATIONAL 5 +#define EXIF_TYPE_UNDEFINED 7 +#define EXIF_TYPE_SLONG 9 +#define EXIF_TYPE_SRATIONAL 10 + +#define EXIF_FILE_SIZE 28800 + +/* 0th IFD TIFF Tags */ +#define EXIF_TAG_IMAGE_WIDTH 0x0100 +#define EXIF_TAG_IMAGE_HEIGHT 0x0101 +#define EXIF_TAG_MAKE 0x010f +#define EXIF_TAG_MODEL 0x0110 +#define EXIF_TAG_ORIENTATION 0x0112 +#define EXIF_TAG_SOFTWARE 0x0131 +#define EXIF_TAG_DATE_TIME 0x0132 +#define EXIF_TAG_YCBCR_POSITIONING 0x0213 +#define EXIF_TAG_EXIF_IFD_POINTER 0x8769 +#define EXIF_TAG_GPS_IFD_POINTER 0x8825 + +/* 0th IFD Exif Private Tags */ +#define EXIF_TAG_EXPOSURE_TIME 0x829A +#define EXIF_TAG_FNUMBER 0x829D +#define EXIF_TAG_EXPOSURE_PROGRAM 0x8822 +#define EXIF_TAG_ISO_SPEED_RATING 0x8827 +#define EXIF_TAG_EXIF_VERSION 0x9000 +#define EXIF_TAG_DATE_TIME_ORG 0x9003 +#define EXIF_TAG_DATE_TIME_DIGITIZE 0x9004 +#define EXIF_TAG_SHUTTER_SPEED 0x9201 +#define EXIF_TAG_APERTURE 0x9202 +#define EXIF_TAG_BRIGHTNESS 0x9203 +#define EXIF_TAG_EXPOSURE_BIAS 0x9204 +#define EXIF_TAG_MAX_APERTURE 0x9205 +#define EXIF_TAG_METERING_MODE 0x9207 +#define EXIF_TAG_FLASH 0x9209 +#define EXIF_TAG_FOCAL_LENGTH 0x920A +#define EXIF_TAG_MAKER_NOTE 0x927C +#define EXIF_TAG_USER_COMMENT 0x9286 +#define EXIF_TAG_SUBSEC_TIME 0x9290 +#define EXIF_TAG_SUBSEC_TIME_ORIG 0x9291 +#define EXIF_TAG_SUBSEC_TIME_DIG 0x9292 +#define EXIF_TAG_COLOR_SPACE 0xA001 +#define EXIF_TAG_PIXEL_X_DIMENSION 0xA002 +#define EXIF_TAG_PIXEL_Y_DIMENSION 0xA003 +#define EXIF_TAG_RELATED_SOUND_FILE 0xA004 +#define EXIF_TAG_INTEROPERABILITY 0xA005 +#define EXIF_TAG_EXPOSURE_MODE 0xA402 +#define EXIF_TAG_WHITE_BALANCE 0xA403 +#define EXIF_TAG_FOCA_LENGTH_IN_35MM_FILM 0xA405 +#define EXIF_TAG_SCENCE_CAPTURE_TYPE 0xA406 +#define EXIF_TAG_IMAGE_UNIQUE_ID 0xA420 + +/* 0th IFD Interoperability Info Tags */ +#define EXIF_TAG_INTEROPERABILITY_INDEX 0x0001 +#define EXIF_TAG_INTEROPERABILITY_VERSION 0x0002 + +/* 0th IFD GPS Info Tags */ +#define EXIF_TAG_GPS_VERSION_ID 0x0000 +#define EXIF_TAG_GPS_LATITUDE_REF 0x0001 +#define EXIF_TAG_GPS_LATITUDE 0x0002 +#define EXIF_TAG_GPS_LONGITUDE_REF 0x0003 +#define EXIF_TAG_GPS_LONGITUDE 0x0004 +#define EXIF_TAG_GPS_ALTITUDE_REF 0x0005 +#define EXIF_TAG_GPS_ALTITUDE 0x0006 +#define EXIF_TAG_GPS_TIMESTAMP 0x0007 +#define EXIF_TAG_GPS_PROCESSING_METHOD 0x001B +#define EXIF_TAG_GPS_DATESTAMP 0x001D + +/* 1th IFD TIFF Tags */ +#define EXIF_TAG_COMPRESSION_SCHEME 0x0103 +#define EXIF_TAG_X_RESOLUTION 0x011A +#define EXIF_TAG_Y_RESOLUTION 0x011B +#define EXIF_TAG_RESOLUTION_UNIT 0x0128 +#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT 0x0201 +#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN 0x0202 + +typedef enum { + EXIF_ORIENTATION_UP = 1, + EXIF_ORIENTATION_90 = 6, + EXIF_ORIENTATION_180 = 3, + EXIF_ORIENTATION_270 = 8, +} ExifOrientationType; + +typedef enum { + EXIF_SCENE_STANDARD, + EXIF_SCENE_LANDSCAPE, + EXIF_SCENE_PORTRAIT, + EXIF_SCENE_NIGHT, +} CamExifSceneCaptureType; + +typedef enum { + EXIF_METERING_UNKNOWN, + EXIF_METERING_AVERAGE, + EXIF_METERING_CENTER, + EXIF_METERING_SPOT, + EXIF_METERING_MULTISPOT, + EXIF_METERING_PATTERN, + EXIF_METERING_PARTIAL, + EXIF_METERING_OTHER = 255, +} CamExifMeteringModeType; + +typedef enum { + EXIF_EXPOSURE_AUTO, + EXIF_EXPOSURE_MANUAL, + EXIF_EXPOSURE_AUTO_BRACKET, +} CamExifExposureModeType; + +typedef enum { + EXIF_WB_AUTO, + EXIF_WB_MANUAL, + EXIF_WB_INCANDESCENT, + EXIF_WB_FLUORESCENT, + EXIF_WB_DAYLIGHT, + EXIF_WB_CLOUDY, +} CamExifWhiteBalanceType; + +/* Values */ +#define EXIF_DEF_MAKER "Samsung Electronics Co., Ltd." /* testJpegExif on the CTS test. This should match Build.MANUFACTURER */ +#define EXIF_DEF_MODEL "SAMSUNG" +#define EXIF_DEF_SOFTWARE "SAMSUNG" +#define EXIF_DEF_EXIF_VERSION "0220" +#define EXIF_DEF_USERCOMMENTS "User comments" + +#define EXIF_DEF_YCBCR_POSITIONING 1 /* centered */ +#define EXIF_DEF_EXPOSURE_MANUAL 1 /* manual program */ +#define EXIF_DEF_EXPOSURE_PROGRAM 2 /* normal program */ +#define EXIF_DEF_FLASH 0 /* O: off, 1: on*/ +#define EXIF_DEF_COLOR_SPACE 1 +#define EXIF_DEF_INTEROPERABILITY (0) +#define EXIF_DEF_EXPOSURE_MODE EXIF_EXPOSURE_AUTO +#define EXIF_DEF_APEX_DEN (100) + +#define EXIF_DEF_COMPRESSION 6 +#define EXIF_DEF_RESOLUTION_NUM 72 +#define EXIF_DEF_RESOLUTION_DEN 1 +#define EXIF_DEF_RESOLUTION_UNIT 2 /* inches */ + +#define APP_MARKER_4 4 +#define APP_MARKER_5 5 + +typedef struct { + uint32_t num; + uint32_t den; +} rational_t; + +typedef struct { + int32_t num; + int32_t den; +} srational_t; + +typedef struct { + bool enableGps; + bool enableThumb; + + char maker[32]; + char model[32]; + char software[32]; + char exif_version[4]; + char date_time[20]; + char sec_time[5]; + unsigned int maker_note_size; + unsigned char *maker_note; + unsigned int user_comment_size; + unsigned char *user_comment; + + uint32_t width; + uint32_t height; + uint32_t widthThumb; + uint32_t heightThumb; + + uint16_t orientation; + uint16_t ycbcr_positioning; + uint16_t exposure_program; + uint16_t iso_speed_rating; + uint16_t metering_mode; + uint16_t flash; + uint16_t color_space; + uint16_t interoperability_index; + + uint16_t exposure_mode; + uint16_t white_balance; + uint16_t focal_length_in_35mm_length; + uint16_t scene_capture_type; + char unique_id[30]; + + rational_t exposure_time; + rational_t fnumber; + rational_t aperture; + rational_t max_aperture; + rational_t focal_length; + + srational_t shutter_speed; + srational_t brightness; + srational_t exposure_bias; + + char gps_latitude_ref[2]; + char gps_longitude_ref[2]; + + uint8_t gps_version_id[4]; + uint8_t gps_altitude_ref; + + rational_t gps_latitude[3]; + rational_t gps_longitude[3]; + rational_t gps_altitude; + rational_t gps_timestamp[3]; + char gps_datestamp[11]; + char gps_processing_method[100]; + + rational_t x_resolution; + rational_t y_resolution; + uint16_t resolution_unit; + uint16_t compression_scheme; +} exif_attribute_t; + +typedef struct { + int num_of_appmarker; /* number of app marker */ + int idx[15][1]; /* idx[number_of_appmarker][appmarker_number] */ + char *debugData[15]; /* 0-base */ + unsigned int debugSize[15]; +} debug_attribute_t; + +bool UpdateDebugData(char *jpeg, size_t jpeglen, debug_attribute_t *debug); +bool UpdateExif(char *jpeg, size_t jpeglen, exif_attribute_t *exif); + +#endif /* __HARDWARE_EXYNOS_EXYNOS_EXIF_H__ */ + +#endif //USES_UNIVERSAL_LIBHWJPEG diff --git a/include/ExynosJpegApi.h b/include/ExynosJpegApi.h new file mode 100644 index 0000000..b0b9a59 --- /dev/null +++ b/include/ExynosJpegApi.h @@ -0,0 +1,185 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// To prevent build conflict with the previous libhwjpeg +#ifdef USES_UNIVERSAL_LIBHWJPEG + +#ifndef __HARDWARE_EXYNOS_EXYNOS_JPEG_API_H__ +#define __HARDWARE_EXYNOS_EXYNOS_JPEG_API_H__ + +// Exynos HAL defines another version of videodev2 apart from the original one +// This prevents conflict from the Exynos HAL from the original videodev2.h +/* +#ifndef v4l2_fourcc +#include +#endif +*/ + +#include + +#ifndef JPEG_CACHE_ON +#define JPEG_CACHE_ON 1 +#endif + +#define JPEG_BUF_TYPE_USER_PTR 1 +#define JPEG_BUF_TYPE_DMA_BUF 2 + +// CUSTOM V4L2 4CC FORMATS FOR LEGACY JPEG LIBRARY AND DRIVERS +#ifndef V4L2_PIX_FMT_JPEG_444 +#define V4L2_PIX_FMT_JPEG_444 v4l2_fourcc('J', 'P', 'G', '4') +#endif +#ifndef V4L2_PIX_FMT_JPEG_422 +#define V4L2_PIX_FMT_JPEG_422 v4l2_fourcc('J', 'P', 'G', '2') +#endif +#ifndef V4L2_PIX_FMT_JPEG_420 +#define V4L2_PIX_FMT_JPEG_420 v4l2_fourcc('J', 'P', 'G', '0') +#endif +#ifndef V4L2_PIX_FMT_JPEG_GRAY +#define V4L2_PIX_FMT_JPEG_GRAY v4l2_fourcc('J', 'P', 'G', 'G') +#endif +#ifndef V4L2_PIX_FMT_JPEG_422V +#define V4L2_PIX_FMT_JPEG_422V v4l2_fourcc('J', 'P', 'G', '5') +#endif +#ifndef V4L2_PIX_FMT_JPEG_411 +#define V4L2_PIX_FMT_JPEG_411 v4l2_fourcc('J', 'P', 'G', '1') +#endif + +class ExynosJpegEncoder { + /* + * ExynosJpedgEncoder class is actually a derived class of + * CHWJpegV4L2Compressor. But it is not derived from CHWJpegV4L2Compressor + * because it has a lot of virtual functions which require extra memory for + * vtables. Moreover, ExynosJpegEncoder class implements no virtual function + * of CHWJpegV4L2Compressor. + */ + CHWJpegV4L2Compressor m_hwjpeg; + + char m_iInBufType; + char m_iOutBufType; + + unsigned int m_uiState; + + int m_nQFactor; + int m_nWidth; + int m_nHeight; + int m_v4l2Format; + int m_jpegFormat; + int m_nStreamSize; + + bool __EnsureFormatIsApplied(); +protected: + enum { + STATE_SIZE_CHANGED = 1 << 0, + STATE_PIXFMT_CHANGED = 1 << 1, + STATE_BASE_MAX = 1 << 16, + }; + + unsigned int GetDeviceCapabilities() { return m_hwjpeg.GetDeviceCapabilities(); } + CHWJpegCompressor &GetCompressor() { return m_hwjpeg; } + unsigned int GetHWDelay() { return m_hwjpeg.GetHWDelay(); } + + void SetState(unsigned int state) { m_uiState |= state; } + void ClearState(unsigned int state) { m_uiState &= ~state; } + bool TestState(unsigned int state) { return (m_uiState & state) == state; } + bool TestStateEither(unsigned int state) { return (m_uiState & state) != 0; } + + virtual bool EnsureFormatIsApplied() { return __EnsureFormatIsApplied(); } +public: + ExynosJpegEncoder(): m_hwjpeg(), + m_iInBufType(JPEG_BUF_TYPE_USER_PTR), m_iOutBufType(JPEG_BUF_TYPE_USER_PTR), m_uiState(0), + m_nQFactor(0), m_nWidth(0), m_nHeight(0), m_v4l2Format(0), m_jpegFormat(0), m_nStreamSize(0) + { + /* To detect setInBuf() call without format setting */ + SetState(STATE_SIZE_CHANGED | STATE_PIXFMT_CHANGED); + } + virtual ~ExynosJpegEncoder() { destroy(); } + + // Return 0 on success, -1 on error + int flagCreate() { return m_hwjpeg.Okay() ? 0 : -1; } + virtual int create(void) { return flagCreate(); } + virtual int destroy(void) { return 0; } + int updateConfig(void) { return 0; } + int setCache(int val) { val = val; return 0; } + + void *getJpegConfig() { return reinterpret_cast(this); } + int setJpegConfig(void* pConfig); + + int checkInBufType(void) { return m_iInBufType; } + int checkOutBufType(void) { return m_iOutBufType; } + + int getInBuf(int *piBuf, int *piInputSize, int iSize); + int getOutBuf(int *piBuf, int *piOutputSize); + int getInBuf(char **pcBuf, int *piInputSize, int iSize); + int getOutBuf(char **pcBuf, int *piOutputSize); + + int setInBuf(int *piBuf, int *iSize); + int setOutBuf(int iBuf, int iSize); + int setInBuf(char **pcBuf, int *iSize); + int setOutBuf(char *pcBuf, int iSize); + + int getSize(int *piWidth, int *piHeight) { + *piWidth = m_nWidth; + *piHeight = m_nHeight; + return 0; + } + + int setSize(int iW, int iH) { + if ((m_nWidth != iW) || (m_nHeight != iH)) { + m_nWidth = iW; + m_nHeight = iH; + SetState(STATE_SIZE_CHANGED); + } + return 0; + } + + int setJpegFormat(int iV4l2JpegFormat); + int getColorFormat(void) { return m_v4l2Format; } + int setColorFormat(int iV4l2ColorFormat) { + if (iV4l2ColorFormat != m_v4l2Format) { + m_v4l2Format = iV4l2ColorFormat; + SetState(STATE_PIXFMT_CHANGED); + } + return 0; + } + + int setQuality(int iQuality) { + if (m_nQFactor != iQuality) { + if (!m_hwjpeg.SetQuality(static_cast(iQuality))) + return -1; + m_nQFactor = iQuality; + } + return 0; + } + + int setQuality(const unsigned char q_table[]); + + int setColorBufSize(int *piBufSize, int iSize); + int getJpegSize(void) { return m_nStreamSize; } + + int encode(void) { + if (!__EnsureFormatIsApplied()) + return false; + + m_nStreamSize = static_cast(m_hwjpeg.Compress()); + return (m_nStreamSize < 0) ? -1 : 0; + } + +}; + +#endif //__HARDWARE_EXYNOS_EXYNOS_JPEG_API_H__ + +#endif //USES_UNIVERSAL_LIBHWJPEG diff --git a/include/ExynosJpegEncoderForCamera.h b/include/ExynosJpegEncoderForCamera.h new file mode 100644 index 0000000..61cd9c0 --- /dev/null +++ b/include/ExynosJpegEncoderForCamera.h @@ -0,0 +1,128 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// To prevent build conflict with the previous libhwjpeg +#ifdef USES_UNIVERSAL_LIBHWJPEG + +#ifndef __HARDWARE_EXYNOS_JPEG_ENCODER_FOR_CAMERA_H__ +#define __HARDWARE_EXYNOS_JPEG_ENCODER_FOR_CAMERA_H__ + +#include + +#include "ExynosExif.h" +#include "ExynosJpegApi.h" + +class CLibCSC; // defined in libhwjpeg/hwjpeg-libcsc.h +class CAppMarkerWriter; // defined in libhwjpeg/AppMarkerWriter.h + +class ExynosJpegEncoderForCamera: public ExynosJpegEncoder { + enum { + STATE_THUMBSIZE_CHANGED = STATE_BASE_MAX << 0, + STATE_HWFC_ENABLED = STATE_BASE_MAX << 1, + STATE_NO_CREATE_THUMBIMAGE = STATE_BASE_MAX << 2, + STATE_NO_BTBCOMP = STATE_BASE_MAX << 3, + }; + + CHWJpegCompressor *m_phwjpeg4thumb; + CLibCSC *m_pLibCSC; + int m_fdIONClient; + int m_fdIONThumbImgBuffer; + char *m_pIONThumbImgBuffer; + size_t m_szIONThumbImgBuffer; + char *m_pIONThumbJpegBuffer; + size_t m_szIONThumbJpegBuffer; + + int m_nThumbWidth; + int m_nThumbHeight; + int m_nMainQuality; + int m_nThumbQuality; + + int m_iHWScalerID; + + /* + * The following four placeholders and size vairables are used + * by asynchronous(non-blocking) compression + */ + char *m_pStreamBase; + size_t m_nStreamSize; + + char m_fThumbBufferType; + + union { + char *m_pThumbnailImageBuffer[3]; // checkInBufType() == JPEG_BUF_TYPE_USER_PTR + int m_fdThumbnailImageBuffer[3]; // checkInBufType() == JPEG_BUF_TYPE_DMA_BUF + }; + size_t m_szThumbnailImageLen[3]; + + CAppMarkerWriter *m_pAppWriter; + + pthread_t m_threadWorker; + + bool AllocThumbBuffer(int v4l2Format); /* For single compression */ + bool AllocThumbJpegBuffer(); /* For BTB compression */ + bool GenerateThumbnailImage(); + size_t CompressThumbnail(); + size_t CompressThumbnailOnly(size_t limit, int quality, unsigned int v4l2Format, int src_buftype); + size_t RemoveTrailingDummies(char *base, size_t len); + ssize_t FinishCompression(size_t mainlen, size_t thumblen); + bool ProcessExif(char *base, size_t limit, exif_attribute_t *exifInfo, debug_attribute_t *debuginfo); + static void *tCompressThumbnail(void *p); + bool PrepareCompression(bool thumbnail); + + // IsThumbGenerationNeeded - true if thumbnail image needed to be generated from the main image + // It also implies that a worker thread is generated to generate thumbnail concurrently. + inline bool IsThumbGenerationNeeded() { return !TestState(STATE_NO_CREATE_THUMBIMAGE); } + inline void NoThumbGenerationNeeded() { SetState(STATE_NO_CREATE_THUMBIMAGE); } + inline void ThumbGenerationNeeded() { ClearState(STATE_NO_CREATE_THUMBIMAGE); } + + inline bool IsBTBCompressionSupported() { + return !!(GetDeviceCapabilities() & V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION) && + !TestState(STATE_NO_BTBCOMP); + } +protected: + virtual bool EnsureFormatIsApplied(); +public: + ExynosJpegEncoderForCamera(bool bBTBComp = true); + virtual ~ExynosJpegEncoderForCamera(); + + int encode(int *size, exif_attribute_t *exifInfo, char** pcJpegBuffer, debug_attribute_t *debugInfo = 0); + int setInBuf2(int *piBuf, int *iSize); + int setInBuf2(char **pcBuf, int *iSize); + int setThumbnailSize(int w, int h); + int setThumbnailQuality(int quality); + + void setExtScalerNum(int csc_hwscaler_id) { m_iHWScalerID = csc_hwscaler_id; } + + void EnableHWFC() { + SetState(STATE_HWFC_ENABLED); + GetCompressor().SetAuxFlags(EXYNOS_HWJPEG_AUXOPT_ENABLE_HWFC); + } + void DisableHWFC() { + GetCompressor().ClearAuxFlags(EXYNOS_HWJPEG_AUXOPT_ENABLE_HWFC); + ClearState(STATE_HWFC_ENABLED); + } + + ssize_t WaitForCompression(); + + size_t GetThumbnailImage(char *buffer, size_t buflen); + + virtual int destroy(void); +}; + +#endif //__HARDWARE_EXYNOS_JPEG_ENCODER_FOR_CAMERA_H__ + +#endif //USES_UNIVERSAL_LIBHWJPEG diff --git a/include/FimgApi.h b/include/FimgApi.h new file mode 100644 index 0000000..a8e3a80 --- /dev/null +++ b/include/FimgApi.h @@ -0,0 +1,199 @@ +/* +** +** Copyright 2009 Samsung Electronics Co, Ltd. +** Copyright 2008, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#ifndef FIMG_API_H +#define FIMG_API_H + +#include +#include +#include + +#if defined(FIMGAPI_V5X) +#include "sec_g2d_5x.h" +#else +#include "sec_g2d_4x.h" +#endif + +#undef FIMGAPI_HAL_DEBUG +#undef REAL_DEBUG +#undef ANDROID_LOG + +#if defined(REAL_DEBUG) +#ifdef ANDROID_LOG +#define PRINT SLOGE +#define PRINTD SLOGD +#else +#define PRINT printf +#define PRINTD printf +#endif +#else +void VOID_FUNC(const char *format, ...); + +#define PRINT VOID_FUNC +#define PRINTD VOID_FUNC +#endif + +struct Fimg { + int srcX; + int srcY; + int srcW; + int srcH; + int srcFWStride; // this is not w, just stride (w * bpp) + int srcFH; + int srcBPP; + int srcColorFormat; + int srcAlphaType; + unsigned char* srcAddr; + + int dstX; + int dstY; + int dstW; + int dstH; + int dstFWStride; // this is not w, just stride (w * bpp) + int dstFH; + int dstBPP; + int dstColorFormat; + int dstAlphaType; + unsigned char* dstAddr; + + int clipT; + int clipB; + int clipL; + int clipR; + int level; + + unsigned int fillcolor; + int tileModeX; + int tileModeY; + int rotate; + unsigned int alpha; + int xfermode; + int isDither; + int isFilter; + int matrixType; + float matrixSw; + float matrixSh; + + int called; /* 0 : drawRect 1 : drawBitmap */ +}; + +#ifdef __cplusplus + +struct blitinfo_table { + int op; + const char *str; +}; + +struct compromise_param { + int clipW; + int clipH; + int src_fmt; + int dst_fmt; + int isScaling; + int isFilter; + int isSrcOver; +}; + +extern struct blitinfo_table optbl[]; + +class FimgApi +{ +public: +#endif + +#ifdef __cplusplus +private : + bool m_flagCreate; + +protected : + FimgApi(); + FimgApi(const FimgApi& rhs) {} + virtual ~FimgApi(); + +public: + bool Create(void); + bool Destroy(void); + inline bool FlagCreate(void) { return m_flagCreate; } + bool Stretch(struct fimg2d_blit *cmd); +#ifdef FIMG2D_USE_M2M1SHOT2 + bool Stretch_v5(struct m2m1shot2 *cmd); +#endif + bool Sync(void); + +protected: + virtual bool t_Create(void); + virtual bool t_Destroy(void); + virtual bool t_Stretch(struct fimg2d_blit *cmd); +#ifdef FIMG2D_USE_M2M1SHOT2 + virtual bool t_Stretch_v5(struct m2m1shot2 *cmd); +#endif + virtual bool t_Sync(void); + virtual bool t_Lock(void); + virtual bool t_UnLock(void); + +}; +#endif + +#ifdef __cplusplus +extern "C" +#endif +struct FimgApi *createFimgApi(); + +#ifdef __cplusplus +extern "C" +#endif +void destroyFimgApi(FimgApi *ptrFimgApi); + +#ifdef __cplusplus +extern "C" +#endif +int stretchFimgApi(struct fimg2d_blit *cmd); + +#ifdef FIMG2D_USE_M2M1SHOT2 +#ifdef __cplusplus +extern "C" +#endif +int stretchFimgApi_v5(struct m2m1shot2 *cmd); +#endif + +#ifdef __cplusplus +extern "C" +#endif +int stretchFimgApi_fast(struct fimg2d_blit *cmd, unsigned long temp_addr, int temp_size); + +#ifdef __cplusplus +extern "C" +#endif +bool checkScaleFimgApi(Fimg *fimg); + + +#ifdef __cplusplus +extern "C" +#endif +int SyncFimgApi(void); + +int requestFimgApi_v5(struct Fimg *fimg); +void printDataBlit(char *title, const char *called, struct fimg2d_blit *cmd); +void printDataBlitRotate(int rotate); +void printDataBlitImage(const char *title, struct fimg2d_image *image); +void printDataBlitRect(const char *title, struct fimg2d_rect *rect); +void printDataBlitRect(const char *title, struct fimg2d_clip *clipping); +void printDataBlitScale(struct fimg2d_scale *scaling); +#endif //FIMG_API_H diff --git a/include/csc.h b/include/csc.h new file mode 100644 index 0000000..7f277d6 --- /dev/null +++ b/include/csc.h @@ -0,0 +1,562 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc.h + * + * @brief color space convertion abstract header + * + * @author Pyoungjae Jung (pjet.jung@samsung.com) + * + * @version 1.0 + * + * @history + * 2011.12.27 : Create + */ + +#ifndef CSC_H +#define CSC_H + +#ifdef __cplusplus +extern "C" { +#endif + +#define CSC_MAX_PLANES 3 + +typedef enum _CSC_ERRORCODE { + CSC_ErrorNone = 0, + CSC_Error, + CSC_ErrorNotInit, + CSC_ErrorInvalidAddress, + CSC_ErrorUnsupportFormat, + CSC_ErrorNotImplemented +} CSC_ERRORCODE; + +typedef enum _CSC_METHOD { + CSC_METHOD_SW = 0, + CSC_METHOD_HW +} CSC_METHOD; + +typedef enum _CSC_HW_PROPERTY_TYPE { + CSC_HW_PROPERTY_FIXED_NODE = 0, + CSC_HW_PROPERTY_MODE_DRM, +} CSC_HW_PROPERTY_TYPE; + +typedef enum _CSC_MEMTYPE { + CSC_MEMORY_MMAP = 1, + CSC_MEMORY_USERPTR, + CSC_MEMORY_OVERLAY, + CSC_MEMORY_DMABUF, + CSC_MEMORY_MFC, +} CSC_MEMTYPE; + +typedef enum _CSC_HW_ID { + CSC_HW_GSC0 = 0, + CSC_HW_GSC1, + CSC_HW_GSC2, + CSC_HW_GSC3, + CSC_HW_SC0, + CSC_HW_SC1, + CSC_HW_SC2, + CSC_HW_MAX, +} CSC_HW_ID; + +typedef enum _CSC_PLANE { + CSC_Y_PLANE = 0, + CSC_RGB_PLANE = 0, + CSC_U_PLANE = 1, + CSC_UV_PLANE = 1, + CSC_V_PLANE = 2 +} CSC_PLANE; + +typedef enum _CSC_HW_TYPE { + CSC_HW_TYPE_FIMC = 0, + CSC_HW_TYPE_GSCALER +} CSC_HW_TYPE; + +typedef enum _CSC_EQ_MODE { + CSC_EQ_MODE_USER = 0, + CSC_EQ_MODE_AUTO +} CSC_EQ_MODE; + +typedef enum _CSC_EQ_COLORSPACE { + CSC_EQ_COLORSPACE_SMPTE170M = 1, + CSC_EQ_COLORSPACE_SMPTE240M, + CSC_EQ_COLORSPACE_REC709, + CSC_EQ_COLORSPACE_BT878, + CSC_EQ_COLORSPACE_470_SYSTEM_M, + CSC_EQ_COLORSPACE_470_SYSTEM_BG +} CSC_EQ_COLORSPACE; + +typedef enum _CSC_EQ_RANGE { + CSC_EQ_RANGE_NARROW = 0, + CSC_EQ_RANGE_FULL +} CSC_EQ_RANGE; + +typedef enum _CSC_HW_FILTER { + CSC_FT_NONE = 0, + CSC_FT_BLUR, + CSC_FT_240, + CSC_FT_480, + CSC_FT_720, + CSC_FT_1080, + CSC_FT_MAX +} CSC_HW_FILTER; + +typedef struct _CSC_FORMAT { + unsigned int width; + unsigned int height; + unsigned int crop_left; + unsigned int crop_top; + unsigned int crop_width; + unsigned int crop_height; + unsigned int color_format; + unsigned int cacheable; + unsigned int mode_drm; +} CSC_FORMAT; + +typedef struct _CSC_BUFFER { + void *planes[CSC_MAX_PLANES]; + int mem_type; +} CSC_BUFFER; + +typedef struct _CSC_HW_PROPERTY { + int fixed_node; + int mode_drm; +} CSC_HW_PROPERTY; + +typedef struct _CSC_HANDLE { + CSC_FORMAT dst_format; + CSC_FORMAT src_format; + CSC_BUFFER dst_buffer; + CSC_BUFFER src_buffer; + CSC_METHOD csc_method; + CSC_HW_TYPE csc_hw_type; + void *csc_hw_handle; + CSC_HW_PROPERTY hw_property; + + /* CSC Equation */ + CSC_EQ_MODE csc_mode; + CSC_EQ_RANGE csc_range; + CSC_EQ_COLORSPACE colorspace; + + /* Denoising filter */ + CSC_HW_FILTER filter; +} CSC_HANDLE; + +/* + * change hal pixel format to omx pixel format + * + * @param hal_format + * hal pixel format[in] + * + * @return + * omx pixel format + */ +unsigned int hal_2_omx_pixel_format( + unsigned int hal_format); + +/* + * change omx pixel format to hal pixel format + * + * @param hal_format + * omx pixel format[in] + * + * @return + * hal pixel format + */ +unsigned int omx_2_hal_pixel_format( + unsigned int omx_format); + +/* + * Init CSC handle + * + * @return + * csc handle + */ +void *csc_init( + CSC_METHOD method); + +/* + * Deinit CSC handle + * + * @param handle + * CSC handle[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_deinit( + void *handle); + +/* + * get color space converter method + * + * @param handle + * CSC handle[in] + * + * @param method + * CSC method[out] + * + * @return + * error code + */ +CSC_ERRORCODE csc_get_method( + void *handle, + CSC_METHOD *method); + +/* + * set color space converter method + * + * @param handle + * CSC handle[in] + * + * @param method + * CSC method[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_method( + void *handle, + CSC_METHOD method); + +/* + * Set hw property + * + * @param handle + * CSC handle[in] + * + * @param property + * csc hw property[in] + * + * @param value + * csc hw property value[in] + * + * @return + * csc handle + */ +CSC_ERRORCODE csc_set_hw_property( + void *handle, + CSC_HW_PROPERTY_TYPE property, + int value); + +/* + * Get csc equation property. + * + * @param handle + * CSC handle[in] + * + * @param mode + * csc equation mode[out] + * + * @param colorspace + * csc color space[out] + * + * @param range + * csc equation range[out] + * + * @return + * error code + */ +CSC_ERRORCODE csc_get_eq_property( + void *handle, + CSC_EQ_MODE *csc_mode, + CSC_EQ_RANGE *csc_range, + CSC_EQ_COLORSPACE *colorspace); + +/* + * Set csc equation property. + * + * @param handle + * CSC handle[in] + * + * @param mode + * csc equation mode[in] + * + * @param colorspace + * csc color space[in] + * + * @param range + * csc equation range[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_eq_property( + void *handle, + CSC_EQ_MODE csc_mode, + CSC_EQ_RANGE csc_range, + CSC_EQ_COLORSPACE colorspace); + +/* + * Set csc filter property. + * + * @param handle + * CSC handle[in] + * + * @param filter + * csc filter info[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_filter_property( + void *handle, + CSC_HW_FILTER filter); + +/* + * Get source format. + * + * @param handle + * CSC handle[in] + * + * @param width + * address of image width[out] + * + * @param height + * address of image height[out] + * + * @param crop_left + * address of image left crop size[out] + * + * @param crop_top + * address of image top crop size[out] + * + * @param crop_width + * address of cropped image width[out] + * + * @param crop_height + * address of cropped image height[out] + * + * @param color_format + * address of source color format(HAL format)[out] + * + * @return + * error code + */ +CSC_ERRORCODE csc_get_src_format( + void *handle, + unsigned int *width, + unsigned int *height, + unsigned int *crop_left, + unsigned int *crop_top, + unsigned int *crop_width, + unsigned int *crop_height, + unsigned int *color_format, + unsigned int *cacheable); + +/* + * Set source format. + * Don't call each converting time. + * Pls call this function as below. + * 1. first converting time + * 2. format is changed + * + * @param handle + * CSC handle[in] + * + * @param width + * image width[in] + * + * @param height + * image height[in] + * + * @param crop_left + * image left crop size[in] + * + * @param crop_top + * image top crop size[in] + * + * @param crop_width + * cropped image width[in] + * + * @param crop_height + * cropped image height[in] + * + * @param color_format + * source color format(HAL format)[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format, + unsigned int cacheable); + +/* + * Get destination format. + * + * @param handle + * CSC handle[in] + * + * @param width + * address of image width[out] + * + * @param height + * address of image height[out] + * + * @param crop_left + * address of image left crop size[out] + * + * @param crop_top + * address of image top crop size[out] + * + * @param crop_width + * address of cropped image width[out] + * + * @param crop_height + * address of cropped image height[out] + * + * @param color_format + * address of color format(HAL format)[out] + * + * @return + * error code + */ +CSC_ERRORCODE csc_get_dst_format( + void *handle, + unsigned int *width, + unsigned int *height, + unsigned int *crop_left, + unsigned int *crop_top, + unsigned int *crop_width, + unsigned int *crop_height, + unsigned int *color_format, + unsigned int *cacheable); + +/* + * Set destination format + * Don't call each converting time. + * Pls call this function as below. + * 1. first converting time + * 2. format is changed + * + * @param handle + * CSC handle[in] + * + * @param width + * image width[in] + * + * @param height + * image height[in] + * + * @param crop_left + * image left crop size[in] + * + * @param crop_top + * image top crop size[in] + * + * @param crop_width + * cropped image width[in] + * + * @param crop_height + * cropped image height[in] + * + * @param color_format + * destination color format(HAL format)[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format, + unsigned int cacheable); + +/* + * Setup source buffer + * set_format func should be called before this this func. + * + * @param handle + * CSC handle[in] + * + * @param src_buffer + * source buffer pointer array[in] + * + * @param y + * y or RGB destination pointer[in] + * + * @param u + * u or uv destination pointer[in] + * + * @param v + * v or none destination pointer[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_src_buffer( + void *handle, + void *addr[CSC_MAX_PLANES], + int mem_type); + +/* + * Setup destination buffer + * + * @param handle + * CSC handle[in] + * + * @param y + * y or RGB destination pointer[in] + * + * @param u + * u or uv destination pointer[in] + * + * @param v + * v or none destination pointer[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_set_dst_buffer( + void *handle, + void *addr[CSC_MAX_PLANES], + int mem_type); + +/* + * Convert color space with presetup color format + * + * @param handle + * CSC handle[in] + * + * @return + * error code + */ +CSC_ERRORCODE csc_convert( + void *handle); + +CSC_ERRORCODE csc_convert_with_rotation( + void *handle, int rotation, int flip_horizontal, int flip_vertical); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/decon-fb.h b/include/decon-fb.h new file mode 100644 index 0000000..66fb784 --- /dev/null +++ b/include/decon-fb.h @@ -0,0 +1,240 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef ___SAMSUNG_DECON_H__ +#define ___SAMSUNG_DECON_H__ +#define S3C_FB_MAX_WIN (7) +#define MAX_DECON_WIN (7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define DECON_WIN_UPDATE_IDX MAX_DECON_WIN +#define MAX_BUF_PLANE_CNT (3) +typedef unsigned int u32; +#if defined(USES_ARCH_ARM64) || defined(USES_DECON_64BIT_ADDRESS) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +typedef uint64_t dma_addr_t; +#else +typedef uint32_t dma_addr_t; +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct decon_win_rect { + int x; + int y; + u32 w; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + u32 h; +}; +struct decon_rect { + int left; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int top; + int right; + int bottom; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_window { + int x; + int y; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_plane_alpha { + int channel; + unsigned char red; + unsigned char green; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char blue; +}; +struct s3c_fb_user_chroma { + int enabled; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char red; + unsigned char green; + unsigned char blue; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_ion_client { + int fd[MAX_BUF_PLANE_CNT]; + int offset; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum decon_pixel_format { + DECON_PIXEL_FORMAT_ARGB_8888 = 0, + DECON_PIXEL_FORMAT_ABGR_8888, + DECON_PIXEL_FORMAT_RGBA_8888, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_BGRA_8888, + DECON_PIXEL_FORMAT_XRGB_8888, + DECON_PIXEL_FORMAT_XBGR_8888, + DECON_PIXEL_FORMAT_RGBX_8888, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_BGRX_8888, + DECON_PIXEL_FORMAT_RGBA_5551, + DECON_PIXEL_FORMAT_RGB_565, + DECON_PIXEL_FORMAT_NV16, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_NV61, + DECON_PIXEL_FORMAT_YVU422_3P, + DECON_PIXEL_FORMAT_NV12, + DECON_PIXEL_FORMAT_NV21, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_NV12M, + DECON_PIXEL_FORMAT_NV21M, + DECON_PIXEL_FORMAT_YUV420, + DECON_PIXEL_FORMAT_YVU420, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_YUV420M, + DECON_PIXEL_FORMAT_YVU420M, + DECON_PIXEL_FORMAT_NV12N, + DECON_PIXEL_FORMAT_NV12N_10B, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_MAX, +}; +enum decon_blending { + DECON_BLENDING_NONE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_BLENDING_PREMULT = 1, + DECON_BLENDING_COVERAGE = 2, + DECON_BLENDING_MAX = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum otf_status { + S3C_FB_DMA, + S3C_FB_LOCAL, + S3C_FB_STOP_DMA, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_READY_TO_LOCAL, +}; +enum vpp_rotate { + VPP_ROT_NORMAL = 0x0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + VPP_ROT_XFLIP, + VPP_ROT_YFLIP, + VPP_ROT_180, + VPP_ROT_90, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + VPP_ROT_90_XFLIP, + VPP_ROT_90_YFLIP, + VPP_ROT_270, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum vpp_csc_eq { + BT_601_NARROW = 0x0, + BT_601_WIDE, + BT_709_NARROW, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + BT_709_WIDE, +}; +enum decon_idma_type { + IDMA_G0 = 0x0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + IDMA_G1, + IDMA_VG0, + IDMA_VG1, + IDMA_VGR0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + IDMA_VGR1, + IDMA_G2, + IDMA_G3, + MAX_DECON_DMA_TYPE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct vpp_params { + dma_addr_t addr[MAX_BUF_PLANE_CNT]; + enum vpp_rotate rot; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum vpp_csc_eq eq_mode; +}; +struct decon_frame { + int x; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int y; + u32 w; + u32 h; + u32 f_w; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + u32 f_h; +}; +struct decon_win_config { + enum { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_WIN_STATE_DISABLED = 0, + DECON_WIN_STATE_COLOR, + DECON_WIN_STATE_BUFFER, + DECON_WIN_STATE_UPDATE, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } state; + union { + __u32 color; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int fd_idma[3]; + int fence_fd; + int plane_alpha; + enum decon_blending blending; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum decon_idma_type idma_type; + enum decon_pixel_format format; + struct vpp_params vpp_parm; + struct decon_win_rect block_area; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct decon_win_rect transparent_area; + struct decon_win_rect covered_opaque_area; + struct decon_frame src; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; + struct decon_frame dst; + bool protection; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct decon_win_config_data { + int fence; + int fd_odma; + struct decon_win_config config[MAX_DECON_WIN + 1]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct decon_dual_display_blank_data { + enum { + DECON_PRIMARY_DISPLAY = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_SECONDARY_DISPLAY, + } display_type; + int blank; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum disp_pwr_mode { + DECON_POWER_MODE_OFF = 0, + DECON_POWER_MODE_DOZE, + DECON_POWER_MODE_NORMAL, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_POWER_MODE_DOZE_SUSPEND, +}; +#define S3CFB_WIN_POSITION _IOW('F', 203, struct s3c_fb_user_window) +#define S3CFB_WIN_SET_PLANE_ALPHA _IOW('F', 204, struct s3c_fb_user_plane_alpha) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_WIN_SET_CHROMA _IOW('F', 205, struct s3c_fb_user_chroma) +#define S3CFB_SET_VSYNC_INT _IOW('F', 206, __u32) +#define S3CFB_GET_ION_USER_HANDLE _IOWR('F', 208, struct s3c_fb_user_ion_client) +#define S3CFB_WIN_CONFIG _IOW('F', 209, struct decon_win_config_data) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_DUAL_DISPLAY_BLANK _IOW('F', 300, struct decon_dual_display_blank_data) +#define S3CFB_WIN_PSR_EXIT _IOW('F', 210, int) +#define EXYNOS_GET_HDMI_CONFIG _IOW('F', 220, struct exynos_hdmi_data) +#define EXYNOS_SET_HDMI_CONFIG _IOW('F', 221, struct exynos_hdmi_data) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_POWER_MODE _IOW('F', 223, __u32) +#endif diff --git a/include/decon_8890.h b/include/decon_8890.h new file mode 100644 index 0000000..a9af576 --- /dev/null +++ b/include/decon_8890.h @@ -0,0 +1,243 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef ___SAMSUNG_DECON_H__ +#define ___SAMSUNG_DECON_H__ +#define S3C_FB_MAX_WIN (8) +#define MAX_DECON_WIN (8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define DECON_WIN_UPDATE_IDX MAX_DECON_WIN +#define MAX_BUF_PLANE_CNT (3) +typedef unsigned int u32; +#ifdef USES_ARCH_ARM64 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +typedef uint64_t dma_addr_t; +#else +typedef uint32_t dma_addr_t; +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct decon_win_rect { + int x; + int y; + u32 w; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + u32 h; +}; +struct decon_rect { + int left; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int top; + int right; + int bottom; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_window { + int x; + int y; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_plane_alpha { + int channel; + unsigned char red; + unsigned char green; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char blue; +}; +struct s3c_fb_user_chroma { + int enabled; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char red; + unsigned char green; + unsigned char blue; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_ion_client { + int fd[MAX_BUF_PLANE_CNT]; + int offset; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum decon_pixel_format { + DECON_PIXEL_FORMAT_ARGB_8888 = 0, + DECON_PIXEL_FORMAT_ABGR_8888, + DECON_PIXEL_FORMAT_RGBA_8888, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_BGRA_8888, + DECON_PIXEL_FORMAT_XRGB_8888, + DECON_PIXEL_FORMAT_XBGR_8888, + DECON_PIXEL_FORMAT_RGBX_8888, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_BGRX_8888, + DECON_PIXEL_FORMAT_RGBA_5551, + DECON_PIXEL_FORMAT_RGB_565, + DECON_PIXEL_FORMAT_NV16, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_NV61, + DECON_PIXEL_FORMAT_YVU422_3P, + DECON_PIXEL_FORMAT_NV12, + DECON_PIXEL_FORMAT_NV21, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_NV12M, + DECON_PIXEL_FORMAT_NV21M, + DECON_PIXEL_FORMAT_YUV420, + DECON_PIXEL_FORMAT_YVU420, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_YUV420M, + DECON_PIXEL_FORMAT_YVU420M, + DECON_PIXEL_FORMAT_NV12N, + DECON_PIXEL_FORMAT_NV12N_10B, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_PIXEL_FORMAT_MAX, +}; +enum decon_blending { + DECON_BLENDING_NONE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_BLENDING_PREMULT = 1, + DECON_BLENDING_COVERAGE = 2, + DECON_BLENDING_MAX = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum otf_status { + S3C_FB_DMA, + S3C_FB_LOCAL, + S3C_FB_STOP_DMA, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_READY_TO_LOCAL, +}; +enum vpp_rotate { + VPP_ROT_NORMAL = 0x0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + VPP_ROT_XFLIP, + VPP_ROT_YFLIP, + VPP_ROT_180, + VPP_ROT_90, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + VPP_ROT_90_XFLIP, + VPP_ROT_90_YFLIP, + VPP_ROT_270, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum vpp_csc_eq { + BT_601_NARROW = 0x0, + BT_601_WIDE, + BT_709_NARROW, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + BT_709_WIDE, +}; +enum decon_idma_type { + IDMA_G0 = 0x0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + IDMA_G1, + IDMA_VG0, + IDMA_VG1, + IDMA_G2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + IDMA_G3, + IDMA_VGR0, + IDMA_VGR1, + ODMA_WB, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + MAX_DECON_DMA_TYPE +}; +struct vpp_params { + dma_addr_t addr[MAX_BUF_PLANE_CNT]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum vpp_rotate rot; + enum vpp_csc_eq eq_mode; +}; +struct decon_frame { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int x; + int y; + u32 w; + u32 h; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + u32 f_w; + u32 f_h; +}; +struct decon_win_config { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum { + DECON_WIN_STATE_DISABLED = 0, + DECON_WIN_STATE_COLOR, + DECON_WIN_STATE_BUFFER, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_WIN_STATE_UPDATE, + } state; + union { + __u32 color; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct { + int fd_idma[3]; + int fence_fd; + int plane_alpha; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum decon_blending blending; + enum decon_idma_type idma_type; + enum decon_pixel_format format; + struct vpp_params vpp_parm; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct decon_win_rect block_area; + struct decon_win_rect transparent_area; + struct decon_win_rect covered_opaque_area; + struct decon_frame src; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; + }; + struct decon_frame dst; + bool protection; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + bool compression; +}; +struct decon_win_config_data { + int fence; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int fd_odma; + struct decon_win_config config[MAX_DECON_WIN + 1]; +}; +struct decon_dual_display_blank_data { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum { + DECON_PRIMARY_DISPLAY = 0, + DECON_SECONDARY_DISPLAY, + } display_type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int blank; +}; +enum disp_pwr_mode { + DECON_POWER_MODE_OFF = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + DECON_POWER_MODE_DOZE, + DECON_POWER_MODE_NORMAL, + DECON_POWER_MODE_DOZE_SUSPEND, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_WIN_POSITION _IOW('F', 203, struct s3c_fb_user_window) +#define S3CFB_WIN_SET_PLANE_ALPHA _IOW('F', 204, struct s3c_fb_user_plane_alpha) +#define S3CFB_WIN_SET_CHROMA _IOW('F', 205, struct s3c_fb_user_chroma) +#define S3CFB_SET_VSYNC_INT _IOW('F', 206, __u32) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_GET_ION_USER_HANDLE _IOWR('F', 208, struct s3c_fb_user_ion_client) +#define S3CFB_WIN_CONFIG _IOW('F', 209, struct decon_win_config_data) +#define S3CFB_WIN_PSR_EXIT _IOW('F', 210, int) +#define S3CFB_DUAL_DISPLAY_BLANK _IOW('F', 300, struct decon_dual_display_blank_data) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define EXYNOS_GET_HDMI_CONFIG _IOW('F', 220, struct exynos_hdmi_data) +#define EXYNOS_SET_HDMI_CONFIG _IOW('F', 221, struct exynos_hdmi_data) +#define S3CFB_POWER_MODE _IOW('F', 223, __u32) +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/include/exynos-hwjpeg.h b/include/exynos-hwjpeg.h new file mode 100644 index 0000000..5e70443 --- /dev/null +++ b/include/exynos-hwjpeg.h @@ -0,0 +1,616 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __EXYNOS_HWJPEG_H__ +#define __EXYNOS_HWJPEG_H__ + +#include // size_t +/* + * exynos-hwjpeg.h does not include videodev2.h because Exynos HAL code may + * define its version of videodev2.h that may differ from + * of the current Linux version. + * To prevent conflict different versions of videodev2.h, this header file does + * not include videodev2.h even though it depends on the data types defined in + * videodev2.h. + * Therefore, the source files that include this header file, they should + * include their proper version of videodev2.h. + */ +#ifndef VIDEO_MAX_PLANES +#error 'linux/videodev2.h' should be included before 'exynos-hwjpeg.h' +#endif + +#if VIDEO_MAX_PLANES < 6 +#error VIDEO_MAX_PLANES should not be smaller than 6 +#endif + +// Exynos JPEG specific device capabilities +// Defined in the driver. Not in videodev2.h +#define V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION 0x0100 +#define V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION 0x0200 +#define V4L2_CAP_EXYNOS_JPEG_HWFC 0x0400 +#define V4L2_CAP_EXYNOS_JPEG_HWFC_EMBEDDED 0x0800 +#define V4L2_CAP_EXYNOS_JPEG_MAX_STREAMSIZE 0x1000 +#define V4L2_CAP_EXYNOS_JPEG_NO_STREAMBASE_ALIGN 0x2000 +#define V4L2_CAP_EXYNOS_JPEG_NO_IMAGEBASE_ALIGN 0x4000 +#define V4L2_CAP_EXYNOS_JPEG_NO_BUFFER_OVERRUN 0x8000 +#define V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS 0x10000 +#define V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_CROP 0x20000 +#define V4L2_CAP_EXYNOS_JPEG_DOWNSCALING 0x40000 +// EXYNOS HWJPEG specific auxiliary option flags +// The flags are common to all derived classes of CHWJpegCompressor +// but if a derived class does not support for a specified flag, +// it is discarded and ignored silently. +#define EXYNOS_HWJPEG_AUXOPT_ENABLE_HWFC (1 << 4) +#define EXYNOS_HWJPEG_AUXOPT_SRC_NOCACHECLEAN (1 << 8) +#define EXYNOS_HWJPEG_AUXOPT_DST_NOCACHECLEAN (1 << 9) + +/* + * CHWJpegBase - The base class of JPEG compression and decompression + * + * This class contains the following information: + * - The open file descriptor of the device node + * - The flags to describe the state of the operations + * - The falgs to indicate the capability of HWJPEG + * This class also defines the getters and the setters of flags. + */ +class CHWJpegBase { + int m_iFD; + unsigned int m_uiDeviceCaps; + /* + * Auxiliary option flags are implementation specific to derived classes + * of CHWJpegCompressor. Even though the flags are common to all derived + * classes, they should identify their supporting flags and ignore the + * other flags. + * NOTE that the flag is volatile. That means the flags stored in + * m_uiAuxFlags is cleared when it is read by GetAuxFlags(). + * + */ + unsigned int m_uiAuxFlags; +protected: + CHWJpegBase(const char *path); + virtual ~CHWJpegBase(); + int GetDeviceFD() { return m_iFD; } + void SetDeviceCapabilities(unsigned int cap) { m_uiDeviceCaps = cap; } + unsigned int GetAuxFlags() { return m_uiAuxFlags; } +public: + unsigned int GetDeviceCapabilities() { return m_uiDeviceCaps; } + bool IsDeviceCapability(unsigned int cap_flags) { return (m_uiDeviceCaps & cap_flags) == cap_flags; } + + /* + * Okay - Test if the object is correctly initialized + * @return: true if this object is correctly initialized and ready to use. + * false, otherwise. + * + * A user that creates this object *must* test if the object is successfully + * created because some initialization in the constructor may fail. + */ + bool Okay() { return m_iFD >= 0; } + operator bool() { return Okay(); } + + /* + * SetAuxFlags - Configure HWJPEG auxiliary options + * @auxflags: a set of bit flags. The flags are prefixed by EXYNOS_HWJPEG_AUXOPT + * and defined separately in this file. + * + * SetAuxFlags() is *not* thread-safe. Racing calls to SetAuxFlags() between + * multiple threads may cause the flags inconsistent. Moreover, Racing between + * SetAuxFlags() call by the users and reading the flags by the libhwjpeg + * causes incomplete hwjpeg configuration. + */ + void SetAuxFlags(unsigned int auxflags); + + /* + * ClearAuxFlags - Removes HWJPEG auxiliary options + * @auxflags: a set of bit flags to clear. The flags are prefixed by EXYNOS_HWJPEG_AUXOPT + * and defined separately in this file. + * + * ClearAuxFlags() is *not* thread-safe. Racing calls to ClearAuxFlags() between + * multiple threads may cause the flags inconsistent. Moreover, Racing between + * ClearAuxFlags() call by the users and reading the flags by the libhwjpeg + * causes incomplete hwjpeg configuration. + */ + void ClearAuxFlags(unsigned int auxflags); +}; + +/* + * CHWJpegCompressor - The abstract class of HW JPEG compression accelerator + * + * This class is *not* thread-safe. If an instance of this class is handled by + * multiple threads, the users of the instance should care about the thread + * synchronization. + * + * CHWJpegCompressor is able to check if the number of configured image buffers + * are sufficient. It depends on the configured image format. Therefore, it is + * better to configure the image format and the size prior to configure image + * buffers. If back-to-back compression is required, it is *important* to know + * how many buffers are required for an image. Thus it is *strongly* recommented + * to configure image format and sizes before configuring image buffers. + */ +class CHWJpegCompressor : public CHWJpegBase { + size_t m_nLastStreamSize; + size_t m_nLastThumbStreamSize; +protected: + void SetStreamSize(size_t main_size, size_t secondary_size = 0) { + m_nLastStreamSize = main_size; + m_nLastThumbStreamSize = secondary_size; + } + + ssize_t GetStreamSize(size_t *secondary_size) { + if (secondary_size) + *secondary_size = m_nLastThumbStreamSize; + return static_cast(m_nLastStreamSize); + } +public: + CHWJpegCompressor(const char *path): CHWJpegBase(path), m_nLastStreamSize(0), m_nLastThumbStreamSize(0) { } + + /* + * SetImageFormat - Configure uncompressed image format, width and height + * @v4l2_fmt[in] : Image pixel format defined in + * @width[in] : Width of the primary uncompressed image in the number of pixels + * @height[in] : Height of the primary uncompressed image in the number of pixels + * @sec_width[in] : Width of the secondary uncompressed image in the number of pixels (optional) + * @sec_height[in] : Height of the secondary uncompressed image in the number of pixels (optional) + * @return : true if configuration of image pixel format and size is successful. + * false, otherwise. + * + * The primary and the secondary image format should be same. There is no way + * to configure different image formats for them. + */ + virtual bool SetImageFormat(unsigned int v4l2_fmt, unsigned int width, unsigned int height, + unsigned int sec_width = 0, unsigned int sec_height = 0) = 0; + + /* + * GetImageBufferSizes - Ask the required buffer sizes for the given image format + * @buf_sizes[out] : array of buffer sizes. + * @num_buffers[in/out]: number of elements in @buf_sizes intially. + * number assigned buffer sizes to @buf_sizes on return. + * @return: true if the @buf_sizes and @num_buffers are initialized successfully. + * false, otherwise + * + * It should be called after SetImageFormat() SetImageSize() are called. Otherwise, + * the returned buffer sizes are not correct. + */ + virtual bool GetImageBufferSizes(size_t buf_sizes[], unsigned int *num_bufffers) = 0; + /* + * SetChromaSampFactor - Configure the chroma subsampling factor for JPEG stream + * @horizontal[in] : horizontal chroma subsampling factor + * @vertical[in] : vertical chroma subsampling factor + * @return: true if chroma subsamping factors are configured successfully, + * false if the factors are invalid. + */ + virtual bool SetChromaSampFactor(unsigned int horizontal, unsigned int vertical) = 0; + /* + * SetQuality - Configure quality factor for JPEG compression + * @quality_factor[in] : JPEG compression quality factor between 1 and 100 for the primary image + * @quality_factor2[in] : JPEG compression quality factor for the secondary image (optional) + * @return: true if quality factors are configured successfully. + * false, otherwise. + */ + virtual bool SetQuality(unsigned int quality_factor, unsigned int quality_factor2 = 0) = 0; + /* + * SetQuality - Configure quantization tables for JPEG compression + * @qtable[in] : The 128 element array of quantization tables that contributes to JPEG + * compression. The first 64 elements are the quantization tables of the luma + * component. The other 64 elements are the quantization table of the chroma + * components. All the quantizers in the tables should be specified in the + * zig-zag scan order. + * @return: true if the given quantization tables are configured successfully. + * false, otherwise. + */ + virtual bool SetQuality(const unsigned char qtable[]) { return false; }; + /* + * SetImageBuffer - Configure the uncompressed primary image buffers (userptr) + * @buffers[in] : addresses of the buffers + * @len_buffers[in] : sizes of the buffers + * @num_buffers[in] : the number of elements of @buffers and @len_buffers + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer(char *buffers[], size_t len_buffers[], unsigned int num_buffers) = 0; + /* + * SetImageBuffer - Configure the uncompressed primary image buffers (dmabuf) + * @buffers[in] : file descriptors of the buffers exported by dma-buf + * @len_buffers[in] : sizes of the buffers + * @num_buffers[in] : the number of elements of @buffers and @len_buffers + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer(int buffers[], size_t len_buffers[], unsigned int num_buffers) = 0; + /* + * SetImageBuffer2 - Configure the uncompressed secondary image buffers (userptr) + * @buffers[in] : addresses of the buffers + * @len_buffers[in] : sizes of the buffers + * @num_buffers[in] : the number of elements of @buffers and @len_buffers + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer2(char *buffers[], size_t len_buffers[], unsigned int num_buffers) { return false; } + /* + * SetImageBuffer2 - Configure the uncompressed secondary image buffers (dmabuf) + * @buffers[in] : file descriptors of the buffers exported by dma-buf + * @len_buffers[in] : sizes of the buffers + * @num_buffers[in] : the number of elements of @buffers and @len_buffers + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer2(int buffers[], size_t len_buffers[], unsigned int num_buffers) { return false; } + /* + * SetJpegBuffer - Configure the buffer of JPEG stream of the primary image (userptr) + * @buffer [in] : The address of the buffer + * @len_buffer [in] : The size of @buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetJpegBuffer(char *buffer, size_t len_buffer) = 0; + /* + * SetJpegBuffer - Configure the buffer of JPEG stream of the primary image (dmabuf) + * @buffer [in] : The file descriptor of the buffer exported by dma-buf + * @len_buffer [in] : The size of @buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetJpegBuffer(int buffer, size_t len_buffer) = 0; + /* + * SetJpegBuffer2 - Configure the buffer of JPEG stream of the secondary image (userptr) + * @buffer [in] : The address of the buffer + * @len_buffer [in] : The size of @buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + * The secondary image configuration is ignored if the secondary image size + * is not configured with SetImageSize(). + */ + virtual bool SetJpegBuffer2(char *buffer, size_t len_buffer) { return false; } + /* + * SetJpegBuffer2 - Configure the buffer of JPEG stream of the secondary image (dmabuf) + * @buffer [in] : The file descriptor of the buffer exported by dma-buf + * @len_buffer [in] : The size of @buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + * The secondary image configuration is ignored if the secondary image size + * is not configured with SetImageSize(). + */ + virtual bool SetJpegBuffer2(int buffer, size_t len_buffer) { return false; } + /* + * Compress - Compress the given image + * secondary_stream_size[out] : The size of secondary JPEG stream + * block_mode[in] : If @block_mode is true this function does not return + * until the compression finishes or error occurrs. + * If a derived function does not support for non-block mode, + * errur is returned. + * @return : The size of the compressed JPEG stream + * (the offset of EOI from SOI plus sizeof(EOI)) + * Zero If @block_mode is false and no error is occurred. + * Negative value on error. + */ + virtual ssize_t Compress(size_t *secondary_stream_size = NULL, bool block_mode = true) = 0; + /* + * WaitForCompression - Wait for the compression finishes + * secondary_stream_size[out] : The size of secondary JPEG stream + * @return : The size of the compressed JPEG stream + * (the offset of EOI from SOI plus sizeof(EOI)) + * Negative value on error. + * + * This function waits until the HWJPEG finishes JPEG compression if the second parameter + * to Compress() is false. If the parameter is true, WaitForCompression() immeidately + * returns and the returned size will be the stream sizes obtained by the last call to + * Compress(). + */ + virtual ssize_t WaitForCompression(size_t *secondary_stream_size = NULL) { return GetStreamSize(secondary_stream_size); } + /* + * GetImageBuffers - Retrieve the configured uncompressed image buffer information (dmabuf) + * @buffers[out]: The file descriptors of the buffers exported by dma-buf + * @len_buffers[out]: The size of each buffers in @buffers + * @num_buffers[in]: The number of elements in @buffers and @len_buffers array + * return: true if retrieving the buffer information is successful. + * false if no buffer is configured or the configured buffer is userptr type. + * DEPREDCATED. DO NOT USE THIS FUNCTION. + * This function is just provided to support the legacy ExynosJpegEncoder API. + */ + virtual bool GetImageBuffers(int buffers[], size_t len_buffers[], unsigned int num_buffers) { return false; } + /* + * GetImageBuffers - Retrieve the configured uncompressed image buffer information (userptr) + * @buffers[out]: The addresses of the buffers + * @len_buffers[out]: The size of each buffers in @buffers + * @num_buffers[in]: The number of elements in @buffers and @len_buffers array + * return: true if retrieving the buffer information is successful. + * false if no buffer is configured or the configured buffer is dmabuf type. + * DEPREDCATED. DO NOT USE THIS FUNCTION. + * This function is just provided to support the legacy ExynosJpegEncoder API. + */ + virtual bool GetImageBuffers(char *buffers[], size_t len_buffers[], unsigned int num_buffers) { return false; } + /* + * GetJpegBuffers - Retrieve the configured JPEG stream image buffer information (dmabuf) + * @buffers[out]: The file descriptor of the buffer exported by dma-buf + * @len_buffers[out]: The size of @buffer + * return: true if retrieving the buffer information is successful. + * false if no buffer is configured or the configured buffer is userptr type. + * DEPREDCATED. DO NOT USE THIS FUNCTION. + * This function is just provided to support the legacy ExynosJpegEncoder API. + */ + virtual bool GetJpegBuffer(int *buffers, size_t *len_buffer) { return false; } + /* + * GetJpegBuffers - Retrieve the configured JPEG stream buffer information (userptr) + * @buffers[out]: The address of the buffer + * @len_buffers[out]: The size of @buffers + * return: true if retrieving the buffer information is successful. + * false if no buffer is configured or the configured buffer is dmabuf type. + * DEPREDCATED. DO NOT USE THIS FUNCTION. + * This function is just provided to support the legacy ExynosJpegEncoder API. + */ + virtual bool GetJpegBuffer(char **buffers, size_t *len_buffer) { return false; } + /* + * Release - release the buffers acquired by CHWJpegCompressor + */ + virtual void Release() { } +}; + +/* + * CHWJpegDecompressor - The abstract class of HW JPEG accelerator for decompression + * + * This class is *not* thread-safe. If an instance of this class is handled by + * multiple threads, the users of the instance should care about the thread + * synchronization. + * + * CHWJpegDecompressor supports for downscaling during decompression by 1/2, 1/4 and + * 1/8 if HWJPEG supports. The users should test if the HWJPEG supports for downscaling + * before configuring smaller output image size. + * The users also test if the HWJPEG (driver) requires the address SOI or SOS. If it + * needs SOI, there is no need to feed the driver DHT and DQT. If it needs SOS, DHT + * DQT should be informed to the driver because it is unable to find DHT and DQT + * from SOS. + * + * V4L2_CAP_EXYNOS_JPEG_DOWNSCALING is set if HWJPEG supports for downscaling. + * V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS is set if HWJPEG driver needs the + * address of SOS and the users to specify DHT and DQT to the driver. + */ +class CHWJpegDecompressor : public CHWJpegBase { +public: + CHWJpegDecompressor(const char *path) : CHWJpegBase(path) { } + virtual ~CHWJpegDecompressor() { } + /* + * SetImageFormat - Configure decompressed image pixel format + * @v4l2_fmt[in] : Image pixel format defined in + * @width[in] : Width of the decompressed image in the number of pixels + * @height[in] : Height of the decompressed image in the number of pixels + * @return : true if configuration of image pixel format is successful. + * false, otherwise. + * + * @width and @height can be smaller than the compressed image size specified + * by SetStreamPixelSize() if downscaling during decompression is supported. + * The subclasses should test if V4L2_CAP_EXYNOS_JPEG_DOWNSCALING is set + * in the device capabilities. Even though downscaling is supported by HWJPEG, + * it has strict limitation that the downscaling factor should be one of + * 1, 2, 4 and 8. If the specified decompressed image size is not one of + * the compressed image size divided by 1, 2, 4 or 8, decompression should fail. + */ + virtual bool SetImageFormat(unsigned int v4l2_fmt, unsigned int width, unsigned int height) = 0; + + /* + * SetImageBuffer - Configure the decompressed image buffer (userptr) + * @buffer[in] : address of the buffer + * @len_buffer[in] : size of the buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer(char *buffer, size_t len_buffer) = 0; + + /* + * SetImageBuffer - Configure the decompressed image buffer (dmabuf) + * @buffer[in] : file descriptor of the buffer exported by dma-buf + * @len_buffer[in] : size of the buffer + * @return : true if buffer configuration is successful. + * false, otherwise. + */ + virtual bool SetImageBuffer(int buffer, size_t len_buffer) = 0; + + /* + * SetStreamPixelSize - Configure the width and the height of the compressed stream + * @width[in] : The number of horizontal pixels of the compressed image + * @height[in] : The number of vertical pixels of the compressed image + */ + virtual bool SetStreamPixelSize(unsigned int width, unsigned int height) { return true; } + + /* + * SetChromaSampFactor - Configure the chroma subsampling factor for JPEG stream + * @horizontal[in] : horizontal chroma subsampling factor + * @vertical[in] : vertical chroma subsampling factor + * @return: true if chroma subsamping factors are configured successfully, + * false if the factors are invalid. + * + * If V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS is specified in the device + * capabilities, it is needed to configure chroma subsampling fractors because + * Neither of HWJPEG nor its driver is able to find the chroma subsampling factors + * of the compressed stream because it is specified in SOF and SOF is written + * ahead of SOS in the JPEG stream. + * If it is required to specify chroma subsampling factors separately, you should + * override SetChromaSampFactor(). + */ + virtual bool SetChromaSampFactor(unsigned int horizontal, unsigned int vertical) { return true; } + + /* + * SetDQT - Configure the address of DQT + * @dqt[in] : The address of DQT in the JPEG stream + * @return: true if the specified DQT has no problem. false if DQT does not exist + * in @dqt or the tables in @dqt are incomplete. + * + * If V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS is specified in the device + * capabilities, the HWJPEG needs DQT separately. Therefore every subcalss + * will need to override SetDQT(). + */ + virtual bool SetDQT(const char *dqt) { return true; } + + /* + * SetDHT - Configure the address of DHT + * @dht[in] : The address of DHT in the JPEG stream + * @return: true if the specified DHT has no problem. false if DHT does not exist + * in @dht or the tables in @dqt are incomplete. + * + * If V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS is specified in the device + * capabilities, the HWJPEG needs DHT separately. Therefore every subcalss + * will need to override SetDHT(). + */ + virtual bool SetDHT(const char *dht) { return true; } + + /* + * Decompress - Decompress the given JPEG stream + * @buffer[in] : The buffer of JPEG stream. + * @len[in] : The length of the JPEG stream. It includes EOI marker. + * @return : true if the decompression succeeded. false, otherwise. + * + * If V4L2_CAP_EXYNOS_JPEG_DECOMPRESSION_FROM_SOS is set in the device capability + * SOS marker should be at the start of @buffer. Otherwise, SOI marker should be + * at the start. If @buffer is start with SOS marker, DHT, DQT and chroma + * subsampling factors should be separately configured with SetDHT(), SetDQT() and + * SetChromaSampFactor(), respectively. + */ + virtual bool Decompress(const char *buffer, size_t len) = 0; +}; + +class CHWJpegFlagManager { + unsigned int m_uiHWConfigFlags; +public: + CHWJpegFlagManager() : m_uiHWConfigFlags(0) { } + void SetFlag(unsigned int flag) { m_uiHWConfigFlags |= flag; } + void ClearFlag(unsigned int flag) { m_uiHWConfigFlags &= ~flag; } + bool TestFlag(unsigned int flag) { return (m_uiHWConfigFlags & flag) == flag; } + bool TestFlagEither(unsigned int flag) { return !!(m_uiHWConfigFlags & flag); } + unsigned int GetFlags() { return m_uiHWConfigFlags; } +}; +/* +class CHWJpegM2M1SHOTCompressor: public CHWJpegCompressor { +}; +*/ + +#define TO_SEC_IMG_SIZE(val) (((val) >> 16) & 0xFFFF) + +class CHWJpegV4L2Compressor : public CHWJpegCompressor, private CHWJpegFlagManager { + enum { + HWJPEG_CTRL_CHROMFACTOR = 0, + HWJPEG_CTRL_QFACTOR, + HWJPEG_CTRL_QFACTOR2, + HWJPEG_CTRL_HWFC, + HWJPEG_CTRL_NUM, + }; + + enum { + HWJPEG_FLAG_PIX_FMT = 0x1, // Set if unapplied image format exists + + HWJPEG_FLAG_QBUF_OUT = 0x100, // Set if the image buffer is queued + HWJPEG_FLAG_QBUF_CAP = 0x200, // Set if the JPEG stream buffer is queued + HWJPEG_FLAG_REQBUFS = 0x400, + HWJPEG_FLAG_STREAMING = 0x800, + + HWJPEG_FLAG_SRC_BUFFER = 0x10000, // Set if SetImageBuffer() is invoked successfully + HWJPEG_FLAG_SRC_BUFFER2 = 0x20000, // Set if SetImageBuffer2() is invoked successfully + HWJPEG_FLAG_DST_BUFFER = 0x40000, // Set if SetJpegBuffer() is invoked successfully + HWJPEG_FLAG_DST_BUFFER2 = 0x80000, // Set if SetJpegBuffer2() is invoked successfully + }; + + struct hwjpeg_v4l2_controls { + __u32 id; + __s32 value; + } m_v4l2Controls[HWJPEG_CTRL_NUM]; + + unsigned int m_uiControlsToSet; + // H/W delay of the last compressoin in usec. + // Only valid after Compression() successes. + unsigned int m_uiHWDelay; + + v4l2_format m_v4l2Format; // v4l2 format for the source image + v4l2_buffer m_v4l2SrcBuffer; // v4l2 source buffer + v4l2_plane m_v4l2SrcPlanes[6]; + v4l2_buffer m_v4l2DstBuffer; + v4l2_plane m_v4l2DstPlanes[2]; + + bool m_bEnableHWFC; + + bool IsB2BCompression() { + return (TO_SEC_IMG_SIZE(m_v4l2Format.fmt.pix_mp.width) + + TO_SEC_IMG_SIZE(m_v4l2Format.fmt.pix_mp.height)) != 0; + } + + // V4L2 Helpers + bool TryFormat(); + bool SetFormat(); + bool UpdateControls(); + bool ReqBufs(unsigned int count = 1); + bool StreamOn(); + bool StreamOff(); + bool QBuf(); + ssize_t DQBuf(size_t *secondary_stream_size); + bool StopStreaming(); +public: + CHWJpegV4L2Compressor(); + virtual ~CHWJpegV4L2Compressor(); + + unsigned int GetHWDelay() { return m_uiHWDelay; } + + // SetChromaSampFactor can be called during streaming + virtual bool SetChromaSampFactor(unsigned int horizontal, + unsigned int vertical); + virtual bool SetQuality(unsigned int quality_factor, + unsigned int quality_factor2 = 0); + virtual bool SetQuality(const unsigned char qtable[]); + + virtual bool SetImageFormat(unsigned int v4l2_fmt, unsigned int width, unsigned int height, + unsigned int sec_width = 0, unsigned sec_height = 0); + virtual bool GetImageBufferSizes(size_t buf_sizes[], unsigned int *num_bufffers); + virtual bool SetImageBuffer(char *buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool SetImageBuffer(int buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool SetImageBuffer2(char *buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool SetImageBuffer2(int buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool SetJpegBuffer(char *buffer, size_t len_buffer); + virtual bool SetJpegBuffer(int buffer, size_t len_buffer); + virtual bool SetJpegBuffer2(char *buffer, size_t len_buffer); + virtual bool SetJpegBuffer2(int buffer, size_t len_buffer); + virtual ssize_t Compress(size_t *secondary_stream_size = NULL, bool bock_mode = true); + virtual bool GetImageBuffers(int buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool GetImageBuffers(char *buffers[], size_t len_buffers[], unsigned int num_buffers); + virtual bool GetJpegBuffer(char **buffer, size_t *len_buffer); + virtual bool GetJpegBuffer(int *buffer, size_t *len_buffer); + virtual ssize_t WaitForCompression(size_t *secondary_stream_size = NULL); + virtual void Release(); +}; + +class CHWJpegV4L2Decompressor : public CHWJpegDecompressor, private CHWJpegFlagManager { + enum { + HWJPEG_FLAG_OUTPUT_READY = 0x10, /* the output stream is ready */ + HWJPEG_FLAG_CAPTURE_READY = 0x20, /* the capture stream is ready */ + }; + + unsigned int m_uiHWDelay; + + v4l2_format m_v4l2Format; + v4l2_buffer m_v4l2DstBuffer; /* multi-planar foramt is not supported */ + + bool PrepareCapture(); + void CancelCapture(); + + bool PrepareStream(); + void CancelStream(); + bool QBufAndWait(const char *buffer, size_t len); +public: + CHWJpegV4L2Decompressor(); + virtual ~CHWJpegV4L2Decompressor(); + virtual bool SetImageFormat(unsigned int v4l2_fmt, unsigned int width, unsigned int height); + virtual bool SetImageBuffer(char *buffer, size_t len_buffer); + virtual bool SetImageBuffer(int buffer, size_t len_buffer); + virtual bool Decompress(const char *buffer, size_t len); + + unsigned int GetHWDelay() { return m_uiHWDelay; } +}; +#endif /* __EXYNOS_HWJPEG_H__ */ diff --git a/include/exynos_blender.h b/include/exynos_blender.h new file mode 100644 index 0000000..a6ef6d8 --- /dev/null +++ b/include/exynos_blender.h @@ -0,0 +1,495 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_blender.h + * \brief User's API for Exynos Blender library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#ifndef __EXYNOS_BLENDER_H__ +#define __EXYNOS_BLENDER_H__ + +#include "videodev2.h" + +#define BL_MAX_PLANES 3 + +enum BL_DEVID { + DEV_UNSPECIFIED = 0, + DEV_G2D0, DEVID_G2D_END, +}; + +enum BL_OP_TYPE { + OP_SRC = 2, + OP_SRC_OVER = 4 +}; + +enum BL_ROTATE { + ORIGIN, + ROT90 = 90, + ROT180 = 180, + ROT270 = 270 +}; + +enum BL_SCALE { + NOSCALE, + NEAREST, + BILINEAR, + POLYPHASE +}; + +enum BL_REPEAT { + NOREPEAT, + NORMAL, + MIRROR = 3, + REFLECT = MIRROR, + CLAMP +}; + +enum BL_BLUESCREEN { + OPAQUE, + //! transparent mode. need bgcolor + TRANSP, + //! bluescreen mode. need bgcolor and bscolor + BLUSCR +}; + +struct bl_property { + enum BL_DEVID devid; + //! true: device open with NONBLOCK flag. + bool nonblock; +}; + +typedef void *bl_handle_t; + +#ifdef __cplusplus +extern "C" { +#endif + +/*! + * Create exynos_blender handle + * + * \ingroup exynos_blender + * + * \param prop + * blender property[in] + * + * \return + * exynos_blender handle + */ +bl_handle_t exynos_bl_create(struct bl_property *prop); + +/*! + * Destroy exynos_blender handle + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + */ +void exynos_bl_destroy(bl_handle_t handle); + +/*! + * Deactivate exynos_blender: g2d-specific for drm + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param deact + * true: deactivate, false: activated (default) + * + * \return + * error code + */ +int exynos_bl_deactivate(bl_handle_t handle, bool deact); + +/*! + * Set color fill mode and src color + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param enable + * true: color fill mode. + * if true, src_addr & src_format is ignored. + * + * \param color_argb8888 + * 32-bit color value, 'a' is msb. + * + * \return + * error code + */ +int exynos_bl_set_color_fill( + bl_handle_t handle, + bool enable, + uint32_t color_argb8888); + +/*! + * Set rotate and flip + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param rot + * 90/180/270: clockwise degree + * + * \param hflip + * true: hoizontal(y-axis) flip + * + * \param vflip + * true: vertical(x-axis) flip + * + * \return + * error code + */ +int exynox_bl_set_rotate( + bl_handle_t handle, + enum BL_ROTATE rot, + bool hflip, + bool vflip); + +/*! + * Set blend op mode + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param op + * 2: SRC COPY (default), 4: SRC OVER + * + * \param premultiplied + * true: alpha premultiplied mode for src and dst + * + * \return + * error code + */ +int exynos_bl_set_blend( + bl_handle_t handle, + enum BL_OP_TYPE op, + bool premultiplied); + +/*! + * Set global alpha value + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param enable + * + * \param g_alpha + * range: 0x0~0xff. + * 0x0 for transpranet, 0xff for opaque. (default '0xff') + * + * \return + * error code + */ +int exynos_bl_set_galpha( + bl_handle_t handle, + bool enable, + unsigned char g_alpha); + +/*! + * Set dither + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param enable + * true: enable dithering effect + * + * \return + * error code + */ +int exynos_bl_set_dither(bl_handle_t handle, bool enable); + +/*! + * Set scaling ratio + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param mode + * 1: nearest, 2: bilinear, 3: polyphase + * + * \param src_w + * src width in pixels of horizontal scale ratio + * + * \param dst_w + * dst width in pixels of horizontal scale ratio + * + * \param src_h + * src height in pixels of vertical scale ratio + * + * \param dst_h + * dst height in pixels of vertical scale ratio + * + * \return + * error code + */ +int exynos_bl_set_scale( + bl_handle_t handle, + enum BL_SCALE mode, + uint32_t src_w, + uint32_t dst_w, + uint32_t src_h, + uint32_t dst_h); + +/*! + * Set repeat mode + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param mode + * 1: normal, 3: mirror(reflect), 4: clamp + * + * \return + * error code + */ +int exynos_bl_set_repeat(bl_handle_t handle, enum BL_REPEAT mode); + +/*! + * Set dst clip rect + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param enable + * true: clip rect is inside of dst crop rect. + * + * \param x + * clip left + * + * \param y + * clip top + * + * \param width + * clip width + * + * \param height + * clip height + * + * \return + * error code + */ +int exynos_bl_set_clip( + bl_handle_t handle, + bool enable, + uint32_t x, + uint32_t y, + uint32_t width, + uint32_t height); + +/*! + * Set colorspace conversion spec + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param enable + * true: user-defined, false: auto + * + * \param space + * V4L2_COLORSPACE_SMPTE170M: 601, V4L2_COLORSPACE_REC709: 709 + * + * \param wide + * true: wide, false: narrow + * + * \return + * error code + */ +int exynos_bl_set_csc_spec( + bl_handle_t handle, + bool enable, + enum v4l2_colorspace space, + bool wide); + +/*! + * Set src image format + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param width + * full width in pixels + * + * \param height + * full height in pixels + * + * \param crop_x + * left of src rect + * + * \param crop_y + * top of src rect + * + * \param crop_width + * width of src rect + * + * \param crop_height + * height of src rect + * + * \return + * error code + */ +int exynos_bl_set_src_format( + bl_handle_t handle, + unsigned int width, + unsigned int height, + unsigned int crop_x, + unsigned int crop_y, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat); + +/*! + * Set dst image format + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param width + * full width in pixels + * + * \param height + * full height in pixels + * + * \param crop_x + * left of dst rect + * + * \param crop_y + * top of dst rect + * + * \param crop_width + * width of dst rect + * + * \param crop_height + * height of dst rect + * + * \return + * error code + */ +int exynos_bl_set_dst_format( + bl_handle_t handle, + unsigned int width, + unsigned int height, + unsigned int crop_x, + unsigned int crop_y, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat); + +/*! + * Set src buffer + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param addr[] + * base address: userptr(ion or malloc), ion_fd: dmabuf + * + * \param type + * V4L2_MEMORY_USERPTR: 2, V4L2_MEMORY_DMABUF: 4 + * + * \return + * error code + */ +int exynos_bl_set_src_addr( + bl_handle_t handle, + void *addr[BL_MAX_PLANES], + enum v4l2_memory type); + +/*! + * Set dst buffer + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \param addr[] + * base address: userptr(ion or malloc), fd: dmabuf(ion) + * + * \param type + * V4L2_MEMORY_USERPTR: 2, V4L2_MEMORY_DMABUF: 4 + * + * \return + * error code + */ +int exynos_bl_set_dst_addr( + bl_handle_t handle, + void *addr[BL_MAX_PLANES], + enum v4l2_memory type); + +/*! + * Start blending single frame + * + * \ingroup exynos_blender + * + * \param handle + * exynos_blender handle[in] + * + * \return + * error code + */ +int exynos_bl_do_blend(bl_handle_t handle); + +/*! + * Start 2-step(scaling & rotation) blending single frame + * + * \ingroup exynos_blender + * + * \return + * error code + */ +int exynos_bl_do_blend_fast(bl_handle_t handle); + +#ifdef __cplusplus +} +#endif + +#endif // __EXYNOS_BLENER_H__ diff --git a/include/exynos_blender_obj.h b/include/exynos_blender_obj.h new file mode 100644 index 0000000..e6cce80 --- /dev/null +++ b/include/exynos_blender_obj.h @@ -0,0 +1,211 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_blender_obj.h + * \brief Class definition for Exynos Blender library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#ifndef __EXYNOS_BLENDER_OBJ_H__ +#define __EXYNOS_BLENDER_OBJ_H__ + +#include +#include +#include +#include +#include "exynos_blender.h" + +#define BL_LOGERR(fmt, args...) \ + ((void)ALOG(LOG_ERROR, LOG_TAG, "%s: " fmt " [%s]", __func__, ##args, strerror(errno))) +#define BL_LOGE(fmt, args...) \ + ((void)ALOG(LOG_ERROR, LOG_TAG, "%s: " fmt, __func__, ##args)) + +//#define BL_DEBUG + +#ifdef BL_DEBUG +#define BL_LOGD(args...) ((void)ALOG(LOG_INFO, LOG_TAG, ##args)) +#else +#define BL_LOGD(args...) +#endif + +#define UNIMPL { BL_LOGE("Unimplemented Operation %p\n", this); return -1; } + +#define SRC_BUFTYPE V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE +#define DST_BUFTYPE V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE + +class CBlender { +public: + enum BL_PORT { SRC, DST, NUM_PORTS }; + +protected: + enum { BL_MAX_NODENAME = 14 }; + enum BL_FLAGS { + F_FILL, + F_ROTATE, + F_BLEND, + F_GALPHA, + F_DITHER, + F_BLUSCR, + F_SCALE, + F_REPEAT, + F_CLIP, + F_CSC_SPEC, + F_CTRL_ANY = 10, + + F_SRC_FMT, + F_DST_FMT, + F_SRC_MEMORY, + F_DST_MEMORY, + + F_SRC_REQBUFS, + F_DST_REQBUFS, + F_SRC_QBUF, + F_DST_QBUF, + F_SRC_STREAMON, + F_DST_STREAMON, + + F_FLAGS_END = 32 + }; + + struct BL_FrameInfo { + struct { + v4l2_buf_type type; + uint32_t width, height; + uint32_t crop_x, crop_y, crop_width, crop_height; + uint32_t color_format; + void *addr[BL_MAX_PLANES]; + v4l2_memory memory; + int out_num_planes; + unsigned long out_plane_size[BL_MAX_PLANES]; + } port[NUM_PORTS]; + } m_Frame; + + struct BL_Control { + struct { + bool enable; + uint32_t color_argb8888; + } fill; + BL_ROTATE rot; + bool hflip; + bool vflip; + BL_OP_TYPE op; + bool premultiplied; + struct { + bool enable; + unsigned char val; + } global_alpha; + bool dither; + struct { + BL_BLUESCREEN mode; + uint32_t bg_color; + uint32_t bs_color; + } bluescreen; + struct { + BL_SCALE mode; + uint32_t src_w; + uint32_t dst_w; + uint32_t src_h; + uint32_t dst_h; + } scale; + BL_REPEAT repeat; + struct { + bool enable; + uint32_t x; + uint32_t y; + uint32_t width; + uint32_t height; + } clip; + struct { + bool enable; // set 'true' for user-defined + enum v4l2_colorspace space; + bool wide; + } csc_spec; + } m_Ctrl; + unsigned long m_Flags; + + int m_fdBlender; + int m_iDeviceID; + int m_fdValidate; + + char m_cszNode[BL_MAX_NODENAME]; // /dev/videoXX + static const char *m_cszPortName[NUM_PORTS]; + + inline void SetFlag(int f) { m_Flags |= (1 << f); } + inline void ResetFlag(void) { m_Flags = 0; } + + inline bool IsFlagSet(int f) + { + if (f == F_CTRL_ANY) + return m_Flags & ((1 << f) - 1); + else + return m_Flags & (1 << f); + } + + inline void ClearFlag(int f) + { + if (f == F_CTRL_ANY) { + m_Flags &= ~((1 << f) - 1); + } else { + m_Flags &= ~(1 << f); + } + } + +public: + CBlender() + { + m_fdBlender = -1; + m_iDeviceID = -1; + memset(&m_Frame, 0, sizeof(m_Frame)); + memset(&m_Ctrl, 0, sizeof(m_Ctrl)); + m_Flags = 0; + m_fdValidate = 0; + memset(m_cszNode, 0, sizeof(m_cszNode)); + } + virtual ~CBlender() {}; + + bool Valid() { return (m_fdBlender >= 0) && (m_fdBlender == -m_fdValidate); } + int GetDeviceID() { return m_iDeviceID; } + + int SetColorFill(bool enable, uint32_t color_argb8888); + int SetRotate(BL_ROTATE rot, bool hflip, bool vflip); + int SetBlend(BL_OP_TYPE op, bool premultiplied); + int SetGlobalAlpha(bool enable, unsigned char g_alpha); + int SetDither(bool enable); + int SetBluescreen(BL_BLUESCREEN mode, uint32_t bg_color, uint32_t bs_color = 0); + int SetScale(BL_SCALE mode, uint32_t src_w, uint32_t dst_w, uint32_t src_h, uint32_t dst_h); + int SetRepeat(BL_REPEAT mode); + int SetClipRect(bool enable, uint32_t x, uint32_t y, uint32_t width, uint32_t height); + int SetCscSpec(bool enable, enum v4l2_colorspace space, bool wide); + + int SetAddr(BL_PORT port, void *addr[BL_MAX_PLANES], v4l2_memory type); + int SetImageFormat(BL_PORT port, unsigned int width, unsigned int height, + unsigned int crop_x, unsigned int crop_y, + unsigned int crop_width, unsigned int crop_height, + unsigned int v4l2_colorformat); + + virtual int DoStart() = 0; + virtual int DoStop() = 0; + virtual int Deactivate(bool deact) UNIMPL; +}; + +#endif // __EXYNOS_BLENER_OBJ_H__ diff --git a/include/exynos_format.h b/include/exynos_format.h new file mode 100644 index 0000000..3cb4c6d --- /dev/null +++ b/include/exynos_format.h @@ -0,0 +1,224 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#ifndef _EXYNOS_FORMAT_H_ +#define _EXYNOS_FORMAT_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +enum { + /* HAL_PIXEL_FORMAT_YCbCr_422_P = 0x100, */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M = 0x101, /* HAL_PIXEL_FORMAT_YCbCr_420_P */ + /* HAL_PIXEL_FORMAT_YCbCr_420_I = 0x102, */ + HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I = 0x103, /* HAL_PIXEL_FORMAT_CbYCrY_422_I */ + /* HAL_PIXEL_FORMAT_CbYCrY_420_I = 0x104, */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M = 0x105, /* HAL_PIXEL_FORMAT_YCbCr_420_SP */ + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP = 0x106, /* HAL_PIXEL_FORMAT_YCrCb_422_SP */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED= 0x107, /* HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED */ + HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888 = 0x108, /* HAL_PIXEL_FORMAT_CUSTOM_ARGB_8888 */ + // support custom format for zero copy + /* HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP = 0x110 */ + /* HAL_PIXEL_FORMAT_CUSTOM_YCrCb_420_SP = 0x111, */ + /* HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP_TILED = 0x112, */ + /* HAL_PIXEL_FORMAT_CUSTOM_YCbCr_422_SP = 0x113, */ + /* HAL_PIXEL_FORMAT_CUSTOM_YCrCb_422_SP = 0x114, */ + /* HAL_PIXEL_FORMAT_CUSTOM_YCbCr_422_I = 0x115, */ + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I = 0x116, /* HAL_PIXEL_FORMAT_CUSTOM_YCrCb_422_I */ + /* HAL_PIXEL_FORMAT_CUSTOM_CbYCrY_422_I = 0x117, */ + HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I = 0x118, /* HAL_PIXEL_FORMAT_CUSTOM_CrYCbY_422_I */ + /* HAL_PIXEL_FORMAT_CUSTOM_CbYCr_422_I = 0x11B, */ + + HAL_PIXEL_FORMAT_EXYNOS_YV12_M = 0x11C, /* HAL_PIXEL_FORMAT_EXYNOS_YV12 */ + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M = 0x11D, /* HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP */ + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL = 0x11E, /* HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL */ + + /* newly added formats */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P = 0x11F, + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP = 0x120, + + /* Interlace EXYNOS_YCbCr_420_SP_M */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV = 0x121, + + /* contiguous(single fd) custom formats */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN = 0x122, + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN = 0x123, + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED = 0x124, + + /* 10-bit format (8bit + separated 2bit) */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B = 0x125, + + /* 10-bit contiguous(single fd, 8bit + separated 2bit) custom formats */ + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B = 0x126, + + HAL_PIXEL_FORMAT_EXYNOS_MAX +}; + +/* for backward compatibility */ +#define HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M +#define HAL_PIXEL_FORMAT_CUSTOM_YCrCb_420_SP HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M + +// Gamut (colorspace range) +enum { + HAL_PIXEL_GAMUT_DEFAULT = 0, + // Values range 0-255 + HAL_PIXEL_GAMUT_WIDE_8, + // Values range 16-235 + HAL_PIXEL_GAMUT_NARROW_8 +}; + +// Chromaticities (colorspace parameters) +enum { + HAL_PIXEL_CHROMA_DEFAULT = 0, + // BT.601 "Standard Definition" color space + HAL_PIXEL_CHROMA_BT601_8, + // BT.709 "High Definition" color space + HAL_PIXEL_CHROMA_BT709_8 +}; + +struct ADDRS { + unsigned int addr_y; + unsigned int addr_cbcr; + unsigned int buf_idx; + unsigned int reserved; +}; + +/* 12 Y/CbCr 4:2:0 64x32 macroblocks */ +#define V4L2_PIX_FMT_NV12T v4l2_fourcc('T', 'V', '1', '2') + +#define ALIGN_UP(x, a) (((x) + (a) - 1) & ~((a) - 1)) +#define ALIGN_DOWN(x, a) ((x) - (x % a)) +#ifndef ALIGN +#define ALIGN(x, a) ALIGN_UP(x, a) +#endif +#ifndef ALIGN_TO_32B +#define ALIGN_TO_32B(x) ((((x) + (1 << 5) - 1) >> 5) << 5) +#endif +#ifndef ALIGN_TO_128B +#define ALIGN_TO_128B(x) ((((x) + (1 << 7) - 1) >> 7) << 7) +#endif +#ifndef ALIGN_TO_8KB +#define ALIGN_TO_8KB(x) ((((x) + (1 << 13) - 1) >> 13) << 13) +#endif + +#define GET_32BPP_FRAME_SIZE(w, h) (((w) * (h)) << 2) +#define GET_24BPP_FRAME_SIZE(w, h) (((w) * (h)) * 3) +#define GET_16BPP_FRAME_SIZE(w, h) (((w) * (h)) << 1) + +/* + * Convert hal_pixel_format to v4l2_pixel_format. + * + * @param hal_pixel_format + * hal_pixel_format[in] + * + * @return + * v4l2_pixel_format + */ +int HAL_PIXEL_FORMAT_2_V4L2_PIX( + int hal_pixel_format); + +/* + * Convert v4l2_pixel_format to hal_pixel_format. + * + * @param v4l2_pixel_format + * v4l2_pixel_format[in] + * + * @return + * hal_pixel_format + */ +int V4L2_PIX_2_HAL_PIXEL_FORMAT( + int v4l2_pixel_format); + +/* + * Get frame_size of hal_pixel_format. + * + * @param hal_pixel_format + * hal_pixel_format[in] + * + * @param width + * width[in] + * + * @param height + * height[in] + * + * @return + * frame_size + */ +unsigned int FRAME_SIZE( + int hal_pixel_format, + int width, + int height); + +int PLANAR_FRAME_SIZE( + int hal_pixel_format, + int width, + int height, + unsigned int *luma_size, + unsigned int *chroma_size); + +int NUM_PLANES(int hal_pixel_format); + + +/* + * Get bpp and plane of v4l2_pixel_format. + * + * @param v4l2_pixel_format + * v4l2_pixel_format[in] + * + * @param bpp + * address of bpp[out] + * + * @param planes + * address of planes[out] + * + * @return + * error code + */ +int V4L2_PIX_2_YUV_INFO( + unsigned int v4l2_pixel_format, + unsigned int *bpp, + unsigned int *planes); + +/* + * Get bpp of v4l2_pixel_format. + * + * @param v4l2_pixel_format + * v4l2_pixel_format[in] + * + * @return + * bpp + */ +int get_yuv_bpp( + unsigned int v4l2_pixel_format); + +/* + * Get plane of v4l2_pixel_format. + * + * @param v4l2_pixel_format + * v4l2_pixel_format[in] + * + * @return + * num of plane + */ +int get_yuv_planes( + unsigned int v4l2_pixel_format); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/include/exynos_gscaler.h b/include/exynos_gscaler.h new file mode 100644 index 0000000..fd98633 --- /dev/null +++ b/include/exynos_gscaler.h @@ -0,0 +1,518 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_gscaler.h + * \brief header file for Gscaler HAL + * \author ShinWon Lee (shinwon.lee@samsung.com) + * \date 2012/01/09 + * + * Revision History: + * - 2012/01/09 : ShinWon Lee(shinwon.lee@samsung.com) \n + * Create + * + * - 2012/02/07 : ShinWon Lee(shinwon.lee@samsung.com) \n + * Change file name to exynos_gscaler.h + * + * - 2012/02/09 : Sangwoo, Parkk(sw5771.park@samsung.com) \n + * Use Multiple Gscaler by Multiple Process + * + * - 2012/02/20 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Add exynos_gsc_set_rotation() API + * + * - 2012/02/20 : ShinWon Lee(shinwon.lee@samsung.com) \n + * Add size constrain + * + */ + +/*! + * \defgroup exynos_gscaler + * \brief API for gscaler + * \addtogroup Exynos + */ +#include "Exynos_log.h" + +#ifndef EXYNOS_GSCALER_H_ +#define EXYNOS_GSCALER_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +//#define EXYNOS_GSC_TRACE 1 +#ifdef EXYNOS_GSC_TRACE +#define EXYNOS_GSC_LOG_TAG "Exynos_gscaler" +#define Exynos_gsc_In() Exynos_Log(EXYNOS_DEV_LOG_DEBUG, EXYNOS_GSC_LOG_TAG, "%s In , Line: %d", __FUNCTION__, __LINE__) +#define Exynos_gsc_Out() Exynos_Log(EXYNOS_DEV_LOG_DEBUG, EXYNOS_GSC_LOG_TAG, "%s Out , Line: %d", __FUNCTION__, __LINE__) +#else +#define Exynos_gsc_In() ((void *)0) +#define Exynos_gsc_Out() ((void *)0) +#endif + +typedef struct { + uint32_t x; + uint32_t y; + uint32_t w; + uint32_t h; + uint32_t fw; + uint32_t fh; + uint32_t format; + unsigned long yaddr; + unsigned long uaddr; + unsigned long vaddr; + uint32_t rot; + uint32_t cacheable; + uint32_t drmMode; + uint32_t narrowRgb; + int acquireFenceFd; + int releaseFenceFd; + int mem_type; + uint32_t pre_multi; +} exynos_mpp_img; + +enum SRC_BL_OP { + /* [0, 0] */ + SRC_BL_OP_CLR = 1, + /* [Sa, Sc] */ + SRC_BL_OP_SRC, + /* [Da, Dc] */ + SRC_BL_OP_DST, + /* [Sa + (1 - Sa)*Da, Rc = Sc + (1 - Sa)*Dc] */ + SRC_BL_OP_SRC_OVER, + /* [Sa + (1 - Sa)*Da, Rc = Dc + (1 - Da)*Sc] */ + SRC_BL_OP_DST_OVER, + /* [Sa * Da, Sc * Da] */ + SRC_BL_OP_SRC_IN, + /* [Sa * Da, Sa * Dc] */ + SRC_BL_OP_DST_IN, + /* [Sa * (1 - Da), Sc * (1 - Da)] */ + SRC_BL_OP_SRC_OUT, + /* [Da * (1 - Sa), Dc * (1 - Sa)] */ + SRC_BL_OP_DST_OUT, + /* [Da, Sc * Da + (1 - Sa) * Dc] */ + SRC_BL_OP_SRC_ATOP, + /* [Sa, Sc * (1 - Da) + Sa * Dc ] */ + SRC_BL_OP_DST_ATOP, + /* [-(Sa * Da), Sc * (1 - Da) + (1 - Sa) * Dc] */ + SRC_BL_OP_XOR, + /* [Sa + Da - Sa*Da, Sc*(1 - Da) + Dc*(1 - Sa) + min(Sc, Dc)] */ + SRC_BL_OP_DARKEN, + /* [Sa + Da - Sa*Da, Sc*(1 - Da) + Dc*(1 - Sa) + max(Sc, Dc)] */ + SRC_BL_OP_LIGHTEN, + /** [Sa * Da, Sc * Dc] */ + SRC_BL_OP_MULTIPLY, + /* [Sa + Da - Sa * Da, Sc + Dc - Sc * Dc] */ + SRC_BL_OP_SCREEN, + /* Saturate(S + D) */ + SRC_BL_OP_ADD +}; + +enum colorspace { + COLORSPACE_SMPTE170M, + COLORSPACE_SMPTE240M, + COLORSPACE_REC709, + COLORSPACE_BT878, + COLORSPACE_470_SYSTEM_M, + COLORSPACE_470_SYSTEM_BG, + COLORSPACE_JPEG, + COLORSPACE_SRGB, +}; + +struct SrcGlobalAlpha { + uint32_t enable; + unsigned int val; +}; + +struct CSC_Spec{ + uint32_t enable; // set 'true' for user-defined + enum colorspace space; + uint32_t wide; +}; + +struct SrcBlendInfo { + enum SRC_BL_OP blop; + unsigned int srcblendfmt; + unsigned int srcblendhpos; + unsigned int srcblendvpos; + unsigned int srcblendpremulti; + unsigned int srcblendstride; + unsigned int srcblendwidth; + unsigned int srcblendheight; + struct SrcGlobalAlpha globalalpha; + struct CSC_Spec cscspec; +}; + +/* + * Create libgscaler handle. + * Gscaler dev_num is dynamically changed. + * + * \ingroup exynos_gscaler + * + * \return + * libgscaler handle + */ +void *exynos_gsc_create( + void); + +/*! + * Create exclusive libgscaler handle. + * Other module can't use dev_num of Gscaler. + * + * \ingroup exynos_gscaler + * + * \param dev_num + * gscaler dev_num[in] + * \param gsc_mode + *It should be set to GSC_M2M_MODE or GSC_OUTPUT_MODE. + * + *\param out_mode + *It should be set to GSC_OUT_FIMD or GSC_OUT_TV. + * + * \return + * libgscaler handle + */ +void *exynos_gsc_create_exclusive( + int dev_num, + int gsc_mode, + int out_mode, + int allow_drm); + +/*! + * Destroy libgscaler handle + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + */ +void exynos_gsc_destroy( + void *handle); + +/*! + * Set csc equation property + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \param eq_auto + * csc mode (0: user, 1: auto)[in] + * + * \param range_full + * csc range (0: narrow, 1: full)[in] + * + * \param v4l2_colorspace + * ITU_R v4l2 colorspace(1: 601, 3: 709)[in] + */ +int exynos_gsc_set_csc_property( + void *handle, + unsigned int eq_auto, + unsigned int range_full, + unsigned int v4l2_colorspace); + +/*! + * Set source format. + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \param width + * image width[in] + * + * \param height + * image height[in] + * + * \param crop_left + * image left crop size[in] + * + * \param crop_top + * image top crop size[in] + * + * \param crop_width + * cropped image width[in] + * + * \param crop_height + * cropped image height[in] + * + * \param v4l2_colorformat + * color format[in] + * + * \param cacheable + * ccacheable[in] + * + * \param mode_drm + * mode_drm[in] + * + * \return + * error code + */ +int exynos_gsc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm); + +/*! + * Set destination format. + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \param width + * image width[in] + * + * \param height + * image height[in] + * + * \param crop_left + * image left crop size[in] + * + * \param crop_top + * image top crop size[in] + * + * \param crop_width + * cropped image width[in] + * + * \param crop_height + * cropped image height[in] + * + * \param v4l2_colorformat + * color format[in] + * + * \param cacheable + * ccacheable[in] + * + * \param mode_drm + * mode_drm[in] + * + * \return + * error code + */ +int exynos_gsc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm); + +/*! + * Set rotation. + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \param rotation + * image rotation. It should be multiple of 90[in] + * + * \param flip_horizontal + * image flip_horizontal[in] + * + * \param flip_vertical + * image flip_vertical[in] + * + * \return + * error code + */ +int exynos_gsc_set_rotation( + void *handle, + int rotation, + int flip_horizontal, + int flip_vertical); + +/*! + * Set source buffer + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \param addr + * buffer pointer array[in] + * + * \param acquireFenceFd + * acquire fence fd for the buffer or -1[in] + * + * \return + * error code + */ +int exynos_gsc_set_src_addr( + void *handle, + void *addr[3], + int mem_type, + int acquireFenceFd); + +/*! + * Set destination buffer + * + * \param handle + * libgscaler handle[in] + * + * \param addr + * buffer pointer array[in] + * + * \param acquireFenceFd + * acquire fence fd for the buffer or -1[in] + * + * \return + * error code + */ +int exynos_gsc_set_dst_addr( + void *handle, + void *addr[3], + int mem_type, + int acquireFenceFd); + +/*! + * Convert color space with presetup color format + * + * \ingroup exynos_gscaler + * + * \param handle + * libgscaler handle[in] + * + * \return + * error code + */ +int exynos_gsc_convert( + void *handle); + +/* + * API for setting GSC subdev crop + * Used in OTF mode + */ +int exynos_gsc_subdev_s_crop( + void *handle, + exynos_mpp_img *src_img, + exynos_mpp_img *dst_img); + +/* +*api for setting the GSC config. +It configures the GSC for given config +*/ +int exynos_gsc_config_exclusive( + void *handle, + exynos_mpp_img *src_img, + exynos_mpp_img *dst_img); + +/* +*api for GSC-OUT run. +It queues the srcBuf to GSC and deques a buf from driver. +It should be called after configuring the GSC. +*/ +int exynos_gsc_run_exclusive( + void *handle, + exynos_mpp_img *src_img, + exynos_mpp_img *dst_img); + +/*! + * Create exclusive libgscaler blend handle. + * Other module can't use dev_num of Gscaler. + * + * \ingroup exynos_gscaler + * + * \param dev_num + * gscaler dev_num[in] + * \param gsc_mode + * \return + * libgscaler handle + */ +void *exynos_gsc_create_blend_exclusive( + int dev_num, + int gsc_mode, + int out_mode, + int allow_drm); + +/* +*api for setting the GSC blend config. +It configures the GSC for given config +*/ +int exynos_gsc_config_blend_exclusive( + void *handle, + exynos_mpp_img *src_img, + exynos_mpp_img *dst_img, + struct SrcBlendInfo *srcblendinfo); + +/* + * Blocks until the current frame is done processing. + */ +int exynos_gsc_wait_frame_done_exclusive +(void *handle); + +/* +*api for GSC stop. +It stops the GSC OUT streaming. +*/ +int exynos_gsc_stop_exclusive +(void *handle); + +/* +*api for GSC free_and_close. +*/ +int exynos_gsc_free_and_close +(void *handle); + +enum { + GSC_M2M_MODE = 0, + GSC_OUTPUT_MODE, + GSC_CAPTURE_MODE, + GSC_RESERVED_MODE, +}; + +/*flag info */ +enum { + GSC_DUMMY = 0, + GSC_OUT_FIMD, + GSC_OUT_TV, + GSC_RESERVED, +}; + +enum { + GSC_DONE_CNG_CFG = 0, + GSC_NEED_CNG_CFG, +}; + +enum { + GSC_MEM_MMAP = 1, + GSC_MEM_USERPTR, + GSC_MEM_OVERLAY, + GSC_MEM_DMABUF, +}; + +#ifdef __cplusplus +} +#endif + +#endif /*EXYNOS_GSCALER_H_*/ diff --git a/include/exynos_ion.h b/include/exynos_ion.h new file mode 100644 index 0000000..018d240 --- /dev/null +++ b/include/exynos_ion.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2014 Samsung Electronics Co., Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _LIB_ION_H_ +#define _LIB_ION_H_ + +#define ION_HEAP_SYSTEM_MASK (1 << 0) +#define ION_HEAP_EXYNOS_CONTIG_MASK (1 << 4) +#define ION_EXYNOS_VIDEO_EXT_MASK (1 << 31) +#define ION_EXYNOS_VIDEO_EXT2_MASK (1 << 29) +#define ION_EXYNOS_FIMD_VIDEO_MASK (1 << 28) +#define ION_EXYNOS_GSC_MASK (1 << 27) +#define ION_EXYNOS_MFC_OUTPUT_MASK (1 << 26) +#define ION_EXYNOS_MFC_INPUT_MASK (1 << 25) +#define ION_EXYNOS_G2D_WFD_MASK (1 << 22) +#define ION_EXYNOS_VIDEO_MASK (1 << 21) + +enum { + ION_EXYNOS_HEAP_ID_CRYPTO = 1, + ION_EXYNOS_HEAP_ID_VIDEO_FW = 2, + ION_EXYNOS_HEAP_ID_VIDEO_STREAM = 3, + ION_EXYNOS_HEAP_ID_RESERVED = 4, + ION_EXYNOS_HEAP_ID_VIDEO_FRAME = 5, + ION_EXYNOS_HEAP_ID_VIDEO_SCALER = 6, + ION_EXYNOS_HEAP_ID_VIDEO_NFW = 7, + ION_EXYNOS_HEAP_ID_GPU_CRC = 8, + ION_EXYNOS_HEAP_ID_GPU_BUFFER = 9, + ION_EXYNOS_HEAP_ID_CAMERA = 10, +}; + +#define EXYNOS_ION_HEAP_CRYPTO_MASK (1 << ION_EXYNOS_HEAP_ID_CRYPTO) +#define EXYNOS_ION_HEAP_VIDEO_FW_MASK (1 << ION_EXYNOS_HEAP_ID_VIDEO_FW) +#define EXYNOS_ION_HEAP_VIDEO_STREAM_MASK (1 << ION_EXYNOS_HEAP_ID_VIDEO_STREAM) +#define EXYNOS_ION_HEAP_VIDEO_FRAME_MASK (1 << ION_EXYNOS_HEAP_ID_VIDEO_FRAME) +#define EXYNOS_ION_HEAP_VIDEO_SCALER_MASK (1 << ION_EXYNOS_HEAP_ID_VIDEO_SCALER) +#define EXYNOS_ION_HEAP_VIDEO_NFW_MASK (1 << ION_EXYNOS_HEAP_ID_VIDEO_NFW) +#define EXYNOS_ION_HEAP_GPU_CRC (1 << ION_EXYNOS_HEAP_ID_GPU_CRC) +#define EXYNOS_ION_HEAP_GPU_BUFFER (1 << ION_EXYNOS_HEAP_ID_GPU_BUFFER) +#define EXYNOS_ION_HEAP_CAMERA (1 << ION_EXYNOS_HEAP_ID_CAMERA) + +#endif /* _LIB_ION_H_ */ diff --git a/include/exynos_scaler.h b/include/exynos_scaler.h new file mode 100644 index 0000000..ebd8062 --- /dev/null +++ b/include/exynos_scaler.h @@ -0,0 +1,455 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_scaler.c + * \brief header file for Scaler HAL + * \author Sunyoung Kang (sy0816.kang@samsung.com) + * \date 2013/02/01 + * + * Revision History: + * - 2013.02.01 : Sunyoung Kang (sy0816.kang@samsung.com) \n + * Create + * + * - 2013.04.26 : Cho KyongHo (pullip.cho@samsung.com \n + * Library rewrite + * + */ + +#ifndef _EXYNOS_SCALER_H_ +#define _EXYNOS_SCALER_H_ + + +#include +#include +#include + +#include "exynos_format.h" +#include "exynos_v4l2.h" +#include "exynos_gscaler.h" + +#define SC_DEV_NODE "/dev/video" +#define SC_NODE(x) (50 + x) + +#define SC_NUM_OF_PLANES (3) + +#define V4L2_PIX_FMT_NV12_RGB32 v4l2_fourcc('N', 'V', '1', 'R') +#define V4L2_PIX_FMT_NV12N_RGB32 v4l2_fourcc('N', 'N', '1', 'R') +#define V4L2_PIX_FMT_NV12M_RGB32 v4l2_fourcc('N', 'V', 'R', 'G') +#define V4L2_PIX_FMT_NV12M_BGR32 v4l2_fourcc('N', 'V', 'B', 'G') +#define V4L2_PIX_FMT_NV12M_RGB565 v4l2_fourcc('N', 'V', 'R', '6') +#define V4L2_PIX_FMT_NV12M_RGB444 v4l2_fourcc('N', 'V', 'R', '4') +#define V4L2_PIX_FMT_NV12M_RGB555X v4l2_fourcc('N', 'V', 'R', '5') +#define V4L2_PIX_FMT_NV12MT_16X16_RGB32 v4l2_fourcc('V', 'M', 'R', 'G') +#define V4L2_PIX_FMT_NV21M_RGB32 v4l2_fourcc('V', 'N', 'R', 'G') +#define V4L2_PIX_FMT_NV21M_BGR32 v4l2_fourcc('V', 'N', 'B', 'G') +#define V4L2_PIX_FMT_NV21_RGB32 v4l2_fourcc('V', 'N', '1', 'R') +#define V4L2_PIX_FMT_YVU420_RGB32 v4l2_fourcc('Y', 'V', 'R', 'G') + +#define V4L2_CID_2D_SRC_BLEND_SET_FMT (V4L2_CID_EXYNOS_BASE + 116) +#define V4L2_CID_2D_SRC_BLEND_SET_H_POS (V4L2_CID_EXYNOS_BASE + 117) +#define V4L2_CID_2D_SRC_BLEND_SET_V_POS (V4L2_CID_EXYNOS_BASE + 118) +#define V4L2_CID_2D_SRC_BLEND_FMT_PREMULTI (V4L2_CID_EXYNOS_BASE + 119) +#define V4L2_CID_2D_SRC_BLEND_SET_STRIDE (V4L2_CID_EXYNOS_BASE + 120) +#define V4L2_CID_2D_SRC_BLEND_SET_WIDTH (V4L2_CID_EXYNOS_BASE + 121) +#define V4L2_CID_2D_SRC_BLEND_SET_HEIGHT (V4L2_CID_EXYNOS_BASE + 122) + +#ifdef SCALER_USE_LOCAL_CID +#define V4L2_CID_GLOBAL_ALPHA (V4L2_CID_EXYNOS_BASE + 1) +#define V4L2_CID_2D_BLEND_OP (V4L2_CID_EXYNOS_BASE + 103) +#define V4L2_CID_2D_COLOR_FILL (V4L2_CID_EXYNOS_BASE + 104) +#define V4L2_CID_2D_DITH (V4L2_CID_EXYNOS_BASE + 105) +#define V4L2_CID_2D_FMT_PREMULTI (V4L2_CID_EXYNOS_BASE + 106) +#endif + +#define LIBSC_V4L2_CID_DNOISE_FT (V4L2_CID_EXYNOS_BASE + 150) +#define LIBSC_M2M1SHOT_OP_FILTER_SHIFT (28) +#define LIBSC_M2M1SHOT_OP_FILTER_MASK (0xf << 28) + +// libgscaler's internal use only +typedef enum _HW_SCAL_ID { + HW_SCAL0 = 4, + HW_SCAL1, + HW_SCAL2, + HW_SCAL_MAX, +} HW_SCAL_ID; + +// argument of non-blocking api +typedef exynos_mpp_img exynos_sc_img; + +#ifdef __cplusplus +extern "C" { +#endif + +/*! + * Create libscaler handle + * + * \ingroup exynos_scaler + * + * \param dev_num + * scaler dev_num[in] + * + * \return + * libscaler handle + */ +void *exynos_sc_create(int dev_num); + +/*! + * Destroy libscaler handle + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + */ +int exynos_sc_destroy(void *handle); + +/*! + * Convert color space with presetup color format + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + * + * \return + * error code + */ +int exynos_sc_convert(void *handle); + +/*! + * Convert color space with presetup color format + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle + * + * \param csc_range + * csc narrow/wide property + * + * \param v4l2_colorspace + * csc equation property + * + * \param filter + * denoise filter info + * + * \return + * error code + */ +int exynos_sc_set_csc_property( + void *handle, + unsigned int csc_range, + unsigned int v4l2_colorspace, + unsigned int filter); + +/*! + * Set source format. + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + * + * \param width + * image width[in] + * + * \param height + * image height[in] + * + * \param crop_left + * image left crop size[in] + * + * \param crop_top + * image top crop size[in] + * + * \param crop_width + * cropped image width[in] + * + * \param crop_height + * cropped image height[in] + * + * \param v4l2_colorformat + * color format[in] + * + * \param cacheable + * ccacheable[in] + * + * \param mode_drm + * mode_drm[in] + * + * \param premultiplied + * pre-multiplied format[in] + * + * \return + * error code + */ +int exynos_sc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm, + unsigned int premultiplied); + +/*! + * Set destination format. + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + * + * \param width + * image width[in] + * + * \param height + * image height[in] + * + * \param crop_left + * image left crop size[in] + * + * \param crop_top + * image top crop size[in] + * + * \param crop_width + * cropped image width[in] + * + * \param crop_height + * cropped image height[in] + * + * \param v4l2_colorformat + * color format[in] + * + * \param cacheable + * ccacheable[in] + * + * \param mode_drm + * mode_drm[in] + * + * \param premultiplied + * pre-multiplied format[in] + * + * \return + * error code + */ +int exynos_sc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm, + unsigned int premultiplied); + +/*! + * Set source buffer + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + * + * \param addr + * buffer pointer array[in] + * + * \param mem_type + * memory type[in] + * + * \param acquireFenceFd + * acquire fence fd for the buffer or -1[in] + * + * \return + * error code + */ + +int exynos_sc_set_src_addr( + void *handle, + void *addr[SC_NUM_OF_PLANES], + int mem_type, + int acquireFenceFd); + +/*! + * Set destination buffer + * + * \param handle + * libscaler handle[in] + * + * \param addr + * buffer pointer array[in] + * + * \param mem_type + * memory type[in] + * + * \param acquireFenceFd + * acquire fence fd for the buffer or -1[in] + * + * \return + * error code + */ +int exynos_sc_set_dst_addr( + void *handle, + void *addr[SC_NUM_OF_PLANES], + int mem_type, + int acquireFenceFd); + +/*! + * Set rotation. + * + * \ingroup exynos_scaler + * + * \param handle + * libscaler handle[in] + * + * \param rot + * image rotation. It should be multiple of 90[in] + * + * \param flip_h + * image flip_horizontal[in] + * + * \param flip_v + * image flip_vertical[in] + * + * \return + * error code + */ +int exynos_sc_set_rotation( + void *handle, + int rot, + int flip_h, + int flip_v); + +////// non-blocking ///// + +void *exynos_sc_create_exclusive( + int dev_num, + int allow_drm); + +int exynos_sc_csc_exclusive(void *handle, + unsigned int range_full, + unsigned int v4l2_colorspace); + +int exynos_sc_config_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img); + +int exynos_sc_run_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img); + +void *exynos_sc_create_blend_exclusive( + int dev_num, + int allow_drm); + +int exynos_sc_config_blend_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img, + struct SrcBlendInfo *srcblendinfo); + +int exynos_sc_wait_frame_done_exclusive +(void *handle); + +int exynos_sc_stop_exclusive +(void *handle); + +int exynos_sc_free_and_close +(void *handle); + + +/****************************************************************************** + ******** API for Copy Pixels between RGB data ******************************** + ******************************************************************************/ + +/*! + * Description of an image for both of the source and the destination. + * + * \ingroup exynos_scaler + */ +struct exynos_sc_pxinfo_img +{ + void *addr; + unsigned int width; + unsigned int height; + unsigned int crop_left; + unsigned int crop_top; + unsigned int crop_width; + unsigned int crop_height; + unsigned int pxfmt; // enum EXYNOS_SC_FMT_PXINFO +}; + +/*! + * Description of a pixel copy + * + * \ingroup exynos_scaler + */ +struct exynos_sc_pxinfo { + struct exynos_sc_pxinfo_img src; + struct exynos_sc_pxinfo_img dst; + unsigned short rotate; // 0 ~ 360 + char hflip; // non-zero value for hflip + char vflip; // non-zero value for vflip +}; + +/*! + * Pixel format definition for pixel copy + * + * \ingroup exynos_scaler + */ +enum SC_FMT_PXINFO { + EXYNOS_SC_FMT_RGB32 = 0x10, + EXYNOS_SC_FMT_BGR32, + EXYNOS_SC_FMT_RGB565, + EXYNOS_SC_FMT_RGB555X, + EXYNOS_SC_FMT_RGB444, +}; + +/*! + * Copy pixel data from RGB to RGB + * + * \ingroup exynos_scaler + * + * \param pxinfo + * information for pixel data copy [in] + * + * \param dev_num + * Scaler H/W instance number. Starts from 0 [in] + * + * \return + * true on success in copying pixel data. + * false on failure. + */ +bool exynos_sc_copy_pixels( + struct exynos_sc_pxinfo *pxinfo, + int dev_num); + +#ifdef __cplusplus +} +#endif + +#endif /* _EXYNOS_SCALER_H_ */ diff --git a/include/exynos_v4l2.h b/include/exynos_v4l2.h new file mode 100644 index 0000000..262cc1f --- /dev/null +++ b/include/exynos_v4l2.h @@ -0,0 +1,208 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_v4l2.h + * \brief header file for libv4l2 + * \author Jinsung Yang (jsgood.yang@samsung.com) + * \date 2011/12/15 + * + * Revision History: + * - 2011/12/15 : Jinsung Yang (jsgood.yang@samsung.com) \n + * Initial version + * + */ + +/*! + * \defgroup exynos_v4l2 + * \brief API for v4l2 + * \addtogroup Exynos + */ + +#ifndef __EXYNOS_LIB_V4L2_H__ +#define __EXYNOS_LIB_V4L2_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +/* V4L2 */ +#include +#include "videodev2.h" /* vendor specific videodev2.h */ + +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_open(const char *filename, int oflag, ...); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_open_devname(const char *devname, int oflag, ...); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_close(int fd); +/*! \ingroup exynos_v4l2 */ +bool exynos_v4l2_enuminput(int fd, int index, char *input_name_buf); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_input(int fd, int index); +/*! \ingroup exynos_v4l2 */ +bool exynos_v4l2_querycap(int fd, unsigned int need_caps); +/*! \ingroup exynos_v4l2 */ +bool exynos_v4l2_enum_fmt(int fd, enum v4l2_buf_type type, unsigned int fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_g_fmt(int fd, struct v4l2_format *fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_fmt(int fd, struct v4l2_format *fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_try_fmt(int fd, struct v4l2_format *fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_reqbufs(int fd, struct v4l2_requestbuffers *req); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_querybuf(int fd, struct v4l2_buffer *buf); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_qbuf(int fd, struct v4l2_buffer *buf); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_dqbuf(int fd, struct v4l2_buffer *buf); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_streamon(int fd, enum v4l2_buf_type type); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_streamoff(int fd, enum v4l2_buf_type type); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_cropcap(int fd, struct v4l2_cropcap *crop); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_g_crop(int fd, struct v4l2_crop *crop); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_crop(int fd, struct v4l2_crop *crop); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_g_ctrl(int fd, unsigned int id, int *value); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_ctrl(int fd, unsigned int id, int value); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_prepare(int fd, struct v4l2_buffer *arg); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_g_parm(int fd, struct v4l2_streamparm *streamparm); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_parm(int fd, struct v4l2_streamparm *streamparm); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_g_ext_ctrl(int fd, struct v4l2_ext_controls *ctrl); +/*! \ingroup exynos_v4l2 */ +int exynos_v4l2_s_ext_ctrl(int fd, struct v4l2_ext_controls *ctrl); + +/* V4L2_SUBDEV */ +#include + +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_open(const char *filename, int oflag, ...); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_get_node_num(const char *devname, int oflag, ...); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_open_devname(const char *devname, int oflag, ...); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_close(int fd); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_enum_frame_size(int fd, struct v4l2_subdev_frame_size_enum *frame_size_enum); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_g_fmt(int fd, struct v4l2_subdev_format *fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_s_fmt(int fd, struct v4l2_subdev_format *fmt); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_g_crop(int fd, struct v4l2_subdev_crop *crop); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_s_crop(int fd, struct v4l2_subdev_crop *crop); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_enum_frame_interval(int fd, struct v4l2_subdev_frame_interval_enum *frame_internval_enum); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_g_frame_interval(int fd, struct v4l2_subdev_frame_interval *frame_internval_enum); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_s_frame_interval(int fd, struct v4l2_subdev_frame_interval *frame_internval_enum); +/*! \ingroup exynos_v4l2 */ +int exynos_subdev_enum_mbus_code(int fd, struct v4l2_subdev_mbus_code_enum *mbus_code_enum); + +/* MEDIA CONTORLLER */ +#include + +/*! media_link + * \ingroup exynos_v4l2 + */ +struct media_link { + struct media_pad *source; + struct media_pad *sink; + struct media_link *twin; + __u32 flags; + __u32 padding[3]; +}; + +/*! media_link + * \ingroup exynos_v4l2 + */ +struct media_pad { + struct media_entity *entity; + __u32 index; + __u32 flags; + __u32 padding[3]; +}; + +/*! media_link + * \ingroup exynos_v4l2 + */ +struct media_entity { + struct media_device *media; + struct media_entity_desc info; + struct media_pad *pads; + struct media_link *links; + unsigned int max_links; + unsigned int num_links; + + char devname[32]; + int fd; + __u32 padding[6]; +}; + +/*! media_link + * \ingroup exynos_v4l2 + */ +struct media_device { + int fd; + struct media_entity *entities; + unsigned int entities_count; + void (*debug_handler)(void *, ...); + void *debug_priv; + __u32 padding[6]; +}; + +/*! \ingroup exynos_v4l2 */ +struct media_device *exynos_media_open(const char *filename); +/*! \ingroup exynos_v4l2 */ +void exynos_media_close(struct media_device *media); +/*! \ingroup exynos_v4l2 */ +struct media_pad *exynos_media_entity_remote_source(struct media_pad *pad); +/*! \ingroup exynos_v4l2 */ +struct media_entity *exynos_media_get_entity_by_name(struct media_device *media, const char *name, size_t length); +/*! \ingroup exynos_v4l2 */ +struct media_entity *exynos_media_get_entity_by_id(struct media_device *media, __u32 id); +/*! \ingroup exynos_v4l2 */ +int exynos_media_setup_link(struct media_device *media, struct media_pad *source, struct media_pad *sink, __u32 flags); +/*! \ingroup exynos_v4l2 */ +int exynos_media_reset_links(struct media_device *media); +/*! \ingroup exynos_v4l2 */ +struct media_pad *exynos_media_parse_pad(struct media_device *media, const char *p, char **endp); +/*! \ingroup exynos_v4l2 */ +struct media_link *exynos_media_parse_link(struct media_device *media, const char *p, char **endp); +/*! \ingroup exynos_v4l2 */ +int exynos_media_parse_setup_link(struct media_device *media, const char *p, char **endp); +/*! \ingroup exynos_v4l2 */ +int exynos_media_parse_setup_links(struct media_device *media, const char *p); + +#ifdef __cplusplus +} +#endif + +#endif /* __EXYNOS_LIB_V4L2_H__ */ diff --git a/include/gralloc_priv.h b/include/gralloc_priv.h new file mode 100644 index 0000000..9379e98 --- /dev/null +++ b/include/gralloc_priv.h @@ -0,0 +1,220 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GRALLOC_PRIV_H_ +#define GRALLOC_PRIV_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +enum { + PREFER_COMPRESSION_NO_CHANGE = 0x00, + PREFER_COMPRESSION_ENABLE = 0x01, + PREFER_COMPRESSION_DISABLE = 0x02, +}; + +/*****************************************************************************/ + +struct private_module_t; +struct private_handle_t; + +struct private_module_t { + gralloc_module_t base; + + private_handle_t* framebuffer; + uint32_t flags; + uint32_t numBuffers; + uint32_t bufferMask; + pthread_mutex_t lock; + buffer_handle_t currentBuffer; + int ionfd; + + struct fb_var_screeninfo info; + struct fb_fix_screeninfo finfo; + int xres; + int yres; + int line_length; + float xdpi; + float ydpi; + float fps; + void *queue; + pthread_mutex_t queue_lock; + +}; + +/*****************************************************************************/ + +#ifdef __cplusplus +struct private_handle_t : public native_handle { +#else +struct private_handle_t { + struct native_handle nativeHandle; +#endif + + enum { + PRIV_FLAGS_FRAMEBUFFER = 0x00000001, + PRIV_FLAGS_USES_UMP = 0x00000002, + PRIV_FLAGS_USES_ION = 0x00000020 + }; + + // file-descriptors + int fd; + int fd1; + int fd2; + // ints + int magic; + int flags; + int size; + int offset; + + int format; + int width; + int height; + int stride; + int vstride; + int frameworkFormat; + + ion_user_handle_t handle; + ion_user_handle_t handle1; + ion_user_handle_t handle2; + + // FIXME: the attributes below should be out-of-line + uint64_t base __attribute__((aligned(8))); + uint64_t base1 __attribute__((aligned(8))); + uint64_t base2 __attribute__((aligned(8))); + +#ifdef __cplusplus + static inline int sNumInts() { + return (((sizeof(private_handle_t) - sizeof(native_handle_t))/sizeof(int)) - sNumFds); + } + static const int sNumFds = 3; + static const int sMagic = 0x3141592; + + private_handle_t(int fd, int size, int flags) : + fd(fd), fd1(-1), fd2(-1), magic(sMagic), flags(flags), size(size), + offset(0), format(0), internal_format(0), frameworkFormat(0), width(0), height(0), stride(0), + vstride(0), is_compressible(0), compressed_out(0), handle(0), handle1(0), handle2(0), base(0), base1(0), base2(0), prefer_compression(PREFER_COMPRESSION_NO_CHANGE), dssRatio(0) + { + version = sizeof(native_handle); + numInts = sNumInts() + 2; + numFds = sNumFds -2 ; + } + + private_handle_t(int fd, int size, int flags, int w, + int h, int format, uint64_t internal_format, int frameworkFormat, int stride, int vstride, int is_compressible) : + fd(fd), fd1(-1), fd2(-1), magic(sMagic), flags(flags), size(size), + offset(0), format(format), internal_format(internal_format), frameworkFormat(frameworkFormat), width(w), height(h), stride(stride), + vstride(vstride), is_compressible(is_compressible), compressed_out(0), handle(0), handle1(0), handle2(0), base(0), base1(0), base2(0), prefer_compression(PREFER_COMPRESSION_NO_CHANGE), dssRatio(0) + { + version = sizeof(native_handle); + numInts = sNumInts() + 2; + numFds = sNumFds - 2; + } + + private_handle_t(int fd, int fd1, int size, int flags, int w, + int h, int format, uint64_t internal_format, int frameworkFormat, int stride, int vstride, int is_compressible) : + fd(fd), fd1(fd1), fd2(-1), magic(sMagic), flags(flags), size(size), + offset(0), format(format), internal_format(internal_format), frameworkFormat(frameworkFormat), width(w), height(h), stride(stride), + vstride(vstride), is_compressible(is_compressible), compressed_out(0), handle(0), handle1(0), handle2(0), base(0), base1(0), base2(0), prefer_compression(PREFER_COMPRESSION_NO_CHANGE), dssRatio(0) + { + version = sizeof(native_handle); + numInts = sNumInts() + 1; + numFds = sNumFds - 1; + } + + private_handle_t(int fd, int fd1, int fd2, int size, int flags, int w, + int h, int format, uint64_t internal_format, int frameworkFormat, int stride, int vstride, int is_compressible) : + fd(fd), fd1(fd1), fd2(fd2), magic(sMagic), flags(flags), size(size), + offset(0), format(format), internal_format(internal_format), frameworkFormat(frameworkFormat), width(w), height(h), stride(stride), + vstride(vstride), is_compressible(is_compressible), compressed_out(0), handle(0), handle1(0), handle2(0), base(0), base1(0), base2(0), prefer_compression(PREFER_COMPRESSION_NO_CHANGE), dssRatio(0) + { + version = sizeof(native_handle); + numInts = sNumInts(); + numFds = sNumFds; + } + ~private_handle_t() { + magic = 0; + } + + static int validate(const native_handle* h) { + const private_handle_t* hnd = (const private_handle_t*)h; + if (!h || h->version != sizeof(native_handle) || + hnd->numInts + hnd->numFds != sNumInts() + sNumFds || + hnd->magic != sMagic) + { + ALOGE("invalid gralloc handle (at %p)", reinterpret_cast(const_cast(h))); + return -EINVAL; + } + return 0; + } + + static private_handle_t* dynamicCast(const native_handle* in) + { + if (validate(in) == 0) + return (private_handle_t*) in; + + return NULL; + } + + int lock_usage; + int lock_offset; + int lock_len; + + int dssRatio; + int cropLeft; + int cropTop; + int cropRight; + int cropBottom; + + int prefer_compression; + uint64_t internal_format; + int is_compressible; + int compressed_out; + +#endif +}; + +#define DSS_CROP_X 0 +#define DSS_CROP_Y 360 +#define DSS_CROP_W 1920 +#define DSS_CROP_H 1080 + +enum { + PRIVATE_DATA_DSS_STATUS = 0x00000001, + PRIVATE_DATA_DSS_CROP = 0x00000002 +}; + +#define CRC_LIMIT_WIDTH (720) +#define CRC_LIMIT_HEIGHT (1280) +#define CRC_TILE_SIZE (16) +#define CRC_BUFFER_KEY (0x12131415) +struct gralloc_crc_header { + int crcBufferKey; + int crcPartial; + int reserved[2]; +}; + +#endif /* GRALLOC_PRIV_H_ */ diff --git a/include/hwjpeglib-exynos.h b/include/hwjpeglib-exynos.h new file mode 100644 index 0000000..ba95320 --- /dev/null +++ b/include/hwjpeglib-exynos.h @@ -0,0 +1,177 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __HARDWARE_SAMSUNG_EXYNOS_HWJPEGDECOMPRESSOR_H__ +#define __HARDWARE_SAMSUNG_EXYNOS_HWJPEGDECOMPRESSOR_H__ + +#ifdef __cplusplus +extern "C" { + +/* + * hwjpeg_decompress_ptr - handle of decompressor instance + */ +typedef struct hwjpeg_decompressor_struct { + unsigned int image_width; /* width of the compressed image */ + unsigned int image_height; /* height of the compressed image */ + unsigned char num_components; /* number of components of the compressed image */ + unsigned char chroma_h_samp_factor; /* horizontal chroma sampling factor of the compressed image */ + unsigned char chroma_v_samp_factor; /* vertical chroma sampling factor of the compressed image */ + unsigned char scale_factor; /* down-scaling factor during decompression: one of 1, 2, 4 and 8 */ + + unsigned int output_width; /* width of the output image (image_width/scale_factor) */ + unsigned int output_height; /* height of the output image (image_height/scale_factor) */ + __u32 output_format; /* 4CC style format identifier of the output image defined in videodev2.h */ +} *hwjpeg_decompress_ptr; + +/* + * hwjpeg_create_decompress - create an instance of decompressor + * + * @return: the handle of the decompressor instance. NULL on failure. + * + * The decompresson process starts from calling this function. The return value + * is the handle of the decompressor instance. Every step of the decompression + * process needs the handle. The handle should be destroyed with + * hwjpeg_destroy_decompress() when it is no longer required. + */ +hwjpeg_decompress_ptr hwjpeg_create_decompress(void); + +/* + * hwjpeg_file_src - configure the path to a file of compressed JPEG stream + * + * @cinfo: decompressor instance handle + * @path: path to the file of compressed JPEG stream. + * It is assumed that EOI is at the end of the file. + * @return: false on failure + */ +bool hwjpeg_file_src(hwjpeg_decompress_ptr cinfo, const char *path); + +/* + * hwjpeg_dmabuf_src - configure the buffer file descriptor that contains the compressed JPEG stream + * + * @cinfo: decompressor instance handle + * @infd: the file descriptor exported by ION of the buffer that contains the compressed JPEG stream + * @insize: the length in bytes of @infd. It is assumed that EOI is at the end of the buffer. + * @dummybytes: The available dummy bytes after @insize. + * @return: false on failure + */ +bool hwjpeg_dmabuf_src(hwjpeg_decompress_ptr cinfo, int infd, size_t insize, size_t dummybytes); + +/* + * hwjpeg_mem_src - configure the buffer that contains the compressed JPEG stream + * + * @cinfo: decompressor instance handle + * @inbuffer: the address of the buffer that contains the compressed JPEG stream + * @insize: the length in bytes of @inbuffer. It is assumed that EOI is at the end of the buffer. + * @dummybytes: The available dummy bytes after @insize. + * @return: false on failure + */ +bool hwjpeg_mem_src(hwjpeg_decompress_ptr cinfo, unsigned char *inbuffer, size_t insize, size_t dummybytes); + +/* + * hwjpeg_config_image_format - configure output image format + * + * @cinfo: decompressor instance handle + * @v4l2_pix_fmt: fourcc format identifier defined in linux/videodev2.h + */ +void hwjpeg_config_image_format(hwjpeg_decompress_ptr cinfo, __u32 v4l2_pix_fmt); + +/* + * hwjpeg_mem_dst - configure the buffer to store decompressed image + * + * @cinfo: decompressor instance handle + * @outbuffer: The array of addresses of the buffers to stroe decompressed image + * The maximum number of elements of @outbuffer is 3. The number of elements + * of @outbuffer depends on the image format configured by hwjpeg_config_image_format(). + * @outsize: The lengths in bytes of the buffers of @outbuffer. + * @num_buffers: The number of elements in @outsizes and @outbuffer + * @return: false on failure. + */ +bool hwjpeg_mem_dst(hwjpeg_decompress_ptr cinfo, unsigned char *outbuffer[], size_t outsize[], unsigned int num_buffers); + +/* + * hwjpeg_dmabuf_dst - configure the buffer to store decompressed image + * + * @cinfo: decompressor instance handle + * @outfd: The array of file descriptors exported by ION of the buffers to stroe decompressed image + * The maximum number of elements of @outfd is 3. The number of elements of @outfd depends + * on the image format configured by hwjpeg_config_image_format(). + * @outsizes: The lengths in bytes of the buffers of @outfd. + * @num_buffers: The number of elements in @outsizes and @outfd + * @return: false on failure. + */ +bool hwjpeg_dmabuf_dst(hwjpeg_decompress_ptr cinfo, int outfd[], size_t outsize[], unsigned int num_buffers); + +/* + * hwjpeg_set_downscale_factor - configure the downscaling factor during decompression + * + * @cinfo: decompressor instance handle + * @factor: downscaling factor. @factor should be one of 1, 2, 4 and 8. + * + * Downscaling factor is the inverse number of the downscaling ratio. @cinfo->output_width and + * @cinfo->output_height is decided by @factor. + * - @cinfo->output_width = @cinfo->image_width / @factor + * - @cinfo->output_height = @cinfo->image_height / @factor + * Note that both of @cinfo->image_width / @factor and @cinfo->image_height / @factor + * should be also integers. The results should be also even number according to the + * output image format configured by hwjpeg_config_image_format(). + * Otherwise, the decompression will fail. + */ +void hwjpeg_set_downscale_factor(hwjpeg_decompress_ptr cinfo, unsigned int factor); + +/* + * hwjpeg_read_header - reads the headers of the compressed JPEG stream + * + * @cinfo: decompressor instance handle + * @return: false on failure. + * + * NOTE that the fields of hwjpeg_decompression_ptr is available after hwjpeg_read_header() + * returns true. + */ +bool hwjpeg_read_header(hwjpeg_decompress_ptr cinfo); + +/* + * hwjpeg_has_enough_stream_buffer - Confirm if the stream buffer is enough + * + * @cinfo: decompressor instance handle + * @return: true if the stream buffer is enough to decompress by H/W + * + * This function should be called after hwjpeg_read_header() is called + * successfully. + */ +bool hwjpeg_has_enough_stream_buffer(hwjpeg_decompress_ptr cinfo); + +/* + * hwjpeg_start_decompress - starts decompression + * + * @cinfo: decompressor instance handle + * @return: false on failure. + * + * This function blocks until the decompression finishes. + */ +bool hwjpeg_start_decompress(hwjpeg_decompress_ptr cinfo); + +/* + * hwjpeg_destroy_decompress - releases all resources of the decompressor instance + * + * @cinfo: decompressor instance handle to destroy + */ +void hwjpeg_destroy_decompress(hwjpeg_decompress_ptr cinfo); + +}; /* extern "C" */ +#endif /* __cplusplus */ + +#endif /*__HARDWARE_SAMSUNG_EXYNOS7420_HWJPEGDECOMPRESSOR_H__*/ diff --git a/include/ion.h b/include/ion.h new file mode 100644 index 0000000..425a270 --- /dev/null +++ b/include/ion.h @@ -0,0 +1,188 @@ +/* + * Copyright (C) 2012 Samsung Electronics Co., Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _LIB_ION_H_ +#define _LIB_ION_H_ + +#include /* size_t */ + +#define ION_FLAG_CACHED 1 +#define ION_FLAG_CACHED_NEEDS_SYNC 2 +#define ION_FLAG_PRESERVE_KMAP 4 +#define ION_FLAG_NOZEROED 8 + +#define ION_HEAP_SYSTEM_MASK (1 << 0) +#define ION_HEAP_SYSTEM_CONTIG_MASK (1 << 1) +#define ION_HEAP_EXYNOS_CONTIG_MASK (1 << 4) +#define ION_HEAP_EXYNOS_MASK (1 << 5) +#define ION_EXYNOS_VIDEO_MASK (1 << 21) +#define ION_EXYNOS_FIMD_VIDEO_MASK (1 << 28) +#define ION_EXYNOS_GSC_MASK (1 << 27) +#define ION_EXYNOS_MFC_OUTPUT_MASK (1 << 26) +#define ION_EXYNOS_MFC_INPUT_MASK (1 << 25) + + +/* ION_MSYNC_FLAGS + * values of @flags parameter to ion_msync() + * + * IMSYNC_DEV_TO_READ: Device only reads the buffer + * IMSYNC_DEV_TO_WRITE: Device may writes to the buffer + * IMSYNC_DEV_TO_RW: Device reads and writes to the buffer + * + * IMSYNC_SYNC_FOR_DEV: ion_msync() for device to access the buffer + * IMSYNC_SYNC_FOR_CPU: ion_msync() for CPU to access the buffer after device + * has accessed it. + * + * The values must be ORed with one of IMSYNC_DEV_* and one of IMSYNC_SYNC_*. + * Otherwise, ion_msync() will not effect. + */ +enum ION_MSYNC_FLAGS { + IMSYNC_DEV_TO_READ = 0, + IMSYNC_DEV_TO_WRITE = 1, + IMSYNC_DEV_TO_RW = 2, + IMSYNC_SYNC_FOR_DEV = 0x10000, + IMSYNC_SYNC_FOR_CPU = 0x20000, +}; + +struct ion_preload_object { + size_t len; + unsigned int count; +}; + +#ifdef __cplusplus +extern "C" { +#endif + +/* ion_client + * An ION client is an object or an entity that needs to use the service of + * ION and has unique address space. ion_client is an identifier of an ION + * client and it represents the ION client. + * All operations on ION needs a valid ion_client value and it can be obtained + * by ion_client_create(). + */ +typedef int ion_client; + +/* ion_buffer + * An identifier of a buffer allocated from ION. You must obtain to access + * a buffer allocated from ION. If you have an effective ion_buffer, you have + * three options to work with it. + * - To access the buffer, you can request an address (user virtual address) + * of the buffer with ion_map(). + * - To pass the buffer to the kernel, you can pass the ion_buffer to the + * kernel driver directly, if the kernel driver can work with ION. + * - To pass the buffer to other processes, you can pass the ion_buffer to + * other processes through RPC machanism such as socket communication or + * Android Binder because ion_buffer is actually an open file descripotor + * of the current process. + */ +typedef int ion_buffer; + +typedef unsigned int ion_handle; + +/* ion_client_create() + * @RETURN: new ion_client. + * netative value if creating new ion_client is failed. + * + * A call to ion_client_create() must be paired with ion_client_destroy(), + * symmetrically. ion_client_destroy() needs a valid ion_client that + * is returned by ion_client_create(). + */ +ion_client ion_client_create(void); + +/* ion_client_destroy() + * @client: An ion_client value to remove. + */ +void ion_client_destroy(ion_client client); + +/* ion_alloc() - Allocates new buffer from ION. + * @client: A valid ion_client value returned by ion_client_create(). + * @len: Size of a buffer required in bytes. + * @align: Alignment requirements of @len and the start address of the allocated + * buffer. If the @len is not aligned by @align, ION allocates a buffer + * that is aligned by @align and the size of the buffer will be larger + * than @len. + * @heap_mask: Mask of heaps which you want this allocation to be served from. + * @flags: Additional requirements about buffer. ION_FLAG_CACHED for a + * buffer you want to have a cached mapping of + * @RETURN: An ion_buffer that represents the buffer allocated. It is only + * unique in the context of the given client, @client. + * -error if the allocation failed. + * See the description of ion_buffer above for detailed information. + */ +ion_buffer ion_alloc(ion_client client, size_t len, size_t align, + unsigned int heap_mask, unsigned int flags); + +/* ion_free() - Frees an existing buffer that is allocated by ION + * @buffer: An ion_buffer of the buffer to be released. + */ +void ion_free(ion_buffer buffer); + +/* ion_map() - Obtains a virtual address of the buffer identied by @buffer + * @buffer: The buffer to map. The virtual address returned is allocated by the + * kernel. + * @len: The size of the buffer to map. This must not exceed the size of the + * buffer represented by @fd_buf. Thus you need to know the size of it + * before calling this function. If @len is less than the size of the + * buffer, this function just map just the size requested (@len) not the + * entire buffer. + * @offset: How many pages will be ignored while mapping.@offset number of + * pages from the start of the buffer will not be mapped. + * @RETURN: The start virtual addres mapped. + * MAP_FAILED if mapping fails. + * + * Note that @len + (@offset * PAGE_SIZE) must not exceed the size of the + * buffer. + */ +void *ion_map(ion_buffer buffer, size_t len, off_t offset); + +/* ion_unmap() - Frees the buffer mapped by ion_map() + * @addr: The address returned by ion_map(). + * @len: The size of the buffer mapped by ion_map(). + * @RETURN: 0 on success, and -1 on failure. + * errno is also set on failure. + */ +int ion_unmap(void *addr, size_t len); + +/* ion_msync() - Makes sure that data in the buffer are visible to H/W peri. + * @client: A valid ion_client value returned by ion_client_create(). + * @buffer: The buffer to perform ion_msync(). + * @flags: Direction of access of H/W peri and CPU. See the description of + * ION_MSYNC_FLAGS. + * @size: Size to ion_msync() in bytes. + * @offset: Where ion_msync() start in @buffer, size in bytes. + * @RETURN: 0 if successful. -error, otherwise. + * + * Note that @offset + @size must not exceed the size of @buffer. + */ +int ion_sync(ion_client client, ion_buffer buffer); + +/* ion_sync_range() - Make sure the specified range in the buffer are visible to H/W + * @client: A valid ion_client value returned by ion_client_create(). + * @sahre_fd: A valid file descriptor for the buffer(mostely returned by ion_alloc()) + * @addr: start address of the region to sync. + It must be the mapped address of the buffer specified by @dmabuf_fd. + * @size: size of the region to sync. + */ +int ion_sync_range(ion_client client, int dmabuf_fd, void *addr, size_t size); + +int ion_incRef(int fd, int share_fd, ion_handle *handle); + +int ion_decRef(int fd, ion_handle handle); + +#ifdef __cplusplus +} +#endif +#endif /* _LIB_ION_H_ */ diff --git a/include/media.h b/include/media.h new file mode 100644 index 0000000..e13771d --- /dev/null +++ b/include/media.h @@ -0,0 +1,125 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_MEDIA_H +#define __LINUX_MEDIA_H +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#include +#define MEDIA_API_VERSION KERNEL_VERSION(0, 1, 0) +struct media_device_info { + char driver[16]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + char model[32]; + char serial[40]; + char bus_info[32]; + __u32 media_version; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 hw_revision; + __u32 driver_version; + __u32 reserved[31]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_ENT_ID_FLAG_NEXT (1 << 31) +#define MEDIA_ENT_TYPE_SHIFT 16 +#define MEDIA_ENT_TYPE_MASK 0x00ff0000 +#define MEDIA_ENT_SUBTYPE_MASK 0x0000ffff +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_ENT_T_DEVNODE (1 << MEDIA_ENT_TYPE_SHIFT) +#define MEDIA_ENT_T_DEVNODE_V4L (MEDIA_ENT_T_DEVNODE + 1) +#define MEDIA_ENT_T_DEVNODE_FB (MEDIA_ENT_T_DEVNODE + 2) +#define MEDIA_ENT_T_DEVNODE_ALSA (MEDIA_ENT_T_DEVNODE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_ENT_T_DEVNODE_DVB (MEDIA_ENT_T_DEVNODE + 4) +#define MEDIA_ENT_T_V4L2_SUBDEV (2 << MEDIA_ENT_TYPE_SHIFT) +#define MEDIA_ENT_T_V4L2_SUBDEV_SENSOR (MEDIA_ENT_T_V4L2_SUBDEV + 1) +#define MEDIA_ENT_T_V4L2_SUBDEV_FLASH (MEDIA_ENT_T_V4L2_SUBDEV + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_ENT_T_V4L2_SUBDEV_LENS (MEDIA_ENT_T_V4L2_SUBDEV + 3) +#define MEDIA_ENT_FL_DEFAULT (1 << 0) +struct media_entity_desc { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + char name[32]; + __u32 type; + __u32 revision; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 group_id; + __u16 pads; + __u16 links; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct { + __u32 major; + __u32 minor; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } v4l; + struct { + __u32 major; + __u32 minor; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } fb; + struct { + __u32 card; + __u32 device; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 subdevice; + } alsa; + int dvb; + __u8 raw[184]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +}; +#define MEDIA_PAD_FL_SINK (1 << 0) +#define MEDIA_PAD_FL_SOURCE (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct media_pad_desc { + __u32 entity; + __u16 index; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define MEDIA_LNK_FL_ENABLED (1 << 0) +#define MEDIA_LNK_FL_IMMUTABLE (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_LNK_FL_DYNAMIC (1 << 2) +struct media_link_desc { + struct media_pad_desc source; + struct media_pad_desc sink; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[2]; +}; +struct media_links_enum { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 entity; + struct media_pad_desc __user *pads; + struct media_link_desc __user *links; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define MEDIA_IOC_DEVICE_INFO _IOWR('|', 0x00, struct media_device_info) +#define MEDIA_IOC_ENUM_ENTITIES _IOWR('|', 0x01, struct media_entity_desc) +#define MEDIA_IOC_ENUM_LINKS _IOWR('|', 0x02, struct media_links_enum) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define MEDIA_IOC_SETUP_LINK _IOWR('|', 0x03, struct media_link_desc) +#endif diff --git a/include/s3c-fb.h b/include/s3c-fb.h new file mode 100644 index 0000000..26f85ef --- /dev/null +++ b/include/s3c-fb.h @@ -0,0 +1,126 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __S3C_FB_H__ +#define __S3C_FB_H__ +struct s3c_fb_user_window { + int x; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int y; +}; +struct s3c_fb_user_plane_alpha { + int channel; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char red; + unsigned char green; + unsigned char blue; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct s3c_fb_user_chroma { + int enabled; + unsigned char red; + unsigned char green; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned char blue; +}; +struct s3c_fb_user_ion_client { + int fd; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int offset; +}; +enum s3c_fb_pixel_format { + S3C_FB_PIXEL_FORMAT_RGBA_8888 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_PIXEL_FORMAT_RGBX_8888 = 1, + S3C_FB_PIXEL_FORMAT_RGBA_5551 = 2, + S3C_FB_PIXEL_FORMAT_RGB_565 = 3, + S3C_FB_PIXEL_FORMAT_BGRA_8888 = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_PIXEL_FORMAT_BGRX_8888 = 5, + S3C_FB_PIXEL_FORMAT_MAX = 6, +}; +enum s3c_fb_blending { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_BLENDING_NONE = 0, + S3C_FB_BLENDING_PREMULT = 1, + S3C_FB_BLENDING_COVERAGE = 2, + S3C_FB_BLENDING_MAX = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct s3c_fb_win_config { + enum { + S3C_FB_WIN_STATE_DISABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + S3C_FB_WIN_STATE_COLOR, + S3C_FB_WIN_STATE_BUFFER, + S3C_FB_WIN_STATE_OTF, + S3C_FB_WIN_STATE_UPDATE, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } state; + union { + __u32 color; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int fd; + __u32 offset; + __u32 stride; + enum s3c_fb_pixel_format format; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum s3c_fb_blending blending; + int fence_fd; + int plane_alpha; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; + int x; + int y; + __u32 w; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 h; +#ifdef USES_DRM_SETTING_BY_DECON + bool protection; +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define S3C_FB_MAX_WIN (5) +#ifdef USES_WINDOW_UPDATE +#define S3C_WIN_UPDATE_IDX S3C_FB_MAX_WIN +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#else +#define S3C_WIN_UPDATE_IDX (-1) +#endif +struct s3c_fb_win_config_data { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int fence; +#ifdef USES_WINDOW_UPDATE + struct s3c_fb_win_config config[S3C_FB_MAX_WIN + 1]; +#else +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct s3c_fb_win_config config[S3C_FB_MAX_WIN]; +#endif +}; +#define S3CFB_WIN_POSITION _IOW('F', 203, struct s3c_fb_user_window) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_WIN_SET_PLANE_ALPHA _IOW('F', 204, struct s3c_fb_user_plane_alpha) +#define S3CFB_WIN_SET_CHROMA _IOW('F', 205, struct s3c_fb_user_chroma) +#define S3CFB_SET_VSYNC_INT _IOW('F', 206, __u32) +#define S3CFB_GET_ION_USER_HANDLE _IOWR('F', 208, struct s3c_fb_user_ion_client) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define S3CFB_WIN_CONFIG _IOW('F', 209, struct s3c_fb_win_config_data) +#define S3CFB_WIN_PSR_EXIT _IOW('F', 210, int) +#endif diff --git a/include/s5p_fimc_v4l2.h b/include/s5p_fimc_v4l2.h new file mode 100644 index 0000000..d0c5d5f --- /dev/null +++ b/include/s5p_fimc_v4l2.h @@ -0,0 +1,160 @@ +/* Copyright(c) 2011 Samsung Electronics Co, Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + * + * Alternatively, Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#ifndef _S5P_FIMC_H_ +#define _S5P_FIMC_H_ + +#include "videodev2.h" +#include "videodev2_exynos_media.h" + +/* + * G E N E R A L S + * +*/ + +/* + * P I X E L F O R M A T G U I D E + * + * The 'x' means 'DO NOT CARE' + * The '*' means 'FIMC SPECIFIC' + * For some fimc formats, we couldn't find equivalent format in the V4L2 FOURCC. + * + * FIMC TYPE PLANES ORDER V4L2_PIX_FMT + * --------------------------------------------------------- + * RGB565 x x V4L2_PIX_FMT_RGB565 + * RGB888 x x V4L2_PIX_FMT_RGB24 + * YUV420 2 LSB_CBCR V4L2_PIX_FMT_NV12 + * YUV420 2 LSB_CRCB V4L2_PIX_FMT_NV21 + * YUV420 2 MSB_CBCR V4L2_PIX_FMT_NV21X* + * YUV420 2 MSB_CRCB V4L2_PIX_FMT_NV12X* + * YUV420 3 x V4L2_PIX_FMT_YUV420 + * YUV422 1 YCBYCR V4L2_PIX_FMT_YUYV + * YUV422 1 YCRYCB V4L2_PIX_FMT_YVYU + * YUV422 1 CBYCRY V4L2_PIX_FMT_UYVY + * YUV422 1 CRYCBY V4L2_PIX_FMT_VYUY* + * YUV422 2 LSB_CBCR V4L2_PIX_FMT_NV16* + * YUV422 2 LSB_CRCB V4L2_PIX_FMT_NV61* + * YUV422 2 MSB_CBCR V4L2_PIX_FMT_NV16X* + * YUV422 2 MSB_CRCB V4L2_PIX_FMT_NV61X* + * YUV422 3 x V4L2_PIX_FMT_YUV422P + * +*/ + +/* + * V 4 L 2 F I M C E X T E N S I O N S + * +*/ +#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') + +/* FOURCC for FIMC specific */ +#define V4L2_PIX_FMT_NV12X v4l2_fourcc('N', '1', '2', 'X') +#define V4L2_PIX_FMT_NV21X v4l2_fourcc('N', '2', '1', 'X') +#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') +#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') +#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') +#define V4L2_PIX_FMT_NV16X v4l2_fourcc('N', '1', '6', 'X') +#define V4L2_PIX_FMT_NV61X v4l2_fourcc('N', '6', '1', 'X') + +/* CID extensions */ +#define V4L2_CID_ROTATION (V4L2_CID_PRIVATE_BASE + 0) +#define V4L2_CID_OVLY_MODE (V4L2_CID_PRIVATE_BASE + 9) +#define V4L2_CID_GET_PHY_SRC_YADDR (V4L2_CID_PRIVATE_BASE + 12) +#define V4L2_CID_GET_PHY_SRC_CADDR (V4L2_CID_PRIVATE_BASE + 13) +#define V4L2_CID_RESERVED_MEM_BASE_ADDR (V4L2_CID_PRIVATE_BASE + 20) +#define V4L2_CID_FIMC_VERSION (V4L2_CID_PRIVATE_BASE + 21) + +/* + * U S E R D E F I N E D T Y P E S + * +*/ +#define FIMC1_RESERVED_SIZE 32768 + +enum fimc_overlay_mode { + FIMC_OVLY_NOT_FIXED = 0x0, /* Overlay mode isn't fixed. */ + FIMC_OVLY_FIFO = 0x1, /* Non-destructive Overlay with FIFO */ + FIMC_OVLY_DMA_AUTO = 0x2, /* Non-destructive Overlay with DMA */ + FIMC_OVLY_DMA_MANUAL = 0x3, /* Non-destructive Overlay with DMA */ + FIMC_OVLY_NONE_SINGLE_BUF = 0x4, /* Destructive Overlay with DMA single destination buffer */ + FIMC_OVLY_NONE_MULTI_BUF = 0x5, /* Destructive Overlay with DMA multiple dstination buffer */ +}; + +typedef unsigned int dma_addr_t; + +struct fimc_buf { + dma_addr_t base[3]; + size_t size[3]; + int planes; +}; + +struct fimc_buffer { + void *virt_addr; + void *phys_addr; + size_t length; +}; + +struct yuv_fmt_list { + const char *name; + const char *desc; + unsigned int fmt; + int bpp; + int planes; +}; + +struct img_offset { + int y_h; + int y_v; + int cb_h; + int cb_v; + int cr_h; + int cr_v; +}; + +//------------ STRUCT ---------------------------------------------------------// + +typedef struct +{ + unsigned int full_width; // Source Image Full Width (Virtual screen size) + unsigned int full_height; // Source Image Full Height (Virtual screen size) + unsigned int start_x; // Source Image Start width offset + unsigned int start_y; // Source Image Start height offset + unsigned int width; // Source Image Width + unsigned int height; // Source Image Height + unsigned int buf_addr_phy_rgb_y; // Base Address of the Source Image (RGB or Y): Physical Address + unsigned int buf_addr_phy_cb; // Base Address of the Source Image (CB Component) : Physical Address + unsigned int buf_addr_phy_cr; // Base Address of the Source Image (CR Component) : Physical Address + unsigned int color_space; // Color Space of the Source Image + unsigned int planes; // number of planes for the Image +} s5p_fimc_img_info; + +typedef struct +{ + s5p_fimc_img_info src; + s5p_fimc_img_info dst; +} s5p_fimc_params_t; + +typedef struct _s5p_fimc_t { + int dev_fd; + struct fimc_buffer out_buf; + + s5p_fimc_params_t params; + + int use_ext_out_mem; +} s5p_fimc_t; + +#endif diff --git a/include/s5p_tvout_v4l2.h b/include/s5p_tvout_v4l2.h new file mode 100644 index 0000000..d11db09 --- /dev/null +++ b/include/s5p_tvout_v4l2.h @@ -0,0 +1,218 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __S5P_TVOUT_H__ +#define __S5P_TVOUT_H__ + +#include + +#include "videodev2.h" +#include "videodev2_exynos_media.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/******************************************* + * Define + *******************************************/ +/* TVOUT control */ +#define PFX_NODE_FB "/dev/graphics/fb" + +#define PFX_NODE_MEDIADEV "/dev/media" +#define PFX_NODE_SUBDEV "/dev/v4l-subdev" +#define PFX_NODE_VIDEODEV "/dev/video" +#define PFX_ENTITY_SUBDEV_MIXER "s5p-mixer%d" +#define PFX_ENTITY_VIDEODEV_MIXER_GRP "mxr%d_graph%d" +#define PFX_ENTITY_SUBDEV_GSC_OUT "exynos-gsc-sd.%d" +#define PFX_ENTITY_VIDEODEV_GSC_OUT "exynos-gsc.%d.output" + +#define PFX_ENTITY_SUBDEV_FIMD "s5p-fimd%d" +#define PFX_ENTITY_SUBDEV_GSC_CAP "gsc-cap-subdev.%d" +#define PFX_ENTITY_VIDEODEV_GSC_CAP "exynos-gsc.%d.capture" + +/* Sub-Mixer 0 */ +#define TVOUT0_DEV_G0 "/dev/video16" +#define TVOUT0_DEV_G1 "/dev/video17" +/* Sub-Mixer 1 */ +#define TVOUT1_DEV_G0 "/dev/video18" +#define TVOUT1_DEV_G1 "/dev/video19" + +#define MIXER_V_SUBDEV_PAD_SINK (0) +#define MIXER_V_SUBDEV_PAD_SOURCE (3) +#define MIXER_G0_SUBDEV_PAD_SINK (1) +#define MIXER_G0_SUBDEV_PAD_SOURCE (4) +#define MIXER_G1_SUBDEV_PAD_SINK (2) +#define MIXER_G1_SUBDEV_PAD_SOURCE (5) + +#define GSCALER_SUBDEV_PAD_SINK (0) +#define GSCALER_SUBDEV_PAD_SOURCE (1) +#define FIMD_SUBDEV_PAD_SOURCE (0) + +#define HPD_DEV "/dev/HPD" + +/* ------------- Output -----------------*/ +/* type */ +#define V4L2_OUTPUT_TYPE_MSDMA 4 +#define V4L2_OUTPUT_TYPE_COMPOSITE 5 +#define V4L2_OUTPUT_TYPE_SVIDEO 6 +#define V4L2_OUTPUT_TYPE_YPBPR_INERLACED 7 +#define V4L2_OUTPUT_TYPE_YPBPR_PROGRESSIVE 8 +#define V4L2_OUTPUT_TYPE_RGB_PROGRESSIVE 9 +#define V4L2_OUTPUT_TYPE_DIGITAL 10 +#define V4L2_OUTPUT_TYPE_HDMI V4L2_OUTPUT_TYPE_DIGITAL +#define V4L2_OUTPUT_TYPE_HDMI_RGB 11 +#define V4L2_OUTPUT_TYPE_DVI 12 + +/* ------------- STD -------------------*/ +#define V4L2_STD_PAL_BDGHI\ + (V4L2_STD_PAL_B|V4L2_STD_PAL_D|V4L2_STD_PAL_G|V4L2_STD_PAL_H|V4L2_STD_PAL_I) + +#define V4L2_STD_480P_60_16_9 ((v4l2_std_id)0x04000000) +#define V4L2_STD_480P_60_4_3 ((v4l2_std_id)0x05000000) +#define V4L2_STD_576P_50_16_9 ((v4l2_std_id)0x06000000) +#define V4L2_STD_576P_50_4_3 ((v4l2_std_id)0x07000000) +#define V4L2_STD_720P_60 ((v4l2_std_id)0x08000000) +#define V4L2_STD_720P_50 ((v4l2_std_id)0x09000000) +#define V4L2_STD_1080P_60 ((v4l2_std_id)0x0a000000) +#define V4L2_STD_1080P_50 ((v4l2_std_id)0x0b000000) +#define V4L2_STD_1080I_60 ((v4l2_std_id)0x0c000000) +#define V4L2_STD_1080I_50 ((v4l2_std_id)0x0d000000) +#define V4L2_STD_480P_59 ((v4l2_std_id)0x0e000000) +#define V4L2_STD_720P_59 ((v4l2_std_id)0x0f000000) +#define V4L2_STD_1080I_59 ((v4l2_std_id)0x10000000) +#define V4L2_STD_1080P_59 ((v4l2_std_id)0x11000000) +#define V4L2_STD_1080P_30 ((v4l2_std_id)0x12000000) +#define V4L2_STD_TVOUT_720P_60_SBS_HALF ((v4l2_std_id)0x13000000) +#define V4L2_STD_TVOUT_720P_59_SBS_HALF ((v4l2_std_id)0x14000000) +#define V4L2_STD_TVOUT_720P_50_TB ((v4l2_std_id)0x15000000) +#define V4L2_STD_TVOUT_1080P_24_TB ((v4l2_std_id)0x16000000) +#define V4L2_STD_TVOUT_1080P_23_TB ((v4l2_std_id)0x17000000) +#define V4L2_STD_TVOUT_1080P_60_SBS_HALF ((v4l2_std_id)0x18000000) + +/* ------------- Input ------------------*/ +/* type */ +#define V4L2_INPUT_TYPE_MSDMA 3 +#define V4L2_INPUT_TYPE_FIFO 4 + +/******************************************* + * structures + *******************************************/ + +/* TVOUT */ +struct v4l2_vid_overlay_src { + void *base_y; + void *base_c; + struct v4l2_pix_format pix_fmt; +}; + +struct v4l2_window_s5p_tvout { + __u32 capability; + __u32 flags; + __u32 priority; + struct v4l2_window win; +}; + +struct v4l2_pix_format_s5p_tvout { + void *base_y; + void *base_c; + __u32 src_img_endian; + struct v4l2_pix_format pix_fmt; +}; + +struct vid_overlay_param { + struct v4l2_vid_overlay_src src; + struct v4l2_rect src_crop; + struct v4l2_framebuffer dst; + struct v4l2_window dst_win; +}; + +struct tvout_param { + struct v4l2_pix_format_s5p_tvout tvout_src; + struct v4l2_window_s5p_tvout tvout_rect; + struct v4l2_rect tvout_dst; +}; + +struct overlay_param { + struct v4l2_framebuffer overlay_frame; + struct v4l2_window_s5p_tvout overlay_rect; + struct v4l2_rect overlay_dst; +}; + +/* FB */ +struct s5ptvfb_user_window { + int x; + int y; +}; + +struct s5ptvfb_user_plane_alpha { + int channel; + unsigned char alpha; +}; + +struct s5ptvfb_user_chroma { + int enabled; + unsigned char red; + unsigned char green; + unsigned char blue; +}; + +enum s5ptvfb_ver_scaling_t { + VERTICAL_X1, + VERTICAL_X2, +}; + +enum s5ptvfb_hor_scaling_t { + HORIZONTAL_X1, + HORIZONTAL_X2, +}; + +struct s5ptvfb_user_scaling { + enum s5ptvfb_ver_scaling_t ver; + enum s5ptvfb_hor_scaling_t hor; +}; + +/******************************************* + * custom ioctls + *******************************************/ + +#define VIDIOC_S_BASEADDR _IOR('V', 83, int) + +#define VIDIOC_HDCP_ENABLE _IOWR('V', 100, unsigned int) +#define VIDIOC_HDCP_STATUS _IOR('V', 101, unsigned int) +#define VIDIOC_HDCP_PROT_STATUS _IOR('V', 102, unsigned int) + +#define VIDIOC_INIT_AUDIO _IOR('V', 103, unsigned int) +#define VIDIOC_AV_MUTE _IOR('V', 104, unsigned int) +#define VIDIOC_G_AVMUTE _IOR('V', 105, unsigned int) +#define HPD_GET_STATE _IOR('H', 100, unsigned int) + +#define S5PTVFB_WIN_POSITION _IOW('F', 213, struct s5ptvfb_user_window) +#define S5PTVFB_WIN_SET_PLANE_ALPHA _IOW('F', 214, struct s5ptvfb_user_plane_alpha) +#define S5PTVFB_WIN_SET_CHROMA _IOW('F', 215, struct s5ptvfb_user_chroma) + +#define S5PTVFB_SET_VSYNC_INT _IOW('F', 216, unsigned int) +#define S5PTVFB_WAITFORVSYNC _IO('F', 32) +#define S5PTVFB_WIN_SET_ADDR _IOW('F', 219, unsigned int) +#define S5PTVFB_SET_WIN_ON _IOW('F', 220, unsigned int) +#define S5PTVFB_SET_WIN_OFF _IOW('F', 221, unsigned int) +#define S5PTVFB_SCALING _IOW('F', 222, struct s5ptvfb_user_scaling) + +#ifdef __cplusplus +} +#endif + +#endif /* __S5P_TVOUT_H__ */ diff --git a/include/sec_g2d_4x.h b/include/sec_g2d_4x.h new file mode 100644 index 0000000..2ab5f26 --- /dev/null +++ b/include/sec_g2d_4x.h @@ -0,0 +1,479 @@ +/* + * Copyright 2011, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _SEC_G2D_DRIVER_H_ +#define _SEC_G2D_DRIVER_H_ + +#define SEC_G2D_DEV_NAME "/dev/fimg2d" +#define G2D_ALPHA_VALUE_MAX (255) + +/* ioctl commands */ +#define FIMG2D_IOCTL_MAGIC 'F' +#define FIMG2D_BITBLT_BLIT _IOWR(FIMG2D_IOCTL_MAGIC, 0, struct fimg2d_blit) +#define FIMG2D_BITBLT_SYNC _IOW(FIMG2D_IOCTL_MAGIC, 1, int) +#define FIMG2D_BITBLT_VERSION _IOR(FIMG2D_IOCTL_MAGIC, 2, struct fimg2d_version) +#define FIMG2D_BITBLT_ACTIVATE _IOW(FIMG2D_IOCTL_MAGIC, 3, enum driver_act) + +enum driver_act { + DRV_ACT = 0, + DRV_DEACT +}; + +struct fimg2d_version { + unsigned int hw; + unsigned int sw; +}; + +enum fimg2d_qos_level { + G2D_LV0 = 0, + G2D_LV1, + G2D_LV2, + G2D_LV3, + G2D_LV4, + G2D_LV_END +}; + +/** + * @BLIT_SYNC: sync mode, to wait for blit done irq + * @BLIT_ASYNC: async mode, not to wait for blit done irq + * + */ +enum blit_sync { + BLIT_SYNC, + BLIT_ASYNC, +}; + +/** + * @ADDR_PHYS: physical address + * @ADDR_USER: user virtual address (physically Non-contiguous) + * @ADDR_USER_RSVD: user virtual address (physically Contiguous) + * @ADDR_DEVICE: specific device virtual address + */ +enum addr_space { + ADDR_NONE, + ADDR_PHYS, + ADDR_KERN, + ADDR_USER, + ADDR_USER_RSVD, + ADDR_DEVICE, +}; + +/** + * Pixel order complies with little-endian style + * + * DO NOT CHANGE THIS ORDER + */ +enum pixel_order { + AX_RGB = 0, + RGB_AX, + AX_BGR, + BGR_AX, + ARGB_ORDER_END, + + P1_CRY1CBY0, + P1_CBY1CRY0, + P1_Y1CRY0CB, + P1_Y1CBY0CR, + P1_ORDER_END, + + P2_CRCB, + P2_CBCR, + P2_ORDER_END, +}; + +/** + * DO NOT CHANGE THIS ORDER + */ +enum color_format { + CF_XRGB_8888 = 0, + CF_ARGB_8888, + CF_RGB_565, + CF_XRGB_1555, + CF_ARGB_1555, + CF_XRGB_4444, + CF_ARGB_4444, + CF_RGB_888, + CF_YCBCR_444, + CF_YCBCR_422, + CF_YCBCR_420, + CF_A8, + CF_L8, + SRC_DST_FORMAT_END, + + CF_MSK_1BIT, + CF_MSK_4BIT, + CF_MSK_8BIT, + CF_MSK_16BIT_565, + CF_MSK_16BIT_1555, + CF_MSK_16BIT_4444, + CF_MSK_32BIT_8888, + MSK_FORMAT_END, +}; + +enum rotation { + ORIGIN, + ROT_90, /* clockwise */ + ROT_180, + ROT_270, + XFLIP, /* x-axis flip */ + YFLIP, /* y-axis flip */ +}; + +/** + * @NO_REPEAT: no effect + * @REPEAT_NORMAL: repeat horizontally and vertically + * @REPEAT_PAD: pad with pad color + * @REPEAT_REFLECT: reflect horizontally and vertically + * @REPEAT_CLAMP: pad with edge color of original image + * + * DO NOT CHANGE THIS ORDER + */ +enum repeat { + NO_REPEAT = 0, + REPEAT_NORMAL, /* default setting */ + REPEAT_PAD, + REPEAT_REFLECT, REPEAT_MIRROR = REPEAT_REFLECT, + REPEAT_CLAMP, +}; + +enum scaling { + NO_SCALING, + SCALING_NEAREST, + SCALING_BILINEAR, +}; + +/** + * @SCALING_PIXELS: ratio in pixels + * @SCALING_RATIO: ratio in fixed point 16 + */ +enum scaling_factor { + SCALING_PIXELS, + SCALING_RATIO, +}; + +/** + * premultiplied alpha + */ +enum premultiplied { + PREMULTIPLIED, + NON_PREMULTIPLIED, +}; + +/** + * @TRANSP: discard bluescreen color + * @BLUSCR: replace bluescreen color with background color + */ +enum bluescreen { + OPAQUE, + TRANSP, + BLUSCR, +}; + +/** + * DO NOT CHANGE THIS ORDER + */ +enum blit_op { + BLIT_OP_SOLID_FILL = 0, + + BLIT_OP_CLR, + BLIT_OP_SRC, BLIT_OP_SRC_COPY = BLIT_OP_SRC, + BLIT_OP_DST, + BLIT_OP_SRC_OVER, + BLIT_OP_DST_OVER, BLIT_OP_OVER_REV = BLIT_OP_DST_OVER, + BLIT_OP_SRC_IN, + BLIT_OP_DST_IN, BLIT_OP_IN_REV = BLIT_OP_DST_IN, + BLIT_OP_SRC_OUT, + BLIT_OP_DST_OUT, BLIT_OP_OUT_REV = BLIT_OP_DST_OUT, + BLIT_OP_SRC_ATOP, + BLIT_OP_DST_ATOP, BLIT_OP_ATOP_REV = BLIT_OP_DST_ATOP, + BLIT_OP_XOR, + + BLIT_OP_ADD, + BLIT_OP_MULTIPLY, + BLIT_OP_SCREEN, + BLIT_OP_DARKEN, + BLIT_OP_LIGHTEN, + + BLIT_OP_DISJ_SRC_OVER, + BLIT_OP_DISJ_DST_OVER, BLIT_OP_SATURATE = BLIT_OP_DISJ_DST_OVER, + BLIT_OP_DISJ_SRC_IN, + BLIT_OP_DISJ_DST_IN, BLIT_OP_DISJ_IN_REV = BLIT_OP_DISJ_DST_IN, + BLIT_OP_DISJ_SRC_OUT, + BLIT_OP_DISJ_DST_OUT, BLIT_OP_DISJ_OUT_REV = BLIT_OP_DISJ_DST_OUT, + BLIT_OP_DISJ_SRC_ATOP, + BLIT_OP_DISJ_DST_ATOP, BLIT_OP_DISJ_ATOP_REV = BLIT_OP_DISJ_DST_ATOP, + BLIT_OP_DISJ_XOR, + + BLIT_OP_CONJ_SRC_OVER, + BLIT_OP_CONJ_DST_OVER, BLIT_OP_CONJ_OVER_REV = BLIT_OP_CONJ_DST_OVER, + BLIT_OP_CONJ_SRC_IN, + BLIT_OP_CONJ_DST_IN, BLIT_OP_CONJ_IN_REV = BLIT_OP_CONJ_DST_IN, + BLIT_OP_CONJ_SRC_OUT, + BLIT_OP_CONJ_DST_OUT, BLIT_OP_CONJ_OUT_REV = BLIT_OP_CONJ_DST_OUT, + BLIT_OP_CONJ_SRC_ATOP, + BLIT_OP_CONJ_DST_ATOP, BLIT_OP_CONJ_ATOP_REV = BLIT_OP_CONJ_DST_ATOP, + BLIT_OP_CONJ_XOR, + + /* user select coefficient manually */ + BLIT_OP_USER_COEFF, + + BLIT_OP_USER_SRC_GA, + + /* Add new operation type here */ + + /* end of blit operation */ + BLIT_OP_END, + + /* driver not supporting format */ + BLIT_OP_NOT_SUPPORTED +}; + +/** + * @start: start address or unique id of image + */ +struct fimg2d_addr { + enum addr_space type; + unsigned long start; +}; + +struct fimg2d_rect { + int x1; + int y1; + int x2; /* x1 + width */ + int y2; /* y1 + height */ +}; + +/** + * pixels can be different from src, dst or clip rect + */ +struct fimg2d_scale { + enum scaling mode; + + /* ratio in pixels */ + int src_w, src_h; + int dst_w, dst_h; +}; + +struct fimg2d_clip { + bool enable; + int x1; + int y1; + int x2; /* x1 + width */ + int y2; /* y1 + height */ +}; + +struct fimg2d_repeat { + enum repeat mode; + unsigned long pad_color; +}; + +/** + * @bg_color: bg_color is valid only if bluescreen mode is BLUSCR. + */ +struct fimg2d_bluscr { + enum bluescreen mode; + unsigned long bs_color; + unsigned long bg_color; +}; + +/** + * @plane2: address info for CbCr in YCbCr 2plane mode + * @rect: crop/clip rect + * @need_cacheopr: true if cache coherency is required + */ +struct fimg2d_image { + int width; + int height; + int stride; + enum pixel_order order; + enum color_format fmt; + struct fimg2d_addr addr; + struct fimg2d_addr plane2; + struct fimg2d_rect rect; + bool need_cacheopr; +}; + +/** + * @solid_color: + * src color instead of src image / dst color instead of dst read image. + * color format and order must be ARGB8888(A is MSB). + * premultiplied format must be same to 'premult' of this struct. + * @g_alpha: global(constant) alpha. 0xff is opaque, 0 is transparnet + * @dither: dithering + * @rotate: rotation degree in clockwise + * @premult: alpha premultiplied mode for read & write + * @scaling: common scaling info for src and mask image. + * @repeat: repeat type (tile mode) + * @bluscr: blue screen and transparent mode + */ +struct fimg2d_param { + unsigned long solid_color; + unsigned char g_alpha; + bool dither; + enum rotation rotate; + enum premultiplied premult; + struct fimg2d_scale scaling; + struct fimg2d_repeat repeat; + struct fimg2d_bluscr bluscr; + struct fimg2d_clip clipping; +}; + +/** + * @op: blit operation mode + * @src: set when using src image + * @msk: set when using mask image + * @tmp: set when using 2-step blit at a single command + * @dst: dst must not be null + * * tmp image must be the same to dst except memory address + * @seq_no: user debugging info. + * for example, user can set sequence number or pid. + */ +struct fimg2d_blit { + enum blit_op op; + struct fimg2d_param param; + struct fimg2d_image *src; + struct fimg2d_image *msk; + struct fimg2d_image *tmp; + struct fimg2d_image *dst; + enum blit_sync sync; + unsigned int seq_no; + enum fimg2d_qos_level qos_lv; +}; + +struct fimg2d_blit_raw { + enum blit_op op; + struct fimg2d_param param; + struct fimg2d_image src; + struct fimg2d_image msk; + struct fimg2d_image tmp; + struct fimg2d_image dst; + enum blit_sync sync; + unsigned int seq_no; + enum fimg2d_qos_level qos_lv; +}; + +#ifdef __KERNEL__ + +/** + * @pgd: base address of arm mmu pagetable + * @ncmd: request count in blit command queue + * @wait_q: conext wait queue head + */ +struct fimg2d_context { + struct mm_struct *mm; + atomic_t ncmd; + wait_queue_head_t wait_q; +}; + +/** + * @seq_no: used for debugging + * @node: list head of blit command queue + */ +struct fimg2d_bltcmd { + enum blit_op op; + + enum premultiplied premult; + unsigned char g_alpha; + bool dither; + enum rotation rotate; + struct fimg2d_scale scaling; + struct fimg2d_repeat repeat; + struct fimg2d_bluscr bluscr; + struct fimg2d_clip clipping; + + bool srcen; + bool dsten; + bool msken; + + unsigned long solid_color; + struct fimg2d_image src; + struct fimg2d_image dst; + struct fimg2d_image msk; + + struct fimg2d_rect src_rect; + struct fimg2d_rect dst_rect; + struct fimg2d_rect msk_rect; + + size_t size_all; + struct fimg2d_cache src_cache; + struct fimg2d_cache dst_cache; + struct fimg2d_cache msk_cache; + + unsigned int seq_no; + struct fimg2d_context *ctx; + struct list_head node; +}; + +/** + * @pwron: power status for runtime pm + * @pwrlock: spinlock for runtime pm + * @mem: resource platform device + * @regs: base address of hardware + * @dev: pointer to device struct + * @err: true if hardware is timed out while blitting + * @irq: irq number + * @nctx: context count + * @busy: 1 if hardware is running + * @bltlock: spinlock for blit + * @wait_q: blit wait queue head + * @cmd_q: blit command queue + * @workqueue: workqueue_struct for kfimg2dd + */ +struct fimg2d_control { + atomic_t pwron; + spinlock_t pwrlock; + struct clk *clock; + struct device *dev; + struct resource *mem; + void __iomem *regs; + + bool err; + int irq; + atomic_t nctx; + atomic_t busy; + atomic_t active; + spinlock_t bltlock; + wait_queue_head_t wait_q; + struct list_head cmd_q; + struct workqueue_struct *work_q; + + void (*blit)(struct fimg2d_control *info); + void (*configure)(struct fimg2d_control *info, struct fimg2d_bltcmd *cmd); + void (*run)(struct fimg2d_control *info); + void (*stop)(struct fimg2d_control *info); + void (*finalize)(struct fimg2d_control *info); +}; + +inline void fimg2d_enqueue(struct fimg2d_control *info, + struct list_head *node, struct list_head *q); +inline void fimg2d_dequeue(struct fimg2d_control *info, struct list_head *node); +inline int fimg2d_queue_is_empty(struct list_head *q); +inline struct fimg2d_bltcmd * fimg2d_get_first_command(struct fimg2d_control *info); +int fimg2d_add_command(struct fimg2d_control *info, struct fimg2d_context *ctx, + struct fimg2d_blit __user *u); +inline void fimg2d_add_context(struct fimg2d_control *info, struct fimg2d_context *ctx); +inline void fimg2d_del_context(struct fimg2d_control *info, struct fimg2d_context *ctx); +int fimg2d_register_ops(struct fimg2d_control *info); +#if defined(CONFIG_OUTER_CACHE) && defined(CONFIG_ARM) +void fimg2d_clean_pagetable(struct mm_struct *mm, unsigned long addr, + unsigned long size); +#else +#define fimg2d_clean_pagetable(mm, addr, size) do { } while (0) +#endif + +#endif /* __KERNEL__ */ + +#endif /*_SEC_G2D_DRIVER_H_*/ diff --git a/include/sec_g2d_5x.h b/include/sec_g2d_5x.h new file mode 100644 index 0000000..9932b0b --- /dev/null +++ b/include/sec_g2d_5x.h @@ -0,0 +1,409 @@ +/* + * Copyright 2011, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _SEC_G2D_DRIVER_H_ +#define _SEC_G2D_DRIVER_H_ + +#define SEC_G2D_DEV_NAME "/dev/fimg2d" +#define G2D_ALPHA_VALUE_MAX (255) + +/* ioctl commands */ +#define FIMG2D_IOCTL_MAGIC 'F' +#define FIMG2D_BITBLT_BLIT _IOWR(FIMG2D_IOCTL_MAGIC, 0, struct fimg2d_blit) +#define FIMG2D_BITBLT_SYNC _IOW(FIMG2D_IOCTL_MAGIC, 1, int) +#define FIMG2D_BITBLT_VERSION _IOR(FIMG2D_IOCTL_MAGIC, 2, struct fimg2d_version) +#define FIMG2D_BITBLT_ACTIVATE _IOW(FIMG2D_IOCTL_MAGIC, 3, enum driver_act) + +#define MAX_SRC 3 + +enum driver_act { + DRV_ACT = 0, + DRV_DEACT +}; + +struct fimg2d_version { + unsigned int hw; + unsigned int sw; +}; + +enum fimg2d_qos_level { + G2D_LV0 = 0, + G2D_LV1, + G2D_LV2, + G2D_LV3, + G2D_LV4, + G2D_LV_END +}; + +/** + * @BLIT_SYNC: sync mode, to wait for blit done irq + * @BLIT_ASYNC: async mode, not to wait for blit done irq + * + */ +enum blit_sync { + BLIT_SYNC, + BLIT_ASYNC, +}; + +/** + * @ADDR_PHYS: physical address + * @ADDR_USER: user virtual address (physically Non-contiguous) + * @ADDR_DEVICE: specific device virtual address + */ +enum addr_space { + ADDR_NONE, + ADDR_USER, + ADDR_DEVICE, +}; + +/** + * Pixel order complies with little-endian style + * + * DO NOT CHANGE THIS ORDER + */ +enum pixel_order { + PO_ARGB = 0, + PO_BGRA, + PO_RGBA, + PO_RGB, + ARGB_ORDER_END, +}; + +/** + * DO NOT CHANGE THIS ORDER + */ +enum color_format { + CF_ARGB_8888 = 0, + CF_RGB_565, + SRC_DST_RGB_FORMAT_END, +}; + +enum rotation { + ORIGIN, + ROT_90, /* clockwise */ + ROT_180, + ROT_270, + XFLIP, /* x-axis flip */ + YFLIP, /* y-axis flip */ +}; + +/** + * @NO_REPEAT: no effect + * @REPEAT_NORMAL: repeat horizontally and vertically + * @REPEAT_PAD: pad with pad color + * @REPEAT_REFLECT: reflect horizontally and vertically + * @REPEAT_CLAMP: pad with edge color of original image + * + * DO NOT CHANGE THIS ORDER + */ +enum repeat { + NO_REPEAT = 0, + REPEAT_NORMAL, /* default setting */ + REPEAT_PAD, + REPEAT_REFLECT, REPEAT_MIRROR = REPEAT_REFLECT, + REPEAT_CLAMP, +}; + +enum scaling { + NO_SCALING, + SCALING_BILINEAR, +}; + +/** + * premultiplied alpha + */ +enum premultiplied { + PREMULTIPLIED, + NON_PREMULTIPLIED, +}; + +/** + * DO NOT CHANGE THIS ORDER + */ +enum blit_op { + BLIT_OP_NONE = 0, + + BLIT_OP_SOLID_FILL, + BLIT_OP_CLR, + BLIT_OP_SRC, BLIT_OP_SRC_COPY = BLIT_OP_SRC, + BLIT_OP_DST, + BLIT_OP_SRC_OVER, + BLIT_OP_DST_OVER, BLIT_OP_OVER_REV = BLIT_OP_DST_OVER, + BLIT_OP_SRC_IN, + BLIT_OP_DST_IN, BLIT_OP_IN_REV = BLIT_OP_DST_IN, + BLIT_OP_SRC_OUT, + BLIT_OP_DST_OUT, BLIT_OP_OUT_REV = BLIT_OP_DST_OUT, + BLIT_OP_SRC_ATOP, + BLIT_OP_DST_ATOP, BLIT_OP_ATOP_REV = BLIT_OP_DST_ATOP, + BLIT_OP_XOR, + + BLIT_OP_ADD, + BLIT_OP_MULTIPLY, + BLIT_OP_SCREEN, + BLIT_OP_DARKEN, + BLIT_OP_LIGHTEN, + + BLIT_OP_DISJ_SRC_OVER, + BLIT_OP_DISJ_DST_OVER, BLIT_OP_SATURATE = BLIT_OP_DISJ_DST_OVER, + BLIT_OP_DISJ_SRC_IN, + BLIT_OP_DISJ_DST_IN, BLIT_OP_DISJ_IN_REV = BLIT_OP_DISJ_DST_IN, + BLIT_OP_DISJ_SRC_OUT, + BLIT_OP_DISJ_DST_OUT, BLIT_OP_DISJ_OUT_REV = BLIT_OP_DISJ_DST_OUT, + BLIT_OP_DISJ_SRC_ATOP, + BLIT_OP_DISJ_DST_ATOP, BLIT_OP_DISJ_ATOP_REV = BLIT_OP_DISJ_DST_ATOP, + BLIT_OP_DISJ_XOR, + + BLIT_OP_CONJ_SRC_OVER, + BLIT_OP_CONJ_DST_OVER, BLIT_OP_CONJ_OVER_REV = BLIT_OP_CONJ_DST_OVER, + BLIT_OP_CONJ_SRC_IN, + BLIT_OP_CONJ_DST_IN, BLIT_OP_CONJ_IN_REV = BLIT_OP_CONJ_DST_IN, + BLIT_OP_CONJ_SRC_OUT, + BLIT_OP_CONJ_DST_OUT, BLIT_OP_CONJ_OUT_REV = BLIT_OP_CONJ_DST_OUT, + BLIT_OP_CONJ_SRC_ATOP, + BLIT_OP_CONJ_DST_ATOP, BLIT_OP_CONJ_ATOP_REV = BLIT_OP_CONJ_DST_ATOP, + BLIT_OP_CONJ_XOR, + + /* user select coefficient manually */ + BLIT_OP_USER_COEFF, + + BLIT_OP_USER_SRC_GA, + + /* Add new operation type here */ + + /* end of blit operation */ + BLIT_OP_END, + + /* driver not supporting format */ + BLIT_OP_NOT_SUPPORTED +}; + +/** + * @start: start address or unique id of image + */ +struct fimg2d_addr { + enum addr_space type; + unsigned long start; + unsigned long header; +}; + +struct fimg2d_rect { + int x1; + int y1; + int x2; /* x1 + width */ + int y2; /* y1 + height */ +}; + +/** + * pixels can be different from src, dst or clip rect + */ +struct fimg2d_scale { + enum scaling mode; + + /* ratio in pixels */ + int src_w, src_h; + int dst_w, dst_h; +}; + +struct fimg2d_clip { + bool enable; + int x1; + int y1; + int x2; /* x1 + width */ + int y2; /* y1 + height */ +}; + +struct fimg2d_repeat { + enum repeat mode; + unsigned long pad_color; +}; + +/** + * @solid_color: + * src color instead of src image / dst color instead of dst read image. + * color format and order must be ARGB8888(A is MSB). + * premultiplied format must be same to 'premult' of this struct. + * @g_alpha: global(constant) alpha. 0xff is opaque, 0 is transparnet + * @dither: dithering + * @rotate: rotation degree in clockwise + * @premult: alpha premultiplied mode for read & write + * @scaling: common scaling info for src and mask image. + * @repeat: repeat type (tile mode) + * @bluscr: blue screen and transparent mode + * @clipping: clipping rect within dst rect + */ +struct fimg2d_param { + unsigned long solid_color; + unsigned char g_alpha; + enum rotation rotate; + enum premultiplied premult; + struct fimg2d_scale scaling; + struct fimg2d_repeat repeat; + struct fimg2d_clip clipping; +}; + + +/** + * @rect: crop/clip rect + * @need_cacheopr: true if cache coherency is required + */ +struct fimg2d_image { + int layer_num; + int width; + int height; + int stride; + enum blit_op op; + enum pixel_order order; + enum color_format fmt; + struct fimg2d_param param; + struct fimg2d_addr addr; + struct fimg2d_rect rect; + bool need_cacheopr; + int acquire_fence_fd; + int release_fence_fd; + struct sync_fence *fence; +}; + +/** + * @op: blit operation mode + * @src: set when using src image + * @msk: set when using mask image + * @tmp: set when using 2-step blit at a single command + * @dst: dst must not be null + * * tmp image must be the same to dst except memory address + * @seq_no: user debugging info. + * for example, user can set sequence number or pid. + */ +struct fimg2d_blit { + bool use_fence; /* should be clear */ + bool dither; + struct fimg2d_image *src[MAX_SRC]; + struct fimg2d_image *dst; + unsigned int seq_no; + enum fimg2d_qos_level qos_lv; +}; + +#ifdef __KERNEL__ + +/** + * @pgd: base address of arm mmu pagetable + * @ncmd: request count in blit command queue + * @wait_q: conext wait queue head + */ +struct fimg2d_context { + struct mm_struct *mm; + atomic_t ncmd; + wait_queue_head_t wait_q; +}; + +/** + * @seq_no: used for debugging + * @node: list head of blit command queue + */ +struct fimg2d_bltcmd { + enum blit_op op; + + enum premultiplied premult; + unsigned char g_alpha; + bool dither; + enum rotation rotate; + struct fimg2d_scale scaling; + struct fimg2d_repeat repeat; + struct fimg2d_bluscr bluscr; + struct fimg2d_clip clipping; + + bool srcen; + bool dsten; + bool msken; + + unsigned long solid_color; + struct fimg2d_image src; + struct fimg2d_image dst; + struct fimg2d_image msk; + + struct fimg2d_rect src_rect; + struct fimg2d_rect dst_rect; + struct fimg2d_rect msk_rect; + + size_t size_all; + struct fimg2d_cache src_cache; + struct fimg2d_cache dst_cache; + struct fimg2d_cache msk_cache; + + unsigned int seq_no; + struct fimg2d_context *ctx; + struct list_head node; +}; + +/** + * @pwron: power status for runtime pm + * @pwrlock: spinlock for runtime pm + * @mem: resource platform device + * @regs: base address of hardware + * @dev: pointer to device struct + * @err: true if hardware is timed out while blitting + * @irq: irq number + * @nctx: context count + * @busy: 1 if hardware is running + * @bltlock: spinlock for blit + * @wait_q: blit wait queue head + * @cmd_q: blit command queue + * @workqueue: workqueue_struct for kfimg2dd + */ +struct fimg2d_control { + atomic_t pwron; + spinlock_t pwrlock; + struct clk *clock; + struct device *dev; + struct resource *mem; + void __iomem *regs; + + bool err; + int irq; + atomic_t nctx; + atomic_t busy; + atomic_t active; + spinlock_t bltlock; + wait_queue_head_t wait_q; + struct list_head cmd_q; + struct workqueue_struct *work_q; + + void (*blit)(struct fimg2d_control *info); + void (*configure)(struct fimg2d_control *info, struct fimg2d_bltcmd *cmd); + void (*run)(struct fimg2d_control *info); + void (*stop)(struct fimg2d_control *info); + void (*finalize)(struct fimg2d_control *info); +}; + +inline void fimg2d_enqueue(struct fimg2d_control *info, + struct list_head *node, struct list_head *q); +inline void fimg2d_dequeue(struct fimg2d_control *info, struct list_head *node); +inline int fimg2d_queue_is_empty(struct list_head *q); +inline struct fimg2d_bltcmd * fimg2d_get_first_command(struct fimg2d_control *info); +int fimg2d_add_command(struct fimg2d_control *info, struct fimg2d_context *ctx, + struct fimg2d_blit __user *u); +inline void fimg2d_add_context(struct fimg2d_control *info, struct fimg2d_context *ctx); +inline void fimg2d_del_context(struct fimg2d_control *info, struct fimg2d_context *ctx); +int fimg2d_register_ops(struct fimg2d_control *info); +#if defined(CONFIG_OUTER_CACHE) && defined(CONFIG_ARM) +void fimg2d_clean_pagetable(struct mm_struct *mm, unsigned long addr, + unsigned long size); +#else +#define fimg2d_clean_pagetable(mm, addr, size) do { } while (0) +#endif + +#endif /* __KERNEL__ */ + +#endif /*_SEC_G2D_DRIVER_H_*/ diff --git a/include/swconverter.h b/include/swconverter.h new file mode 100644 index 0000000..877e4ba --- /dev/null +++ b/include/swconverter.h @@ -0,0 +1,405 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file swconverter.h + * @brief Exynos_OMX specific define. It support MFC's tiled format. + * NV12T(tiled) layout: + * Each element is not pixel. + * MFC 5.x : It is 64x32 pixel block. + * MFC 6.x : It is 16x16 pixel block. + * uv pixel block is interleaved as u v u v u v ... + * y1 y2 y7 y8 y9 y10 y15 y16 + * y3 y4 y5 y6 y11 y12 y13 y14 + * y17 y18 y23 y24 y25 y26 y31 y32 + * y19 y20 y21 y22 y27 y28 y29 y30 + * uv1 uv2 uv7 uv8 uv9 uv10 uv15 uv16 + * uv3 uv4 uv5 uv6 uv11 uv12 uv13 uv14 + * YUV420Planar(linear) layout: + * Each element is not pixel. It is 64x32 pixel block. + * y1 y2 y3 y4 y5 y6 y7 y8 + * y9 y10 y11 y12 y13 y14 y15 y16 + * y17 y18 y19 y20 y21 y22 y23 y24 + * y25 y26 y27 y28 y29 y30 y31 y32 + * u1 u2 u3 u4 u5 u6 u7 u8 + * v1 v2 v3 v4 v5 v6 v7 v8 + * YUV420Semiplanar(linear) layout: + * Each element is not pixel. It is 64x32 pixel block. + * uv pixel block is interleaved as u v u v u v ... + * y1 y2 y3 y4 y5 y6 y7 y8 + * y9 y10 y11 y12 y13 y14 y15 y16 + * y17 y18 y19 y20 y21 y22 y23 y24 + * y25 y26 y27 y28 y29 y30 y31 y32 + * uv1 uv2 uv3 uv4 uv5 uv6 uv7 uv8 + * uv9 uv10 uv11 uv12 uv13 uv14 uv15 uv16 + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +#ifndef SW_CONVERTOR_H_ +#define SW_CONVERTOR_H_ + +/*--------------------------------------------------------------------------------*/ +/* Format Conversion API */ +/*--------------------------------------------------------------------------------*/ +/* + * C code only + * De-interleaves src to dest1, dest2 + * + * @param dest1 + * Address of de-interleaved data[out] + * + * @param dest2 + * Address of de-interleaved data[out] + * + * @param src + * Address of interleaved data[in] + * + * @param src_size + * Size of interleaved data[in] + */ +void csc_deinterleave_memcpy( + unsigned char *dest1, + unsigned char *dest2, + unsigned char *src, + unsigned int src_size); + +/* + * C code or Neon + * Interleaves src1, src2 to dest + * + * @param dest + * Address of interleaved data[out] + * + * @param src1 + * Address of de-interleaved data[in] + * + * @param src2 + * Address of de-interleaved data[in] + * + * @param src_size + * Size of de-interleaved data[in] + */ +void csc_interleave_memcpy( + unsigned char *dest, + unsigned char *src1, + unsigned char *src2, + unsigned int src_size); + +/* + * C code or Neon + * Converts tiled data to linear + * 1. y of nv12t to y of yuv420p + * 2. y of nv12t to y of yuv420s + * + * @param dst + * y address of yuv420[out] + * + * @param src + * y address of nv12t[in] + * + * @param yuv420_width + * real width of yuv420[in] + * it should be even + * + * @param yuv420_height + * real height of yuv420[in] + * it should be even. + * + */ +void csc_tiled_to_linear_y( + unsigned char *y_dst, + unsigned char *y_src, + unsigned int width, + unsigned int height); + +/* + * C code or Neon + * Converts tiled data to linear + * 1. uv of nv12t to y of yuv420s + * + * @param dst + * uv address of yuv420s[out] + * + * @param src + * uv address of nv12t[in] + * + * @param yuv420_width + * real width of yuv420s[in] + * + * @param yuv420_height + * real height of yuv420s[in] + * + */ +void csc_tiled_to_linear_uv( + unsigned char *uv_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height); + +/* + * C code or Neon + * Converts tiled data to linear + * 1. uv of nt12t to uv of yuv420p + * + * @param u_dst + * u address of yuv420p[out] + * + * @param v_dst + * v address of yuv420p[out] + * + * @param uv_src + * uv address of nt12t[in] + * + * @param yuv420_width + * real width of yuv420p[in] + * + * @param yuv420_height + * real height of yuv420p[in] + */ +void csc_tiled_to_linear_uv_deinterleave( + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height); + +/* + * Neon only + * Converts linear data to tiled + * 1. y of yuv420 to y of nv12t + * + * @param dst + * y address of nv12t[out] + * + * @param src + * y address of yuv420[in] + * + * @param yuv420_width + * real width of yuv420[in] + * it should be even + * + * @param yuv420_height + * real height of yuv420[in] + * it should be even. + * + */ +void csc_linear_to_tiled_y( + unsigned char *y_dst, + unsigned char *y_src, + unsigned int width, + unsigned int height); + +/* + * Neon only + * Converts and interleaves linear data to tiled + * 1. uv of nv12t to uv of yuv420 + * + * @param dst + * uv address of nv12t[out] + * + * @param src + * u address of yuv420[in] + * + * @param src + * v address of yuv420[in] + * + * @param yuv420_width + * real width of yuv420[in] + * + * @param yuv420_height + * real height of yuv420[in] + * + */ +void csc_linear_to_tiled_uv( + unsigned char *uv_dst, + unsigned char *u_src, + unsigned char *v_src, + unsigned int width, + unsigned int height); + +/* + * C code only + * Converts RGB565 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of RGB565[in] + * + * @param width + * Width of RGB565[in] + * + * @param height + * Height of RGB565[in] + */ +void csc_RGB565_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + int width, + int height); + +/* + * C code only + * Converts RGB565 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of RGB565[in] + * + * @param width + * Width of RGB565[in] + * + * @param height + * Height of RGB565[in] + */ +void csc_RGB565_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + int width, + int height); + +/* + * C code only + * Converts BGRA8888 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of BGRA8888[in] + * + * @param width + * Width of BGRA8888[in] + * + * @param height + * Height of BGRA8888[in] + */ +void csc_BGRA8888_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); + +/* + * C code or Neon + * Converts BGRA8888 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of BGRA8888[in] + * + * @param width + * Width of BGRA8888[in] + * + * @param height + * Height of BGRA8888[in] + */ +void csc_BGRA8888_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); + +/* + * C code only + * Converts RGBA8888 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of RGBA8888[in] + * + * @param width + * Width of RGBA8888[in] + * + * @param height + * Height of RGBA8888[in] + */ +void csc_RGBA8888_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); + +/* + * C code or Neon + * Converts RGBA8888 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of RGBA8888[in] + * + * @param width + * Width of RGBA8888[in] + * + * @param height + * Height of RGBA8888[in] + */ +void csc_RGBA8888_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); + +#endif /*COLOR_SPACE_CONVERTOR_H_*/ diff --git a/include/v4l2-mediabus.h b/include/v4l2-mediabus.h new file mode 100644 index 0000000..09447c2 --- /dev/null +++ b/include/v4l2-mediabus.h @@ -0,0 +1,100 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_V4L2_MEDIABUS_H +#define __LINUX_V4L2_MEDIABUS_H +#include +#include "videodev2.h" +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mbus_pixelcode { + V4L2_MBUS_FMT_FIXED = 0x0001, + V4L2_MBUS_FMT_RGB444_2X8_PADHI_BE = 0x1001, + V4L2_MBUS_FMT_RGB444_2X8_PADHI_LE = 0x1002, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_RGB555_2X8_PADHI_BE = 0x1003, + V4L2_MBUS_FMT_RGB555_2X8_PADHI_LE = 0x1004, + V4L2_MBUS_FMT_BGR565_2X8_BE = 0x1005, + V4L2_MBUS_FMT_BGR565_2X8_LE = 0x1006, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_RGB565_2X8_BE = 0x1007, + V4L2_MBUS_FMT_RGB565_2X8_LE = 0x1008, + V4L2_MBUS_FMT_XRGB8888_4X8_LE = 0x1009, + V4L2_MBUS_FMT_Y8_1X8 = 0x2001, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_UYVY8_1_5X8 = 0x2002, + V4L2_MBUS_FMT_VYUY8_1_5X8 = 0x2003, + V4L2_MBUS_FMT_YUYV8_1_5X8 = 0x2004, + V4L2_MBUS_FMT_YVYU8_1_5X8 = 0x2005, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_UYVY8_2X8 = 0x2006, + V4L2_MBUS_FMT_VYUY8_2X8 = 0x2007, + V4L2_MBUS_FMT_YUYV8_2X8 = 0x2008, + V4L2_MBUS_FMT_YVYU8_2X8 = 0x2009, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_Y10_1X10 = 0x200a, + V4L2_MBUS_FMT_YUYV10_2X10 = 0x200b, + V4L2_MBUS_FMT_YVYU10_2X10 = 0x200c, + V4L2_MBUS_FMT_Y12_1X12 = 0x2013, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_UYVY8_1X16 = 0x200f, + V4L2_MBUS_FMT_VYUY8_1X16 = 0x2010, + V4L2_MBUS_FMT_YUYV8_1X16 = 0x2011, + V4L2_MBUS_FMT_YVYU8_1X16 = 0x2012, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_YUV8_1X24 = 0x2014, + V4L2_MBUS_FMT_YUYV10_1X20 = 0x200d, + V4L2_MBUS_FMT_YVYU10_1X20 = 0x200e, + V4L2_MBUS_FMT_SBGGR8_1X8 = 0x3001, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_SGBRG8_1X8 = 0x3013, + V4L2_MBUS_FMT_SGRBG8_1X8 = 0x3002, + V4L2_MBUS_FMT_SRGGB8_1X8 = 0x3014, + V4L2_MBUS_FMT_SBGGR10_DPCM8_1X8 = 0x300b, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_SGBRG10_DPCM8_1X8 = 0x300c, + V4L2_MBUS_FMT_SGRBG10_DPCM8_1X8 = 0x3009, + V4L2_MBUS_FMT_SRGGB10_DPCM8_1X8 = 0x300d, + V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_BE = 0x3003, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_SBGGR10_2X8_PADHI_LE = 0x3004, + V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_BE = 0x3005, + V4L2_MBUS_FMT_SBGGR10_2X8_PADLO_LE = 0x3006, + V4L2_MBUS_FMT_SBGGR10_1X10 = 0x3007, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_SGBRG10_1X10 = 0x300e, + V4L2_MBUS_FMT_SGRBG10_1X10 = 0x300a, + V4L2_MBUS_FMT_SRGGB10_1X10 = 0x300f, + V4L2_MBUS_FMT_SBGGR12_1X12 = 0x3008, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MBUS_FMT_SGBRG12_1X12 = 0x3010, + V4L2_MBUS_FMT_SGRBG12_1X12 = 0x3011, + V4L2_MBUS_FMT_SRGGB12_1X12 = 0x3012, + V4L2_MBUS_FMT_JPEG_1X8 = 0x4001, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_mbus_framefmt { + __u32 width; + __u32 height; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 code; + __u32 field; + __u32 colorspace; + __u32 reserved[7]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#endif diff --git a/include/v4l2-subdev.h b/include/v4l2-subdev.h new file mode 100644 index 0000000..5708b8f --- /dev/null +++ b/include/v4l2-subdev.h @@ -0,0 +1,94 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_V4L2_SUBDEV_H +#define __LINUX_V4L2_SUBDEV_H +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#include "v4l2-mediabus.h" +enum v4l2_subdev_format_whence { + V4L2_SUBDEV_FORMAT_TRY = 0, + V4L2_SUBDEV_FORMAT_ACTIVE = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_subdev_format { + __u32 which; + __u32 pad; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_mbus_framefmt format; + __u32 reserved[8]; +}; +struct v4l2_subdev_crop { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 which; + __u32 pad; + struct v4l2_rect rect; + __u32 reserved[8]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_subdev_mbus_code_enum { + __u32 pad; + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 code; + __u32 reserved[9]; +}; +struct v4l2_subdev_frame_size_enum { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 pad; + __u32 code; + __u32 min_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 max_width; + __u32 min_height; + __u32 max_height; + __u32 reserved[9]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_subdev_frame_interval { + __u32 pad; + struct v4l2_fract interval; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[9]; +}; +struct v4l2_subdev_frame_interval_enum { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pad; + __u32 code; + __u32 width; + __u32 height; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract interval; + __u32 reserved[9]; +}; +#define VIDIOC_SUBDEV_G_FMT _IOWR('V', 4, struct v4l2_subdev_format) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_SUBDEV_S_FMT _IOWR('V', 5, struct v4l2_subdev_format) +#define VIDIOC_SUBDEV_G_FRAME_INTERVAL _IOWR('V', 21, struct v4l2_subdev_frame_interval) +#define VIDIOC_SUBDEV_S_FRAME_INTERVAL _IOWR('V', 22, struct v4l2_subdev_frame_interval) +#define VIDIOC_SUBDEV_ENUM_MBUS_CODE _IOWR('V', 2, struct v4l2_subdev_mbus_code_enum) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_SUBDEV_ENUM_FRAME_SIZE _IOWR('V', 74, struct v4l2_subdev_frame_size_enum) +#define VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL _IOWR('V', 75, struct v4l2_subdev_frame_interval_enum) +#define VIDIOC_SUBDEV_G_CROP _IOWR('V', 59, struct v4l2_subdev_crop) +#define VIDIOC_SUBDEV_S_CROP _IOWR('V', 60, struct v4l2_subdev_crop) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#endif diff --git a/kernel-3.10-headers/linux/v4l2-common.h b/kernel-3.10-headers/linux/v4l2-common.h new file mode 100644 index 0000000..814d960 --- /dev/null +++ b/kernel-3.10-headers/linux/v4l2-common.h @@ -0,0 +1,45 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __V4L2_COMMON__ +#define __V4L2_COMMON__ +#define V4L2_SEL_TGT_CROP 0x0000 +#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002 +#define V4L2_SEL_TGT_COMPOSE 0x0100 +#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101 +#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103 +#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP +#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE +#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE +#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS +#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS +#define V4L2_SEL_FLAG_GE (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_FLAG_LE (1 << 1) +#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2) +#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE +#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG +#endif diff --git a/kernel-3.10-headers/linux/v4l2-controls.h b/kernel-3.10-headers/linux/v4l2-controls.h new file mode 100644 index 0000000..eb8aebb --- /dev/null +++ b/kernel-3.10-headers/linux/v4l2-controls.h @@ -0,0 +1,901 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_V4L2_CONTROLS_H +#define __LINUX_V4L2_CONTROLS_H +#define V4L2_CTRL_CLASS_USER 0x00980000 +#define V4L2_CTRL_CLASS_MPEG 0x00990000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 +#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 +#define V4L2_CTRL_CLASS_FLASH 0x009c0000 +#define V4L2_CTRL_CLASS_JPEG 0x009d0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 +#define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 +#define V4L2_CTRL_CLASS_DV 0x00a00000 +#define V4L2_CTRL_CLASS_FM_RX 0x00a10000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900) +#define V4L2_CID_USER_BASE V4L2_CID_BASE +#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1) +#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1) +#define V4L2_CID_SATURATION (V4L2_CID_BASE+2) +#define V4L2_CID_HUE (V4L2_CID_BASE+3) +#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6) +#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7) +#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8) +#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10) +#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) +#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12) +#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14) +#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15) +#define V4L2_CID_GAMMA (V4L2_CID_BASE+16) +#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17) +#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18) +#define V4L2_CID_GAIN (V4L2_CID_BASE+19) +#define V4L2_CID_HFLIP (V4L2_CID_BASE+20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_VFLIP (V4L2_CID_BASE+21) +#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24) +enum v4l2_power_line_frequency { + V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1, + V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2, + V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25) +#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26) +#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27) +#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29) +#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30) +#define V4L2_CID_COLORFX (V4L2_CID_BASE+31) +enum v4l2_colorfx { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_NONE = 0, + V4L2_COLORFX_BW = 1, + V4L2_COLORFX_SEPIA = 2, + V4L2_COLORFX_NEGATIVE = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_EMBOSS = 4, + V4L2_COLORFX_SKETCH = 5, + V4L2_COLORFX_SKY_BLUE = 6, + V4L2_COLORFX_GRASS_GREEN = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SKIN_WHITEN = 8, + V4L2_COLORFX_VIVID = 9, + V4L2_COLORFX_AQUA = 10, + V4L2_COLORFX_ART_FREEZE = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SILHOUETTE = 12, + V4L2_COLORFX_SOLARIZATION = 13, + V4L2_COLORFX_ANTIQUE = 14, + V4L2_COLORFX_SET_CBCR = 15, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32) +#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33) +#define V4L2_CID_ROTATE (V4L2_CID_BASE+34) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35) +#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36) +#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37) +#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39) +#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40) +#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41) +#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_LASTP1 (V4L2_CID_BASE+43) +#define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000) +#define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010) +#define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040) +#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900) +#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1) +#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_stream_type { + V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, + V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, + V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, + V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, + V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1) +#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2) +#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3) +#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5) +#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6) +#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7) +enum v4l2_mpeg_stream_vbi_fmt { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, + V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, +}; +#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_sampling_freq { + V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0, + V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1, + V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101) +enum v4l2_mpeg_audio_encoding { + V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1, + V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2, + V4L2_MPEG_AUDIO_ENCODING_AAC = 3, + V4L2_MPEG_AUDIO_ENCODING_AC3 = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102) +enum v4l2_mpeg_audio_l1_bitrate { + V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1, + V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2, + V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3, + V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5, + V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6, + V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7, + V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9, + V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10, + V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11, + V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13, +}; +#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103) +enum v4l2_mpeg_audio_l2_bitrate { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1, + V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2, + V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4, + V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5, + V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6, + V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8, + V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9, + V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10, + V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12, + V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13, +}; +#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_l3_bitrate { + V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4, + V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8, + V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12, + V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105) +enum v4l2_mpeg_audio_mode { + V4L2_MPEG_AUDIO_MODE_STEREO = 0, + V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_DUAL = 2, + V4L2_MPEG_AUDIO_MODE_MONO = 3, +}; +#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_mode_extension { + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3, +}; +#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107) +enum v4l2_mpeg_audio_emphasis { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0, + V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1, + V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108) +enum v4l2_mpeg_audio_crc { + V4L2_MPEG_AUDIO_CRC_NONE = 0, + V4L2_MPEG_AUDIO_CRC_CRC16 = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109) +#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110) +#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_ac3_bitrate { + V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4, + V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8, + V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12, + V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13, + V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15, + V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16, + V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17, + V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE+112) +enum v4l2_mpeg_audio_dec_playback { + V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5, +}; +#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE+113) +#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_encoding { + V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0, + V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1, + V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201) +enum v4l2_mpeg_video_aspect { + V4L2_MPEG_VIDEO_ASPECT_1x1 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_ASPECT_4x3 = 1, + V4L2_MPEG_VIDEO_ASPECT_16x9 = 2, + V4L2_MPEG_VIDEO_ASPECT_221x100 = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202) +#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203) +#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204) +#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206) +enum v4l2_mpeg_video_bitrate_mode { + V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0, + V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207) +#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208) +#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210) +#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211) +#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE+212) +#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE+213) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE+214) +#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE+215) +#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE+216) +enum v4l2_mpeg_video_header_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0, + V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1, +}; +#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE+217) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE+218) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE+219) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE+220) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE+221) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_multi_slice_mode { + V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0, + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1, + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE+222) +#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE+223) +#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE+224) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_BASE+225) +#define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_MPEG_BASE+226) +#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE+300) +#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE+301) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE+302) +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE+303) +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE+304) +#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE+350) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE+351) +#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE+352) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE+353) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE+354) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE+355) +#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE+356) +#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE+357) +enum v4l2_mpeg_video_h264_entropy_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0, + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1, +}; +#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE+358) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE+359) +enum v4l2_mpeg_video_h264_level { + V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0, + V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2, + V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3, + V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4, + V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6, + V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7, + V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8, + V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10, + V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11, + V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12, + V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14, + V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15, +}; +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE+360) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE+361) +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE+362) +enum v4l2_mpeg_video_h264_loop_filter_mode { + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1, + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2, +}; +#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE+363) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_h264_profile { + V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0, + V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1, + V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15, + V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16, +}; +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE+364) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE+365) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE+366) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE+367) +enum v4l2_mpeg_video_h264_vui_sar_idc { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17, +}; +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_BASE+368) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_BASE+369) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_BASE+370) +enum v4l2_mpeg_video_h264_sei_fp_arrangement_type { + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_CHEKERBOARD = 0, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_COLUMN = 1, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_ROW = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_SIDE_BY_SIDE = 3, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TOP_BOTTOM = 4, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TEMPORAL = 5, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_BASE+371) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_BASE+372) +enum v4l2_mpeg_video_h264_fmo_map_type { + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6, +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_BASE+373) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_BASE+374) +enum v4l2_mpeg_video_h264_fmo_change_dir { + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0, + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_BASE+375) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_BASE+376) +#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_BASE+377) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_BASE+378) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_BASE+379) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_BASE+380) +enum v4l2_mpeg_video_h264_hierarchical_coding_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0, + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1, +}; +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_BASE+381) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_BASE+382) +#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE+400) +#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE+401) +#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE+402) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE+403) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE+404) +#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE+405) +enum v4l2_mpeg_video_mpeg4_level { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE+406) +enum v4l2_mpeg_video_mpeg4_profile { + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE+407) +#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000) +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_cx2341x_video_spatial_filter_mode { + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0, + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1) +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2) +enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3) +enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4) +enum v4l2_mpeg_cx2341x_video_temporal_filter_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0, + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6) +enum v4l2_mpeg_cx2341x_video_median_filter_type { + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7) +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8) +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9) +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11) +#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100) +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE+0) +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE+1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE+2) +enum v4l2_mpeg_mfc51_video_frame_skip_mode { + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2, +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE+3) +enum v4l2_mpeg_mfc51_video_force_frame_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0, + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1, + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE+4) +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE+5) +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE+6) +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE+7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE+50) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE+51) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE+52) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE+53) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE+54) +#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900) +#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1) +#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_exposure_auto_type { + V4L2_EXPOSURE_AUTO = 0, + V4L2_EXPOSURE_MANUAL = 1, + V4L2_EXPOSURE_SHUTTER_PRIORITY = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_EXPOSURE_APERTURE_PRIORITY = 3 +}; +#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2) +#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4) +#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5) +#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6) +#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8) +#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9) +#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10) +#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12) +#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13) +#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14) +#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16) +#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17) +#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18) +#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20) +enum v4l2_auto_n_preset_white_balance { + V4L2_WHITE_BALANCE_MANUAL = 0, + V4L2_WHITE_BALANCE_AUTO = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_WHITE_BALANCE_INCANDESCENT = 2, + V4L2_WHITE_BALANCE_FLUORESCENT = 3, + V4L2_WHITE_BALANCE_FLUORESCENT_H = 4, + V4L2_WHITE_BALANCE_HORIZON = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_WHITE_BALANCE_DAYLIGHT = 6, + V4L2_WHITE_BALANCE_FLASH = 7, + V4L2_WHITE_BALANCE_CLOUDY = 8, + V4L2_WHITE_BALANCE_SHADE = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21) +#define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22) +#define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24) +enum v4l2_iso_sensitivity_auto_type { + V4L2_ISO_SENSITIVITY_MANUAL = 0, + V4L2_ISO_SENSITIVITY_AUTO = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25) +enum v4l2_exposure_metering { + V4L2_EXPOSURE_METERING_AVERAGE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1, + V4L2_EXPOSURE_METERING_SPOT = 2, + V4L2_EXPOSURE_METERING_MATRIX = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26) +enum v4l2_scene_mode { + V4L2_SCENE_MODE_NONE = 0, + V4L2_SCENE_MODE_BACKLIGHT = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_BEACH_SNOW = 2, + V4L2_SCENE_MODE_CANDLE_LIGHT = 3, + V4L2_SCENE_MODE_DAWN_DUSK = 4, + V4L2_SCENE_MODE_FALL_COLORS = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_FIREWORKS = 6, + V4L2_SCENE_MODE_LANDSCAPE = 7, + V4L2_SCENE_MODE_NIGHT = 8, + V4L2_SCENE_MODE_PARTY_INDOOR = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_PORTRAIT = 10, + V4L2_SCENE_MODE_SPORTS = 11, + V4L2_SCENE_MODE_SUNSET = 12, + V4L2_SCENE_MODE_TEXT = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27) +#define V4L2_LOCK_EXPOSURE (1 << 0) +#define V4L2_LOCK_WHITE_BALANCE (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_LOCK_FOCUS (1 << 2) +#define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28) +#define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29) +#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0) +#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0) +#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1) +#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31) +enum v4l2_auto_focus_range { + V4L2_AUTO_FOCUS_RANGE_AUTO = 0, + V4L2_AUTO_FOCUS_RANGE_NORMAL = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_AUTO_FOCUS_RANGE_MACRO = 2, + V4L2_AUTO_FOCUS_RANGE_INFINITY = 3, +}; +#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1) +#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1) +#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2) +#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5) +#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6) +#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64) +#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66) +#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80) +#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81) +#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83) +#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84) +#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96) +#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98) +#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112) +enum v4l2_preemphasis { + V4L2_PREEMPHASIS_DISABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PREEMPHASIS_50_uS = 1, + V4L2_PREEMPHASIS_75_uS = 2, +}; +#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114) +#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900) +#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1) +#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_flash_led_mode { + V4L2_FLASH_LED_MODE_NONE, + V4L2_FLASH_LED_MODE_FLASH, + V4L2_FLASH_LED_MODE_TORCH, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2) +enum v4l2_flash_strobe_source { + V4L2_FLASH_STROBE_SOURCE_SOFTWARE, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FLASH_STROBE_SOURCE_EXTERNAL, +}; +#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3) +#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5) +#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6) +#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7) +#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9) +#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10) +#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0) +#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2) +#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3) +#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4) +#define V4L2_FLASH_FAULT_INDICATOR (1 << 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11) +#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12) +#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900) +#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1) +enum v4l2_jpeg_chroma_subsampling { + V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0, + V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2, + V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3, + V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4, + V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2) +#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3) +#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0) +#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1) +#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16) +#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18) +#define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900) +#define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1) +#define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2) +#define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3) +#define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900) +#define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1) +#define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2) +#define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3) +#define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1) +#define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1) +#define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2) +#define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4) +enum v4l2_dv_tx_mode { + V4L2_DV_TX_MODE_DVI_D = 0, + V4L2_DV_TX_MODE_HDMI = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5) +enum v4l2_dv_rgb_range { + V4L2_DV_RGB_RANGE_AUTO = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_DV_RGB_RANGE_LIMITED = 1, + V4L2_DV_RGB_RANGE_FULL = 2, +}; +#define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101) +#define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900) +#define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1) +#define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_deemphasis { + V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED, + V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS, + V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2) +#endif diff --git a/kernel-3.10-headers/v4l2-dv-timings.h b/kernel-3.10-headers/v4l2-dv-timings.h new file mode 100644 index 0000000..7b82e81 --- /dev/null +++ b/kernel-3.10-headers/v4l2-dv-timings.h @@ -0,0 +1,181 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef _V4L2_DV_TIMINGS_H +#define _V4L2_DV_TIMINGS_H +#if __GNUC__ < (4 || __GNUC__ == (4 && __GNUC_MINOR__ < 6)) +#define V4L2_INIT_BT_TIMINGS(_width, args...) { .bt = { _width , ## args } } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#else +#define V4L2_INIT_BT_TIMINGS(_width, args...) .bt = { _width , ## args } +#endif +#define V4L2_DV_BT_CEA_640X480P59_94 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, 25175000, 16, 96, 48, 10, 2, 33, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_720X480I59_94 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(720, 480, 1, 0, 13500000, 19, 62, 57, 4, 3, 15, 4, 3, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) } +#define V4L2_DV_BT_CEA_720X480P59_94 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(720, 480, 0, 0, 27000000, 16, 62, 60, 9, 6, 30, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_720X576I50 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(720, 576, 1, 0, 13500000, 12, 63, 69, 2, 3, 19, 2, 3, 20, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) } +#define V4L2_DV_BT_CEA_720X576P50 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(720, 576, 0, 0, 27000000, 12, 64, 68, 5, 5, 39, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1280X720P24 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 59400000, 1760, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1280X720P25 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 2420, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_1280X720P30 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 1760, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1280X720P50 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1280X720P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P24 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P25 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_1920X1080P30 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1920X1080I50 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) } +#define V4L2_DV_BT_CEA_1920X1080P50 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 528, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_1920X1080I60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS | V4L2_DV_FL_HALF_LINE) } +#define V4L2_DV_BT_CEA_1920X1080P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_3840X2160P24 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(3840, 2160, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 297000000, 1276, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_3840X2160P25 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(3840, 2160, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 297000000, 1056, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_3840X2160P30 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(3840, 2160, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 297000000, 176, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_4096X2160P24 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(4096, 2160, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 297000000, 1020, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_DMT_640X350P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 350, 0, V4L2_DV_HSYNC_POS_POL, 31500000, 32, 64, 96, 32, 3, 60, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_640X400P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 400, 0, V4L2_DV_VSYNC_POS_POL, 31500000, 32, 64, 96, 1, 3, 41, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_720X400P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(720, 400, 0, V4L2_DV_VSYNC_POS_POL, 35500000, 36, 72, 108, 1, 3, 42, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_640X480P60 V4L2_DV_BT_CEA_640X480P59_94 +#define V4L2_DV_BT_DMT_640X480P72 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, 31500000, 24, 40, 128, 9, 3, 28, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_640X480P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, 31500000, 16, 64, 120, 1, 3, 16, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_640X480P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(640, 480, 0, 0, 36000000, 56, 56, 80, 1, 3, 25, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_800X600P56 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 36000000, 24, 72, 128, 1, 2, 22, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_800X600P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 40000000, 40, 128, 88, 1, 4, 23, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_800X600P72 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 50000000, 56, 120, 64, 37, 6, 23, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_800X600P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 49500000, 16, 80, 160, 1, 3, 21, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_800X600P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 56250000, 32, 64, 152, 1, 3, 27, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_800X600P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(800, 600, 0, V4L2_DV_HSYNC_POS_POL, 73250000, 48, 32, 80, 3, 4, 29, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_848X480P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(848, 480, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 33750000, 16, 112, 112, 6, 8, 23, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1024X768I43 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 44900000, 8, 176, 56, 0, 4, 20, 0, 4, 21, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1024X768P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 0, 0, 65000000, 24, 136, 160, 3, 6, 29, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1024X768P70 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 0, 0, 75000000, 24, 136, 144, 3, 6, 29, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1024X768P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 78750000, 16, 96, 176, 1, 3, 28, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1024X768P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 94500000, 48, 96, 208, 1, 3, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1024X768P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1024, 768, 0, V4L2_DV_HSYNC_POS_POL, 115500000, 48, 32, 80, 3, 4, 38, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1152X864P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1152, 864, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 108000000, 64, 128, 256, 1, 3, 32, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X720P60 V4L2_DV_BT_CEA_1280X720P60 +#define V4L2_DV_BT_DMT_1280X768P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_HSYNC_POS_POL, 68250000, 48, 32, 80, 3, 7, 12, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1280X768P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, 79500000, 64, 128, 192, 3, 7, 20, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1280X768P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, 102250000, 80, 128, 208, 3, 7, 27, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1280X768P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_VSYNC_POS_POL, 117500000, 80, 136, 216, 3, 7, 31, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1280X768P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 768, 0, V4L2_DV_HSYNC_POS_POL, 140250000, 48, 32, 80, 3, 7, 35, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1280X800P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_HSYNC_POS_POL, 71000000, 48, 32, 80, 3, 6, 14, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1280X800P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, 83500000, 72, 128, 200, 3, 6, 22, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1280X800P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, 106500000, 80, 128, 208, 3, 6, 29, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1280X800P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_VSYNC_POS_POL, 122500000, 80, 136, 216, 3, 6, 34, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1280X800P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 800, 0, V4L2_DV_HSYNC_POS_POL, 146250000, 48, 32, 80, 3, 6, 38, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1280X960P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 960, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 108000000, 96, 112, 312, 1, 3, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X960P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 960, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 64, 160, 224, 1, 3, 47, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X960P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 960, 0, V4L2_DV_HSYNC_POS_POL, 175500000, 48, 32, 80, 3, 4, 50, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1280X1024P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 1024, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 108000000, 48, 112, 248, 1, 3, 38, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X1024P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 1024, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 135000000, 16, 144, 248, 1, 3, 38, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X1024P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 1024, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 157500000, 64, 160, 224, 1, 3, 44, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1280X1024P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1280, 1024, 0, V4L2_DV_HSYNC_POS_POL, 187250000, 48, 32, 80, 3, 7, 50, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1360X768P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1360, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 85500000, 64, 112, 256, 3, 6, 18, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1360X768P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1360, 768, 0, V4L2_DV_HSYNC_POS_POL, 148250000, 48, 32, 80, 3, 5, 37, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1366X768P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1366, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 85500000, 70, 143, 213, 3, 3, 24, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1366X768P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1366, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 72000000, 14, 56, 64, 1, 3, 28, 0, 0, 0, V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1400X1050P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_HSYNC_POS_POL, 101000000, 48, 32, 80, 3, 4, 23, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1400X1050P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, 121750000, 88, 144, 232, 3, 4, 32, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1400X1050P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, 156000000, 104, 144, 248, 3, 4, 42, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1400X1050P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_VSYNC_POS_POL, 179500000, 104, 152, 256, 3, 4, 48, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1400X1050P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1400, 1050, 0, V4L2_DV_HSYNC_POS_POL, 208000000, 48, 32, 80, 3, 4, 55, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1440X900P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_HSYNC_POS_POL, 88750000, 48, 32, 80, 3, 6, 17, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1440X900P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, 106500000, 80, 152, 232, 3, 6, 25, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1440X900P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, 136750000, 96, 152, 248, 3, 6, 33, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1440X900P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_VSYNC_POS_POL, 157000000, 104, 152, 256, 3, 6, 39, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1440X900P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1440, 900, 0, V4L2_DV_HSYNC_POS_POL, 182750000, 48, 32, 80, 3, 6, 44, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1600X900P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 900, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 108000000, 24, 80, 96, 1, 3, 96, 0, 0, 0, V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1600X1200P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 162000000, 64, 192, 304, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1600X1200P65 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 175500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1600X1200P70 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 189000000, 64, 192, 304, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1600X1200P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 202500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1600X1200P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 229500000, 64, 192, 304, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1600X1200P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1600, 1200, 0, V4L2_DV_HSYNC_POS_POL, 268250000, 48, 32, 80, 3, 4, 64, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1680X1050P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_HSYNC_POS_POL, 119000000, 48, 32, 80, 3, 6, 21, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1680X1050P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, 146250000, 104, 176, 280, 3, 6, 30, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1680X1050P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, 187000000, 120, 176, 296, 3, 6, 40, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1680X1050P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_VSYNC_POS_POL, 214750000, 128, 176, 304, 3, 6, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1680X1050P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1680, 1050, 0, V4L2_DV_HSYNC_POS_POL, 245500000, 48, 32, 80, 3, 6, 53, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1792X1344P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_VSYNC_POS_POL, 204750000, 128, 200, 328, 1, 3, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1792X1344P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_VSYNC_POS_POL, 261000000, 96, 216, 352, 1, 3, 69, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1792X1344P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1792, 1344, 0, V4L2_DV_HSYNC_POS_POL, 333250000, 48, 32, 80, 3, 4, 72, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1856X1392P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_VSYNC_POS_POL, 218250000, 96, 224, 352, 1, 3, 43, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1856X1392P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_VSYNC_POS_POL, 288000000, 128, 224, 352, 1, 3, 104, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1856X1392P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1856, 1392, 0, V4L2_DV_HSYNC_POS_POL, 356500000, 48, 32, 80, 3, 4, 75, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1920X1080P60 V4L2_DV_BT_CEA_1920X1080P60 +#define V4L2_DV_BT_DMT_1920X1200P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_HSYNC_POS_POL, 154000000, 48, 32, 80, 3, 6, 26, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1920X1200P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, 193250000, 136, 200, 336, 3, 6, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1920X1200P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, 245250000, 136, 208, 344, 3, 6, 46, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1920X1200P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_VSYNC_POS_POL, 281250000, 144, 208, 352, 3, 6, 53, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_1920X1200P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1200, 0, V4L2_DV_HSYNC_POS_POL, 317000000, 48, 32, 80, 3, 6, 62, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1920X1440P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_VSYNC_POS_POL, 234000000, 128, 208, 344, 1, 3, 56, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +#define V4L2_DV_BT_DMT_1920X1440P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_VSYNC_POS_POL, 297000000, 144, 224, 352, 1, 3, 56, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_1920X1440P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1920, 1440, 0, V4L2_DV_HSYNC_POS_POL, 380500000, 48, 32, 80, 3, 4, 78, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_2048X1152P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2048, 1152, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 162000000, 26, 80, 96, 1, 3, 44, 0, 0, 0, V4L2_DV_BT_STD_DMT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_2560X1600P60_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_HSYNC_POS_POL, 268500000, 48, 32, 80, 3, 6, 37, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_2560X1600P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, 348500000, 192, 280, 472, 3, 6, 49, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_DMT_2560X1600P75 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, 443250000, 208, 280, 488, 3, 6, 63, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_2560X1600P85 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_VSYNC_POS_POL, 505250000, 208, 280, 488, 3, 6, 73, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, 0) } +#define V4L2_DV_BT_DMT_2560X1600P120_RB { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(2560, 1600, 0, V4L2_DV_HSYNC_POS_POL, 552750000, 48, 32, 80, 3, 6, 85, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CVT, V4L2_DV_FL_REDUCED_BLANKING) } +#define V4L2_DV_BT_DMT_1366X768P60 { .type = V4L2_DV_BT_656_1120, V4L2_INIT_BT_TIMINGS(1366, 768, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 85500000, 70, 143, 213, 3, 3, 24, 0, 0, 0, V4L2_DV_BT_STD_DMT, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1280X720P60_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1280X720P60_TB { .type = V4L2_DV_BT_TB, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1280X720P60_FP { .type = V4L2_DV_BT_FP, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1280X720P50_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1280X720P50_TB { .type = V4L2_DV_BT_TB, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_1280X720P50_FP { .type = V4L2_DV_BT_FP, V4L2_INIT_BT_TIMINGS(1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0) } +#define V4L2_DV_BT_CEA_1920X1080P24_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P24_TB { .type = V4L2_DV_BT_TB, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1920X1080P24_FP { .type = V4L2_DV_BT_FP, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P30_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P30_TB { .type = V4L2_DV_BT_TB, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080I50_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE) } +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CEA_1920X1080I60_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS | V4L2_DV_FL_HALF_LINE) } +#define V4L2_DV_BT_CEA_1920X1080P60_SB_HALF { .type = V4L2_DV_BT_SB_HALF, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#define V4L2_DV_BT_CEA_1920X1080P60_TB { .type = V4L2_DV_BT_TB, V4L2_INIT_BT_TIMINGS(1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS) } +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/kernel-3.10-headers/videodev2.h b/kernel-3.10-headers/videodev2.h new file mode 100644 index 0000000..9a94f6f --- /dev/null +++ b/kernel-3.10-headers/videodev2.h @@ -0,0 +1,1506 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef _UAPI__LINUX_VIDEODEV2_H +#define _UAPI__LINUX_VIDEODEV2_H +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#include +#include +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDEO_MAX_FRAME 32 +#define VIDEO_MAX_PLANES 8 +#define v4l2_fourcc(a,b,c,d) ((__u32) (a) | ((__u32) (b) << 8) | ((__u32) (c) << 16) | ((__u32) (d) << 24)) +enum v4l2_field { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_ANY = 0, + V4L2_FIELD_NONE = 1, + V4L2_FIELD_TOP = 2, + V4L2_FIELD_BOTTOM = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_INTERLACED = 4, + V4L2_FIELD_SEQ_TB = 5, + V4L2_FIELD_SEQ_BT = 6, + V4L2_FIELD_ALTERNATE = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_INTERLACED_TB = 8, + V4L2_FIELD_INTERLACED_BT = 9, +}; +#define V4L2_FIELD_HAS_TOP(field) ((field) == V4L2_FIELD_TOP || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FIELD_HAS_BOTTOM(field) ((field) == V4L2_FIELD_BOTTOM || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +#define V4L2_FIELD_HAS_BOTH(field) ((field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +enum v4l2_buf_type { + V4L2_BUF_TYPE_VIDEO_CAPTURE = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VIDEO_OUTPUT = 2, + V4L2_BUF_TYPE_VIDEO_OVERLAY = 3, + V4L2_BUF_TYPE_VBI_CAPTURE = 4, + V4L2_BUF_TYPE_VBI_OUTPUT = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6, + V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7, + V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8, + V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10, + V4L2_BUF_TYPE_PRIVATE = 0x80, +}; +#define V4L2_TYPE_IS_MULTIPLANAR(type) ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TYPE_IS_OUTPUT(type) ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY || (type) == V4L2_BUF_TYPE_VBI_OUTPUT || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT) +enum v4l2_tuner_type { + V4L2_TUNER_RADIO = 1, + V4L2_TUNER_ANALOG_TV = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_TUNER_DIGITAL_TV = 3, +}; +enum v4l2_memory { + V4L2_MEMORY_MMAP = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MEMORY_USERPTR = 2, + V4L2_MEMORY_OVERLAY = 3, + V4L2_MEMORY_DMABUF = 4, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_colorspace { + V4L2_COLORSPACE_DEFAULT = 0, + V4L2_COLORSPACE_SMPTE170M = 1, + V4L2_COLORSPACE_SMPTE240M = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_REC709 = 3, + V4L2_COLORSPACE_BT878 = 4, + V4L2_COLORSPACE_470_SYSTEM_M = 5, + V4L2_COLORSPACE_470_SYSTEM_BG = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_JPEG = 7, + V4L2_COLORSPACE_SRGB = 8, + V4L2_COLORSPACE_ADOBERGB = 9, + V4L2_COLORSPACE_BT2020 = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_RAW = 11, + V4L2_COLORSPACE_DCI_P3 = 12, +}; +enum v4l2_priority { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_UNSET = 0, + V4L2_PRIORITY_BACKGROUND = 1, + V4L2_PRIORITY_INTERACTIVE = 2, + V4L2_PRIORITY_RECORD = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE, +}; +struct v4l2_rect { + __s32 left; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 top; + __s32 width; + __s32 height; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_fract { + __u32 numerator; + __u32 denominator; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_capability { + __u8 driver[16]; + __u8 card[32]; + __u8 bus_info[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 version; + __u32 capabilities; + __u32 device_caps; + __u32 reserved[3]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CAP_VIDEO_CAPTURE 0x00000001 +#define V4L2_CAP_VIDEO_OUTPUT 0x00000002 +#define V4L2_CAP_VIDEO_OVERLAY 0x00000004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VBI_CAPTURE 0x00000010 +#define V4L2_CAP_VBI_OUTPUT 0x00000020 +#define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 +#define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_RDS_CAPTURE 0x00000100 +#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 +#define V4L2_CAP_HW_FREQ_SEEK 0x00000400 +#define V4L2_CAP_RDS_OUTPUT 0x00000800 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000 +#define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000 +#define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000 +#define V4L2_CAP_VIDEO_M2M 0x00008000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_TUNER 0x00010000 +#define V4L2_CAP_AUDIO 0x00020000 +#define V4L2_CAP_RADIO 0x00040000 +#define V4L2_CAP_MODULATOR 0x00080000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_READWRITE 0x01000000 +#define V4L2_CAP_ASYNCIO 0x02000000 +#define V4L2_CAP_STREAMING 0x04000000 +#define V4L2_CAP_DEVICE_CAPS 0x80000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_pix_format { + __u32 width; + __u32 height; + __u32 pixelformat; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 field; + __u32 bytesperline; + __u32 sizeimage; + __u32 colorspace; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 priv; +}; +#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') +#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') +#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') +#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') +#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') +#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') +#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') +#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') +#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') +#define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') +#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') +#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') +#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') +#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') +#define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') +#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') +#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') +#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') +#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') +#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') +#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') +#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') +#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') +#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') +#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') +#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') +#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') +#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') +#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') +#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') +#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') +#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') +#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') +#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') +#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') +#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') +#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') +#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') +#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') +#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') +#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') +#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') +#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') +#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') +#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') +#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') +#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') +#define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8') +#define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8') +#define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8') +#define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8') +#define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8') +#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8') +#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') +#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') +#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') +#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') +#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') +#define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') +#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') +#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') +#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') +#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') +#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') +#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') +#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') +#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') +#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') +#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') +#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') +#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') +#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') +#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') +#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') +#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') +#define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') +#define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') +#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') +#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') +#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') +#define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') +#define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') +#define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') +#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') +#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') +struct v4l2_fmtdesc { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 type; + __u32 flags; + __u8 description[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixelformat; + __u32 reserved[4]; +}; +#define V4L2_FMT_FLAG_COMPRESSED 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FMT_FLAG_EMULATED 0x0002 +enum v4l2_frmsizetypes { + V4L2_FRMSIZE_TYPE_DISCRETE = 1, + V4L2_FRMSIZE_TYPE_CONTINUOUS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FRMSIZE_TYPE_STEPWISE = 3, +}; +struct v4l2_frmsize_discrete { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; +}; +struct v4l2_frmsize_stepwise { + __u32 min_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 max_width; + __u32 step_width; + __u32 min_height; + __u32 max_height; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 step_height; +}; +struct v4l2_frmsizeenum { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixel_format; + __u32 type; + union { + struct v4l2_frmsize_discrete discrete; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_frmsize_stepwise stepwise; + }; + __u32 reserved[2]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_frmivaltypes { + V4L2_FRMIVAL_TYPE_DISCRETE = 1, + V4L2_FRMIVAL_TYPE_CONTINUOUS = 2, + V4L2_FRMIVAL_TYPE_STEPWISE = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_frmival_stepwise { + struct v4l2_fract min; + struct v4l2_fract max; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract step; +}; +struct v4l2_frmivalenum { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixel_format; + __u32 width; + __u32 height; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_fract discrete; + struct v4l2_frmival_stepwise stepwise; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +struct v4l2_timecode { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u8 frames; + __u8 seconds; + __u8 minutes; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 hours; + __u8 userbits[4]; +}; +#define V4L2_TC_TYPE_24FPS 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_TYPE_25FPS 2 +#define V4L2_TC_TYPE_30FPS 3 +#define V4L2_TC_TYPE_50FPS 4 +#define V4L2_TC_TYPE_60FPS 5 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_FLAG_DROPFRAME 0x0001 +#define V4L2_TC_FLAG_COLORFRAME 0x0002 +#define V4L2_TC_USERBITS_field 0x000C +#define V4L2_TC_USERBITS_USERDEFINED 0x0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_USERBITS_8BITCHARS 0x0008 +struct v4l2_jpegcompression { + int quality; + int APPn; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int APP_len; + char APP_data[60]; + int COM_len; + char COM_data[60]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 jpeg_markers; +#define V4L2_JPEG_MARKER_DHT (1 << 3) +#define V4L2_JPEG_MARKER_DQT (1 << 4) +#define V4L2_JPEG_MARKER_DRI (1 << 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_MARKER_COM (1 << 6) +#define V4L2_JPEG_MARKER_APP (1 << 7) +}; +struct v4l2_requestbuffers { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 count; + __u32 type; + __u32 memory; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_plane { + __u32 bytesused; + __u32 length; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + __u32 mem_offset; + unsigned long userptr; + __s32 fd; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } m; + __u32 data_offset; + __u32 reserved[11]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_buffer { + __u32 index; + __u32 type; + __u32 bytesused; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 field; + struct timeval timestamp; + struct v4l2_timecode timecode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sequence; + __u32 memory; + union { + __u32 offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned long userptr; + struct v4l2_plane * planes; + __s32 fd; + } m; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 length; + __u32 input; + __u32 reserved; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_MAPPED 0x0001 +#define V4L2_BUF_FLAG_QUEUED 0x0002 +#define V4L2_BUF_FLAG_DONE 0x0004 +#define V4L2_BUF_FLAG_KEYFRAME 0x0008 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_PFRAME 0x0010 +#define V4L2_BUF_FLAG_BFRAME 0x0020 +#define V4L2_BUF_FLAG_ERROR 0x0040 +#define V4L2_BUF_FLAG_TIMECODE 0x0100 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_INPUT 0x0200 +#define V4L2_BUF_FLAG_PREPARED 0x0400 +#define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x0800 +#define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x1000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_TIMESTAMP_MASK 0xe000 +#define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x0000 +#define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x2000 +#define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x4000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_USE_SYNC 0x8000 +struct v4l2_exportbuffer { + __u32 type; + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 plane; + __u32 flags; + __s32 fd; + __u32 reserved[11]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_framebuffer { + __u32 capability; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + void * base; + struct v4l2_pix_format fmt; +}; +#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_CHROMAKEY 0x0002 +#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004 +#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008 +#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020 +#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040 +#define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080 +#define V4L2_FBUF_FLAG_PRIMARY 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_FLAG_OVERLAY 0x0002 +#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004 +#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008 +#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020 +#define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040 +struct v4l2_clip { + struct v4l2_rect c; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_clip __user * next; +}; +struct v4l2_window { + struct v4l2_rect w; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 field; + __u32 chromakey; + struct v4l2_clip __user * clips; + __u32 clipcount; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + void __user * bitmap; + __u8 global_alpha; +}; +struct v4l2_captureparm { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 capturemode; + struct v4l2_fract timeperframe; + __u32 extendedmode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 readbuffers; + __u32 reserved[4]; +}; +#define V4L2_MODE_HIGHQUALITY 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_TIMEPERFRAME 0x1000 +struct v4l2_outputparm { + __u32 capability; + __u32 outputmode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract timeperframe; + __u32 extendedmode; + __u32 writebuffers; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_cropcap { + __u32 type; + struct v4l2_rect bounds; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_rect defrect; + struct v4l2_fract pixelaspect; +}; +struct v4l2_crop { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + struct v4l2_rect c; +}; +struct v4l2_selection { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 target; + __u32 flags; + struct v4l2_rect r; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[9]; +}; +typedef __u64 v4l2_std_id; +#define V4L2_STD_PAL_B ((v4l2_std_id) 0x00000001) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_B1 ((v4l2_std_id) 0x00000002) +#define V4L2_STD_PAL_G ((v4l2_std_id) 0x00000004) +#define V4L2_STD_PAL_H ((v4l2_std_id) 0x00000008) +#define V4L2_STD_PAL_I ((v4l2_std_id) 0x00000010) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_D ((v4l2_std_id) 0x00000020) +#define V4L2_STD_PAL_D1 ((v4l2_std_id) 0x00000040) +#define V4L2_STD_PAL_K ((v4l2_std_id) 0x00000080) +#define V4L2_STD_PAL_M ((v4l2_std_id) 0x00000100) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_N ((v4l2_std_id) 0x00000200) +#define V4L2_STD_PAL_Nc ((v4l2_std_id) 0x00000400) +#define V4L2_STD_PAL_60 ((v4l2_std_id) 0x00000800) +#define V4L2_STD_NTSC_M ((v4l2_std_id) 0x00001000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_NTSC_M_JP ((v4l2_std_id) 0x00002000) +#define V4L2_STD_NTSC_443 ((v4l2_std_id) 0x00004000) +#define V4L2_STD_NTSC_M_KR ((v4l2_std_id) 0x00008000) +#define V4L2_STD_SECAM_B ((v4l2_std_id) 0x00010000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_D ((v4l2_std_id) 0x00020000) +#define V4L2_STD_SECAM_G ((v4l2_std_id) 0x00040000) +#define V4L2_STD_SECAM_H ((v4l2_std_id) 0x00080000) +#define V4L2_STD_SECAM_K ((v4l2_std_id) 0x00100000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_K1 ((v4l2_std_id) 0x00200000) +#define V4L2_STD_SECAM_L ((v4l2_std_id) 0x00400000) +#define V4L2_STD_SECAM_LC ((v4l2_std_id) 0x00800000) +#define V4L2_STD_ATSC_8_VSB ((v4l2_std_id) 0x01000000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_ATSC_16_VSB ((v4l2_std_id) 0x02000000) +#define V4L2_STD_NTSC (V4L2_STD_NTSC_M | V4L2_STD_NTSC_M_JP | V4L2_STD_NTSC_M_KR) +#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D | V4L2_STD_SECAM_K | V4L2_STD_SECAM_K1) +#define V4L2_STD_SECAM (V4L2_STD_SECAM_B | V4L2_STD_SECAM_G | V4L2_STD_SECAM_H | V4L2_STD_SECAM_DK | V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_BG (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_PAL_G) +#define V4L2_STD_PAL_DK (V4L2_STD_PAL_D | V4L2_STD_PAL_D1 | V4L2_STD_PAL_K) +#define V4L2_STD_PAL (V4L2_STD_PAL_BG | V4L2_STD_PAL_DK | V4L2_STD_PAL_H | V4L2_STD_PAL_I) +#define V4L2_STD_B (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_SECAM_B) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_G (V4L2_STD_PAL_G | V4L2_STD_SECAM_G) +#define V4L2_STD_H (V4L2_STD_PAL_H | V4L2_STD_SECAM_H) +#define V4L2_STD_L (V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +#define V4L2_STD_GH (V4L2_STD_G | V4L2_STD_H) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_DK (V4L2_STD_PAL_DK | V4L2_STD_SECAM_DK) +#define V4L2_STD_BG (V4L2_STD_B | V4L2_STD_G) +#define V4L2_STD_MN (V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_NTSC) +#define V4L2_STD_MTS (V4L2_STD_NTSC_M | V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_525_60 (V4L2_STD_PAL_M | V4L2_STD_PAL_60 | V4L2_STD_NTSC | V4L2_STD_NTSC_443) +#define V4L2_STD_625_50 (V4L2_STD_PAL | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_SECAM) +#define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB | V4L2_STD_ATSC_16_VSB) +#define V4L2_STD_UNKNOWN 0 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_ALL (V4L2_STD_525_60 | V4L2_STD_625_50) +struct v4l2_standard { + __u32 index; + v4l2_std_id id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[24]; + struct v4l2_fract frameperiod; + __u32 framelines; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_dv_preset { + __u32 preset; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_dv_enum_preset { + __u32 index; + __u32 preset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __u32 width; + __u32 height; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_DV_INVALID 0 +#define V4L2_DV_480P59_94 1 +#define V4L2_DV_576P50 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P24 3 +#define V4L2_DV_720P25 4 +#define V4L2_DV_720P30 5 +#define V4L2_DV_720P50 6 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P59_94 7 +#define V4L2_DV_720P60 8 +#define V4L2_DV_1080I29_97 9 +#define V4L2_DV_1080I30 10 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I25 11 +#define V4L2_DV_1080I50 12 +#define V4L2_DV_1080I60 13 +#define V4L2_DV_1080P24 14 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P25 15 +#define V4L2_DV_1080P30 16 +#define V4L2_DV_1080P50 17 +#define V4L2_DV_1080P60 18 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_480P60 19 +#define V4L2_DV_1080I59_94 20 +#define V4L2_DV_1080P59_94 21 +#define V4L2_DV_720P60_FP 22 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P60_SB_HALF 23 +#define V4L2_DV_720P60_TB 24 +#define V4L2_DV_720P59_94_FP 25 +#define V4L2_DV_720P59_94_SB_HALF 26 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P59_94_TB 27 +#define V4L2_DV_720P50_FP 28 +#define V4L2_DV_720P50_SB_HALF 29 +#define V4L2_DV_720P50_TB 30 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P24_FP 31 +#define V4L2_DV_1080P24_SB_HALF 32 +#define V4L2_DV_1080P24_TB 33 +#define V4L2_DV_1080P23_98_FP 34 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P23_98_SB_HALF 35 +#define V4L2_DV_1080P23_98_TB 36 +#define V4L2_DV_1080I60_SB_HALF 37 +#define V4L2_DV_1080I59_94_SB_HALF 38 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I50_SB_HALF 39 +#define V4L2_DV_1080P60_SB_HALF 40 +#define V4L2_DV_1080P60_TB 41 +#define V4L2_DV_1080P30_FP 42 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P30_SB_HALF 43 +#define V4L2_DV_1080P30_TB 44 +#define V4L2_DV_2160P24 45 +#define V4L2_DV_2160P25 46 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_2160P30 47 +#define V4L2_DV_2160P24_1 48 +struct v4l2_bt_timings { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 interlaced; + __u32 polarities; + __u64 pixelclock; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 hfrontporch; + __u32 hsync; + __u32 hbackporch; + __u32 vfrontporch; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 vsync; + __u32 vbackporch; + __u32 il_vfrontporch; + __u32 il_vsync; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 il_vbackporch; + __u32 standards; + __u32 flags; + __u32 reserved[14]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_DV_PROGRESSIVE 0 +#define V4L2_DV_INTERLACED 1 +#define V4L2_DV_VSYNC_POS_POL 0x00000001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_HSYNC_POS_POL 0x00000002 +#define V4L2_DV_BT_STD_CEA861 (1 << 0) +#define V4L2_DV_BT_STD_DMT (1 << 1) +#define V4L2_DV_BT_STD_CVT (1 << 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_STD_GTF (1 << 3) +#define V4L2_DV_FL_REDUCED_BLANKING (1 << 0) +#define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1) +#define V4L2_DV_FL_REDUCED_FPS (1 << 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_FL_HALF_LINE (1 << 3) +struct v4l2_dv_timings { + __u32 type; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_bt_timings bt; + __u32 reserved[32]; + }; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_656_1120 0 +#define V4L2_DV_BT_SB_HALF (1 << 8) +#define V4L2_DV_BT_TB (1 << 6) +#define V4L2_DV_BT_FP (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_enum_dv_timings { + __u32 index; + __u32 reserved[3]; + struct v4l2_dv_timings timings; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_bt_timings_cap { + __u32 min_width; + __u32 max_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 min_height; + __u32 max_height; + __u64 min_pixelclock; + __u64 max_pixelclock; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 standards; + __u32 capabilities; + __u32 reserved[16]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CAP_INTERLACED (1 << 0) +#define V4L2_DV_BT_CAP_PROGRESSIVE (1 << 1) +#define V4L2_DV_BT_CAP_REDUCED_BLANKING (1 << 2) +#define V4L2_DV_BT_CAP_CUSTOM (1 << 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dv_timings_cap { + __u32 type; + __u32 reserved[3]; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_bt_timings_cap bt; + __u32 raw_data[32]; + }; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_input { + __u32 index; + __u8 name[32]; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 audioset; + __u32 tuner; + v4l2_std_id std; + __u32 status; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capabilities; + __u32 reserved[3]; +}; +#define V4L2_INPUT_TYPE_TUNER 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_INPUT_TYPE_CAMERA 2 +#define V4L2_IN_ST_NO_POWER 0x00000001 +#define V4L2_IN_ST_NO_SIGNAL 0x00000002 +#define V4L2_IN_ST_NO_COLOR 0x00000004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_HFLIP 0x00000010 +#define V4L2_IN_ST_VFLIP 0x00000020 +#define V4L2_IN_ST_NO_H_LOCK 0x00000100 +#define V4L2_IN_ST_COLOR_KILL 0x00000200 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_SYNC 0x00010000 +#define V4L2_IN_ST_NO_EQU 0x00020000 +#define V4L2_IN_ST_NO_CARRIER 0x00040000 +#define V4L2_IN_ST_MACROVISION 0x01000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_ACCESS 0x02000000 +#define V4L2_IN_ST_VTR 0x04000000 +#define V4L2_IN_CAP_DV_TIMINGS 0x00000002 +#define V4L2_IN_CAP_CUSTOM_TIMINGS V4L2_IN_CAP_DV_TIMINGS +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_CAP_STD 0x00000004 +struct v4l2_output { + __u32 index; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 audioset; + __u32 modulator; + v4l2_std_id std; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capabilities; + __u32 reserved[3]; +}; +#define V4L2_OUTPUT_TYPE_MODULATOR 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_OUTPUT_TYPE_ANALOG 2 +#define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3 +#define V4L2_OUT_CAP_DV_TIMINGS 0x00000002 +#define V4L2_OUT_CAP_CUSTOM_TIMINGS V4L2_OUT_CAP_DV_TIMINGS +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_OUT_CAP_STD 0x00000004 +struct v4l2_control { + __u32 id; + __s32 value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_ext_control { + __u32 id; + __u32 size; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved2[1]; + union { + __s32 value; + __s64 value64; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + char * string; + }; +} __attribute__((packed)); +struct v4l2_ext_controls { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 ctrl_class; + __u32 count; + __u32 error_idx; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_ext_control * controls; +}; +#define V4L2_CTRL_ID_MASK (0x0fffffff) +#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000) +enum v4l2_ctrl_type { + V4L2_CTRL_TYPE_INTEGER = 1, + V4L2_CTRL_TYPE_BOOLEAN = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_MENU = 3, + V4L2_CTRL_TYPE_BUTTON = 4, + V4L2_CTRL_TYPE_INTEGER64 = 5, + V4L2_CTRL_TYPE_CTRL_CLASS = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_STRING = 7, + V4L2_CTRL_TYPE_BITMASK = 8, + V4L2_CTRL_TYPE_INTEGER_MENU = 9, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_queryctrl { + __u32 id; + __u32 type; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 minimum; + __s32 maximum; + __s32 step; + __s32 default_value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[2]; +}; +struct v4l2_querymenu { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __u32 index; + union { + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s64 value; + }; + __u32 reserved; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_DISABLED 0x0001 +#define V4L2_CTRL_FLAG_GRABBED 0x0002 +#define V4L2_CTRL_FLAG_READ_ONLY 0x0004 +#define V4L2_CTRL_FLAG_UPDATE 0x0008 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_INACTIVE 0x0010 +#define V4L2_CTRL_FLAG_SLIDER 0x0020 +#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040 +#define V4L2_CTRL_FLAG_VOLATILE 0x0080 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000 +#define V4L2_CID_MAX_CTRLS 1024 +#define V4L2_CID_PRIVATE_BASE 0x08000000 +struct v4l2_tuner { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + __u32 type; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 rxsubchans; + __u32 audmode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 signal; + __s32 afc; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_modulator { + __u32 index; + __u8 name[32]; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 txsubchans; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_TUNER_CAP_LOW 0x0001 +#define V4L2_TUNER_CAP_NORM 0x0002 +#define V4L2_TUNER_CAP_HWSEEK_BOUNDED 0x0004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_HWSEEK_WRAP 0x0008 +#define V4L2_TUNER_CAP_STEREO 0x0010 +#define V4L2_TUNER_CAP_LANG2 0x0020 +#define V4L2_TUNER_CAP_SAP 0x0020 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_LANG1 0x0040 +#define V4L2_TUNER_CAP_RDS 0x0080 +#define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100 +#define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_FREQ_BANDS 0x0400 +#define V4L2_TUNER_CAP_HWSEEK_PROG_LIM 0x0800 +#define V4L2_TUNER_SUB_MONO 0x0001 +#define V4L2_TUNER_SUB_STEREO 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_SUB_LANG2 0x0004 +#define V4L2_TUNER_SUB_SAP 0x0004 +#define V4L2_TUNER_SUB_LANG1 0x0008 +#define V4L2_TUNER_SUB_RDS 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_MONO 0x0000 +#define V4L2_TUNER_MODE_STEREO 0x0001 +#define V4L2_TUNER_MODE_LANG2 0x0002 +#define V4L2_TUNER_MODE_SAP 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_LANG1 0x0003 +#define V4L2_TUNER_MODE_LANG1_LANG2 0x0004 +struct v4l2_frequency { + __u32 tuner; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 frequency; + __u32 reserved[8]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BAND_MODULATION_VSB (1 << 1) +#define V4L2_BAND_MODULATION_FM (1 << 2) +#define V4L2_BAND_MODULATION_AM (1 << 3) +struct v4l2_frequency_band { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 tuner; + __u32 type; + __u32 index; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 modulation; + __u32 reserved[9]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_hw_freq_seek { + __u32 tuner; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 seek_upward; + __u32 wrap_around; + __u32 spacing; + __u32 rangelow; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangehigh; + __u32 reserved[5]; +}; +struct v4l2_rds_data { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 lsb; + __u8 msb; + __u8 block; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_MSK 0x7 +#define V4L2_RDS_BLOCK_A 0 +#define V4L2_RDS_BLOCK_B 1 +#define V4L2_RDS_BLOCK_C 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_D 3 +#define V4L2_RDS_BLOCK_C_ALT 4 +#define V4L2_RDS_BLOCK_INVALID 7 +#define V4L2_RDS_BLOCK_CORRECTED 0x40 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_ERROR 0x80 +struct v4l2_audio { + __u32 index; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 mode; + __u32 reserved[2]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_AUDCAP_STEREO 0x00001 +#define V4L2_AUDCAP_AVL 0x00002 +#define V4L2_AUDMODE_AVL 0x00001 +struct v4l2_audioout { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + __u32 capability; + __u32 mode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_ENC_IDX_FRAME_I (0) +#define V4L2_ENC_IDX_FRAME_P (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_IDX_FRAME_B (2) +#define V4L2_ENC_IDX_FRAME_MASK (0xf) +struct v4l2_enc_idx_entry { + __u64 offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u64 pts; + __u32 length; + __u32 flags; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_ENC_IDX_ENTRIES (64) +struct v4l2_enc_idx { + __u32 entries; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 entries_cap; + __u32 reserved[4]; + struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_CMD_START (0) +#define V4L2_ENC_CMD_STOP (1) +#define V4L2_ENC_CMD_PAUSE (2) +#define V4L2_ENC_CMD_RESUME (3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0) +struct v4l2_encoder_cmd { + __u32 cmd; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct { + __u32 data[8]; + } raw; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +}; +#define V4L2_DEC_CMD_START (0) +#define V4L2_DEC_CMD_STOP (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_PAUSE (2) +#define V4L2_DEC_CMD_RESUME (3) +#define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0) +#define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0) +#define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1) +#define V4L2_DEC_START_FMT_NONE (0) +#define V4L2_DEC_START_FMT_GOP (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_decoder_cmd { + __u32 cmd; + __u32 flags; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct { + __u64 pts; + } stop; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 speed; + __u32 format; + } start; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 data[16]; + } raw; + }; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_vbi_format { + __u32 sampling_rate; + __u32 offset; + __u32 samples_per_line; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sample_format; + __s32 start[2]; + __u32 count[2]; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_VBI_UNSYNC (1 << 0) +#define V4L2_VBI_INTERLACED (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_sliced_vbi_format { + __u16 service_set; + __u16 service_lines[2][24]; + __u32 io_size; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_SLICED_TELETEXT_B (0x0001) +#define V4L2_SLICED_VPS (0x0400) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SLICED_CAPTION_525 (0x1000) +#define V4L2_SLICED_WSS_625 (0x4000) +#define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525) +#define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_sliced_vbi_cap { + __u16 service_set; + __u16 service_lines[2][24]; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[3]; +}; +struct v4l2_sliced_vbi_data { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 field; + __u32 line; + __u32 reserved; + __u8 data[48]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1) +#define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4) +#define V4L2_MPEG_VBI_IVTV_WSS_625 (5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VBI_IVTV_VPS (7) +struct v4l2_mpeg_vbi_itv0_line { + __u8 id; + __u8 data[42]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_mpeg_vbi_itv0 { + __le32 linemask[2]; + struct v4l2_mpeg_vbi_itv0_line line[35]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_mpeg_vbi_ITV0 { + struct v4l2_mpeg_vbi_itv0_line line[36]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0" +#define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0" +struct v4l2_mpeg_vbi_fmt_ivtv { + __u8 magic[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_mpeg_vbi_itv0 itv0; + struct v4l2_mpeg_vbi_ITV0 ITV0; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_plane_pix_format { + __u32 sizeimage; + __u16 bytesperline; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u16 reserved[7]; +} __attribute__((packed)); +struct v4l2_pix_format_mplane { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 pixelformat; + __u32 field; + __u32 colorspace; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES]; + __u8 num_planes; + __u8 reserved[11]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_format { + __u32 type; + union { + struct v4l2_pix_format pix; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_pix_format_mplane pix_mp; + struct v4l2_window win; + struct v4l2_vbi_format vbi; + struct v4l2_sliced_vbi_format sliced; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 raw_data[200]; + } fmt; +}; +struct v4l2_streamparm { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + struct v4l2_captureparm capture; + struct v4l2_outputparm output; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 raw_data[200]; + } parm; +}; +#define V4L2_EVENT_ALL 0 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_VSYNC 1 +#define V4L2_EVENT_EOS 2 +#define V4L2_EVENT_CTRL 3 +#define V4L2_EVENT_FRAME_SYNC 4 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_PRIVATE_START 0x08000000 +struct v4l2_event_vsync { + __u8 field; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_CTRL_CH_VALUE (1 << 0) +#define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1) +#define V4L2_EVENT_CTRL_CH_RANGE (1 << 2) +struct v4l2_event_ctrl { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 changes; + __u32 type; + union { + __s32 value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s64 value64; + }; + __u32 flags; + __s32 minimum; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 maximum; + __s32 step; + __s32 default_value; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_event_frame_sync { + __u32 frame_sequence; +}; +struct v4l2_event { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + struct v4l2_event_vsync vsync; + struct v4l2_event_ctrl ctrl; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_event_frame_sync frame_sync; + __u8 data[64]; + } u; + __u32 pending; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sequence; + struct timespec timestamp; + __u32 id; + __u32 reserved[8]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0) +#define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1) +struct v4l2_event_subscription { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 id; + __u32 flags; + __u32 reserved[5]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CHIP_MATCH_BRIDGE 0 +#define V4L2_CHIP_MATCH_HOST V4L2_CHIP_MATCH_BRIDGE +#define V4L2_CHIP_MATCH_I2C_DRIVER 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CHIP_MATCH_I2C_ADDR 2 +#define V4L2_CHIP_MATCH_AC97 3 +#define V4L2_CHIP_MATCH_SUBDEV 4 +struct v4l2_dbg_match { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + __u32 addr; + char name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +} __attribute__((packed)); +struct v4l2_dbg_register { + struct v4l2_dbg_match match; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 size; + __u64 reg; + __u64 val; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dbg_chip_ident { + struct v4l2_dbg_match match; + __u32 ident; + __u32 revision; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_CHIP_FL_READABLE (1 << 0) +#define V4L2_CHIP_FL_WRITABLE (1 << 1) +struct v4l2_dbg_chip_info { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_dbg_match match; + char name[32]; + __u32 flags; + __u32 reserved[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_create_buffers { + __u32 index; + __u32 count; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 memory; + struct v4l2_format format; + __u32 reserved[8]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability) +#define VIDIOC_RESERVED _IO('V', 1) +#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc) +#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format) +#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers) +#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer) +#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer) +#define VIDIOC_OVERLAY _IOW('V', 14, int) +#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer) +#define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer) +#define VIDIOC_STREAMON _IOW('V', 18, int) +#define VIDIOC_STREAMOFF _IOW('V', 19, int) +#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm) +#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id) +#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id) +#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input) +#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control) +#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control) +#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner) +#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio) +#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio) +#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu) +#define VIDIOC_G_INPUT _IOR('V', 38, int) +#define VIDIOC_S_INPUT _IOWR('V', 39, int) +#define VIDIOC_G_OUTPUT _IOR('V', 46, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_OUTPUT _IOWR('V', 47, int) +#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output) +#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout) +#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator) +#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator) +#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency) +#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap) +#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop) +#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop) +#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression) +#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id) +#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format) +#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout) +#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) +#define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) +#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_LOG_STATUS _IO('V', 70) +#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls) +#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls) +#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum) +#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum) +#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx) +#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd) +#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register) +#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register) +#define VIDIOC_DBG_G_CHIP_IDENT _IOWR('V', 81, struct v4l2_dbg_chip_ident) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek) +#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings) +#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings) +#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription) +#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription) +#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers) +#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection) +#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection) +#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd) +#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings) +#define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings) +#define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap) +#define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info) +#define BASE_VIDIOC_PRIVATE 192 +#endif diff --git a/kernel-3.10-headers/videodev2_exynos_media.h b/kernel-3.10-headers/videodev2_exynos_media.h new file mode 100644 index 0000000..f758601 --- /dev/null +++ b/kernel-3.10-headers/videodev2_exynos_media.h @@ -0,0 +1,371 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#define __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#define V4L2_PIX_FMT_RGB32X v4l2_fourcc('R', 'G', 'B', 'X') +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('A', 'R', 'G', 'B') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV422V_2P v4l2_fourcc('Y', 'U', 'V', '2') +#define V4L2_PIX_FMT_YUV444_2P v4l2_fourcc('Y', 'U', '2', 'P') +#define V4L2_PIX_FMT_YVU444_2P v4l2_fourcc('Y', 'V', '2', 'P') +#define V4L2_PIX_FMT_YUV422V_3P v4l2_fourcc('Y', 'U', 'V', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV444_3P v4l2_fourcc('Y', 'U', '3', 'P') +#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') +#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') +#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_FIMV v4l2_fourcc('F', 'I', 'M', 'V') +#define V4L2_PIX_FMT_FIMV1 v4l2_fourcc('F', 'I', 'M', '1') +#define V4L2_PIX_FMT_FIMV2 v4l2_fourcc('F', 'I', 'M', '2') +#define V4L2_PIX_FMT_FIMV3 v4l2_fourcc('F', 'I', 'M', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_FIMV4 v4l2_fourcc('F', 'I', 'M', '4') +#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') +#define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') +#define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPEG_444 v4l2_fourcc('J', 'P', 'G', '4') +#define V4L2_PIX_FMT_JPEG_422 v4l2_fourcc('J', 'P', 'G', '2') +#define V4L2_PIX_FMT_JPEG_420 v4l2_fourcc('J', 'P', 'G', '0') +#define V4L2_PIX_FMT_JPEG_GRAY v4l2_fourcc('J', 'P', 'G', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPEG_422V v4l2_fourcc('J', 'P', 'G', '5') +#define V4L2_PIX_FMT_JPEG_411 v4l2_fourcc('J', 'P', 'G', '1') +#define V4L2_PIX_FMT_NV12N v4l2_fourcc('N', 'N', '1', '2') +#define V4L2_PIX_FMT_NV12NT v4l2_fourcc('T', 'N', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV420N v4l2_fourcc('Y', 'N', '1', '2') +#define V4L2_PIX_FMT_NV12N_10B v4l2_fourcc('B', 'N', '1', '2') +#ifndef __ALIGN_UP +#define __ALIGN_UP(x,a) (((x) + ((a) - 1)) & ~((a) - 1)) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#endif +#define NV12N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +#define NV12N_CBCR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w), 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define NV12N_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h))) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define NV12N_10B_Y_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * __ALIGN_UP((h), 16) + 64)) +#define NV12N_10B_CBCR_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * (__ALIGN_UP((h), 16) / 2) + 64)) +#define NV12N_10B_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h)) + NV12N_10B_Y_2B_SIZE((w), (h))) +#define YUV420N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define YUV420N_CB_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define YUV420N_CR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define YUV420N_CB_BASE(base,w,h) ((base) + YUV420N_Y_SIZE((w), (h))) +#define YUV420N_CR_BASE(base,w,h) (YUV420N_CB_BASE((base), (w), (h)) + YUV420N_CB_SIZE((w), (h))) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXYNOS_BASE (V4L2_CTRL_CLASS_USER | 0x2000) +#define V4L2_CID_GLOBAL_ALPHA (V4L2_CID_EXYNOS_BASE + 1) +#define V4L2_CID_CACHEABLE (V4L2_CID_EXYNOS_BASE + 10) +#define V4L2_CID_CAM_JPEG_MEMSIZE (V4L2_CID_EXYNOS_BASE + 20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CAM_JPEG_ENCODEDSIZE (V4L2_CID_EXYNOS_BASE + 21) +#define V4L2_CID_JPEG_TABLE (V4L2_CID_EXYNOS_BASE + 22) +#define V4L2_CID_SET_SHAREABLE (V4L2_CID_EXYNOS_BASE + 40) +#define V4L2_CID_TV_LAYER_BLEND_ENABLE (V4L2_CID_EXYNOS_BASE + 50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_LAYER_BLEND_ALPHA (V4L2_CID_EXYNOS_BASE + 51) +#define V4L2_CID_TV_PIXEL_BLEND_ENABLE (V4L2_CID_EXYNOS_BASE + 52) +#define V4L2_CID_TV_CHROMA_ENABLE (V4L2_CID_EXYNOS_BASE + 53) +#define V4L2_CID_TV_CHROMA_VALUE (V4L2_CID_EXYNOS_BASE + 54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_HPD_STATUS (V4L2_CID_EXYNOS_BASE + 55) +#define V4L2_CID_TV_LAYER_PRIO (V4L2_CID_EXYNOS_BASE + 56) +#define V4L2_CID_TV_SET_DVI_MODE (V4L2_CID_EXYNOS_BASE + 57) +#define V4L2_CID_TV_GET_DVI_MODE (V4L2_CID_EXYNOS_BASE + 58) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_SET_ASPECT_RATIO (V4L2_CID_EXYNOS_BASE + 59) +#define V4L2_CID_TV_MAX_AUDIO_CHANNELS (V4L2_CID_EXYNOS_BASE + 60) +#define V4L2_CID_TV_ENABLE_HDMI_AUDIO (V4L2_CID_EXYNOS_BASE + 61) +#define V4L2_CID_TV_SET_NUM_CHANNELS (V4L2_CID_EXYNOS_BASE + 62) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_UPDATE (V4L2_CID_EXYNOS_BASE + 63) +#define V4L2_CID_TV_SET_COLOR_RANGE (V4L2_CID_EXYNOS_BASE + 64) +#define V4L2_CID_TV_HDCP_ENABLE (V4L2_CID_EXYNOS_BASE + 65) +#define V4L2_CID_TV_HDMI_STATUS (V4L2_CID_EXYNOS_BASE + 66) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_SOURCE_PHY_ADDR (V4L2_CID_EXYNOS_BASE + 67) +#define V4L2_CID_TV_BLANK (V4L2_CID_EXYNOS_BASE + 68) +#define V4L2_CID_CSC_EQ_MODE (V4L2_CID_EXYNOS_BASE + 100) +#define V4L2_CID_CSC_EQ (V4L2_CID_EXYNOS_BASE + 101) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CSC_RANGE (V4L2_CID_EXYNOS_BASE + 102) +#define V4L2_CID_M2M_CTX_NUM (V4L2_CID_EXYNOS_BASE + 200) +#define V4L2_CID_CONTENT_PROTECTION (V4L2_CID_EXYNOS_BASE + 201) +#define V4L2_CID_2D_BLEND_OP (V4L2_CID_EXYNOS_BASE + 103) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_COLOR_FILL (V4L2_CID_EXYNOS_BASE + 104) +#define V4L2_CID_2D_DITH (V4L2_CID_EXYNOS_BASE + 105) +#define V4L2_CID_2D_FMT_PREMULTI (V4L2_CID_EXYNOS_BASE + 106) +#define V4L2_CID_2D_SRC_COLOR (V4L2_CID_EXYNOS_BASE + 107) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_SRC_COLOR (V4L2_CID_EXYNOS_BASE + 107) +#define V4L2_CID_2D_CLIP (V4L2_CID_EXYNOS_BASE + 108) +#define V4L2_CID_2D_SCALE_WIDTH (V4L2_CID_EXYNOS_BASE + 109) +#define V4L2_CID_2D_SCALE_HEIGHT (V4L2_CID_EXYNOS_BASE + 110) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_REPEAT (V4L2_CID_EXYNOS_BASE + 111) +#define V4L2_CID_2D_SCALE_MODE (V4L2_CID_EXYNOS_BASE + 112) +#define V4L2_CID_2D_BLUESCREEN (V4L2_CID_EXYNOS_BASE + 113) +#define V4L2_CID_2D_BG_COLOR (V4L2_CID_EXYNOS_BASE + 114) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_BS_COLOR (V4L2_CID_EXYNOS_BASE + 115) +#define V4L2_CID_M2M_CTX_NUM (V4L2_CID_EXYNOS_BASE + 200) +#define V4L2_CID_CONTENT_PROTECTION (V4L2_CID_EXYNOS_BASE + 201) +#define V4L2_CID_MPEG_MFC_BASE (V4L2_CTRL_CLASS_MPEG | 0x2000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_AVAIL (V4L2_CID_MPEG_MFC_BASE + 1) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRGMENT_ID (V4L2_CID_MPEG_MFC_BASE + 2) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_INFO (V4L2_CID_MPEG_MFC_BASE + 3) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_GRID_POS (V4L2_CID_MPEG_MFC_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_PACKED_PB (V4L2_CID_MPEG_MFC_BASE + 5) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG (V4L2_CID_MPEG_MFC_BASE + 6) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_ENABLE (V4L2_CID_MPEG_MFC_BASE + 7) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA (V4L2_CID_MPEG_MFC_BASE + 8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA (V4L2_CID_MPEG_MFC_BASE + 9) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA_BOT (V4L2_CID_MPEG_MFC_BASE + 10) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA_BOT (V4L2_CID_MPEG_MFC_BASE + 11) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_GENERATED (V4L2_CID_MPEG_MFC_BASE + 12) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE (V4L2_CID_MPEG_MFC_BASE + 13) +#define V4L2_CID_MPEG_MFC51_VIDEO_DISPLAY_STATUS (V4L2_CID_MPEG_MFC_BASE + 14) +#define V4L2_CID_MPEG_MFC51_VIDEO_LUMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 15) +#define V4L2_CID_MPEG_MFC51_VIDEO_CHROMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 16) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_STREAM_SIZE (V4L2_CID_MPEG_MFC_BASE + 17) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_COUNT (V4L2_CID_MPEG_MFC_BASE + 18) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TYPE (V4L2_CID_MPEG_MFC_BASE + 19) +enum v4l2_mpeg_mfc51_video_frame_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_NOT_CODED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_I_FRAME = 1, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_P_FRAME = 2, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_B_FRAME = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_SKIPPED = 4, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_OTHERS = 5, +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_INTERLACE (V4L2_CID_MPEG_MFC_BASE + 20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 21) +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_TIME_RES (V4L2_CID_MPEG_MFC_BASE + 22) +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_FRM_DELTA (V4L2_CID_MPEG_MFC_BASE + 23) +#define V4L2_CID_MPEG_MFC51_VIDEO_H263_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 24) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC6X_VIDEO_FRAME_DELTA (V4L2_CID_MPEG_MFC_BASE + 25) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_PERIOD_CH V4L2_CID_MPEG_VIDEO_GOP_SIZE +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE +#define V4L2_CID_MPEG_MFC51_VIDEO_BIT_RATE_CH V4L2_CID_MPEG_VIDEO_BITRATE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VIDEO_MPEG4_LEVEL_6 8 +#define V4L2_MPEG_VIDEO_HEADER_MODE_AT_THE_READY 2 +#define V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_S_B V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY +#define V4L2_CID_MPEG_VIDEO_H264_MVC_VIEW_ID (V4L2_CID_MPEG_MFC_BASE + 42) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_STATUS (V4L2_CID_MPEG_MFC_BASE + 43) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_FRAME_DECODING (V4L2_CID_MPEG_MFC_BASE + 44) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 45) +#define V4L2_CID_MPEG_VIDEO_H264_PREPEND_SPSPPS_TO_IDR (V4L2_CID_MPEG_MFC_BASE + 46) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_DECODER_IMMEDIATE_DISPLAY (V4L2_CID_MPEG_MFC_BASE + 47) +#define V4L2_CID_MPEG_VIDEO_DECODER_DECODING_TIMESTAMP_MODE (V4L2_CID_MPEG_MFC_BASE + 48) +#define V4L2_CID_MPEG_VIDEO_DECODER_WAIT_DECODING_START (V4L2_CID_MPEG_MFC_BASE + 49) +#define V4L2_CID_MPEG_VIDEO_QOS_RATIO (V4L2_CID_MPEG_MFC_BASE + 50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 51) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 52) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 53) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 55) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT3 (V4L2_CID_MPEG_MFC_BASE + 56) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT4 (V4L2_CID_MPEG_MFC_BASE + 57) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT5 (V4L2_CID_MPEG_MFC_BASE + 58) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT6 (V4L2_CID_MPEG_MFC_BASE + 59) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_VERSION (V4L2_CID_MPEG_MFC_BASE + 60) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 61) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 62) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 63) +#define V4L2_CID_MPEG_VIDEO_VP8_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 64) +#define V4L2_CID_MPEG_VIDEO_VP8_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 65) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_OF_PARTITIONS (V4L2_CID_MPEG_MFC_BASE + 66) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_LEVEL (V4L2_CID_MPEG_MFC_BASE + 67) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_SHARPNESS (V4L2_CID_MPEG_MFC_BASE + 68) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 69) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 70) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 71) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER0 (V4L2_CID_MPEG_MFC_BASE + 72) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER1 (V4L2_CID_MPEG_MFC_BASE + 73) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER2 (V4L2_CID_MPEG_MFC_BASE + 74) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 75) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_DISABLE_INTRA_MD4X4 (V4L2_CID_MPEG_MFC_BASE + 76) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_TEMPORAL_LAYER (V4L2_CID_MPEG_MFC_BASE + 77) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 78) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 79) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 80) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 81) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 82) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_GET_VERSION_INFO (V4L2_CID_MPEG_MFC_BASE + 91) +#define V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE (V4L2_CID_MPEG_MFC_BASE + 92) +#define V4L2_CID_MPEG_MFC_SET_DUAL_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 93) +#define V4L2_CID_MPEG_MFC_SET_DYNAMIC_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 95) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_SET_USER_SHARED_HANDLE (V4L2_CID_MPEG_MFC_BASE + 96) +#define V4L2_CID_MPEG_MFC_GET_EXT_INFO (V4L2_CID_MPEG_MFC_BASE + 97) +#define V4L2_CID_MPEG_MFC_SET_BUF_PROCESS_TYPE (V4L2_CID_MPEG_MFC_BASE + 98) +#define V4L2_CID_MPEG_MFC_GET_10BIT_INFO (V4L2_CID_MPEG_MFC_BASE + 99) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_ENABLE_LTR (V4L2_CID_MPEG_MFC_BASE + 100) +#define V4L2_CID_MPEG_MFC_H264_MARK_LTR (V4L2_CID_MPEG_MFC_BASE + 101) +#define V4L2_CID_MPEG_MFC_H264_USE_LTR (V4L2_CID_MPEG_MFC_BASE + 102) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB_ROW (V4L2_CID_MPEG_MFC_BASE + 103) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_BASE_PRIORITY (V4L2_CID_MPEG_MFC_BASE + 104) +#define V4L2_CID_MPEG_MFC_CONFIG_QP (V4L2_CID_MPEG_MFC_BASE + 105) +#define V4L2_CID_MPEG_MFC_H264_VUI_RESTRICTION_ENABLE (V4L2_CID_MPEG_MFC_BASE + 106) +#define V4L2_CID_MPEG_MFC_GET_DRIVER_INFO (V4L2_CID_MPEG_MFC_BASE + 107) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_CONFIG_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 108) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 110) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 111) +#define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 112) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 113) +#define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 114) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 115) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_MFC_BASE + 116) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_hevc_hierarchical_coding_type { + V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_B = 0, + V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_P = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 117) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 118) +#define V4L2_CID_MPEG_VIDEO_HEVC_PROFILE (V4L2_CID_MPEG_MFC_BASE + 120) +#define V4L2_CID_MPEG_VIDEO_HEVC_LEVEL (V4L2_CID_MPEG_MFC_BASE + 121) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 122) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TIER_FLAG (V4L2_CID_MPEG_MFC_BASE + 123) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 124) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 125) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_DISABLE (V4L2_CID_MPEG_MFC_BASE + 126) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_SLICE_BOUNDARY (V4L2_CID_MPEG_MFC_BASE + 127) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_BETA_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 128) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_TC_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 129) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_TYPE (V4L2_CID_MPEG_MFC_BASE + 130) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 131) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LOSSLESS_CU_ENABLE (V4L2_CID_MPEG_MFC_BASE + 132) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_CONST_INTRA_PRED_ENABLE (V4L2_CID_MPEG_MFC_BASE + 133) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WAVEFRONT_ENABLE (V4L2_CID_MPEG_MFC_BASE + 134) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LTR_ENABLE (V4L2_CID_MPEG_MFC_BASE + 135) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_USER_REF (V4L2_CID_MPEG_MFC_BASE + 136) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STORE_REF (V4L2_CID_MPEG_MFC_BASE + 137) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIGN_DATA_HIDING (V4L2_CID_MPEG_MFC_BASE + 138) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_GENERAL_PB_ENABLE (V4L2_CID_MPEG_MFC_BASE + 139) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TEMPORAL_ID_ENABLE (V4L2_CID_MPEG_MFC_BASE + 140) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STRONG_SMOTHING_FLAG (V4L2_CID_MPEG_MFC_BASE + 141) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_NUM_MERGE_MV_MINUS1 (V4L2_CID_MPEG_MFC_BASE + 142) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC_BASE + 143) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC_BASE + 144) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC_BASE + 145) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC_BASE + 146) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 147) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_TMV_PREDICTION (V4L2_CID_MPEG_MFC_BASE + 148) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WITHOUT_STARTCODE_ENABLE (V4L2_CID_MPEG_MFC_BASE + 149) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CR (V4L2_CID_MPEG_MFC_BASE + 150) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CB (V4L2_CID_MPEG_MFC_BASE + 151) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIZE_OF_LENGTH_FIELD (V4L2_CID_MPEG_MFC_BASE + 152) +#define V4L2_CID_MPEG_VIDEO_HEVC_PREPEND_SPSPPS_TO_IDR (V4L2_CID_MPEG_MFC_BASE + 153) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 154) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 155) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 156) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 157) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT3 (V4L2_CID_MPEG_MFC_BASE + 158) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT4 (V4L2_CID_MPEG_MFC_BASE + 159) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT5 (V4L2_CID_MPEG_MFC_BASE + 160) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT6 (V4L2_CID_MPEG_MFC_BASE + 161) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_VERSION (V4L2_CID_MPEG_MFC_BASE + 163) +#define V4L2_CID_MPEG_VIDEO_VP9_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 164) +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 165) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 166) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 167) +#define V4L2_CID_MPEG_VIDEO_VP9_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 168) +#define V4L2_CID_MPEG_VIDEO_VP9_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 169) +#define V4L2_CID_MPEG_VIDEO_VP9_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 170) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 171) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 172) +#define V4L2_CID_MPEG_VIDEO_VP9_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 173) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 174) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 175) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 176) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 177) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 178) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 179) +#define V4L2_CID_MPEG_VIDEO_VP9_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 180) +#define V4L2_CID_MPEG_VIDEO_DISABLE_IVF_HEADER (V4L2_CID_MPEG_MFC_BASE + 181) +#define V4L2_CID_MPEG_VIDEO_MATRIX_COEFFICIENTS (V4L2_CID_MPEG_MFC_BASE + 192) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 201) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 202) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 203) +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 204) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 205) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 206) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 207) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 208) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 209) +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 210) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 211) +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 212) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 213) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 214) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 215) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 216) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 217) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 218) +#endif diff --git a/kernel-3.18-headers/linux/v4l2-common.h b/kernel-3.18-headers/linux/v4l2-common.h new file mode 100644 index 0000000..eaf3bfc --- /dev/null +++ b/kernel-3.18-headers/linux/v4l2-common.h @@ -0,0 +1,55 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __V4L2_COMMON__ +#define __V4L2_COMMON__ +#include +#define V4L2_SEL_TGT_CROP 0x0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001 +#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002 +#define V4L2_SEL_TGT_COMPOSE 0x0100 +#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102 +#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103 +#define V4L2_SEL_TGT_CROP_ACTIVE V4L2_SEL_TGT_CROP +#define V4L2_SEL_TGT_COMPOSE_ACTIVE V4L2_SEL_TGT_COMPOSE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SUBDEV_SEL_TGT_CROP_ACTUAL V4L2_SEL_TGT_CROP +#define V4L2_SUBDEV_SEL_TGT_COMPOSE_ACTUAL V4L2_SEL_TGT_COMPOSE +#define V4L2_SUBDEV_SEL_TGT_CROP_BOUNDS V4L2_SEL_TGT_CROP_BOUNDS +#define V4L2_SUBDEV_SEL_TGT_COMPOSE_BOUNDS V4L2_SEL_TGT_COMPOSE_BOUNDS +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_FLAG_GE (1 << 0) +#define V4L2_SEL_FLAG_LE (1 << 1) +#define V4L2_SEL_FLAG_KEEP_CONFIG (1 << 2) +#define V4L2_SUBDEV_SEL_FLAG_SIZE_GE V4L2_SEL_FLAG_GE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SUBDEV_SEL_FLAG_SIZE_LE V4L2_SEL_FLAG_LE +#define V4L2_SUBDEV_SEL_FLAG_KEEP_CONFIG V4L2_SEL_FLAG_KEEP_CONFIG +struct v4l2_edid { + __u32 pad; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 start_block; + __u32 blocks; + __u32 reserved[5]; + __u8 *edid; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#endif diff --git a/kernel-3.18-headers/linux/v4l2-controls.h b/kernel-3.18-headers/linux/v4l2-controls.h new file mode 100644 index 0000000..fbc131f --- /dev/null +++ b/kernel-3.18-headers/linux/v4l2-controls.h @@ -0,0 +1,994 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_V4L2_CONTROLS_H +#define __LINUX_V4L2_CONTROLS_H +#define V4L2_CTRL_CLASS_USER 0x00980000 +#define V4L2_CTRL_CLASS_MPEG 0x00990000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 +#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 +#define V4L2_CTRL_CLASS_FLASH 0x009c0000 +#define V4L2_CTRL_CLASS_JPEG 0x009d0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_IMAGE_SOURCE 0x009e0000 +#define V4L2_CTRL_CLASS_IMAGE_PROC 0x009f0000 +#define V4L2_CTRL_CLASS_DV 0x00a00000 +#define V4L2_CTRL_CLASS_FM_RX 0x00a10000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_RF_TUNER 0x00a20000 +#define V4L2_CTRL_CLASS_DETECT 0x00a30000 +#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900) +#define V4L2_CID_USER_BASE V4L2_CID_BASE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1) +#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0) +#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1) +#define V4L2_CID_SATURATION (V4L2_CID_BASE+2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_HUE (V4L2_CID_BASE+3) +#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE+5) +#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE+6) +#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE+7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE+8) +#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE+9) +#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE+10) +#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE+11) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE+12) +#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE+13) +#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE+14) +#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE+15) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_GAMMA (V4L2_CID_BASE+16) +#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) +#define V4L2_CID_EXPOSURE (V4L2_CID_BASE+17) +#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE+18) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_GAIN (V4L2_CID_BASE+19) +#define V4L2_CID_HFLIP (V4L2_CID_BASE+20) +#define V4L2_CID_VFLIP (V4L2_CID_BASE+21) +#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE+24) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_power_line_frequency { + V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0, + V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1, + V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3, +}; +#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE+25) +#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE+26) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27) +#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE+28) +#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE+29) +#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE+30) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_COLORFX (V4L2_CID_BASE+31) +enum v4l2_colorfx { + V4L2_COLORFX_NONE = 0, + V4L2_COLORFX_BW = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SEPIA = 2, + V4L2_COLORFX_NEGATIVE = 3, + V4L2_COLORFX_EMBOSS = 4, + V4L2_COLORFX_SKETCH = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SKY_BLUE = 6, + V4L2_COLORFX_GRASS_GREEN = 7, + V4L2_COLORFX_SKIN_WHITEN = 8, + V4L2_COLORFX_VIVID = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_AQUA = 10, + V4L2_COLORFX_ART_FREEZE = 11, + V4L2_COLORFX_SILHOUETTE = 12, + V4L2_COLORFX_SOLARIZATION = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_ANTIQUE = 14, + V4L2_COLORFX_SET_CBCR = 15, +}; +#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE+32) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE+33) +#define V4L2_CID_ROTATE (V4L2_CID_BASE+34) +#define V4L2_CID_BG_COLOR (V4L2_CID_BASE+35) +#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE+36) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE+37) +#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE+38) +#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE+39) +#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE+40) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE+41) +#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE+42) +#define V4L2_CID_LASTP1 (V4L2_CID_BASE+43) +#define V4L2_CID_USER_MEYE_BASE (V4L2_CID_USER_BASE + 0x1000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_USER_BTTV_BASE (V4L2_CID_USER_BASE + 0x1010) +#define V4L2_CID_USER_S2255_BASE (V4L2_CID_USER_BASE + 0x1030) +#define V4L2_CID_USER_SI476X_BASE (V4L2_CID_USER_BASE + 0x1040) +#define V4L2_CID_USER_TI_VPE_BASE (V4L2_CID_USER_BASE + 0x1050) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_USER_SAA7134_BASE (V4L2_CID_USER_BASE + 0x1060) +#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900) +#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1) +#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE+0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_stream_type { + V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, + V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, + V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, + V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, + V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE+1) +#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE+2) +#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE+3) +#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE+4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE+5) +#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE+6) +#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE+7) +enum v4l2_mpeg_stream_vbi_fmt { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, + V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, +}; +#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE+100) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_sampling_freq { + V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0, + V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1, + V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE+101) +enum v4l2_mpeg_audio_encoding { + V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1, + V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2, + V4L2_MPEG_AUDIO_ENCODING_AAC = 3, + V4L2_MPEG_AUDIO_ENCODING_AC3 = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE+102) +enum v4l2_mpeg_audio_l1_bitrate { + V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1, + V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2, + V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3, + V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5, + V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6, + V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7, + V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9, + V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10, + V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11, + V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13, +}; +#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE+103) +enum v4l2_mpeg_audio_l2_bitrate { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1, + V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2, + V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4, + V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5, + V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6, + V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8, + V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9, + V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10, + V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12, + V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13, +}; +#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE+104) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_l3_bitrate { + V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4, + V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8, + V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12, + V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE+105) +enum v4l2_mpeg_audio_mode { + V4L2_MPEG_AUDIO_MODE_STEREO = 0, + V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_DUAL = 2, + V4L2_MPEG_AUDIO_MODE_MONO = 3, +}; +#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE+106) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_mode_extension { + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3, +}; +#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE+107) +enum v4l2_mpeg_audio_emphasis { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0, + V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1, + V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE+108) +enum v4l2_mpeg_audio_crc { + V4L2_MPEG_AUDIO_CRC_NONE = 0, + V4L2_MPEG_AUDIO_CRC_CRC16 = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE+109) +#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE+110) +#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE+111) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_ac3_bitrate { + V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4, + V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8, + V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12, + V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13, + V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15, + V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16, + V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17, + V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE+112) +enum v4l2_mpeg_audio_dec_playback { + V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5, +}; +#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE+113) +#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE+200) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_encoding { + V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0, + V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1, + V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE+201) +enum v4l2_mpeg_video_aspect { + V4L2_MPEG_VIDEO_ASPECT_1x1 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_ASPECT_4x3 = 1, + V4L2_MPEG_VIDEO_ASPECT_16x9 = 2, + V4L2_MPEG_VIDEO_ASPECT_221x100 = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE+202) +#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE+203) +#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE+204) +#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE+205) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE+206) +enum v4l2_mpeg_video_bitrate_mode { + V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0, + V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE+207) +#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE+208) +#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE+209) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE+210) +#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE+211) +#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE+212) +#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE+213) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE+214) +#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE+215) +#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE+216) +enum v4l2_mpeg_video_header_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0, + V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1, +}; +#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE+217) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE+218) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE+219) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE+220) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE+221) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_multi_slice_mode { + V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0, + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1, + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE+222) +#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE+223) +#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE+224) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_BASE+225) +#define V4L2_CID_MPEG_VIDEO_REPEAT_SEQ_HEADER (V4L2_CID_MPEG_BASE+226) +#define V4L2_CID_MPEG_VIDEO_MV_H_SEARCH_RANGE (V4L2_CID_MPEG_BASE+227) +#define V4L2_CID_MPEG_VIDEO_MV_V_SEARCH_RANGE (V4L2_CID_MPEG_BASE+228) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE+300) +#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE+301) +#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE+302) +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE+303) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE+304) +#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE+350) +#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE+351) +#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE+352) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE+353) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE+354) +#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE+355) +#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE+356) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE+357) +enum v4l2_mpeg_video_h264_entropy_mode { + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0, + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE+358) +#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE+359) +enum v4l2_mpeg_video_h264_level { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0, + V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1, + V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2, + V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4, + V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5, + V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6, + V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8, + V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9, + V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10, + V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12, + V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13, + V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14, + V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE+360) +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE+361) +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE+362) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_h264_loop_filter_mode { + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0, + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1, + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE+363) +enum v4l2_mpeg_video_h264_profile { + V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1, + V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2, + V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10, + V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14, + V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15, + V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE+364) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE+365) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE+366) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE+367) +enum v4l2_mpeg_video_h264_vui_sar_idc { + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_BASE+368) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_BASE+369) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_BASE+370) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_h264_sei_fp_arrangement_type { + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_CHECKERBOARD = 0, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_COLUMN = 1, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_ROW = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_SIDE_BY_SIDE = 3, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TOP_BOTTOM = 4, + V4L2_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE_TEMPORAL = 5, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_BASE+371) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_BASE+372) +enum v4l2_mpeg_video_h264_fmo_map_type { + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6, +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_BASE+373) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_BASE+374) +enum v4l2_mpeg_video_h264_fmo_change_dir { + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0, + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_BASE+375) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_BASE+376) +#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_BASE+377) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_BASE+378) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_BASE+379) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_BASE+380) +enum v4l2_mpeg_video_h264_hierarchical_coding_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0, + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1, +}; +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_BASE+381) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_BASE+382) +#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE+400) +#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE+401) +#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE+402) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE+403) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE+404) +#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE+405) +enum v4l2_mpeg_video_mpeg4_level { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE+406) +enum v4l2_mpeg_video_mpeg4_profile { + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE+407) +#define V4L2_CID_MPEG_VIDEO_VPX_NUM_PARTITIONS (V4L2_CID_MPEG_BASE+500) +enum v4l2_vp8_num_partitions { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CID_MPEG_VIDEO_VPX_1_PARTITION = 0, + V4L2_CID_MPEG_VIDEO_VPX_2_PARTITIONS = 1, + V4L2_CID_MPEG_VIDEO_VPX_4_PARTITIONS = 2, + V4L2_CID_MPEG_VIDEO_VPX_8_PARTITIONS = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_VPX_IMD_DISABLE_4X4 (V4L2_CID_MPEG_BASE+501) +#define V4L2_CID_MPEG_VIDEO_VPX_NUM_REF_FRAMES (V4L2_CID_MPEG_BASE+502) +enum v4l2_vp8_num_ref_frames { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CID_MPEG_VIDEO_VPX_1_REF_FRAME = 0, + V4L2_CID_MPEG_VIDEO_VPX_2_REF_FRAME = 1, + V4L2_CID_MPEG_VIDEO_VPX_3_REF_FRAME = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_LEVEL (V4L2_CID_MPEG_BASE+503) +#define V4L2_CID_MPEG_VIDEO_VPX_FILTER_SHARPNESS (V4L2_CID_MPEG_BASE+504) +#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_REF_PERIOD (V4L2_CID_MPEG_BASE+505) +#define V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_SEL (V4L2_CID_MPEG_BASE+506) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_vp8_golden_frame_sel { + V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_PREV = 0, + V4L2_CID_MPEG_VIDEO_VPX_GOLDEN_FRAME_USE_REF_PERIOD = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VPX_MIN_QP (V4L2_CID_MPEG_BASE+507) +#define V4L2_CID_MPEG_VIDEO_VPX_MAX_QP (V4L2_CID_MPEG_BASE+508) +#define V4L2_CID_MPEG_VIDEO_VPX_I_FRAME_QP (V4L2_CID_MPEG_BASE+509) +#define V4L2_CID_MPEG_VIDEO_VPX_P_FRAME_QP (V4L2_CID_MPEG_BASE+510) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VPX_PROFILE (V4L2_CID_MPEG_BASE+511) +#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000) +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+0) +enum v4l2_mpeg_cx2341x_video_spatial_filter_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0, + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+2) +enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+3) +enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE+4) +enum v4l2_mpeg_cx2341x_video_temporal_filter_mode { + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE+5) +#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE+6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_cx2341x_video_median_filter_type { + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+8) +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE+9) +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE+10) +#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE+11) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100) +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE+0) +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE+1) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE+2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_mfc51_video_frame_skip_mode { + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1, + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE+3) +enum v4l2_mpeg_mfc51_video_force_frame_type { + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1, + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2, +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE+4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE+5) +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE+6) +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE+7) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE+50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE+51) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE+52) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE+53) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE+54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900) +#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1) +#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE+1) +enum v4l2_exposure_auto_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_EXPOSURE_AUTO = 0, + V4L2_EXPOSURE_MANUAL = 1, + V4L2_EXPOSURE_SHUTTER_PRIORITY = 2, + V4L2_EXPOSURE_APERTURE_PRIORITY = 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+2) +#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE+3) +#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+5) +#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE+6) +#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE+7) +#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+9) +#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+10) +#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+11) +#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE+12) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+13) +#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+14) +#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE+15) +#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE+16) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE+17) +#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE+18) +#define V4L2_CID_AUTO_EXPOSURE_BIAS (V4L2_CID_CAMERA_CLASS_BASE+19) +#define V4L2_CID_AUTO_N_PRESET_WHITE_BALANCE (V4L2_CID_CAMERA_CLASS_BASE+20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_auto_n_preset_white_balance { + V4L2_WHITE_BALANCE_MANUAL = 0, + V4L2_WHITE_BALANCE_AUTO = 1, + V4L2_WHITE_BALANCE_INCANDESCENT = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_WHITE_BALANCE_FLUORESCENT = 3, + V4L2_WHITE_BALANCE_FLUORESCENT_H = 4, + V4L2_WHITE_BALANCE_HORIZON = 5, + V4L2_WHITE_BALANCE_DAYLIGHT = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_WHITE_BALANCE_FLASH = 7, + V4L2_WHITE_BALANCE_CLOUDY = 8, + V4L2_WHITE_BALANCE_SHADE = 9, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_WIDE_DYNAMIC_RANGE (V4L2_CID_CAMERA_CLASS_BASE+21) +#define V4L2_CID_IMAGE_STABILIZATION (V4L2_CID_CAMERA_CLASS_BASE+22) +#define V4L2_CID_ISO_SENSITIVITY (V4L2_CID_CAMERA_CLASS_BASE+23) +#define V4L2_CID_ISO_SENSITIVITY_AUTO (V4L2_CID_CAMERA_CLASS_BASE+24) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_iso_sensitivity_auto_type { + V4L2_ISO_SENSITIVITY_MANUAL = 0, + V4L2_ISO_SENSITIVITY_AUTO = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXPOSURE_METERING (V4L2_CID_CAMERA_CLASS_BASE+25) +enum v4l2_exposure_metering { + V4L2_EXPOSURE_METERING_AVERAGE = 0, + V4L2_EXPOSURE_METERING_CENTER_WEIGHTED = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_EXPOSURE_METERING_SPOT = 2, + V4L2_EXPOSURE_METERING_MATRIX = 3, +}; +#define V4L2_CID_SCENE_MODE (V4L2_CID_CAMERA_CLASS_BASE+26) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_scene_mode { + V4L2_SCENE_MODE_NONE = 0, + V4L2_SCENE_MODE_BACKLIGHT = 1, + V4L2_SCENE_MODE_BEACH_SNOW = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_CANDLE_LIGHT = 3, + V4L2_SCENE_MODE_DAWN_DUSK = 4, + V4L2_SCENE_MODE_FALL_COLORS = 5, + V4L2_SCENE_MODE_FIREWORKS = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_LANDSCAPE = 7, + V4L2_SCENE_MODE_NIGHT = 8, + V4L2_SCENE_MODE_PARTY_INDOOR = 9, + V4L2_SCENE_MODE_PORTRAIT = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_SCENE_MODE_SPORTS = 11, + V4L2_SCENE_MODE_SUNSET = 12, + V4L2_SCENE_MODE_TEXT = 13, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_3A_LOCK (V4L2_CID_CAMERA_CLASS_BASE+27) +#define V4L2_LOCK_EXPOSURE (1 << 0) +#define V4L2_LOCK_WHITE_BALANCE (1 << 1) +#define V4L2_LOCK_FOCUS (1 << 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUTO_FOCUS_START (V4L2_CID_CAMERA_CLASS_BASE+28) +#define V4L2_CID_AUTO_FOCUS_STOP (V4L2_CID_CAMERA_CLASS_BASE+29) +#define V4L2_CID_AUTO_FOCUS_STATUS (V4L2_CID_CAMERA_CLASS_BASE+30) +#define V4L2_AUTO_FOCUS_STATUS_IDLE (0 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_AUTO_FOCUS_STATUS_BUSY (1 << 0) +#define V4L2_AUTO_FOCUS_STATUS_REACHED (1 << 1) +#define V4L2_AUTO_FOCUS_STATUS_FAILED (1 << 2) +#define V4L2_CID_AUTO_FOCUS_RANGE (V4L2_CID_CAMERA_CLASS_BASE+31) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_auto_focus_range { + V4L2_AUTO_FOCUS_RANGE_AUTO = 0, + V4L2_AUTO_FOCUS_RANGE_NORMAL = 1, + V4L2_AUTO_FOCUS_RANGE_MACRO = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_AUTO_FOCUS_RANGE_INFINITY = 3, +}; +#define V4L2_CID_PAN_SPEED (V4L2_CID_CAMERA_CLASS_BASE+32) +#define V4L2_CID_TILT_SPEED (V4L2_CID_CAMERA_CLASS_BASE+33) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900) +#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1) +#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1) +#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3) +#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5) +#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6) +#define V4L2_CID_RDS_TX_MONO_STEREO (V4L2_CID_FM_TX_CLASS_BASE + 7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_TX_ARTIFICIAL_HEAD (V4L2_CID_FM_TX_CLASS_BASE + 8) +#define V4L2_CID_RDS_TX_COMPRESSED (V4L2_CID_FM_TX_CLASS_BASE + 9) +#define V4L2_CID_RDS_TX_DYNAMIC_PTY (V4L2_CID_FM_TX_CLASS_BASE + 10) +#define V4L2_CID_RDS_TX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_TX_CLASS_BASE + 11) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_TX_TRAFFIC_PROGRAM (V4L2_CID_FM_TX_CLASS_BASE + 12) +#define V4L2_CID_RDS_TX_MUSIC_SPEECH (V4L2_CID_FM_TX_CLASS_BASE + 13) +#define V4L2_CID_RDS_TX_ALT_FREQS_ENABLE (V4L2_CID_FM_TX_CLASS_BASE + 14) +#define V4L2_CID_RDS_TX_ALT_FREQS (V4L2_CID_FM_TX_CLASS_BASE + 15) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64) +#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65) +#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66) +#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81) +#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82) +#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83) +#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96) +#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97) +#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98) +#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_preemphasis { + V4L2_PREEMPHASIS_DISABLED = 0, + V4L2_PREEMPHASIS_50_uS = 1, + V4L2_PREEMPHASIS_75_uS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113) +#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114) +#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1) +#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1) +enum v4l2_flash_led_mode { + V4L2_FLASH_LED_MODE_NONE, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FLASH_LED_MODE_FLASH, + V4L2_FLASH_LED_MODE_TORCH, +}; +#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_flash_strobe_source { + V4L2_FLASH_STROBE_SOURCE_SOFTWARE, + V4L2_FLASH_STROBE_SOURCE_EXTERNAL, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3) +#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4) +#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5) +#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7) +#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8) +#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9) +#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0) +#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1) +#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2) +#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4) +#define V4L2_FLASH_FAULT_INDICATOR (1 << 5) +#define V4L2_FLASH_FAULT_UNDER_VOLTAGE (1 << 6) +#define V4L2_FLASH_FAULT_INPUT_VOLTAGE (1 << 7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_LED_OVER_TEMPERATURE (1 << 8) +#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11) +#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12) +#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1) +#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1) +enum v4l2_jpeg_chroma_subsampling { + V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1, + V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2, + V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3, + V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5, +}; +#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2) +#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4) +#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0) +#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1) +#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17) +#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18) +#define V4L2_CID_IMAGE_SOURCE_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_SOURCE | 0x900) +#define V4L2_CID_IMAGE_SOURCE_CLASS (V4L2_CTRL_CLASS_IMAGE_SOURCE | 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_VBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 1) +#define V4L2_CID_HBLANK (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 2) +#define V4L2_CID_ANALOGUE_GAIN (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 3) +#define V4L2_CID_TEST_PATTERN_RED (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TEST_PATTERN_GREENR (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 5) +#define V4L2_CID_TEST_PATTERN_BLUE (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 6) +#define V4L2_CID_TEST_PATTERN_GREENB (V4L2_CID_IMAGE_SOURCE_CLASS_BASE + 7) +#define V4L2_CID_IMAGE_PROC_CLASS_BASE (V4L2_CTRL_CLASS_IMAGE_PROC | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_IMAGE_PROC_CLASS (V4L2_CTRL_CLASS_IMAGE_PROC | 1) +#define V4L2_CID_LINK_FREQ (V4L2_CID_IMAGE_PROC_CLASS_BASE + 1) +#define V4L2_CID_PIXEL_RATE (V4L2_CID_IMAGE_PROC_CLASS_BASE + 2) +#define V4L2_CID_TEST_PATTERN (V4L2_CID_IMAGE_PROC_CLASS_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_CLASS_BASE (V4L2_CTRL_CLASS_DV | 0x900) +#define V4L2_CID_DV_CLASS (V4L2_CTRL_CLASS_DV | 1) +#define V4L2_CID_DV_TX_HOTPLUG (V4L2_CID_DV_CLASS_BASE + 1) +#define V4L2_CID_DV_TX_RXSENSE (V4L2_CID_DV_CLASS_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_TX_EDID_PRESENT (V4L2_CID_DV_CLASS_BASE + 3) +#define V4L2_CID_DV_TX_MODE (V4L2_CID_DV_CLASS_BASE + 4) +enum v4l2_dv_tx_mode { + V4L2_DV_TX_MODE_DVI_D = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_DV_TX_MODE_HDMI = 1, +}; +#define V4L2_CID_DV_TX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 5) +enum v4l2_dv_rgb_range { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_DV_RGB_RANGE_AUTO = 0, + V4L2_DV_RGB_RANGE_LIMITED = 1, + V4L2_DV_RGB_RANGE_FULL = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DV_RX_POWER_PRESENT (V4L2_CID_DV_CLASS_BASE + 100) +#define V4L2_CID_DV_RX_RGB_RANGE (V4L2_CID_DV_CLASS_BASE + 101) +#define V4L2_CID_FM_RX_CLASS_BASE (V4L2_CTRL_CLASS_FM_RX | 0x900) +#define V4L2_CID_FM_RX_CLASS (V4L2_CTRL_CLASS_FM_RX | 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TUNE_DEEMPHASIS (V4L2_CID_FM_RX_CLASS_BASE + 1) +enum v4l2_deemphasis { + V4L2_DEEMPHASIS_DISABLED = V4L2_PREEMPHASIS_DISABLED, + V4L2_DEEMPHASIS_50_uS = V4L2_PREEMPHASIS_50_uS, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_DEEMPHASIS_75_uS = V4L2_PREEMPHASIS_75_uS, +}; +#define V4L2_CID_RDS_RECEPTION (V4L2_CID_FM_RX_CLASS_BASE + 2) +#define V4L2_CID_RDS_RX_PTY (V4L2_CID_FM_RX_CLASS_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_RX_PS_NAME (V4L2_CID_FM_RX_CLASS_BASE + 4) +#define V4L2_CID_RDS_RX_RADIO_TEXT (V4L2_CID_FM_RX_CLASS_BASE + 5) +#define V4L2_CID_RDS_RX_TRAFFIC_ANNOUNCEMENT (V4L2_CID_FM_RX_CLASS_BASE + 6) +#define V4L2_CID_RDS_RX_TRAFFIC_PROGRAM (V4L2_CID_FM_RX_CLASS_BASE + 7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_RX_MUSIC_SPEECH (V4L2_CID_FM_RX_CLASS_BASE + 8) +#define V4L2_CID_RF_TUNER_CLASS_BASE (V4L2_CTRL_CLASS_RF_TUNER | 0x900) +#define V4L2_CID_RF_TUNER_CLASS (V4L2_CTRL_CLASS_RF_TUNER | 1) +#define V4L2_CID_RF_TUNER_BANDWIDTH_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 11) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RF_TUNER_BANDWIDTH (V4L2_CID_RF_TUNER_CLASS_BASE + 12) +#define V4L2_CID_RF_TUNER_LNA_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 41) +#define V4L2_CID_RF_TUNER_LNA_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 42) +#define V4L2_CID_RF_TUNER_MIXER_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 51) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RF_TUNER_MIXER_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 52) +#define V4L2_CID_RF_TUNER_IF_GAIN_AUTO (V4L2_CID_RF_TUNER_CLASS_BASE + 61) +#define V4L2_CID_RF_TUNER_IF_GAIN (V4L2_CID_RF_TUNER_CLASS_BASE + 62) +#define V4L2_CID_RF_TUNER_PLL_LOCK (V4L2_CID_RF_TUNER_CLASS_BASE + 91) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_DETECT_CLASS_BASE (V4L2_CTRL_CLASS_DETECT | 0x900) +#define V4L2_CID_DETECT_CLASS (V4L2_CTRL_CLASS_DETECT | 1) +#define V4L2_CID_DETECT_MD_MODE (V4L2_CID_DETECT_CLASS_BASE + 1) +enum v4l2_detect_md_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_DETECT_MD_MODE_DISABLED = 0, + V4L2_DETECT_MD_MODE_GLOBAL = 1, + V4L2_DETECT_MD_MODE_THRESHOLD_GRID = 2, + V4L2_DETECT_MD_MODE_REGION_GRID = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_DETECT_MD_GLOBAL_THRESHOLD (V4L2_CID_DETECT_CLASS_BASE + 2) +#define V4L2_CID_DETECT_MD_THRESHOLD_GRID (V4L2_CID_DETECT_CLASS_BASE + 3) +#define V4L2_CID_DETECT_MD_REGION_GRID (V4L2_CID_DETECT_CLASS_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#endif diff --git a/kernel-3.18-headers/videodev2.h b/kernel-3.18-headers/videodev2.h new file mode 100644 index 0000000..3355444 --- /dev/null +++ b/kernel-3.18-headers/videodev2.h @@ -0,0 +1,1571 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef _UAPI__LINUX_VIDEODEV2_H +#define _UAPI__LINUX_VIDEODEV2_H +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#include +#include +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDEO_MAX_FRAME 32 +#define VIDEO_MAX_PLANES 8 +#define v4l2_fourcc(a,b,c,d) ((__u32) (a) | ((__u32) (b) << 8) | ((__u32) (c) << 16) | ((__u32) (d) << 24)) +#define v4l2_fourcc_be(a,b,c,d) (v4l2_fourcc(a, b, c, d) | (1 << 31)) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_field { + V4L2_FIELD_ANY = 0, + V4L2_FIELD_NONE = 1, + V4L2_FIELD_TOP = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_BOTTOM = 3, + V4L2_FIELD_INTERLACED = 4, + V4L2_FIELD_SEQ_TB = 5, + V4L2_FIELD_SEQ_BT = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_ALTERNATE = 7, + V4L2_FIELD_INTERLACED_TB = 8, + V4L2_FIELD_INTERLACED_BT = 9, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FIELD_HAS_TOP(field) ((field) == V4L2_FIELD_TOP || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +#define V4L2_FIELD_HAS_BOTTOM(field) ((field) == V4L2_FIELD_BOTTOM || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +#define V4L2_FIELD_HAS_BOTH(field) ((field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +#define V4L2_FIELD_HAS_T_OR_B(field) ((field) == V4L2_FIELD_BOTTOM || (field) == V4L2_FIELD_TOP || (field) == V4L2_FIELD_ALTERNATE) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_buf_type { + V4L2_BUF_TYPE_VIDEO_CAPTURE = 1, + V4L2_BUF_TYPE_VIDEO_OUTPUT = 2, + V4L2_BUF_TYPE_VIDEO_OVERLAY = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VBI_CAPTURE = 4, + V4L2_BUF_TYPE_VBI_OUTPUT = 5, + V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6, + V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8, + V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9, + V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10, + V4L2_BUF_TYPE_SDR_CAPTURE = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_PRIVATE = 0x80, +}; +#define V4L2_TYPE_IS_MULTIPLANAR(type) ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) +#define V4L2_TYPE_IS_OUTPUT(type) ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY || (type) == V4L2_BUF_TYPE_VBI_OUTPUT || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_tuner_type { + V4L2_TUNER_RADIO = 1, + V4L2_TUNER_ANALOG_TV = 2, + V4L2_TUNER_DIGITAL_TV = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_TUNER_ADC = 4, + V4L2_TUNER_RF = 5, +}; +enum v4l2_memory { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MEMORY_MMAP = 1, + V4L2_MEMORY_USERPTR = 2, + V4L2_MEMORY_OVERLAY = 3, + V4L2_MEMORY_DMABUF = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +enum v4l2_colorspace { + V4L2_COLORSPACE_DEFAULT = 0, + V4L2_COLORSPACE_SMPTE170M = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_SMPTE240M = 2, + V4L2_COLORSPACE_REC709 = 3, + V4L2_COLORSPACE_BT878 = 4, + V4L2_COLORSPACE_470_SYSTEM_M = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_470_SYSTEM_BG = 6, + V4L2_COLORSPACE_JPEG = 7, + V4L2_COLORSPACE_SRGB = 8, + V4L2_COLORSPACE_ADOBERGB = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_BT2020 = 10, + V4L2_COLORSPACE_RAW = 11, + V4L2_COLORSPACE_DCI_P3 = 12, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MAP_COLORSPACE_DEFAULT(is_sdtv,is_hdtv) ((is_sdtv) ? V4L2_COLORSPACE_SMPTE170M : ((is_hdtv) ? V4L2_COLORSPACE_REC709 : V4L2_COLORSPACE_SRGB)) +enum v4l2_ycbcr_encoding { + V4L2_YCBCR_ENC_DEFAULT = 0, + V4L2_YCBCR_ENC_601 = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_YCBCR_ENC_709 = 2, + V4L2_YCBCR_ENC_XV601 = 3, + V4L2_YCBCR_ENC_XV709 = 4, + V4L2_YCBCR_ENC_SYCC = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_YCBCR_ENC_BT2020 = 6, + V4L2_YCBCR_ENC_BT2020_CONST_LUM = 7, + V4L2_YCBCR_ENC_SMPTE240M = 8, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MAP_YCBCR_ENC_DEFAULT(colsp) (((colsp) == V4L2_COLORSPACE_REC709 || (colsp) == V4L2_COLORSPACE_DCI_P3) ? V4L2_YCBCR_ENC_709 : ((colsp) == V4L2_COLORSPACE_BT2020 ? V4L2_YCBCR_ENC_BT2020 : ((colsp) == V4L2_COLORSPACE_SMPTE240M ? V4L2_YCBCR_ENC_SMPTE240M : V4L2_YCBCR_ENC_601))) +enum v4l2_quantization { + V4L2_QUANTIZATION_DEFAULT = 0, + V4L2_QUANTIZATION_FULL_RANGE = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_QUANTIZATION_LIM_RANGE = 2, +}; +#define V4L2_MAP_QUANTIZATION_DEFAULT(is_rgb,colsp,ycbcr_enc) (((is_rgb) && (colsp) == V4L2_COLORSPACE_BT2020) ? V4L2_QUANTIZATION_LIM_RANGE : (((is_rgb) || (ycbcr_enc) == V4L2_YCBCR_ENC_XV601 || (ycbcr_enc) == V4L2_YCBCR_ENC_XV709 || (colsp) == V4L2_COLORSPACE_JPEG) ? V4L2_QUANTIZATION_FULL_RANGE : V4L2_QUANTIZATION_LIM_RANGE)) +enum v4l2_priority { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_UNSET = 0, + V4L2_PRIORITY_BACKGROUND = 1, + V4L2_PRIORITY_INTERACTIVE = 2, + V4L2_PRIORITY_RECORD = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE, +}; +struct v4l2_rect { + __s32 left; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 top; + __u32 width; + __u32 height; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_fract { + __u32 numerator; + __u32 denominator; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_capability { + __u8 driver[16]; + __u8 card[32]; + __u8 bus_info[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 version; + __u32 capabilities; + __u32 device_caps; + __u32 reserved[3]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CAP_VIDEO_CAPTURE 0x00000001 +#define V4L2_CAP_VIDEO_OUTPUT 0x00000002 +#define V4L2_CAP_VIDEO_OVERLAY 0x00000004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VBI_CAPTURE 0x00000010 +#define V4L2_CAP_VBI_OUTPUT 0x00000020 +#define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 +#define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_RDS_CAPTURE 0x00000100 +#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 +#define V4L2_CAP_HW_FREQ_SEEK 0x00000400 +#define V4L2_CAP_RDS_OUTPUT 0x00000800 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000 +#define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000 +#define V4L2_CAP_VIDEO_M2M_MPLANE 0x00004000 +#define V4L2_CAP_VIDEO_M2M 0x00008000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_TUNER 0x00010000 +#define V4L2_CAP_AUDIO 0x00020000 +#define V4L2_CAP_RADIO 0x00040000 +#define V4L2_CAP_MODULATOR 0x00080000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_SDR_CAPTURE 0x00100000 +#define V4L2_CAP_EXT_PIX_FORMAT 0x00200000 +#define V4L2_CAP_READWRITE 0x01000000 +#define V4L2_CAP_ASYNCIO 0x02000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_STREAMING 0x04000000 +#define V4L2_CAP_DEVICE_CAPS 0x80000000 +struct v4l2_pix_format { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 pixelformat; + __u32 field; + __u32 bytesperline; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sizeimage; + __u32 colorspace; + __u32 priv; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 ycbcr_enc; + __u32 quantization; +}; +#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') +#define V4L2_PIX_FMT_ARGB444 v4l2_fourcc('A', 'R', '1', '2') +#define V4L2_PIX_FMT_XRGB444 v4l2_fourcc('X', 'R', '1', '2') +#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_ARGB555 v4l2_fourcc('A', 'R', '1', '5') +#define V4L2_PIX_FMT_XRGB555 v4l2_fourcc('X', 'R', '1', '5') +#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') +#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_ARGB555X v4l2_fourcc_be('A', 'R', '1', '5') +#define V4L2_PIX_FMT_XRGB555X v4l2_fourcc_be('X', 'R', '1', '5') +#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') +#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') +#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') +#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') +#define V4L2_PIX_FMT_ABGR32 v4l2_fourcc('A', 'R', '2', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_XBGR32 v4l2_fourcc('X', 'R', '2', '4') +#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('B', 'A', '2', '4') +#define V4L2_PIX_FMT_XRGB32 v4l2_fourcc('B', 'X', '2', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') +#define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') +#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') +#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') +#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') +#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') +#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') +#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') +#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') +#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') +#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') +#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') +#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') +#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') +#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') +#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') +#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') +#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') +#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') +#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') +#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') +#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') +#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') +#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') +#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') +#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') +#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') +#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') +#define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') +#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') +#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') +#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') +#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') +#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') +#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') +#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') +#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') +#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') +#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') +#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') +#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') +#define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8') +#define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8') +#define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8') +#define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8') +#define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0') +#define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8') +#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') +#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') +#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') +#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') +#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') +#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') +#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') +#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') +#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') +#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') +#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') +#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') +#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') +#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') +#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') +#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') +#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') +#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') +#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') +#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') +#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') +#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') +#define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') +#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') +#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') +#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') +#define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') +#define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') +#define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') +#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') +#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') +#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SDR_FMT_CU8 v4l2_fourcc('C', 'U', '0', '8') +#define V4L2_SDR_FMT_CU16LE v4l2_fourcc('C', 'U', '1', '6') +#define V4L2_SDR_FMT_CS8 v4l2_fourcc('C', 'S', '0', '8') +#define V4L2_SDR_FMT_CS14LE v4l2_fourcc('C', 'S', '1', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SDR_FMT_RU12LE v4l2_fourcc('R', 'U', '1', '2') +#define V4L2_PIX_FMT_PRIV_MAGIC 0xfeedcafe +#define V4L2_PIX_FMT_FLAG_PREMUL_ALPHA 0x00000001 +struct v4l2_fmtdesc { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 type; + __u32 flags; + __u8 description[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixelformat; + __u32 reserved[4]; +}; +#define V4L2_FMT_FLAG_COMPRESSED 0x0001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FMT_FLAG_EMULATED 0x0002 +enum v4l2_frmsizetypes { + V4L2_FRMSIZE_TYPE_DISCRETE = 1, + V4L2_FRMSIZE_TYPE_CONTINUOUS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FRMSIZE_TYPE_STEPWISE = 3, +}; +struct v4l2_frmsize_discrete { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; +}; +struct v4l2_frmsize_stepwise { + __u32 min_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 max_width; + __u32 step_width; + __u32 min_height; + __u32 max_height; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 step_height; +}; +struct v4l2_frmsizeenum { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixel_format; + __u32 type; + union { + struct v4l2_frmsize_discrete discrete; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_frmsize_stepwise stepwise; + }; + __u32 reserved[2]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_frmivaltypes { + V4L2_FRMIVAL_TYPE_DISCRETE = 1, + V4L2_FRMIVAL_TYPE_CONTINUOUS = 2, + V4L2_FRMIVAL_TYPE_STEPWISE = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_frmival_stepwise { + struct v4l2_fract min; + struct v4l2_fract max; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract step; +}; +struct v4l2_frmivalenum { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixel_format; + __u32 width; + __u32 height; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_fract discrete; + struct v4l2_frmival_stepwise stepwise; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +struct v4l2_timecode { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u8 frames; + __u8 seconds; + __u8 minutes; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 hours; + __u8 userbits[4]; +}; +#define V4L2_TC_TYPE_24FPS 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_TYPE_25FPS 2 +#define V4L2_TC_TYPE_30FPS 3 +#define V4L2_TC_TYPE_50FPS 4 +#define V4L2_TC_TYPE_60FPS 5 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_FLAG_DROPFRAME 0x0001 +#define V4L2_TC_FLAG_COLORFRAME 0x0002 +#define V4L2_TC_USERBITS_field 0x000C +#define V4L2_TC_USERBITS_USERDEFINED 0x0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_USERBITS_8BITCHARS 0x0008 +struct v4l2_jpegcompression { + int quality; + int APPn; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int APP_len; + char APP_data[60]; + int COM_len; + char COM_data[60]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 jpeg_markers; +#define V4L2_JPEG_MARKER_DHT (1 << 3) +#define V4L2_JPEG_MARKER_DQT (1 << 4) +#define V4L2_JPEG_MARKER_DRI (1 << 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_MARKER_COM (1 << 6) +#define V4L2_JPEG_MARKER_APP (1 << 7) +}; +struct v4l2_requestbuffers { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 count; + __u32 type; + __u32 memory; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_plane { + __u32 bytesused; + __u32 length; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + __u32 mem_offset; + unsigned long userptr; + __s32 fd; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } m; + __u32 data_offset; + __u32 reserved[11]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_buffer { + __u32 index; + __u32 type; + __u32 bytesused; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 field; + struct timeval timestamp; + struct v4l2_timecode timecode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sequence; + __u32 memory; + union { + __u32 offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned long userptr; + struct v4l2_plane * planes; + __s32 fd; + } m; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 length; + __u32 reserved2; + __u32 reserved; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_MAPPED 0x00000001 +#define V4L2_BUF_FLAG_QUEUED 0x00000002 +#define V4L2_BUF_FLAG_DONE 0x00000004 +#define V4L2_BUF_FLAG_KEYFRAME 0x00000008 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_PFRAME 0x00000010 +#define V4L2_BUF_FLAG_BFRAME 0x00000020 +#define V4L2_BUF_FLAG_ERROR 0x00000040 +#define V4L2_BUF_FLAG_TIMECODE 0x00000100 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_PREPARED 0x00000400 +#define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x00000800 +#define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x00001000 +#define V4L2_BUF_FLAG_TIMESTAMP_MASK 0x0000e000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x00000000 +#define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x00002000 +#define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x00004000 +#define V4L2_BUF_FLAG_USE_SYNC 0x00008000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_TSTAMP_SRC_MASK 0x00070000 +#define V4L2_BUF_FLAG_TSTAMP_SRC_EOF 0x00000000 +#define V4L2_BUF_FLAG_TSTAMP_SRC_SOE 0x00010000 +struct v4l2_exportbuffer { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 index; + __u32 plane; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 fd; + __u32 reserved[11]; +}; +struct v4l2_framebuffer { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 flags; + void * base; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 width; + __u32 height; + __u32 pixelformat; + __u32 field; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 bytesperline; + __u32 sizeimage; + __u32 colorspace; + __u32 priv; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } fmt; +}; +#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001 +#define V4L2_FBUF_CAP_CHROMAKEY 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004 +#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008 +#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010 +#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040 +#define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080 +#define V4L2_FBUF_FLAG_PRIMARY 0x0001 +#define V4L2_FBUF_FLAG_OVERLAY 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004 +#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008 +#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010 +#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040 +struct v4l2_clip { + struct v4l2_rect c; + struct v4l2_clip __user * next; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_window { + struct v4l2_rect w; + __u32 field; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 chromakey; + struct v4l2_clip __user * clips; + __u32 clipcount; + void __user * bitmap; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 global_alpha; +}; +struct v4l2_captureparm { + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capturemode; + struct v4l2_fract timeperframe; + __u32 extendedmode; + __u32 readbuffers; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[4]; +}; +#define V4L2_MODE_HIGHQUALITY 0x0001 +#define V4L2_CAP_TIMEPERFRAME 0x1000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_outputparm { + __u32 capability; + __u32 outputmode; + struct v4l2_fract timeperframe; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 extendedmode; + __u32 writebuffers; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_cropcap { + __u32 type; + struct v4l2_rect bounds; + struct v4l2_rect defrect; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract pixelaspect; +}; +struct v4l2_crop { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_rect c; +}; +struct v4l2_selection { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 target; + __u32 flags; + struct v4l2_rect r; + __u32 reserved[9]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +typedef __u64 v4l2_std_id; +#define V4L2_STD_PAL_B ((v4l2_std_id) 0x00000001) +#define V4L2_STD_PAL_B1 ((v4l2_std_id) 0x00000002) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_G ((v4l2_std_id) 0x00000004) +#define V4L2_STD_PAL_H ((v4l2_std_id) 0x00000008) +#define V4L2_STD_PAL_I ((v4l2_std_id) 0x00000010) +#define V4L2_STD_PAL_D ((v4l2_std_id) 0x00000020) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_D1 ((v4l2_std_id) 0x00000040) +#define V4L2_STD_PAL_K ((v4l2_std_id) 0x00000080) +#define V4L2_STD_PAL_M ((v4l2_std_id) 0x00000100) +#define V4L2_STD_PAL_N ((v4l2_std_id) 0x00000200) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_Nc ((v4l2_std_id) 0x00000400) +#define V4L2_STD_PAL_60 ((v4l2_std_id) 0x00000800) +#define V4L2_STD_NTSC_M ((v4l2_std_id) 0x00001000) +#define V4L2_STD_NTSC_M_JP ((v4l2_std_id) 0x00002000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_NTSC_443 ((v4l2_std_id) 0x00004000) +#define V4L2_STD_NTSC_M_KR ((v4l2_std_id) 0x00008000) +#define V4L2_STD_SECAM_B ((v4l2_std_id) 0x00010000) +#define V4L2_STD_SECAM_D ((v4l2_std_id) 0x00020000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_G ((v4l2_std_id) 0x00040000) +#define V4L2_STD_SECAM_H ((v4l2_std_id) 0x00080000) +#define V4L2_STD_SECAM_K ((v4l2_std_id) 0x00100000) +#define V4L2_STD_SECAM_K1 ((v4l2_std_id) 0x00200000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_L ((v4l2_std_id) 0x00400000) +#define V4L2_STD_SECAM_LC ((v4l2_std_id) 0x00800000) +#define V4L2_STD_ATSC_8_VSB ((v4l2_std_id) 0x01000000) +#define V4L2_STD_ATSC_16_VSB ((v4l2_std_id) 0x02000000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_NTSC (V4L2_STD_NTSC_M | V4L2_STD_NTSC_M_JP | V4L2_STD_NTSC_M_KR) +#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D | V4L2_STD_SECAM_K | V4L2_STD_SECAM_K1) +#define V4L2_STD_SECAM (V4L2_STD_SECAM_B | V4L2_STD_SECAM_G | V4L2_STD_SECAM_H | V4L2_STD_SECAM_DK | V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +#define V4L2_STD_PAL_BG (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_PAL_G) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_DK (V4L2_STD_PAL_D | V4L2_STD_PAL_D1 | V4L2_STD_PAL_K) +#define V4L2_STD_PAL (V4L2_STD_PAL_BG | V4L2_STD_PAL_DK | V4L2_STD_PAL_H | V4L2_STD_PAL_I) +#define V4L2_STD_B (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_SECAM_B) +#define V4L2_STD_G (V4L2_STD_PAL_G | V4L2_STD_SECAM_G) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_H (V4L2_STD_PAL_H | V4L2_STD_SECAM_H) +#define V4L2_STD_L (V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +#define V4L2_STD_GH (V4L2_STD_G | V4L2_STD_H) +#define V4L2_STD_DK (V4L2_STD_PAL_DK | V4L2_STD_SECAM_DK) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_BG (V4L2_STD_B | V4L2_STD_G) +#define V4L2_STD_MN (V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_NTSC) +#define V4L2_STD_MTS (V4L2_STD_NTSC_M | V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc) +#define V4L2_STD_525_60 (V4L2_STD_PAL_M | V4L2_STD_PAL_60 | V4L2_STD_NTSC | V4L2_STD_NTSC_443) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_625_50 (V4L2_STD_PAL | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_SECAM) +#define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB | V4L2_STD_ATSC_16_VSB) +#define V4L2_STD_UNKNOWN 0 +#define V4L2_STD_ALL (V4L2_STD_525_60 | V4L2_STD_625_50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_standard { + __u32 index; + v4l2_std_id id; + __u8 name[24]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract frameperiod; + __u32 framelines; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_bt_timings { + __u32 width; + __u32 height; + __u32 interlaced; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 polarities; + __u64 pixelclock; + __u32 hfrontporch; + __u32 hsync; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 hbackporch; + __u32 vfrontporch; + __u32 vsync; + __u32 vbackporch; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 il_vfrontporch; + __u32 il_vsync; + __u32 il_vbackporch; + __u32 standards; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[14]; +} __attribute__((packed)); +#define V4L2_DV_PROGRESSIVE 0 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_INTERLACED 1 +#define V4L2_DV_VSYNC_POS_POL 0x00000001 +#define V4L2_DV_HSYNC_POS_POL 0x00000002 +#define V4L2_DV_BT_STD_CEA861 (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_STD_DMT (1 << 1) +#define V4L2_DV_BT_STD_CVT (1 << 2) +#define V4L2_DV_BT_STD_GTF (1 << 3) +#define V4L2_DV_FL_REDUCED_BLANKING (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_FL_CAN_REDUCE_FPS (1 << 1) +#define V4L2_DV_FL_REDUCED_FPS (1 << 2) +#define V4L2_DV_FL_HALF_LINE (1 << 3) +#define V4L2_DV_BT_BLANKING_WIDTH(bt) ((bt)->hfrontporch + (bt)->hsync + (bt)->hbackporch) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_FRAME_WIDTH(bt) ((bt)->width + V4L2_DV_BT_BLANKING_WIDTH(bt)) +#define V4L2_DV_BT_BLANKING_HEIGHT(bt) ((bt)->vfrontporch + (bt)->vsync + (bt)->vbackporch + (bt)->il_vfrontporch + (bt)->il_vsync + (bt)->il_vbackporch) +#define V4L2_DV_BT_FRAME_HEIGHT(bt) ((bt)->height + V4L2_DV_BT_BLANKING_HEIGHT(bt)) +struct v4l2_dv_timings { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + struct v4l2_bt_timings bt; + __u32 reserved[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +} __attribute__((packed)); +#define V4L2_DV_BT_656_1120 0 +struct v4l2_enum_dv_timings { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 pad; + __u32 reserved[2]; + struct v4l2_dv_timings timings; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_bt_timings_cap { + __u32 min_width; + __u32 max_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 min_height; + __u32 max_height; + __u64 min_pixelclock; + __u64 max_pixelclock; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 standards; + __u32 capabilities; + __u32 reserved[16]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_CAP_INTERLACED (1 << 0) +#define V4L2_DV_BT_CAP_PROGRESSIVE (1 << 1) +#define V4L2_DV_BT_CAP_REDUCED_BLANKING (1 << 2) +#define V4L2_DV_BT_CAP_CUSTOM (1 << 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dv_timings_cap { + __u32 type; + __u32 pad; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_bt_timings_cap bt; + __u32 raw_data[32]; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_input { + __u32 index; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 audioset; + __u32 tuner; + v4l2_std_id std; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 status; + __u32 capabilities; + __u32 reserved[3]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_INPUT_TYPE_TUNER 1 +#define V4L2_INPUT_TYPE_CAMERA 2 +#define V4L2_IN_ST_NO_POWER 0x00000001 +#define V4L2_IN_ST_NO_SIGNAL 0x00000002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_COLOR 0x00000004 +#define V4L2_IN_ST_HFLIP 0x00000010 +#define V4L2_IN_ST_VFLIP 0x00000020 +#define V4L2_IN_ST_NO_H_LOCK 0x00000100 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_COLOR_KILL 0x00000200 +#define V4L2_IN_ST_NO_SYNC 0x00010000 +#define V4L2_IN_ST_NO_EQU 0x00020000 +#define V4L2_IN_ST_NO_CARRIER 0x00040000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_MACROVISION 0x01000000 +#define V4L2_IN_ST_NO_ACCESS 0x02000000 +#define V4L2_IN_ST_VTR 0x04000000 +#define V4L2_IN_CAP_DV_TIMINGS 0x00000002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_CAP_CUSTOM_TIMINGS V4L2_IN_CAP_DV_TIMINGS +#define V4L2_IN_CAP_STD 0x00000004 +struct v4l2_output { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __u32 type; + __u32 audioset; + __u32 modulator; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + v4l2_std_id std; + __u32 capabilities; + __u32 reserved[3]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_OUTPUT_TYPE_MODULATOR 1 +#define V4L2_OUTPUT_TYPE_ANALOG 2 +#define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3 +#define V4L2_OUT_CAP_DV_TIMINGS 0x00000002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_OUT_CAP_CUSTOM_TIMINGS V4L2_OUT_CAP_DV_TIMINGS +#define V4L2_OUT_CAP_STD 0x00000004 +struct v4l2_control { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 value; +}; +struct v4l2_ext_control { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 size; + __u32 reserved2[1]; + union { + __s32 value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s64 value64; + char __user * string; + __u8 __user * p_u8; + __u16 __user * p_u16; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 __user * p_u32; + void __user * ptr; + }; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_ext_controls { + __u32 ctrl_class; + __u32 count; + __u32 error_idx; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; + struct v4l2_ext_control * controls; +}; +#define V4L2_CTRL_ID_MASK (0x0fffffff) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL) +#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000) +#define V4L2_CTRL_MAX_DIMS (4) +enum v4l2_ctrl_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_INTEGER = 1, + V4L2_CTRL_TYPE_BOOLEAN = 2, + V4L2_CTRL_TYPE_MENU = 3, + V4L2_CTRL_TYPE_BUTTON = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_INTEGER64 = 5, + V4L2_CTRL_TYPE_CTRL_CLASS = 6, + V4L2_CTRL_TYPE_STRING = 7, + V4L2_CTRL_TYPE_BITMASK = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_INTEGER_MENU = 9, + V4L2_CTRL_COMPOUND_TYPES = 0x0100, + V4L2_CTRL_TYPE_U8 = 0x0100, + V4L2_CTRL_TYPE_U16 = 0x0101, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_U32 = 0x0102, +}; +struct v4l2_queryctrl { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u8 name[32]; + __s32 minimum; + __s32 maximum; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 step; + __s32 default_value; + __u32 flags; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_query_ext_ctrl { + __u32 id; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + char name[32]; + __s64 minimum; + __s64 maximum; + __u64 step; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s64 default_value; + __u32 flags; + __u32 elem_size; + __u32 elems; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 nr_of_dims; + __u32 dims[V4L2_CTRL_MAX_DIMS]; + __u32 reserved[32]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_querymenu { + __u32 id; + __u32 index; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __s64 value; + }; + __u32 reserved; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_CTRL_FLAG_DISABLED 0x0001 +#define V4L2_CTRL_FLAG_GRABBED 0x0002 +#define V4L2_CTRL_FLAG_READ_ONLY 0x0004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_UPDATE 0x0008 +#define V4L2_CTRL_FLAG_INACTIVE 0x0010 +#define V4L2_CTRL_FLAG_SLIDER 0x0020 +#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_VOLATILE 0x0080 +#define V4L2_CTRL_FLAG_HAS_PAYLOAD 0x0100 +#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000 +#define V4L2_CTRL_FLAG_NEXT_COMPOUND 0x40000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MAX_CTRLS 1024 +#define V4L2_CID_PRIVATE_BASE 0x08000000 +struct v4l2_tuner { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __u32 type; + __u32 capability; + __u32 rangelow; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangehigh; + __u32 rxsubchans; + __u32 audmode; + __s32 signal; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 afc; + __u32 reserved[4]; +}; +struct v4l2_modulator { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + __u32 capability; + __u32 rangelow; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangehigh; + __u32 txsubchans; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_LOW 0x0001 +#define V4L2_TUNER_CAP_NORM 0x0002 +#define V4L2_TUNER_CAP_HWSEEK_BOUNDED 0x0004 +#define V4L2_TUNER_CAP_HWSEEK_WRAP 0x0008 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_STEREO 0x0010 +#define V4L2_TUNER_CAP_LANG2 0x0020 +#define V4L2_TUNER_CAP_SAP 0x0020 +#define V4L2_TUNER_CAP_LANG1 0x0040 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_RDS 0x0080 +#define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100 +#define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200 +#define V4L2_TUNER_CAP_FREQ_BANDS 0x0400 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_HWSEEK_PROG_LIM 0x0800 +#define V4L2_TUNER_CAP_1HZ 0x1000 +#define V4L2_TUNER_SUB_MONO 0x0001 +#define V4L2_TUNER_SUB_STEREO 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_SUB_LANG2 0x0004 +#define V4L2_TUNER_SUB_SAP 0x0004 +#define V4L2_TUNER_SUB_LANG1 0x0008 +#define V4L2_TUNER_SUB_RDS 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_MONO 0x0000 +#define V4L2_TUNER_MODE_STEREO 0x0001 +#define V4L2_TUNER_MODE_LANG2 0x0002 +#define V4L2_TUNER_MODE_SAP 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_LANG1 0x0003 +#define V4L2_TUNER_MODE_LANG1_LANG2 0x0004 +struct v4l2_frequency { + __u32 tuner; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 frequency; + __u32 reserved[8]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BAND_MODULATION_VSB (1 << 1) +#define V4L2_BAND_MODULATION_FM (1 << 2) +#define V4L2_BAND_MODULATION_AM (1 << 3) +struct v4l2_frequency_band { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 tuner; + __u32 type; + __u32 index; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 modulation; + __u32 reserved[9]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_hw_freq_seek { + __u32 tuner; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 seek_upward; + __u32 wrap_around; + __u32 spacing; + __u32 rangelow; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangehigh; + __u32 reserved[5]; +}; +struct v4l2_rds_data { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 lsb; + __u8 msb; + __u8 block; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_MSK 0x7 +#define V4L2_RDS_BLOCK_A 0 +#define V4L2_RDS_BLOCK_B 1 +#define V4L2_RDS_BLOCK_C 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_D 3 +#define V4L2_RDS_BLOCK_C_ALT 4 +#define V4L2_RDS_BLOCK_INVALID 7 +#define V4L2_RDS_BLOCK_CORRECTED 0x40 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_ERROR 0x80 +struct v4l2_audio { + __u32 index; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 mode; + __u32 reserved[2]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_AUDCAP_STEREO 0x00001 +#define V4L2_AUDCAP_AVL 0x00002 +#define V4L2_AUDMODE_AVL 0x00001 +struct v4l2_audioout { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + __u32 capability; + __u32 mode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_ENC_IDX_FRAME_I (0) +#define V4L2_ENC_IDX_FRAME_P (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_IDX_FRAME_B (2) +#define V4L2_ENC_IDX_FRAME_MASK (0xf) +struct v4l2_enc_idx_entry { + __u64 offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u64 pts; + __u32 length; + __u32 flags; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_ENC_IDX_ENTRIES (64) +struct v4l2_enc_idx { + __u32 entries; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 entries_cap; + __u32 reserved[4]; + struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_CMD_START (0) +#define V4L2_ENC_CMD_STOP (1) +#define V4L2_ENC_CMD_PAUSE (2) +#define V4L2_ENC_CMD_RESUME (3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0) +struct v4l2_encoder_cmd { + __u32 cmd; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct { + __u32 data[8]; + } raw; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +}; +#define V4L2_DEC_CMD_START (0) +#define V4L2_DEC_CMD_STOP (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_PAUSE (2) +#define V4L2_DEC_CMD_RESUME (3) +#define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0) +#define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0) +#define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1) +#define V4L2_DEC_START_FMT_NONE (0) +#define V4L2_DEC_START_FMT_GOP (1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_decoder_cmd { + __u32 cmd; + __u32 flags; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct { + __u64 pts; + } stop; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 speed; + __u32 format; + } start; + struct { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 data[16]; + } raw; + }; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_vbi_format { + __u32 sampling_rate; + __u32 offset; + __u32 samples_per_line; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sample_format; + __s32 start[2]; + __u32 count[2]; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_VBI_UNSYNC (1 << 0) +#define V4L2_VBI_INTERLACED (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_VBI_ITU_525_F1_START (1) +#define V4L2_VBI_ITU_525_F2_START (264) +#define V4L2_VBI_ITU_625_F1_START (1) +#define V4L2_VBI_ITU_625_F2_START (314) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_sliced_vbi_format { + __u16 service_set; + __u16 service_lines[2][24]; + __u32 io_size; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_SLICED_TELETEXT_B (0x0001) +#define V4L2_SLICED_VPS (0x0400) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SLICED_CAPTION_525 (0x1000) +#define V4L2_SLICED_WSS_625 (0x4000) +#define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525) +#define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_sliced_vbi_cap { + __u16 service_set; + __u16 service_lines[2][24]; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[3]; +}; +struct v4l2_sliced_vbi_data { + __u32 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 field; + __u32 line; + __u32 reserved; + __u8 data[48]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1) +#define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4) +#define V4L2_MPEG_VBI_IVTV_WSS_625 (5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VBI_IVTV_VPS (7) +struct v4l2_mpeg_vbi_itv0_line { + __u8 id; + __u8 data[42]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_mpeg_vbi_itv0 { + __le32 linemask[2]; + struct v4l2_mpeg_vbi_itv0_line line[35]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_mpeg_vbi_ITV0 { + struct v4l2_mpeg_vbi_itv0_line line[36]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0" +#define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0" +struct v4l2_mpeg_vbi_fmt_ivtv { + __u8 magic[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_mpeg_vbi_itv0 itv0; + struct v4l2_mpeg_vbi_ITV0 ITV0; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_plane_pix_format { + __u32 sizeimage; + __u16 bytesperline; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u16 reserved[7]; +} __attribute__((packed)); +struct v4l2_pix_format_mplane { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 pixelformat; + __u32 field; + __u32 colorspace; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES]; + __u8 num_planes; + __u8 flags; + __u8 ycbcr_enc; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 quantization; + __u8 reserved[8]; +} __attribute__((packed)); +struct v4l2_sdr_format { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 pixelformat; + __u32 buffersize; + __u8 reserved[24]; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_format { + __u32 type; + union { + struct v4l2_pix_format pix; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_pix_format_mplane pix_mp; + struct v4l2_window win; + struct v4l2_vbi_format vbi; + struct v4l2_sliced_vbi_format sliced; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_sdr_format sdr; + __u8 raw_data[200]; + } fmt; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_streamparm { + __u32 type; + union { + struct v4l2_captureparm capture; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_outputparm output; + __u8 raw_data[200]; + } parm; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_ALL 0 +#define V4L2_EVENT_VSYNC 1 +#define V4L2_EVENT_EOS 2 +#define V4L2_EVENT_CTRL 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_FRAME_SYNC 4 +#define V4L2_EVENT_SOURCE_CHANGE 5 +#define V4L2_EVENT_MOTION_DET 6 +#define V4L2_EVENT_PRIVATE_START 0x08000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_event_vsync { + __u8 field; +} __attribute__((packed)); +#define V4L2_EVENT_CTRL_CH_VALUE (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1) +#define V4L2_EVENT_CTRL_CH_RANGE (1 << 2) +struct v4l2_event_ctrl { + __u32 changes; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + __s32 value; + __s64 value64; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; + __u32 flags; + __s32 minimum; + __s32 maximum; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 step; + __s32 default_value; +}; +struct v4l2_event_frame_sync { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 frame_sequence; +}; +#define V4L2_EVENT_SRC_CH_RESOLUTION (1 << 0) +struct v4l2_event_src_change { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 changes; +}; +#define V4L2_EVENT_MD_FL_HAVE_FRAME_SEQ (1 << 0) +struct v4l2_event_motion_det { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 frame_sequence; + __u32 region_mask; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_event { + __u32 type; + union { + struct v4l2_event_vsync vsync; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_event_ctrl ctrl; + struct v4l2_event_frame_sync frame_sync; + struct v4l2_event_src_change src_change; + struct v4l2_event_motion_det motion_det; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 data[64]; + } u; + __u32 pending; + __u32 sequence; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct timespec timestamp; + __u32 id; + __u32 reserved[8]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0) +#define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1) +struct v4l2_event_subscription { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __u32 flags; + __u32 reserved[5]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CHIP_MATCH_BRIDGE 0 +#define V4L2_CHIP_MATCH_SUBDEV 4 +#define V4L2_CHIP_MATCH_HOST V4L2_CHIP_MATCH_BRIDGE +#define V4L2_CHIP_MATCH_I2C_DRIVER 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CHIP_MATCH_I2C_ADDR 2 +#define V4L2_CHIP_MATCH_AC97 3 +struct v4l2_dbg_match { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + __u32 addr; + char name[32]; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_dbg_register { + struct v4l2_dbg_match match; + __u32 size; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u64 reg; + __u64 val; +} __attribute__((packed)); +#define V4L2_CHIP_FL_READABLE (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CHIP_FL_WRITABLE (1 << 1) +struct v4l2_dbg_chip_info { + struct v4l2_dbg_match match; + char name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[32]; +} __attribute__((packed)); +struct v4l2_create_buffers { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 count; + __u32 memory; + struct v4l2_format format; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[8]; +}; +#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability) +#define VIDIOC_RESERVED _IO('V', 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc) +#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format) +#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format) +#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer) +#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer) +#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer) +#define VIDIOC_OVERLAY _IOW('V', 14, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer) +#define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer) +#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer) +#define VIDIOC_STREAMON _IOW('V', 18, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_STREAMOFF _IOW('V', 19, int) +#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm) +#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm) +#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id) +#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard) +#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input) +#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control) +#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner) +#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner) +#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio) +#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl) +#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu) +#define VIDIOC_G_INPUT _IOR('V', 38, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_INPUT _IOWR('V', 39, int) +#define VIDIOC_G_EDID _IOWR('V', 40, struct v4l2_edid) +#define VIDIOC_S_EDID _IOWR('V', 41, struct v4l2_edid) +#define VIDIOC_G_OUTPUT _IOR('V', 46, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_OUTPUT _IOWR('V', 47, int) +#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output) +#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout) +#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator) +#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator) +#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency) +#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap) +#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop) +#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop) +#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression) +#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id) +#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format) +#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout) +#define VIDIOC_G_PRIORITY _IOR('V', 67, __u32) +#define VIDIOC_S_PRIORITY _IOW('V', 68, __u32) +#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_LOG_STATUS _IO('V', 70) +#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls) +#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls) +#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum) +#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum) +#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx) +#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd) +#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register) +#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register) +#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings) +#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings) +#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event) +#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription) +#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers) +#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer) +#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection) +#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd) +#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd) +#define VIDIOC_ENUM_DV_TIMINGS _IOWR('V', 98, struct v4l2_enum_dv_timings) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERY_DV_TIMINGS _IOR('V', 99, struct v4l2_dv_timings) +#define VIDIOC_DV_TIMINGS_CAP _IOWR('V', 100, struct v4l2_dv_timings_cap) +#define VIDIOC_ENUM_FREQ_BANDS _IOWR('V', 101, struct v4l2_frequency_band) +#define VIDIOC_DBG_G_CHIP_INFO _IOWR('V', 102, struct v4l2_dbg_chip_info) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERY_EXT_CTRL _IOWR('V', 103, struct v4l2_query_ext_ctrl) +#define BASE_VIDIOC_PRIVATE 192 +#endif diff --git a/kernel-3.18-headers/videodev2_exynos_media.h b/kernel-3.18-headers/videodev2_exynos_media.h new file mode 100644 index 0000000..7562e2b --- /dev/null +++ b/kernel-3.18-headers/videodev2_exynos_media.h @@ -0,0 +1,53 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#define __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#include +#define V4L2_CID_EXYNOS_BASE (V4L2_CTRL_CLASS_USER | 0x2000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CACHEABLE (V4L2_CID_EXYNOS_BASE + 10) +#define V4L2_CID_CSC_EQ_MODE (V4L2_CID_EXYNOS_BASE + 100) +#define V4L2_CID_CSC_EQ (V4L2_CID_EXYNOS_BASE + 101) +#define V4L2_CID_CSC_RANGE (V4L2_CID_EXYNOS_BASE + 102) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CONTENT_PROTECTION (V4L2_CID_EXYNOS_BASE + 201) +#define V4L2_PIX_FMT_NV12N v4l2_fourcc('N', 'N', '1', '2') +#define V4L2_PIX_FMT_NV12NT v4l2_fourcc('T', 'N', '1', '2') +#define V4L2_PIX_FMT_YUV420N v4l2_fourcc('Y', 'N', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV12N_10B v4l2_fourcc('B', 'N', '1', '2') +#ifndef __ALIGN_UP +#define __ALIGN_UP(x,a) (((x) + ((a) - 1)) & ~((a) - 1)) +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define NV12N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +#define NV12N_CBCR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w), 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define NV12N_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h))) +#define NV12N_10B_Y_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * __ALIGN_UP((h), 16) + 64)) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define NV12N_10B_CBCR_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * (__ALIGN_UP((h), 16) / 2) + 64)) +#define NV12N_10B_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h)) + NV12N_10B_Y_2B_SIZE((w), (h))) +#define YUV420N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +#define YUV420N_CB_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define YUV420N_CR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define YUV420N_CB_BASE(base,w,h) ((base) + YUV420N_Y_SIZE((w), (h))) +#define YUV420N_CR_BASE(base,w,h) (YUV420N_CB_BASE((base), (w), (h)) + YUV420N_CB_SIZE((w), (h))) +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/kernel-3.4-headers/videodev2.h b/kernel-3.4-headers/videodev2.h new file mode 100644 index 0000000..df0ab32 --- /dev/null +++ b/kernel-3.4-headers/videodev2.h @@ -0,0 +1,2098 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef _UAPI__LINUX_VIDEODEV2_H +#define _UAPI__LINUX_VIDEODEV2_H +#include +#include +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#include +#include +#define VIDEO_MAX_FRAME 32 +#define VIDEO_MAX_PLANES 8 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VID_TYPE_CAPTURE 1 +#define VID_TYPE_TUNER 2 +#define VID_TYPE_TELETEXT 4 +#define VID_TYPE_OVERLAY 8 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VID_TYPE_CHROMAKEY 16 +#define VID_TYPE_CLIPPING 32 +#define VID_TYPE_FRAMERAM 64 +#define VID_TYPE_SCALES 128 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VID_TYPE_MONOCHROME 256 +#define VID_TYPE_SUBCAPTURE 512 +#define VID_TYPE_MPEG_DECODER 1024 +#define VID_TYPE_MPEG_ENCODER 2048 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VID_TYPE_MJPEG_DECODER 4096 +#define VID_TYPE_MJPEG_ENCODER 8192 +#define v4l2_fourcc(a,b,c,d) ((__u32) (a) | ((__u32) (b) << 8) | ((__u32) (c) << 16) | ((__u32) (d) << 24)) +enum v4l2_field { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_ANY = 0, + V4L2_FIELD_NONE = 1, + V4L2_FIELD_TOP = 2, + V4L2_FIELD_BOTTOM = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_INTERLACED = 4, + V4L2_FIELD_SEQ_TB = 5, + V4L2_FIELD_SEQ_BT = 6, + V4L2_FIELD_ALTERNATE = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FIELD_INTERLACED_TB = 8, + V4L2_FIELD_INTERLACED_BT = 9, +}; +#define V4L2_FIELD_HAS_TOP(field) ((field) == V4L2_FIELD_TOP || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FIELD_HAS_BOTTOM(field) ((field) == V4L2_FIELD_BOTTOM || (field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +#define V4L2_FIELD_HAS_BOTH(field) ((field) == V4L2_FIELD_INTERLACED || (field) == V4L2_FIELD_INTERLACED_TB || (field) == V4L2_FIELD_INTERLACED_BT || (field) == V4L2_FIELD_SEQ_TB || (field) == V4L2_FIELD_SEQ_BT) +enum v4l2_buf_type { + V4L2_BUF_TYPE_VIDEO_CAPTURE = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VIDEO_OUTPUT = 2, + V4L2_BUF_TYPE_VIDEO_OVERLAY = 3, + V4L2_BUF_TYPE_VBI_CAPTURE = 4, + V4L2_BUF_TYPE_VBI_OUTPUT = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6, + V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7, + V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8, + V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10, + V4L2_BUF_TYPE_PRIVATE = 0x80, +}; +#define V4L2_TYPE_IS_MULTIPLANAR(type) ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TYPE_IS_OUTPUT(type) ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY || (type) == V4L2_BUF_TYPE_VBI_OUTPUT || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT) +enum v4l2_tuner_type { + V4L2_TUNER_RADIO = 1, + V4L2_TUNER_ANALOG_TV = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_TUNER_DIGITAL_TV = 3, +}; +enum v4l2_memory { + V4L2_MEMORY_MMAP = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MEMORY_USERPTR = 2, + V4L2_MEMORY_OVERLAY = 3, + V4L2_MEMORY_DMABUF = 4, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_colorspace { + V4L2_COLORSPACE_DEFAULT = 0, + V4L2_COLORSPACE_SMPTE170M = 1, + V4L2_COLORSPACE_SMPTE240M = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_REC709 = 3, + V4L2_COLORSPACE_BT878 = 4, + V4L2_COLORSPACE_470_SYSTEM_M = 5, + V4L2_COLORSPACE_470_SYSTEM_BG = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_JPEG = 7, + V4L2_COLORSPACE_SRGB = 8, + V4L2_COLORSPACE_ADOBERGB = 9, + V4L2_COLORSPACE_BT2020 = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORSPACE_RAW = 11, + V4L2_COLORSPACE_DCI_P3 = 12, +}; +enum v4l2_priority { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_UNSET = 0, + V4L2_PRIORITY_BACKGROUND = 1, + V4L2_PRIORITY_INTERACTIVE = 2, + V4L2_PRIORITY_RECORD = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE, +}; +struct v4l2_rect { + __s32 left; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 top; + __s32 width; + __s32 height; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_fract { + __u32 numerator; + __u32 denominator; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_capability { + __u8 driver[16]; + __u8 card[32]; + __u8 bus_info[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 version; + __u32 capabilities; + __u32 device_caps; + __u32 reserved[3]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CAP_VIDEO_CAPTURE 0x00000001 +#define V4L2_CAP_VIDEO_OUTPUT 0x00000002 +#define V4L2_CAP_VIDEO_OVERLAY 0x00000004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VBI_CAPTURE 0x00000010 +#define V4L2_CAP_VBI_OUTPUT 0x00000020 +#define V4L2_CAP_SLICED_VBI_CAPTURE 0x00000040 +#define V4L2_CAP_SLICED_VBI_OUTPUT 0x00000080 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_RDS_CAPTURE 0x00000100 +#define V4L2_CAP_VIDEO_OUTPUT_OVERLAY 0x00000200 +#define V4L2_CAP_HW_FREQ_SEEK 0x00000400 +#define V4L2_CAP_RDS_OUTPUT 0x00000800 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_VIDEO_CAPTURE_MPLANE 0x00001000 +#define V4L2_CAP_VIDEO_OUTPUT_MPLANE 0x00002000 +#define V4L2_CAP_TUNER 0x00010000 +#define V4L2_CAP_AUDIO 0x00020000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_RADIO 0x00040000 +#define V4L2_CAP_MODULATOR 0x00080000 +#define V4L2_CAP_READWRITE 0x01000000 +#define V4L2_CAP_ASYNCIO 0x02000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CAP_STREAMING 0x04000000 +#define V4L2_CAP_DEVICE_CAPS 0x80000000 +struct v4l2_pix_format { + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 pixelformat; + enum v4l2_field field; + __u32 bytesperline; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sizeimage; + enum v4l2_colorspace colorspace; + __u32 priv; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') +#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') +#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') +#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') +#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') +#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') +#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') +#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') +#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') +#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') +#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') +#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') +#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') +#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') +#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') +#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') +#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') +#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') +#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') +#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') +#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') +#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') +#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') +#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') +#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') +#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') +#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') +#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') +#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') +#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') +#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') +#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') +#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') +#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') +#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') +#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') +#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') +#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') +#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') +#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') +#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') +#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') +#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') +#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') +#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') +#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') +#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') +#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') +#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') +#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') +#define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') +#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') +#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') +#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') +#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') +#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') +#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') +#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') +#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') +#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') +#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') +#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') +#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') +#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') +#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') +#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') +#define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') +#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') +#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') +#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') +#define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') +#define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') +#define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') +#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') +#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') +#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') +struct v4l2_fmtdesc { + __u32 index; + enum v4l2_buf_type type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u8 description[32]; + __u32 pixelformat; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_FMT_FLAG_COMPRESSED 0x0001 +#define V4L2_FMT_FLAG_EMULATED 0x0002 +enum v4l2_frmsizetypes { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FRMSIZE_TYPE_DISCRETE = 1, + V4L2_FRMSIZE_TYPE_CONTINUOUS = 2, + V4L2_FRMSIZE_TYPE_STEPWISE = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_frmsize_discrete { + __u32 width; + __u32 height; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_frmsize_stepwise { + __u32 min_width; + __u32 max_width; + __u32 step_width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 min_height; + __u32 max_height; + __u32 step_height; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_frmsizeenum { + __u32 index; + __u32 pixel_format; + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_frmsize_discrete discrete; + struct v4l2_frmsize_stepwise stepwise; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +enum v4l2_frmivaltypes { + V4L2_FRMIVAL_TYPE_DISCRETE = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FRMIVAL_TYPE_CONTINUOUS = 2, + V4L2_FRMIVAL_TYPE_STEPWISE = 3, +}; +struct v4l2_frmival_stepwise { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract min; + struct v4l2_fract max; + struct v4l2_fract step; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_frmivalenum { + __u32 index; + __u32 pixel_format; + __u32 width; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 height; + __u32 type; + union { + struct v4l2_fract discrete; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_frmival_stepwise stepwise; + }; + __u32 reserved[2]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_timecode { + __u32 type; + __u32 flags; + __u8 frames; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 seconds; + __u8 minutes; + __u8 hours; + __u8 userbits[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_TC_TYPE_24FPS 1 +#define V4L2_TC_TYPE_25FPS 2 +#define V4L2_TC_TYPE_30FPS 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_TYPE_50FPS 4 +#define V4L2_TC_TYPE_60FPS 5 +#define V4L2_TC_FLAG_DROPFRAME 0x0001 +#define V4L2_TC_FLAG_COLORFRAME 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TC_USERBITS_field 0x000C +#define V4L2_TC_USERBITS_USERDEFINED 0x0000 +#define V4L2_TC_USERBITS_8BITCHARS 0x0008 +struct v4l2_jpegcompression { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int quality; + int APPn; + int APP_len; + char APP_data[60]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int COM_len; + char COM_data[60]; + __u32 jpeg_markers; +#define V4L2_JPEG_MARKER_DHT (1 << 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_MARKER_DQT (1 << 4) +#define V4L2_JPEG_MARKER_DRI (1 << 5) +#define V4L2_JPEG_MARKER_COM (1 << 6) +#define V4L2_JPEG_MARKER_APP (1 << 7) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_requestbuffers { + __u32 count; + enum v4l2_buf_type type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_memory memory; + __u32 reserved[2]; +}; +struct v4l2_plane { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 bytesused; + __u32 length; + union { + __u32 mem_offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + unsigned long userptr; + int fd; + } m; + __u32 data_offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[11]; +}; +struct v4l2_buffer { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_buf_type type; + __u32 bytesused; + __u32 flags; + enum v4l2_field field; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct timeval timestamp; + struct v4l2_timecode timecode; + __u32 sequence; + enum v4l2_memory memory; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + __u32 offset; + unsigned long userptr; + struct v4l2_plane * planes; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + int fd; + } m; + __u32 length; + __u32 input; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved; +}; +#define V4L2_BUF_FLAG_MAPPED 0x0001 +#define V4L2_BUF_FLAG_QUEUED 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_DONE 0x0004 +#define V4L2_BUF_FLAG_KEYFRAME 0x0008 +#define V4L2_BUF_FLAG_PFRAME 0x0010 +#define V4L2_BUF_FLAG_BFRAME 0x0020 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_ERROR 0x0040 +#define V4L2_BUF_FLAG_TIMECODE 0x0100 +#define V4L2_BUF_FLAG_INPUT 0x0200 +#define V4L2_BUF_FLAG_PREPARED 0x0400 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x0800 +#define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x1000 +#define V4L2_BUF_FLAG_USE_SYNC 0x2000 +struct v4l2_exportbuffer { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 index; + __u32 plane; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 fd; + __u32 reserved[11]; +}; +struct v4l2_framebuffer { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 flags; + void * base; + struct v4l2_pix_format fmt; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001 +#define V4L2_FBUF_CAP_CHROMAKEY 0x0002 +#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008 +#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010 +#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020 +#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080 +#define V4L2_FBUF_FLAG_PRIMARY 0x0001 +#define V4L2_FBUF_FLAG_OVERLAY 0x0002 +#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008 +#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010 +#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020 +#define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_clip { + struct v4l2_rect c; + struct v4l2_clip __user * next; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_window { + struct v4l2_rect w; + enum v4l2_field field; + __u32 chromakey; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_clip __user * clips; + __u32 clipcount; + void __user * bitmap; + __u8 global_alpha; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_captureparm { + __u32 capability; + __u32 capturemode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract timeperframe; + __u32 extendedmode; + __u32 readbuffers; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_MODE_HIGHQUALITY 0x0001 +#define V4L2_CAP_TIMEPERFRAME 0x1000 +struct v4l2_outputparm { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 capability; + __u32 outputmode; + struct v4l2_fract timeperframe; + __u32 extendedmode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 writebuffers; + __u32 reserved[4]; +}; +struct v4l2_cropcap { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_buf_type type; + struct v4l2_rect bounds; + struct v4l2_rect defrect; + struct v4l2_fract pixelaspect; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_crop { + enum v4l2_buf_type type; + struct v4l2_rect c; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_SEL_FLAG_GE 0x00000001 +#define V4L2_SEL_FLAG_LE 0x00000002 +#define V4L2_SEL_TGT_CROP_ACTIVE 0x0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_CROP_DEFAULT 0x0001 +#define V4L2_SEL_TGT_CROP_BOUNDS 0x0002 +#define V4L2_SEL_TGT_COMPOSE_ACTIVE 0x0100 +#define V4L2_SEL_TGT_COMPOSE_DEFAULT 0x0101 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SEL_TGT_COMPOSE_BOUNDS 0x0102 +#define V4L2_SEL_TGT_COMPOSE_PADDED 0x0103 +struct v4l2_selection { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 target; + __u32 flags; + struct v4l2_rect r; + __u32 reserved[9]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +typedef __u64 v4l2_std_id; +#define V4L2_STD_PAL_B ((v4l2_std_id) 0x00000001) +#define V4L2_STD_PAL_B1 ((v4l2_std_id) 0x00000002) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_G ((v4l2_std_id) 0x00000004) +#define V4L2_STD_PAL_H ((v4l2_std_id) 0x00000008) +#define V4L2_STD_PAL_I ((v4l2_std_id) 0x00000010) +#define V4L2_STD_PAL_D ((v4l2_std_id) 0x00000020) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_D1 ((v4l2_std_id) 0x00000040) +#define V4L2_STD_PAL_K ((v4l2_std_id) 0x00000080) +#define V4L2_STD_PAL_M ((v4l2_std_id) 0x00000100) +#define V4L2_STD_PAL_N ((v4l2_std_id) 0x00000200) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_Nc ((v4l2_std_id) 0x00000400) +#define V4L2_STD_PAL_60 ((v4l2_std_id) 0x00000800) +#define V4L2_STD_NTSC_M ((v4l2_std_id) 0x00001000) +#define V4L2_STD_NTSC_M_JP ((v4l2_std_id) 0x00002000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_NTSC_443 ((v4l2_std_id) 0x00004000) +#define V4L2_STD_NTSC_M_KR ((v4l2_std_id) 0x00008000) +#define V4L2_STD_SECAM_B ((v4l2_std_id) 0x00010000) +#define V4L2_STD_SECAM_D ((v4l2_std_id) 0x00020000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_G ((v4l2_std_id) 0x00040000) +#define V4L2_STD_SECAM_H ((v4l2_std_id) 0x00080000) +#define V4L2_STD_SECAM_K ((v4l2_std_id) 0x00100000) +#define V4L2_STD_SECAM_K1 ((v4l2_std_id) 0x00200000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_SECAM_L ((v4l2_std_id) 0x00400000) +#define V4L2_STD_SECAM_LC ((v4l2_std_id) 0x00800000) +#define V4L2_STD_ATSC_8_VSB ((v4l2_std_id) 0x01000000) +#define V4L2_STD_ATSC_16_VSB ((v4l2_std_id) 0x02000000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_NTSC (V4L2_STD_NTSC_M | V4L2_STD_NTSC_M_JP | V4L2_STD_NTSC_M_KR) +#define V4L2_STD_SECAM_DK (V4L2_STD_SECAM_D | V4L2_STD_SECAM_K | V4L2_STD_SECAM_K1) +#define V4L2_STD_SECAM (V4L2_STD_SECAM_B | V4L2_STD_SECAM_G | V4L2_STD_SECAM_H | V4L2_STD_SECAM_DK | V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +#define V4L2_STD_PAL_BG (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_PAL_G) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_PAL_DK (V4L2_STD_PAL_D | V4L2_STD_PAL_D1 | V4L2_STD_PAL_K) +#define V4L2_STD_PAL (V4L2_STD_PAL_BG | V4L2_STD_PAL_DK | V4L2_STD_PAL_H | V4L2_STD_PAL_I) +#define V4L2_STD_B (V4L2_STD_PAL_B | V4L2_STD_PAL_B1 | V4L2_STD_SECAM_B) +#define V4L2_STD_G (V4L2_STD_PAL_G | V4L2_STD_SECAM_G) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_H (V4L2_STD_PAL_H | V4L2_STD_SECAM_H) +#define V4L2_STD_L (V4L2_STD_SECAM_L | V4L2_STD_SECAM_LC) +#define V4L2_STD_GH (V4L2_STD_G | V4L2_STD_H) +#define V4L2_STD_DK (V4L2_STD_PAL_DK | V4L2_STD_SECAM_DK) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_BG (V4L2_STD_B | V4L2_STD_G) +#define V4L2_STD_MN (V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_NTSC) +#define V4L2_STD_MTS (V4L2_STD_NTSC_M | V4L2_STD_PAL_M | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc) +#define V4L2_STD_525_60 (V4L2_STD_PAL_M | V4L2_STD_PAL_60 | V4L2_STD_NTSC | V4L2_STD_NTSC_443) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_STD_625_50 (V4L2_STD_PAL | V4L2_STD_PAL_N | V4L2_STD_PAL_Nc | V4L2_STD_SECAM) +#define V4L2_STD_ATSC (V4L2_STD_ATSC_8_VSB | V4L2_STD_ATSC_16_VSB) +#define V4L2_STD_UNKNOWN 0 +#define V4L2_STD_ALL (V4L2_STD_525_60 | V4L2_STD_625_50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_standard { + __u32 index; + v4l2_std_id id; + __u8 name[24]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_fract frameperiod; + __u32 framelines; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dv_preset { + __u32 preset; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dv_enum_preset { + __u32 index; + __u32 preset; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 width; + __u32 height; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_INVALID 0 +#define V4L2_DV_480P59_94 1 +#define V4L2_DV_576P50 2 +#define V4L2_DV_720P24 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P25 4 +#define V4L2_DV_720P30 5 +#define V4L2_DV_720P50 6 +#define V4L2_DV_720P59_94 7 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P60 8 +#define V4L2_DV_1080I29_97 9 +#define V4L2_DV_1080I30 10 +#define V4L2_DV_1080I25 11 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I50 12 +#define V4L2_DV_1080I60 13 +#define V4L2_DV_1080P24 14 +#define V4L2_DV_1080P25 15 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P30 16 +#define V4L2_DV_1080P50 17 +#define V4L2_DV_1080P60 18 +#define V4L2_DV_480P60 19 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I59_94 20 +#define V4L2_DV_1080P59_94 21 +#define V4L2_DV_720P60_FP 22 +#define V4L2_DV_720P60_SB_HALF 23 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P60_TB 24 +#define V4L2_DV_720P59_94_FP 25 +#define V4L2_DV_720P59_94_SB_HALF 26 +#define V4L2_DV_720P59_94_TB 27 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P50_FP 28 +#define V4L2_DV_720P50_SB_HALF 29 +#define V4L2_DV_720P50_TB 30 +#define V4L2_DV_1080P24_FP 31 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P24_SB_HALF 32 +#define V4L2_DV_1080P24_TB 33 +#define V4L2_DV_1080P23_98_FP 34 +#define V4L2_DV_1080P23_98_SB_HALF 35 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P23_98_TB 36 +#define V4L2_DV_1080I60_SB_HALF 37 +#define V4L2_DV_1080I59_94_SB_HALF 38 +#define V4L2_DV_1080I50_SB_HALF 39 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P60_SB_HALF 40 +#define V4L2_DV_1080P60_TB 41 +#define V4L2_DV_1080P30_FP 42 +#define V4L2_DV_1080P30_SB_HALF 43 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P30_TB 44 +struct v4l2_bt_timings { + __u32 width; + __u32 height; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 interlaced; + __u32 polarities; + __u64 pixelclock; + __u32 hfrontporch; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 hsync; + __u32 hbackporch; + __u32 vfrontporch; + __u32 vsync; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 vbackporch; + __u32 il_vfrontporch; + __u32 il_vsync; + __u32 il_vbackporch; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[16]; +} __attribute__((packed)); +#define V4L2_DV_PROGRESSIVE 0 +#define V4L2_DV_INTERLACED 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_VSYNC_POS_POL 0x00000001 +#define V4L2_DV_HSYNC_POS_POL 0x00000002 +struct v4l2_dv_timings { + __u32 type; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct v4l2_bt_timings bt; + __u32 reserved[32]; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_DV_BT_656_1120 0 +struct v4l2_input { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __u32 type; + __u32 audioset; + __u32 tuner; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + v4l2_std_id std; + __u32 status; + __u32 capabilities; + __u32 reserved[3]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_INPUT_TYPE_TUNER 1 +#define V4L2_INPUT_TYPE_CAMERA 2 +#define V4L2_IN_ST_NO_POWER 0x00000001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_SIGNAL 0x00000002 +#define V4L2_IN_ST_NO_COLOR 0x00000004 +#define V4L2_IN_ST_HFLIP 0x00000010 +#define V4L2_IN_ST_VFLIP 0x00000020 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_H_LOCK 0x00000100 +#define V4L2_IN_ST_COLOR_KILL 0x00000200 +#define V4L2_IN_ST_NO_SYNC 0x00010000 +#define V4L2_IN_ST_NO_EQU 0x00020000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_ST_NO_CARRIER 0x00040000 +#define V4L2_IN_ST_MACROVISION 0x01000000 +#define V4L2_IN_ST_NO_ACCESS 0x02000000 +#define V4L2_IN_ST_VTR 0x04000000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_IN_CAP_PRESETS 0x00000001 +#define V4L2_IN_CAP_CUSTOM_TIMINGS 0x00000002 +#define V4L2_IN_CAP_STD 0x00000004 +struct v4l2_output { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + __u32 type; + __u32 audioset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 modulator; + v4l2_std_id std; + __u32 capabilities; + __u32 reserved[3]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_OUTPUT_TYPE_MODULATOR 1 +#define V4L2_OUTPUT_TYPE_ANALOG 2 +#define V4L2_OUTPUT_TYPE_ANALOGVGAOVERLAY 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_OUT_CAP_PRESETS 0x00000001 +#define V4L2_OUT_CAP_CUSTOM_TIMINGS 0x00000002 +#define V4L2_OUT_CAP_STD 0x00000004 +struct v4l2_control { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __s32 value; +}; +struct v4l2_ext_control { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __u32 size; + __u32 reserved2[1]; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 value; + __s64 value64; + char * string; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_ext_controls { + __u32 ctrl_class; + __u32 count; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 error_idx; + __u32 reserved[2]; + struct v4l2_ext_control * controls; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_USER 0x00980000 +#define V4L2_CTRL_CLASS_MPEG 0x00990000 +#define V4L2_CTRL_CLASS_CAMERA 0x009a0000 +#define V4L2_CTRL_CLASS_FM_TX 0x009b0000 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_CLASS_FLASH 0x009c0000 +#define V4L2_CTRL_CLASS_JPEG 0x009d0000 +#define V4L2_CTRL_CLASS_CODEC 0x009e0000 +#define V4L2_CTRL_ID_MASK (0x0fffffff) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL) +#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000) +enum v4l2_ctrl_type { + V4L2_CTRL_TYPE_INTEGER = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_BOOLEAN = 2, + V4L2_CTRL_TYPE_MENU = 3, + V4L2_CTRL_TYPE_BUTTON = 4, + V4L2_CTRL_TYPE_INTEGER64 = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CTRL_TYPE_CTRL_CLASS = 6, + V4L2_CTRL_TYPE_STRING = 7, + V4L2_CTRL_TYPE_BITMASK = 8, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_queryctrl { + __u32 id; + enum v4l2_ctrl_type type; + __u8 name[32]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 minimum; + __s32 maximum; + __s32 step; + __s32 default_value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[2]; +}; +struct v4l2_querymenu { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __u32 index; + __u8 name[32]; + __u32 reserved; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CTRL_FLAG_DISABLED 0x0001 +#define V4L2_CTRL_FLAG_GRABBED 0x0002 +#define V4L2_CTRL_FLAG_READ_ONLY 0x0004 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_UPDATE 0x0008 +#define V4L2_CTRL_FLAG_INACTIVE 0x0010 +#define V4L2_CTRL_FLAG_SLIDER 0x0020 +#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CTRL_FLAG_VOLATILE 0x0080 +#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000 +#define V4L2_CID_MAX_CTRLS 1024 +#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_USER_BASE V4L2_CID_BASE +#define V4L2_CID_PRIVATE_BASE 0x08000000 +#define V4L2_CID_USER_CLASS (V4L2_CTRL_CLASS_USER | 1) +#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE + 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CONTRAST (V4L2_CID_BASE + 1) +#define V4L2_CID_SATURATION (V4L2_CID_BASE + 2) +#define V4L2_CID_HUE (V4L2_CID_BASE + 3) +#define V4L2_CID_AUDIO_VOLUME (V4L2_CID_BASE + 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_BALANCE (V4L2_CID_BASE + 6) +#define V4L2_CID_AUDIO_BASS (V4L2_CID_BASE + 7) +#define V4L2_CID_AUDIO_TREBLE (V4L2_CID_BASE + 8) +#define V4L2_CID_AUDIO_MUTE (V4L2_CID_BASE + 9) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_LOUDNESS (V4L2_CID_BASE + 10) +#define V4L2_CID_BLACK_LEVEL (V4L2_CID_BASE + 11) +#define V4L2_CID_AUTO_WHITE_BALANCE (V4L2_CID_BASE + 12) +#define V4L2_CID_DO_WHITE_BALANCE (V4L2_CID_BASE + 13) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RED_BALANCE (V4L2_CID_BASE + 14) +#define V4L2_CID_BLUE_BALANCE (V4L2_CID_BASE + 15) +#define V4L2_CID_GAMMA (V4L2_CID_BASE + 16) +#define V4L2_CID_WHITENESS (V4L2_CID_GAMMA) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXPOSURE (V4L2_CID_BASE + 17) +#define V4L2_CID_AUTOGAIN (V4L2_CID_BASE + 18) +#define V4L2_CID_GAIN (V4L2_CID_BASE + 19) +#define V4L2_CID_HFLIP (V4L2_CID_BASE + 20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_VFLIP (V4L2_CID_BASE + 21) +#define V4L2_CID_HCENTER (V4L2_CID_BASE + 22) +#define V4L2_CID_VCENTER (V4L2_CID_BASE + 23) +#define V4L2_CID_POWER_LINE_FREQUENCY (V4L2_CID_BASE + 24) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_power_line_frequency { + V4L2_CID_POWER_LINE_FREQUENCY_DISABLED = 0, + V4L2_CID_POWER_LINE_FREQUENCY_50HZ = 1, + V4L2_CID_POWER_LINE_FREQUENCY_60HZ = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_CID_POWER_LINE_FREQUENCY_AUTO = 3, +}; +#define V4L2_CID_HUE_AUTO (V4L2_CID_BASE + 25) +#define V4L2_CID_WHITE_BALANCE_TEMPERATURE (V4L2_CID_BASE + 26) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_SHARPNESS (V4L2_CID_BASE + 27) +#define V4L2_CID_BACKLIGHT_COMPENSATION (V4L2_CID_BASE + 28) +#define V4L2_CID_CHROMA_AGC (V4L2_CID_BASE + 29) +#define V4L2_CID_COLOR_KILLER (V4L2_CID_BASE + 30) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_COLORFX (V4L2_CID_BASE + 31) +enum v4l2_colorfx { + V4L2_COLORFX_NONE = 0, + V4L2_COLORFX_BW = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SEPIA = 2, + V4L2_COLORFX_NEGATIVE = 3, + V4L2_COLORFX_EMBOSS = 4, + V4L2_COLORFX_SKETCH = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_SKY_BLUE = 6, + V4L2_COLORFX_GRASS_GREEN = 7, + V4L2_COLORFX_SKIN_WHITEN = 8, + V4L2_COLORFX_VIVID = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_AQUA = 10, + V4L2_COLORFX_ART_FREEZE = 11, + V4L2_COLORFX_SILHOUETTE = 12, + V4L2_COLORFX_SOLARIZATION = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_COLORFX_ANTIQUE = 14, + V4L2_COLORFX_SET_CBCR = 15, +}; +#define V4L2_CID_AUTOBRIGHTNESS (V4L2_CID_BASE + 32) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_BAND_STOP_FILTER (V4L2_CID_BASE + 33) +#define V4L2_CID_ROTATE (V4L2_CID_BASE + 34) +#define V4L2_CID_BG_COLOR (V4L2_CID_BASE + 35) +#define V4L2_CID_CHROMA_GAIN (V4L2_CID_BASE + 36) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ILLUMINATORS_1 (V4L2_CID_BASE + 37) +#define V4L2_CID_ILLUMINATORS_2 (V4L2_CID_BASE + 38) +#define V4L2_CID_MIN_BUFFERS_FOR_CAPTURE (V4L2_CID_BASE + 39) +#define V4L2_CID_MIN_BUFFERS_FOR_OUTPUT (V4L2_CID_BASE + 40) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ALPHA_COMPONENT (V4L2_CID_BASE + 41) +#define V4L2_CID_COLORFX_CBCR (V4L2_CID_BASE + 42) +#define V4L2_CID_LASTP1 (V4L2_CID_BASE + 43) +#define V4L2_CID_MPEG_BASE (V4L2_CTRL_CLASS_MPEG | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CLASS (V4L2_CTRL_CLASS_MPEG | 1) +#define V4L2_CID_MPEG_STREAM_TYPE (V4L2_CID_MPEG_BASE + 0) +enum v4l2_mpeg_stream_type { + V4L2_MPEG_STREAM_TYPE_MPEG2_PS = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_TYPE_MPEG2_TS = 1, + V4L2_MPEG_STREAM_TYPE_MPEG1_SS = 2, + V4L2_MPEG_STREAM_TYPE_MPEG2_DVD = 3, + V4L2_MPEG_STREAM_TYPE_MPEG1_VCD = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_STREAM_TYPE_MPEG2_SVCD = 5, +}; +#define V4L2_CID_MPEG_STREAM_PID_PMT (V4L2_CID_MPEG_BASE + 1) +#define V4L2_CID_MPEG_STREAM_PID_AUDIO (V4L2_CID_MPEG_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_PID_VIDEO (V4L2_CID_MPEG_BASE + 3) +#define V4L2_CID_MPEG_STREAM_PID_PCR (V4L2_CID_MPEG_BASE + 4) +#define V4L2_CID_MPEG_STREAM_PES_ID_AUDIO (V4L2_CID_MPEG_BASE + 5) +#define V4L2_CID_MPEG_STREAM_PES_ID_VIDEO (V4L2_CID_MPEG_BASE + 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_STREAM_VBI_FMT (V4L2_CID_MPEG_BASE + 7) +enum v4l2_mpeg_stream_vbi_fmt { + V4L2_MPEG_STREAM_VBI_FMT_NONE = 0, + V4L2_MPEG_STREAM_VBI_FMT_IVTV = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_SAMPLING_FREQ (V4L2_CID_MPEG_BASE + 100) +enum v4l2_mpeg_audio_sampling_freq { + V4L2_MPEG_AUDIO_SAMPLING_FREQ_44100 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_SAMPLING_FREQ_48000 = 1, + V4L2_MPEG_AUDIO_SAMPLING_FREQ_32000 = 2, +}; +#define V4L2_CID_MPEG_AUDIO_ENCODING (V4L2_CID_MPEG_BASE + 101) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_encoding { + V4L2_MPEG_AUDIO_ENCODING_LAYER_1 = 0, + V4L2_MPEG_AUDIO_ENCODING_LAYER_2 = 1, + V4L2_MPEG_AUDIO_ENCODING_LAYER_3 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_ENCODING_AAC = 3, + V4L2_MPEG_AUDIO_ENCODING_AC3 = 4, +}; +#define V4L2_CID_MPEG_AUDIO_L1_BITRATE (V4L2_CID_MPEG_BASE + 102) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_l1_bitrate { + V4L2_MPEG_AUDIO_L1_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L1_BITRATE_64K = 1, + V4L2_MPEG_AUDIO_L1_BITRATE_96K = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_128K = 3, + V4L2_MPEG_AUDIO_L1_BITRATE_160K = 4, + V4L2_MPEG_AUDIO_L1_BITRATE_192K = 5, + V4L2_MPEG_AUDIO_L1_BITRATE_224K = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_256K = 7, + V4L2_MPEG_AUDIO_L1_BITRATE_288K = 8, + V4L2_MPEG_AUDIO_L1_BITRATE_320K = 9, + V4L2_MPEG_AUDIO_L1_BITRATE_352K = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L1_BITRATE_384K = 11, + V4L2_MPEG_AUDIO_L1_BITRATE_416K = 12, + V4L2_MPEG_AUDIO_L1_BITRATE_448K = 13, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_L2_BITRATE (V4L2_CID_MPEG_BASE + 103) +enum v4l2_mpeg_audio_l2_bitrate { + V4L2_MPEG_AUDIO_L2_BITRATE_32K = 0, + V4L2_MPEG_AUDIO_L2_BITRATE_48K = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_56K = 2, + V4L2_MPEG_AUDIO_L2_BITRATE_64K = 3, + V4L2_MPEG_AUDIO_L2_BITRATE_80K = 4, + V4L2_MPEG_AUDIO_L2_BITRATE_96K = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_112K = 6, + V4L2_MPEG_AUDIO_L2_BITRATE_128K = 7, + V4L2_MPEG_AUDIO_L2_BITRATE_160K = 8, + V4L2_MPEG_AUDIO_L2_BITRATE_192K = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L2_BITRATE_224K = 10, + V4L2_MPEG_AUDIO_L2_BITRATE_256K = 11, + V4L2_MPEG_AUDIO_L2_BITRATE_320K = 12, + V4L2_MPEG_AUDIO_L2_BITRATE_384K = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_L3_BITRATE (V4L2_CID_MPEG_BASE + 104) +enum v4l2_mpeg_audio_l3_bitrate { + V4L2_MPEG_AUDIO_L3_BITRATE_32K = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_L3_BITRATE_48K = 2, + V4L2_MPEG_AUDIO_L3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_L3_BITRATE_64K = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_L3_BITRATE_96K = 6, + V4L2_MPEG_AUDIO_L3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_L3_BITRATE_128K = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_L3_BITRATE_192K = 10, + V4L2_MPEG_AUDIO_L3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_L3_BITRATE_256K = 12, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_L3_BITRATE_320K = 13, +}; +#define V4L2_CID_MPEG_AUDIO_MODE (V4L2_CID_MPEG_BASE + 105) +enum v4l2_mpeg_audio_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_STEREO = 0, + V4L2_MPEG_AUDIO_MODE_JOINT_STEREO = 1, + V4L2_MPEG_AUDIO_MODE_DUAL = 2, + V4L2_MPEG_AUDIO_MODE_MONO = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_AUDIO_MODE_EXTENSION (V4L2_CID_MPEG_BASE + 106) +enum v4l2_mpeg_audio_mode_extension { + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_4 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_8 = 1, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_12 = 2, + V4L2_MPEG_AUDIO_MODE_EXTENSION_BOUND_16 = 3, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_EMPHASIS (V4L2_CID_MPEG_BASE + 107) +enum v4l2_mpeg_audio_emphasis { + V4L2_MPEG_AUDIO_EMPHASIS_NONE = 0, + V4L2_MPEG_AUDIO_EMPHASIS_50_DIV_15_uS = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_EMPHASIS_CCITT_J17 = 2, +}; +#define V4L2_CID_MPEG_AUDIO_CRC (V4L2_CID_MPEG_BASE + 108) +enum v4l2_mpeg_audio_crc { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_CRC_NONE = 0, + V4L2_MPEG_AUDIO_CRC_CRC16 = 1, +}; +#define V4L2_CID_MPEG_AUDIO_MUTE (V4L2_CID_MPEG_BASE + 109) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_AAC_BITRATE (V4L2_CID_MPEG_BASE + 110) +#define V4L2_CID_MPEG_AUDIO_AC3_BITRATE (V4L2_CID_MPEG_BASE + 111) +enum v4l2_mpeg_audio_ac3_bitrate { + V4L2_MPEG_AUDIO_AC3_BITRATE_32K = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_40K = 1, + V4L2_MPEG_AUDIO_AC3_BITRATE_48K = 2, + V4L2_MPEG_AUDIO_AC3_BITRATE_56K = 3, + V4L2_MPEG_AUDIO_AC3_BITRATE_64K = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_80K = 5, + V4L2_MPEG_AUDIO_AC3_BITRATE_96K = 6, + V4L2_MPEG_AUDIO_AC3_BITRATE_112K = 7, + V4L2_MPEG_AUDIO_AC3_BITRATE_128K = 8, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_160K = 9, + V4L2_MPEG_AUDIO_AC3_BITRATE_192K = 10, + V4L2_MPEG_AUDIO_AC3_BITRATE_224K = 11, + V4L2_MPEG_AUDIO_AC3_BITRATE_256K = 12, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_320K = 13, + V4L2_MPEG_AUDIO_AC3_BITRATE_384K = 14, + V4L2_MPEG_AUDIO_AC3_BITRATE_448K = 15, + V4L2_MPEG_AUDIO_AC3_BITRATE_512K = 16, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_AC3_BITRATE_576K = 17, + V4L2_MPEG_AUDIO_AC3_BITRATE_640K = 18, +}; +#define V4L2_CID_MPEG_AUDIO_DEC_PLAYBACK (V4L2_CID_MPEG_BASE + 112) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_audio_dec_playback { + V4L2_MPEG_AUDIO_DEC_PLAYBACK_AUTO = 0, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_STEREO = 1, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_LEFT = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_AUDIO_DEC_PLAYBACK_RIGHT = 3, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_MONO = 4, + V4L2_MPEG_AUDIO_DEC_PLAYBACK_SWAPPED_STEREO = 5, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_AUDIO_DEC_MULTILINGUAL_PLAYBACK (V4L2_CID_MPEG_BASE + 113) +#define V4L2_CID_MPEG_VIDEO_ENCODING (V4L2_CID_MPEG_BASE + 200) +enum v4l2_mpeg_video_encoding { + V4L2_MPEG_VIDEO_ENCODING_MPEG_1 = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_ENCODING_MPEG_2 = 1, + V4L2_MPEG_VIDEO_ENCODING_MPEG_4_AVC = 2, +}; +#define V4L2_CID_MPEG_VIDEO_ASPECT (V4L2_CID_MPEG_BASE + 201) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_aspect { + V4L2_MPEG_VIDEO_ASPECT_1x1 = 0, + V4L2_MPEG_VIDEO_ASPECT_4x3 = 1, + V4L2_MPEG_VIDEO_ASPECT_16x9 = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_ASPECT_221x100 = 3, +}; +#define V4L2_CID_MPEG_VIDEO_B_FRAMES (V4L2_CID_MPEG_BASE + 202) +#define V4L2_CID_MPEG_VIDEO_GOP_SIZE (V4L2_CID_MPEG_BASE + 203) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_GOP_CLOSURE (V4L2_CID_MPEG_BASE + 204) +#define V4L2_CID_MPEG_VIDEO_PULLDOWN (V4L2_CID_MPEG_BASE + 205) +#define V4L2_CID_MPEG_VIDEO_BITRATE_MODE (V4L2_CID_MPEG_BASE + 206) +enum v4l2_mpeg_video_bitrate_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_BITRATE_MODE_VBR = 0, + V4L2_MPEG_VIDEO_BITRATE_MODE_CBR = 1, +}; +#define V4L2_CID_MPEG_VIDEO_BITRATE (V4L2_CID_MPEG_BASE + 207) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_BITRATE_PEAK (V4L2_CID_MPEG_BASE + 208) +#define V4L2_CID_MPEG_VIDEO_TEMPORAL_DECIMATION (V4L2_CID_MPEG_BASE + 209) +#define V4L2_CID_MPEG_VIDEO_MUTE (V4L2_CID_MPEG_BASE + 210) +#define V4L2_CID_MPEG_VIDEO_MUTE_YUV (V4L2_CID_MPEG_BASE + 211) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE (V4L2_CID_MPEG_BASE + 212) +#define V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER (V4L2_CID_MPEG_BASE + 213) +#define V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB (V4L2_CID_MPEG_BASE + 214) +#define V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE (V4L2_CID_MPEG_BASE + 215) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEADER_MODE (V4L2_CID_MPEG_BASE + 216) +enum v4l2_mpeg_video_header_mode { + V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE = 0, + V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_MAX_REF_PIC (V4L2_CID_MPEG_BASE + 217) +#define V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE (V4L2_CID_MPEG_BASE + 218) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES (V4L2_CID_MPEG_BASE + 219) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB (V4L2_CID_MPEG_BASE + 220) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE (V4L2_CID_MPEG_BASE + 221) +enum v4l2_mpeg_video_multi_slice_mode { + V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_SINGLE = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_MB = 1, + V4L2_MPEG_VIDEO_MULTI_SICE_MODE_MAX_BYTES = 2, +}; +#define V4L2_CID_MPEG_VIDEO_VBV_SIZE (V4L2_CID_MPEG_BASE + 222) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_DEC_PTS (V4L2_CID_MPEG_BASE + 223) +#define V4L2_CID_MPEG_VIDEO_DEC_FRAME (V4L2_CID_MPEG_BASE + 224) +#define V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP (V4L2_CID_MPEG_BASE + 300) +#define V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP (V4L2_CID_MPEG_BASE + 301) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_B_FRAME_QP (V4L2_CID_MPEG_BASE + 302) +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP (V4L2_CID_MPEG_BASE + 303) +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP (V4L2_CID_MPEG_BASE + 304) +#define V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP (V4L2_CID_MPEG_BASE + 350) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP (V4L2_CID_MPEG_BASE + 351) +#define V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP (V4L2_CID_MPEG_BASE + 352) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP (V4L2_CID_MPEG_BASE + 353) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP (V4L2_CID_MPEG_BASE + 354) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM (V4L2_CID_MPEG_BASE + 355) +#define V4L2_CID_MPEG_VIDEO_H264_CPB_SIZE (V4L2_CID_MPEG_BASE + 356) +#define V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE (V4L2_CID_MPEG_BASE + 357) +enum v4l2_mpeg_video_h264_entropy_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CAVLC = 0, + V4L2_MPEG_VIDEO_H264_ENTROPY_MODE_CABAC = 1, +}; +#define V4L2_CID_MPEG_VIDEO_H264_I_PERIOD (V4L2_CID_MPEG_BASE + 358) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_LEVEL (V4L2_CID_MPEG_BASE + 359) +enum v4l2_mpeg_video_h264_level { + V4L2_MPEG_VIDEO_H264_LEVEL_1_0 = 0, + V4L2_MPEG_VIDEO_H264_LEVEL_1B = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_1_1 = 2, + V4L2_MPEG_VIDEO_H264_LEVEL_1_2 = 3, + V4L2_MPEG_VIDEO_H264_LEVEL_1_3 = 4, + V4L2_MPEG_VIDEO_H264_LEVEL_2_0 = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_2_1 = 6, + V4L2_MPEG_VIDEO_H264_LEVEL_2_2 = 7, + V4L2_MPEG_VIDEO_H264_LEVEL_3_0 = 8, + V4L2_MPEG_VIDEO_H264_LEVEL_3_1 = 9, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_3_2 = 10, + V4L2_MPEG_VIDEO_H264_LEVEL_4_0 = 11, + V4L2_MPEG_VIDEO_H264_LEVEL_4_1 = 12, + V4L2_MPEG_VIDEO_H264_LEVEL_4_2 = 13, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LEVEL_5_0 = 14, + V4L2_MPEG_VIDEO_H264_LEVEL_5_1 = 15, +}; +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA (V4L2_CID_MPEG_BASE + 360) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA (V4L2_CID_MPEG_BASE + 361) +#define V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE (V4L2_CID_MPEG_BASE + 362) +enum v4l2_mpeg_video_h264_loop_filter_mode { + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_ENABLED = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED = 1, + V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY = 2, +}; +#define V4L2_CID_MPEG_VIDEO_H264_PROFILE (V4L2_CID_MPEG_BASE + 363) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_h264_profile { + V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE = 0, + V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE = 1, + V4L2_MPEG_VIDEO_H264_PROFILE_MAIN = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED = 3, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH = 4, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10 = 5, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422 = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE = 7, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA = 8, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA = 9, + V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA = 10, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA = 11, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE = 12, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH = 13, + V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA = 14, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH = 15, + V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH = 16, +}; +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT (V4L2_CID_MPEG_BASE + 364) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH (V4L2_CID_MPEG_BASE + 365) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE (V4L2_CID_MPEG_BASE + 366) +#define V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC (V4L2_CID_MPEG_BASE + 367) +enum v4l2_mpeg_video_h264_vui_sar_idc { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_UNSPECIFIED = 0, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_1x1 = 1, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_12x11 = 2, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_10x11 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_16x11 = 4, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_40x33 = 5, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_24x11 = 6, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_20x11 = 7, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_32x11 = 8, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_80x33 = 9, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_18x11 = 10, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_15x11 = 11, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_64x33 = 12, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_160x99 = 13, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_4x3 = 14, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_3x2 = 15, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_2x1 = 16, + V4L2_MPEG_VIDEO_H264_VUI_SAR_IDC_EXTENDED = 17, +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP (V4L2_CID_MPEG_BASE + 400) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP (V4L2_CID_MPEG_BASE + 401) +#define V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP (V4L2_CID_MPEG_BASE + 402) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP (V4L2_CID_MPEG_BASE + 403) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP (V4L2_CID_MPEG_BASE + 404) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL (V4L2_CID_MPEG_BASE + 405) +enum v4l2_mpeg_video_mpeg4_level { + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0 = 0, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_0B = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_1 = 2, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_2 = 3, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3 = 4, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_3B = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_LEVEL_4 = 6, + V4L2_MPEG_VIDEO_MPEG4_LEVEL_5 = 7, +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE (V4L2_CID_MPEG_BASE + 406) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_mpeg4_profile { + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE = 0, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_SIMPLE = 1, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_CORE = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_MPEG4_PROFILE_SIMPLE_SCALABLE = 3, + V4L2_MPEG_VIDEO_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY = 4, +}; +#define V4L2_CID_MPEG_VIDEO_MPEG4_QPEL (V4L2_CID_MPEG_BASE + 407) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_BASE (V4L2_CTRL_CLASS_MPEG | 0x1000) +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE + 0) +enum v4l2_mpeg_cx2341x_video_spatial_filter_mode { + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_MANUAL = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_SPATIAL_FILTER_MODE_AUTO = 1, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_SPATIAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE + 1) +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_cx2341x_video_luma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_1D_VERT = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_HV_SEPARABLE = 3, + V4L2_MPEG_CX2341X_VIDEO_LUMA_SPATIAL_FILTER_TYPE_2D_SYM_NON_SEPARABLE = 4, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE + 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_cx2341x_video_chroma_spatial_filter_type { + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_CHROMA_SPATIAL_FILTER_TYPE_1D_HOR = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE (V4L2_CID_MPEG_CX2341X_BASE + 4) +enum v4l2_mpeg_cx2341x_video_temporal_filter_mode { + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_MANUAL = 0, + V4L2_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER_MODE_AUTO = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_TEMPORAL_FILTER (V4L2_CID_MPEG_CX2341X_BASE + 5) +#define V4L2_CID_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE (V4L2_CID_MPEG_CX2341X_BASE + 6) +enum v4l2_mpeg_cx2341x_video_median_filter_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_OFF = 0, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR = 1, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_VERT = 2, + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_HOR_VERT = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_CX2341X_VIDEO_MEDIAN_FILTER_TYPE_DIAG = 4, +}; +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE + 7) +#define V4L2_CID_MPEG_CX2341X_VIDEO_LUMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE + 8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_BOTTOM (V4L2_CID_MPEG_CX2341X_BASE + 9) +#define V4L2_CID_MPEG_CX2341X_VIDEO_CHROMA_MEDIAN_FILTER_TOP (V4L2_CID_MPEG_CX2341X_BASE + 10) +#define V4L2_CID_MPEG_CX2341X_STREAM_INSERT_NAV_PACKETS (V4L2_CID_MPEG_CX2341X_BASE + 11) +#define V4L2_CID_MPEG_MFC51_BASE (V4L2_CTRL_CLASS_MPEG | 0x1100) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY (V4L2_CID_MPEG_MFC51_BASE + 0) +#define V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY_ENABLE (V4L2_CID_MPEG_MFC51_BASE + 1) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE (V4L2_CID_MPEG_MFC51_BASE + 2) +enum v4l2_mpeg_mfc51_video_frame_skip_mode { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT = 1, + V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT = 2, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE (V4L2_CID_MPEG_MFC51_BASE + 3) +enum v4l2_mpeg_mfc51_video_force_frame_type { + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_DISABLED = 0, + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_I_FRAME = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE_NOT_CODED = 2, +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING (V4L2_CID_MPEG_MFC51_BASE + 4) +#define V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV (V4L2_CID_MPEG_MFC51_BASE + 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT (V4L2_CID_MPEG_MFC51_BASE + 6) +#define V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF (V4L2_CID_MPEG_MFC51_BASE + 7) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC51_BASE + 50) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC51_BASE + 51) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC51_BASE + 52) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC51_BASE + 53) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P (V4L2_CID_MPEG_MFC51_BASE + 54) +#define V4L2_CID_CAMERA_CLASS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CAMERA_CLASS (V4L2_CTRL_CLASS_CAMERA | 1) +#define V4L2_CID_EXPOSURE_AUTO (V4L2_CID_CAMERA_CLASS_BASE + 1) +enum v4l2_exposure_auto_type { + V4L2_EXPOSURE_AUTO = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_EXPOSURE_MANUAL = 1, + V4L2_EXPOSURE_SHUTTER_PRIORITY = 2, + V4L2_EXPOSURE_APERTURE_PRIORITY = 3 +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXPOSURE_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 2) +#define V4L2_CID_EXPOSURE_AUTO_PRIORITY (V4L2_CID_CAMERA_CLASS_BASE + 3) +#define V4L2_CID_PAN_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE + 4) +#define V4L2_CID_TILT_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE + 5) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PAN_RESET (V4L2_CID_CAMERA_CLASS_BASE + 6) +#define V4L2_CID_TILT_RESET (V4L2_CID_CAMERA_CLASS_BASE + 7) +#define V4L2_CID_PAN_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 8) +#define V4L2_CID_TILT_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 9) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FOCUS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 10) +#define V4L2_CID_FOCUS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE + 11) +#define V4L2_CID_FOCUS_AUTO (V4L2_CID_CAMERA_CLASS_BASE + 12) +#define V4L2_CID_ZOOM_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 13) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_ZOOM_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE + 14) +#define V4L2_CID_ZOOM_CONTINUOUS (V4L2_CID_CAMERA_CLASS_BASE + 15) +#define V4L2_CID_PRIVACY (V4L2_CID_CAMERA_CLASS_BASE + 16) +#define V4L2_CID_IRIS_ABSOLUTE (V4L2_CID_CAMERA_CLASS_BASE + 17) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_IRIS_RELATIVE (V4L2_CID_CAMERA_CLASS_BASE + 18) +#define V4L2_CID_FM_TX_CLASS_BASE (V4L2_CTRL_CLASS_FM_TX | 0x900) +#define V4L2_CID_FM_TX_CLASS (V4L2_CTRL_CLASS_FM_TX | 1) +#define V4L2_CID_RDS_TX_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_RDS_TX_PI (V4L2_CID_FM_TX_CLASS_BASE + 2) +#define V4L2_CID_RDS_TX_PTY (V4L2_CID_FM_TX_CLASS_BASE + 3) +#define V4L2_CID_RDS_TX_PS_NAME (V4L2_CID_FM_TX_CLASS_BASE + 5) +#define V4L2_CID_RDS_TX_RADIO_TEXT (V4L2_CID_FM_TX_CLASS_BASE + 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_LIMITER_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 64) +#define V4L2_CID_AUDIO_LIMITER_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 65) +#define V4L2_CID_AUDIO_LIMITER_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 66) +#define V4L2_CID_AUDIO_COMPRESSION_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 80) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_AUDIO_COMPRESSION_GAIN (V4L2_CID_FM_TX_CLASS_BASE + 81) +#define V4L2_CID_AUDIO_COMPRESSION_THRESHOLD (V4L2_CID_FM_TX_CLASS_BASE + 82) +#define V4L2_CID_AUDIO_COMPRESSION_ATTACK_TIME (V4L2_CID_FM_TX_CLASS_BASE + 83) +#define V4L2_CID_AUDIO_COMPRESSION_RELEASE_TIME (V4L2_CID_FM_TX_CLASS_BASE + 84) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_PILOT_TONE_ENABLED (V4L2_CID_FM_TX_CLASS_BASE + 96) +#define V4L2_CID_PILOT_TONE_DEVIATION (V4L2_CID_FM_TX_CLASS_BASE + 97) +#define V4L2_CID_PILOT_TONE_FREQUENCY (V4L2_CID_FM_TX_CLASS_BASE + 98) +#define V4L2_CID_TUNE_PREEMPHASIS (V4L2_CID_FM_TX_CLASS_BASE + 112) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_preemphasis { + V4L2_PREEMPHASIS_DISABLED = 0, + V4L2_PREEMPHASIS_50_uS = 1, + V4L2_PREEMPHASIS_75_uS = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_TUNE_POWER_LEVEL (V4L2_CID_FM_TX_CLASS_BASE + 113) +#define V4L2_CID_TUNE_ANTENNA_CAPACITOR (V4L2_CID_FM_TX_CLASS_BASE + 114) +#define V4L2_CID_FLASH_CLASS_BASE (V4L2_CTRL_CLASS_FLASH | 0x900) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_CLASS (V4L2_CTRL_CLASS_FLASH | 1) +#define V4L2_CID_FLASH_LED_MODE (V4L2_CID_FLASH_CLASS_BASE + 1) +enum v4l2_flash_led_mode { + V4L2_FLASH_LED_MODE_NONE, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_FLASH_LED_MODE_FLASH, + V4L2_FLASH_LED_MODE_TORCH, +}; +#define V4L2_CID_FLASH_STROBE_SOURCE (V4L2_CID_FLASH_CLASS_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_flash_strobe_source { + V4L2_FLASH_STROBE_SOURCE_SOFTWARE, + V4L2_FLASH_STROBE_SOURCE_EXTERNAL, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_STROBE (V4L2_CID_FLASH_CLASS_BASE + 3) +#define V4L2_CID_FLASH_STROBE_STOP (V4L2_CID_FLASH_CLASS_BASE + 4) +#define V4L2_CID_FLASH_STROBE_STATUS (V4L2_CID_FLASH_CLASS_BASE + 5) +#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_FLASH_CLASS_BASE + 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 7) +#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 8) +#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_FLASH_CLASS_BASE + 9) +#define V4L2_CID_FLASH_FAULT (V4L2_CID_FLASH_CLASS_BASE + 10) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_OVER_VOLTAGE (1 << 0) +#define V4L2_FLASH_FAULT_TIMEOUT (1 << 1) +#define V4L2_FLASH_FAULT_OVER_TEMPERATURE (1 << 2) +#define V4L2_FLASH_FAULT_SHORT_CIRCUIT (1 << 3) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_FLASH_FAULT_OVER_CURRENT (1 << 4) +#define V4L2_FLASH_FAULT_INDICATOR (1 << 5) +#define V4L2_CID_FLASH_CHARGE (V4L2_CID_FLASH_CLASS_BASE + 11) +#define V4L2_CID_FLASH_READY (V4L2_CID_FLASH_CLASS_BASE + 12) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_JPEG_CLASS_BASE (V4L2_CTRL_CLASS_JPEG | 0x900) +#define V4L2_CID_JPEG_CLASS (V4L2_CTRL_CLASS_JPEG | 1) +#define V4L2_CID_JPEG_CHROMA_SUBSAMPLING (V4L2_CID_JPEG_CLASS_BASE + 1) +enum v4l2_jpeg_chroma_subsampling { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_JPEG_CHROMA_SUBSAMPLING_444 = 0, + V4L2_JPEG_CHROMA_SUBSAMPLING_422 = 1, + V4L2_JPEG_CHROMA_SUBSAMPLING_420 = 2, + V4L2_JPEG_CHROMA_SUBSAMPLING_411 = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_JPEG_CHROMA_SUBSAMPLING_410 = 4, + V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY = 5, +}; +#define V4L2_CID_JPEG_RESTART_INTERVAL (V4L2_CID_JPEG_CLASS_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_JPEG_COMPRESSION_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 3) +#define V4L2_CID_JPEG_ACTIVE_MARKER (V4L2_CID_JPEG_CLASS_BASE + 4) +#define V4L2_JPEG_ACTIVE_MARKER_APP0 (1 << 0) +#define V4L2_JPEG_ACTIVE_MARKER_APP1 (1 << 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_JPEG_ACTIVE_MARKER_COM (1 << 16) +#define V4L2_JPEG_ACTIVE_MARKER_DQT (1 << 17) +#define V4L2_JPEG_ACTIVE_MARKER_DHT (1 << 18) +struct v4l2_tuner { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u8 name[32]; + enum v4l2_tuner_type type; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 rxsubchans; + __u32 audmode; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 signal; + __s32 afc; + __u32 reserved[4]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_modulator { + __u32 index; + __u8 name[32]; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 rangelow; + __u32 rangehigh; + __u32 txsubchans; + __u32 reserved[4]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_TUNER_CAP_LOW 0x0001 +#define V4L2_TUNER_CAP_NORM 0x0002 +#define V4L2_TUNER_CAP_STEREO 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_LANG2 0x0020 +#define V4L2_TUNER_CAP_SAP 0x0020 +#define V4L2_TUNER_CAP_LANG1 0x0040 +#define V4L2_TUNER_CAP_RDS 0x0080 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_CAP_RDS_BLOCK_IO 0x0100 +#define V4L2_TUNER_CAP_RDS_CONTROLS 0x0200 +#define V4L2_TUNER_SUB_MONO 0x0001 +#define V4L2_TUNER_SUB_STEREO 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_SUB_LANG2 0x0004 +#define V4L2_TUNER_SUB_SAP 0x0004 +#define V4L2_TUNER_SUB_LANG1 0x0008 +#define V4L2_TUNER_SUB_RDS 0x0010 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_MONO 0x0000 +#define V4L2_TUNER_MODE_STEREO 0x0001 +#define V4L2_TUNER_MODE_LANG2 0x0002 +#define V4L2_TUNER_MODE_SAP 0x0002 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_TUNER_MODE_LANG1 0x0003 +#define V4L2_TUNER_MODE_LANG1_LANG2 0x0004 +struct v4l2_frequency { + __u32 tuner; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_tuner_type type; + __u32 frequency; + __u32 reserved[8]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_hw_freq_seek { + __u32 tuner; + enum v4l2_tuner_type type; + __u32 seek_upward; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 wrap_around; + __u32 spacing; + __u32 reserved[7]; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_rds_data { + __u8 lsb; + __u8 msb; + __u8 block; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_RDS_BLOCK_MSK 0x7 +#define V4L2_RDS_BLOCK_A 0 +#define V4L2_RDS_BLOCK_B 1 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_C 2 +#define V4L2_RDS_BLOCK_D 3 +#define V4L2_RDS_BLOCK_C_ALT 4 +#define V4L2_RDS_BLOCK_INVALID 7 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_RDS_BLOCK_CORRECTED 0x40 +#define V4L2_RDS_BLOCK_ERROR 0x80 +struct v4l2_audio { + __u32 index; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 name[32]; + __u32 capability; + __u32 mode; + __u32 reserved[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_AUDCAP_STEREO 0x00001 +#define V4L2_AUDCAP_AVL 0x00002 +#define V4L2_AUDMODE_AVL 0x00001 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_audioout { + __u32 index; + __u8 name[32]; + __u32 capability; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 mode; + __u32 reserved[2]; +}; +#define V4L2_ENC_IDX_FRAME_I (0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_IDX_FRAME_P (1) +#define V4L2_ENC_IDX_FRAME_B (2) +#define V4L2_ENC_IDX_FRAME_MASK (0xf) +struct v4l2_enc_idx_entry { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u64 offset; + __u64 pts; + __u32 length; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[2]; +}; +#define V4L2_ENC_IDX_ENTRIES (64) +struct v4l2_enc_idx { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 entries; + __u32 entries_cap; + __u32 reserved[4]; + struct v4l2_enc_idx_entry entry[V4L2_ENC_IDX_ENTRIES]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_ENC_CMD_START (0) +#define V4L2_ENC_CMD_STOP (1) +#define V4L2_ENC_CMD_PAUSE (2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_ENC_CMD_RESUME (3) +#define V4L2_ENC_CMD_STOP_AT_GOP_END (1 << 0) +struct v4l2_encoder_cmd { + __u32 cmd; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + union { + struct { + __u32 data[8]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + } raw; + }; +}; +#define V4L2_DEC_CMD_START (0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_STOP (1) +#define V4L2_DEC_CMD_PAUSE (2) +#define V4L2_DEC_CMD_RESUME (3) +#define V4L2_DEC_CMD_START_MUTE_AUDIO (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_CMD_PAUSE_TO_BLACK (1 << 0) +#define V4L2_DEC_CMD_STOP_TO_BLACK (1 << 0) +#define V4L2_DEC_CMD_STOP_IMMEDIATELY (1 << 1) +#define V4L2_DEC_START_FMT_NONE (0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DEC_START_FMT_GOP (1) +struct v4l2_decoder_cmd { + __u32 cmd; + __u32 flags; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + union { + struct { + __u64 pts; + } stop; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct { + __s32 speed; + __u32 format; + } start; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct { + __u32 data[16]; + } raw; + }; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +struct v4l2_vbi_format { + __u32 sampling_rate; + __u32 offset; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 samples_per_line; + __u32 sample_format; + __s32 start[2]; + __u32 count[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 flags; + __u32 reserved[2]; +}; +#define V4L2_VBI_UNSYNC (1 << 0) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_VBI_INTERLACED (1 << 1) +struct v4l2_sliced_vbi_format { + __u16 service_set; + __u16 service_lines[2][24]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 io_size; + __u32 reserved[2]; +}; +#define V4L2_SLICED_TELETEXT_B (0x0001) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SLICED_VPS (0x0400) +#define V4L2_SLICED_CAPTION_525 (0x1000) +#define V4L2_SLICED_WSS_625 (0x4000) +#define V4L2_SLICED_VBI_525 (V4L2_SLICED_CAPTION_525) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_SLICED_VBI_625 (V4L2_SLICED_TELETEXT_B | V4L2_SLICED_VPS | V4L2_SLICED_WSS_625) +struct v4l2_sliced_vbi_cap { + __u16 service_set; + __u16 service_lines[2][24]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_buf_type type; + __u32 reserved[3]; +}; +struct v4l2_sliced_vbi_data { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 id; + __u32 field; + __u32 line; + __u32 reserved; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 data[48]; +}; +#define V4L2_MPEG_VBI_IVTV_TELETEXT_B (1) +#define V4L2_MPEG_VBI_IVTV_CAPTION_525 (4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VBI_IVTV_WSS_625 (5) +#define V4L2_MPEG_VBI_IVTV_VPS (7) +struct v4l2_mpeg_vbi_itv0_line { + __u8 id; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 data[42]; +} __attribute__((packed)); +struct v4l2_mpeg_vbi_itv0 { + __le32 linemask[2]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_mpeg_vbi_itv0_line line[35]; +} __attribute__((packed)); +struct v4l2_mpeg_vbi_ITV0 { + struct v4l2_mpeg_vbi_itv0_line line[36]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_MPEG_VBI_IVTV_MAGIC0 "itv0" +#define V4L2_MPEG_VBI_IVTV_MAGIC1 "ITV0" +struct v4l2_mpeg_vbi_fmt_ivtv { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u8 magic[4]; + union { + struct v4l2_mpeg_vbi_itv0 itv0; + struct v4l2_mpeg_vbi_ITV0 ITV0; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + }; +} __attribute__((packed)); +struct v4l2_plane_pix_format { + __u32 sizeimage; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u16 bytesperline; + __u16 reserved[7]; +} __attribute__((packed)); +struct v4l2_pix_format_mplane { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 width; + __u32 height; + __u32 pixelformat; + enum v4l2_field field; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + enum v4l2_colorspace colorspace; + struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES]; + __u8 num_planes; + __u8 reserved[11]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +struct v4l2_format { + enum v4l2_buf_type type; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_pix_format pix; + struct v4l2_pix_format_mplane pix_mp; + struct v4l2_window win; + struct v4l2_vbi_format vbi; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_sliced_vbi_format sliced; + __u8 raw_data[200]; + } fmt; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_streamparm { + enum v4l2_buf_type type; + union { + struct v4l2_captureparm capture; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_outputparm output; + __u8 raw_data[200]; + } parm; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_ALL 0 +#define V4L2_EVENT_VSYNC 1 +#define V4L2_EVENT_EOS 2 +#define V4L2_EVENT_CTRL 3 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_EVENT_FRAME_SYNC 4 +#define V4L2_EVENT_PRIVATE_START 0x08000000 +struct v4l2_event_vsync { + __u8 field; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +} __attribute__((packed)); +#define V4L2_EVENT_CTRL_CH_VALUE (1 << 0) +#define V4L2_EVENT_CTRL_CH_FLAGS (1 << 1) +struct v4l2_event_ctrl { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 changes; + __u32 type; + union { + __s32 value; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s64 value64; + }; + __u32 flags; + __s32 minimum; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __s32 maximum; + __s32 step; + __s32 default_value; +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_event_frame_sync { + __u32 frame_sequence; +}; +struct v4l2_event { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + union { + struct v4l2_event_vsync vsync; + struct v4l2_event_ctrl ctrl; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + struct v4l2_event_frame_sync frame_sync; + __u8 data[64]; + } u; + __u32 pending; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 sequence; + struct timespec timestamp; + __u32 id; + __u32 reserved[8]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_EVENT_SUB_FL_SEND_INITIAL (1 << 0) +#define V4L2_EVENT_SUB_FL_ALLOW_FEEDBACK (1 << 1) +struct v4l2_event_subscription { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 type; + __u32 id; + __u32 flags; + __u32 reserved[5]; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CHIP_MATCH_HOST 0 +#define V4L2_CHIP_MATCH_I2C_DRIVER 1 +#define V4L2_CHIP_MATCH_I2C_ADDR 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CHIP_MATCH_AC97 3 +struct v4l2_dbg_match { + __u32 type; + union { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 addr; + char name[32]; + }; +} __attribute__((packed)); +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +struct v4l2_dbg_register { + struct v4l2_dbg_match match; + __u32 size; + __u64 reg; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u64 val; +} __attribute__((packed)); +struct v4l2_dbg_chip_ident { + struct v4l2_dbg_match match; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 ident; + __u32 revision; +} __attribute__((packed)); +struct v4l2_create_buffers { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 index; + __u32 count; + enum v4l2_memory memory; + struct v4l2_format format; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + __u32 reserved[8]; +}; +#define VIDIOC_QUERYCAP _IOR('V', 0, struct v4l2_capability) +#define VIDIOC_RESERVED _IO('V', 1) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_ENUM_FMT _IOWR('V', 2, struct v4l2_fmtdesc) +#define VIDIOC_G_FMT _IOWR('V', 4, struct v4l2_format) +#define VIDIOC_S_FMT _IOWR('V', 5, struct v4l2_format) +#define VIDIOC_REQBUFS _IOWR('V', 8, struct v4l2_requestbuffers) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QUERYBUF _IOWR('V', 9, struct v4l2_buffer) +#define VIDIOC_G_FBUF _IOR('V', 10, struct v4l2_framebuffer) +#define VIDIOC_S_FBUF _IOW('V', 11, struct v4l2_framebuffer) +#define VIDIOC_OVERLAY _IOW('V', 14, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_QBUF _IOWR('V', 15, struct v4l2_buffer) +#define VIDIOC_EXPBUF _IOWR('V', 16, struct v4l2_exportbuffer) +#define VIDIOC_DQBUF _IOWR('V', 17, struct v4l2_buffer) +#define VIDIOC_STREAMON _IOW('V', 18, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_STREAMOFF _IOW('V', 19, int) +#define VIDIOC_G_PARM _IOWR('V', 21, struct v4l2_streamparm) +#define VIDIOC_S_PARM _IOWR('V', 22, struct v4l2_streamparm) +#define VIDIOC_G_STD _IOR('V', 23, v4l2_std_id) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_STD _IOW('V', 24, v4l2_std_id) +#define VIDIOC_ENUMSTD _IOWR('V', 25, struct v4l2_standard) +#define VIDIOC_ENUMINPUT _IOWR('V', 26, struct v4l2_input) +#define VIDIOC_G_CTRL _IOWR('V', 27, struct v4l2_control) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_CTRL _IOWR('V', 28, struct v4l2_control) +#define VIDIOC_G_TUNER _IOWR('V', 29, struct v4l2_tuner) +#define VIDIOC_S_TUNER _IOW('V', 30, struct v4l2_tuner) +#define VIDIOC_G_AUDIO _IOR('V', 33, struct v4l2_audio) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_AUDIO _IOW('V', 34, struct v4l2_audio) +#define VIDIOC_QUERYCTRL _IOWR('V', 36, struct v4l2_queryctrl) +#define VIDIOC_QUERYMENU _IOWR('V', 37, struct v4l2_querymenu) +#define VIDIOC_G_INPUT _IOR('V', 38, int) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_INPUT _IOWR('V', 39, int) +#define VIDIOC_G_OUTPUT _IOR('V', 46, int) +#define VIDIOC_S_OUTPUT _IOWR('V', 47, int) +#define VIDIOC_ENUMOUTPUT _IOWR('V', 48, struct v4l2_output) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_AUDOUT _IOR('V', 49, struct v4l2_audioout) +#define VIDIOC_S_AUDOUT _IOW('V', 50, struct v4l2_audioout) +#define VIDIOC_G_MODULATOR _IOWR('V', 54, struct v4l2_modulator) +#define VIDIOC_S_MODULATOR _IOW('V', 55, struct v4l2_modulator) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_FREQUENCY _IOWR('V', 56, struct v4l2_frequency) +#define VIDIOC_S_FREQUENCY _IOW('V', 57, struct v4l2_frequency) +#define VIDIOC_CROPCAP _IOWR('V', 58, struct v4l2_cropcap) +#define VIDIOC_G_CROP _IOWR('V', 59, struct v4l2_crop) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_CROP _IOW('V', 60, struct v4l2_crop) +#define VIDIOC_G_JPEGCOMP _IOR('V', 61, struct v4l2_jpegcompression) +#define VIDIOC_S_JPEGCOMP _IOW('V', 62, struct v4l2_jpegcompression) +#define VIDIOC_QUERYSTD _IOR('V', 63, v4l2_std_id) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_TRY_FMT _IOWR('V', 64, struct v4l2_format) +#define VIDIOC_ENUMAUDIO _IOWR('V', 65, struct v4l2_audio) +#define VIDIOC_ENUMAUDOUT _IOWR('V', 66, struct v4l2_audioout) +#define VIDIOC_G_PRIORITY _IOR('V', 67, enum v4l2_priority) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_PRIORITY _IOW('V', 68, enum v4l2_priority) +#define VIDIOC_G_SLICED_VBI_CAP _IOWR('V', 69, struct v4l2_sliced_vbi_cap) +#define VIDIOC_LOG_STATUS _IO('V', 70) +#define VIDIOC_G_EXT_CTRLS _IOWR('V', 71, struct v4l2_ext_controls) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_EXT_CTRLS _IOWR('V', 72, struct v4l2_ext_controls) +#define VIDIOC_TRY_EXT_CTRLS _IOWR('V', 73, struct v4l2_ext_controls) +#define VIDIOC_ENUM_FRAMESIZES _IOWR('V', 74, struct v4l2_frmsizeenum) +#define VIDIOC_ENUM_FRAMEINTERVALS _IOWR('V', 75, struct v4l2_frmivalenum) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_ENC_INDEX _IOR('V', 76, struct v4l2_enc_idx) +#define VIDIOC_ENCODER_CMD _IOWR('V', 77, struct v4l2_encoder_cmd) +#define VIDIOC_TRY_ENCODER_CMD _IOWR('V', 78, struct v4l2_encoder_cmd) +#define VIDIOC_DBG_S_REGISTER _IOW('V', 79, struct v4l2_dbg_register) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_DBG_G_REGISTER _IOWR('V', 80, struct v4l2_dbg_register) +#define VIDIOC_DBG_G_CHIP_IDENT _IOWR('V', 81, struct v4l2_dbg_chip_ident) +#define VIDIOC_S_HW_FREQ_SEEK _IOW('V', 82, struct v4l2_hw_freq_seek) +#define VIDIOC_ENUM_DV_PRESETS _IOWR('V', 83, struct v4l2_dv_enum_preset) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_S_DV_PRESET _IOWR('V', 84, struct v4l2_dv_preset) +#define VIDIOC_G_DV_PRESET _IOWR('V', 85, struct v4l2_dv_preset) +#define VIDIOC_QUERY_DV_PRESET _IOR('V', 86, struct v4l2_dv_preset) +#define VIDIOC_S_DV_TIMINGS _IOWR('V', 87, struct v4l2_dv_timings) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_G_DV_TIMINGS _IOWR('V', 88, struct v4l2_dv_timings) +#define VIDIOC_DQEVENT _IOR('V', 89, struct v4l2_event) +#define VIDIOC_SUBSCRIBE_EVENT _IOW('V', 90, struct v4l2_event_subscription) +#define VIDIOC_UNSUBSCRIBE_EVENT _IOW('V', 91, struct v4l2_event_subscription) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_CREATE_BUFS _IOWR('V', 92, struct v4l2_create_buffers) +#define VIDIOC_PREPARE_BUF _IOWR('V', 93, struct v4l2_buffer) +#define VIDIOC_G_SELECTION _IOWR('V', 94, struct v4l2_selection) +#define VIDIOC_S_SELECTION _IOWR('V', 95, struct v4l2_selection) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define VIDIOC_DECODER_CMD _IOWR('V', 96, struct v4l2_decoder_cmd) +#define VIDIOC_TRY_DECODER_CMD _IOWR('V', 97, struct v4l2_decoder_cmd) +#define BASE_VIDIOC_PRIVATE 192 +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/kernel-3.4-headers/videodev2_exynos_media.h b/kernel-3.4-headers/videodev2_exynos_media.h new file mode 100644 index 0000000..f0dd083 --- /dev/null +++ b/kernel-3.4-headers/videodev2_exynos_media.h @@ -0,0 +1,355 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#define __LINUX_VIDEODEV2_EXYNOS_MEDIA_H +#define V4L2_PIX_FMT_RGB32X v4l2_fourcc('R', 'G', 'B', 'X') +#define V4L2_PIX_FMT_ARGB32 v4l2_fourcc('A', 'R', 'G', 'B') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV422V_2P v4l2_fourcc('Y', 'U', 'V', '2') +#define V4L2_PIX_FMT_YUV444_2P v4l2_fourcc('Y', 'U', '2', 'P') +#define V4L2_PIX_FMT_YVU444_2P v4l2_fourcc('Y', 'V', '2', 'P') +#define V4L2_PIX_FMT_YUV422V_3P v4l2_fourcc('Y', 'U', 'V', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YUV444_3P v4l2_fourcc('Y', 'U', '3', 'P') +#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') +#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') +#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_FIMV v4l2_fourcc('F', 'I', 'M', 'V') +#define V4L2_PIX_FMT_FIMV1 v4l2_fourcc('F', 'I', 'M', '1') +#define V4L2_PIX_FMT_FIMV2 v4l2_fourcc('F', 'I', 'M', '2') +#define V4L2_PIX_FMT_FIMV3 v4l2_fourcc('F', 'I', 'M', '3') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_FIMV4 v4l2_fourcc('F', 'I', 'M', '4') +#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') +#define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') +#define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPEG_444 v4l2_fourcc('J', 'P', 'G', '4') +#define V4L2_PIX_FMT_JPEG_422 v4l2_fourcc('J', 'P', 'G', '2') +#define V4L2_PIX_FMT_JPEG_420 v4l2_fourcc('J', 'P', 'G', '0') +#define V4L2_PIX_FMT_JPEG_GRAY v4l2_fourcc('J', 'P', 'G', 'G') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_JPEG_422V v4l2_fourcc('J', 'P', 'G', '5') +#define V4L2_PIX_FMT_JPEG_411 v4l2_fourcc('J', 'P', 'G', '1') +#define V4L2_PIX_FMT_NV12N v4l2_fourcc('N', 'N', '1', '2') +#define V4L2_PIX_FMT_NV12NT v4l2_fourcc('T', 'N', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_NV12N_10B v4l2_fourcc('B', 'N', '1', '2') +#define V4L2_PIX_FMT_YUV420N v4l2_fourcc('Y', 'N', '1', '2') +#ifndef __ALIGN_UP +#define __ALIGN_UP(x,a) (((x) + ((a) - 1)) & ~((a) - 1)) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#endif +#define NV12N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +#define NV12N_CBCR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w), 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define NV12N_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h))) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define NV12N_10B_Y_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * __ALIGN_UP((h), 16) + 64)) +#define NV12N_10B_CBCR_2B_SIZE(w,h) ((__ALIGN_UP((w) / 4, 16) * (__ALIGN_UP((h), 16) / 2) + 64)) +#define NV12N_10B_CBCR_BASE(base,w,h) ((base) + NV12N_Y_SIZE((w), (h)) + NV12N_10B_Y_2B_SIZE((w), (h))) +#define YUV420N_Y_SIZE(w,h) (__ALIGN_UP((w), 16) * __ALIGN_UP((h), 16) + 256) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define YUV420N_CB_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define YUV420N_CR_SIZE(w,h) (__ALIGN_UP((__ALIGN_UP((w) / 2, 16) * (__ALIGN_UP((h), 16) / 2) + 256), 16)) +#define YUV420N_CB_BASE(base,w,h) ((base) + YUV420N_Y_SIZE((w), (h))) +#define YUV420N_CR_BASE(base,w,h) (YUV420N_CB_BASE((base), (w), (h)) + YUV420N_CB_SIZE((w), (h))) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_EXYNOS_BASE (V4L2_CTRL_CLASS_USER | 0x2000) +#define V4L2_CID_GLOBAL_ALPHA (V4L2_CID_EXYNOS_BASE + 1) +#define V4L2_CID_CACHEABLE (V4L2_CID_EXYNOS_BASE + 10) +#define V4L2_CID_CAM_JPEG_MEMSIZE (V4L2_CID_EXYNOS_BASE + 20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CAM_JPEG_ENCODEDSIZE (V4L2_CID_EXYNOS_BASE + 21) +#define V4L2_CID_JPEG_TABLE (V4L2_CID_EXYNOS_BASE + 22) +#define V4L2_CID_SET_SHAREABLE (V4L2_CID_EXYNOS_BASE + 40) +#define V4L2_CID_TV_LAYER_BLEND_ENABLE (V4L2_CID_EXYNOS_BASE + 50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_LAYER_BLEND_ALPHA (V4L2_CID_EXYNOS_BASE + 51) +#define V4L2_CID_TV_PIXEL_BLEND_ENABLE (V4L2_CID_EXYNOS_BASE + 52) +#define V4L2_CID_TV_CHROMA_ENABLE (V4L2_CID_EXYNOS_BASE + 53) +#define V4L2_CID_TV_CHROMA_VALUE (V4L2_CID_EXYNOS_BASE + 54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_HPD_STATUS (V4L2_CID_EXYNOS_BASE + 55) +#define V4L2_CID_TV_LAYER_PRIO (V4L2_CID_EXYNOS_BASE + 56) +#define V4L2_CID_TV_SET_DVI_MODE (V4L2_CID_EXYNOS_BASE + 57) +#define V4L2_CID_TV_GET_DVI_MODE (V4L2_CID_EXYNOS_BASE + 58) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_SET_ASPECT_RATIO (V4L2_CID_EXYNOS_BASE + 59) +#define V4L2_CID_TV_MAX_AUDIO_CHANNELS (V4L2_CID_EXYNOS_BASE + 60) +#define V4L2_CID_TV_ENABLE_HDMI_AUDIO (V4L2_CID_EXYNOS_BASE + 61) +#define V4L2_CID_TV_SET_NUM_CHANNELS (V4L2_CID_EXYNOS_BASE + 62) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_UPDATE (V4L2_CID_EXYNOS_BASE + 63) +#define V4L2_CID_TV_SET_COLOR_RANGE (V4L2_CID_EXYNOS_BASE + 64) +#define V4L2_CID_TV_HDCP_ENABLE (V4L2_CID_EXYNOS_BASE + 65) +#define V4L2_CID_TV_HDMI_STATUS (V4L2_CID_EXYNOS_BASE + 66) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_TV_SOURCE_PHY_ADDR (V4L2_CID_EXYNOS_BASE + 67) +#define V4L2_CID_TV_BLANK (V4L2_CID_EXYNOS_BASE + 68) +#define V4L2_CID_CSC_EQ_MODE (V4L2_CID_EXYNOS_BASE + 100) +#define V4L2_CID_CSC_EQ (V4L2_CID_EXYNOS_BASE + 101) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_CSC_RANGE (V4L2_CID_EXYNOS_BASE + 102) +#define V4L2_CID_M2M_CTX_NUM (V4L2_CID_EXYNOS_BASE + 200) +#define V4L2_CID_CONTENT_PROTECTION (V4L2_CID_EXYNOS_BASE + 201) +#define V4L2_CID_2D_BLEND_OP (V4L2_CID_EXYNOS_BASE + 103) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_COLOR_FILL (V4L2_CID_EXYNOS_BASE + 104) +#define V4L2_CID_2D_DITH (V4L2_CID_EXYNOS_BASE + 105) +#define V4L2_CID_2D_FMT_PREMULTI (V4L2_CID_EXYNOS_BASE + 106) +#define V4L2_CID_2D_SRC_COLOR (V4L2_CID_EXYNOS_BASE + 107) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_SRC_COLOR (V4L2_CID_EXYNOS_BASE + 107) +#define V4L2_CID_2D_CLIP (V4L2_CID_EXYNOS_BASE + 108) +#define V4L2_CID_2D_SCALE_WIDTH (V4L2_CID_EXYNOS_BASE + 109) +#define V4L2_CID_2D_SCALE_HEIGHT (V4L2_CID_EXYNOS_BASE + 110) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_REPEAT (V4L2_CID_EXYNOS_BASE + 111) +#define V4L2_CID_2D_SCALE_MODE (V4L2_CID_EXYNOS_BASE + 112) +#define V4L2_CID_2D_BLUESCREEN (V4L2_CID_EXYNOS_BASE + 113) +#define V4L2_CID_2D_BG_COLOR (V4L2_CID_EXYNOS_BASE + 114) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_2D_BS_COLOR (V4L2_CID_EXYNOS_BASE + 115) +#define V4L2_CID_M2M_CTX_NUM (V4L2_CID_EXYNOS_BASE + 200) +#define V4L2_CID_CONTENT_PROTECTION (V4L2_CID_EXYNOS_BASE + 201) +#define V4L2_CID_MPEG_MFC_BASE (V4L2_CTRL_CLASS_MPEG | 0x2000) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_AVAIL (V4L2_CID_MPEG_MFC_BASE + 1) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRGMENT_ID (V4L2_CID_MPEG_MFC_BASE + 2) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_INFO (V4L2_CID_MPEG_MFC_BASE + 3) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_GRID_POS (V4L2_CID_MPEG_MFC_BASE + 4) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_PACKED_PB (V4L2_CID_MPEG_MFC_BASE + 5) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG (V4L2_CID_MPEG_MFC_BASE + 6) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_ENABLE (V4L2_CID_MPEG_MFC_BASE + 7) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA (V4L2_CID_MPEG_MFC_BASE + 8) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA (V4L2_CID_MPEG_MFC_BASE + 9) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA_BOT (V4L2_CID_MPEG_MFC_BASE + 10) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA_BOT (V4L2_CID_MPEG_MFC_BASE + 11) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_GENERATED (V4L2_CID_MPEG_MFC_BASE + 12) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE (V4L2_CID_MPEG_MFC_BASE + 13) +#define V4L2_CID_MPEG_MFC51_VIDEO_DISPLAY_STATUS (V4L2_CID_MPEG_MFC_BASE + 14) +#define V4L2_CID_MPEG_MFC51_VIDEO_LUMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 15) +#define V4L2_CID_MPEG_MFC51_VIDEO_CHROMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 16) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_STREAM_SIZE (V4L2_CID_MPEG_MFC_BASE + 17) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_COUNT (V4L2_CID_MPEG_MFC_BASE + 18) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TYPE (V4L2_CID_MPEG_MFC_BASE + 19) +enum v4l2_mpeg_mfc51_video_frame_type { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_NOT_CODED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_I_FRAME = 1, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_P_FRAME = 2, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_B_FRAME = 3, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_SKIPPED = 4, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_OTHERS = 5, +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_INTERLACE (V4L2_CID_MPEG_MFC_BASE + 20) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 21) +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_TIME_RES (V4L2_CID_MPEG_MFC_BASE + 22) +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_FRM_DELTA (V4L2_CID_MPEG_MFC_BASE + 23) +#define V4L2_CID_MPEG_MFC51_VIDEO_H263_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 24) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC6X_VIDEO_FRAME_DELTA (V4L2_CID_MPEG_MFC_BASE + 25) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_PERIOD_CH V4L2_CID_MPEG_VIDEO_GOP_SIZE +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE +#define V4L2_CID_MPEG_MFC51_VIDEO_BIT_RATE_CH V4L2_CID_MPEG_VIDEO_BITRATE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VIDEO_MPEG4_LEVEL_6 8 +#define V4L2_MPEG_VIDEO_HEADER_MODE_AT_THE_READY 2 +#define V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_S_B V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY +#define V4L2_CID_MPEG_VIDEO_H264_MVC_VIEW_ID (V4L2_CID_MPEG_MFC_BASE + 42) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_STATUS (V4L2_CID_MPEG_MFC_BASE + 43) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_FRAME_DECODING (V4L2_CID_MPEG_MFC_BASE + 44) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 45) +#define V4L2_CID_MPEG_VIDEO_H264_PREPEND_SPSPPS_TO_IDR (V4L2_CID_MPEG_MFC_BASE + 46) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_DECODER_IMMEDIATE_DISPLAY (V4L2_CID_MPEG_MFC_BASE + 47) +#define V4L2_CID_MPEG_VIDEO_DECODER_DECODING_TIMESTAMP_MODE (V4L2_CID_MPEG_MFC_BASE + 48) +#define V4L2_CID_MPEG_VIDEO_DECODER_WAIT_DECODING_START (V4L2_CID_MPEG_MFC_BASE + 49) +#define V4L2_CID_MPEG_VIDEO_QOS_RATIO (V4L2_CID_MPEG_MFC_BASE + 50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 51) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 52) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 53) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 55) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT3 (V4L2_CID_MPEG_MFC_BASE + 56) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT4 (V4L2_CID_MPEG_MFC_BASE + 57) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT5 (V4L2_CID_MPEG_MFC_BASE + 58) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT6 (V4L2_CID_MPEG_MFC_BASE + 59) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_VERSION (V4L2_CID_MPEG_MFC_BASE + 60) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 61) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 62) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 63) +#define V4L2_CID_MPEG_VIDEO_VP8_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 64) +#define V4L2_CID_MPEG_VIDEO_VP8_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 65) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_OF_PARTITIONS (V4L2_CID_MPEG_MFC_BASE + 66) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_LEVEL (V4L2_CID_MPEG_MFC_BASE + 67) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_SHARPNESS (V4L2_CID_MPEG_MFC_BASE + 68) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 69) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 70) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 71) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER0 (V4L2_CID_MPEG_MFC_BASE + 72) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER1 (V4L2_CID_MPEG_MFC_BASE + 73) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER2 (V4L2_CID_MPEG_MFC_BASE + 74) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 75) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_DISABLE_INTRA_MD4X4 (V4L2_CID_MPEG_MFC_BASE + 76) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_TEMPORAL_LAYER (V4L2_CID_MPEG_MFC_BASE + 77) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 78) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 79) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 80) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 81) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 82) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_GET_VERSION_INFO (V4L2_CID_MPEG_MFC_BASE + 91) +#define V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE (V4L2_CID_MPEG_MFC_BASE + 92) +#define V4L2_CID_MPEG_MFC_SET_DUAL_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 93) +#define V4L2_CID_MPEG_MFC_SET_DYNAMIC_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 95) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_SET_USER_SHARED_HANDLE (V4L2_CID_MPEG_MFC_BASE + 96) +#define V4L2_CID_MPEG_MFC_GET_EXT_INFO (V4L2_CID_MPEG_MFC_BASE + 97) +#define V4L2_CID_MPEG_MFC_SET_BUF_PROCESS_TYPE (V4L2_CID_MPEG_MFC_BASE + 98) +#define V4L2_CID_MPEG_MFC_GET_10BIT_INFO (V4L2_CID_MPEG_MFC_BASE + 99) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_ENABLE_LTR (V4L2_CID_MPEG_MFC_BASE + 100) +#define V4L2_CID_MPEG_MFC_H264_MARK_LTR (V4L2_CID_MPEG_MFC_BASE + 101) +#define V4L2_CID_MPEG_MFC_H264_USE_LTR (V4L2_CID_MPEG_MFC_BASE + 102) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB_ROW (V4L2_CID_MPEG_MFC_BASE + 103) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_BASE_PRIORITY (V4L2_CID_MPEG_MFC_BASE + 104) +#define V4L2_CID_MPEG_MFC_CONFIG_QP (V4L2_CID_MPEG_MFC_BASE + 105) +#define V4L2_CID_MPEG_MFC_H264_VUI_RESTRICTION_ENABLE (V4L2_CID_MPEG_MFC_BASE + 106) +#define V4L2_CID_MPEG_MFC_GET_DRIVER_INFO (V4L2_CID_MPEG_MFC_BASE + 107) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 110) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 111) +#define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 112) +#define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 113) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 114) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 115) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_MFC_BASE + 116) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 117) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 118) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 119) +#define V4L2_CID_MPEG_VIDEO_HEVC_PROFILE (V4L2_CID_MPEG_MFC_BASE + 120) +#define V4L2_CID_MPEG_VIDEO_HEVC_LEVEL (V4L2_CID_MPEG_MFC_BASE + 121) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 122) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TIER_FLAG (V4L2_CID_MPEG_MFC_BASE + 123) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 124) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 125) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_DISABLE (V4L2_CID_MPEG_MFC_BASE + 126) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_SLICE_BOUNDARY (V4L2_CID_MPEG_MFC_BASE + 127) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_BETA_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 128) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_TC_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 129) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_TYPE (V4L2_CID_MPEG_MFC_BASE + 130) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 131) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LOSSLESS_CU_ENABLE (V4L2_CID_MPEG_MFC_BASE + 132) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_CONST_INTRA_PRED_ENABLE (V4L2_CID_MPEG_MFC_BASE + 133) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WAVEFRONT_ENABLE (V4L2_CID_MPEG_MFC_BASE + 134) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LTR_ENABLE (V4L2_CID_MPEG_MFC_BASE + 135) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_USER_REF (V4L2_CID_MPEG_MFC_BASE + 136) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STORE_REF (V4L2_CID_MPEG_MFC_BASE + 137) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIGN_DATA_HIDING (V4L2_CID_MPEG_MFC_BASE + 138) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_GENERAL_PB_ENABLE (V4L2_CID_MPEG_MFC_BASE + 139) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TEMPORAL_ID_ENABLE (V4L2_CID_MPEG_MFC_BASE + 140) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STRONG_SMOTHING_FLAG (V4L2_CID_MPEG_MFC_BASE + 141) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_NUM_MERGE_MV_MINUS1 (V4L2_CID_MPEG_MFC_BASE + 142) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC_BASE + 143) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC_BASE + 144) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC_BASE + 145) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC_BASE + 146) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 147) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_TMV_PREDICTION (V4L2_CID_MPEG_MFC_BASE + 148) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WITHOUT_STARTCODE_ENABLE (V4L2_CID_MPEG_MFC_BASE + 149) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CR (V4L2_CID_MPEG_MFC_BASE + 150) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CB (V4L2_CID_MPEG_MFC_BASE + 151) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIZE_OF_LENGTH_FIELD (V4L2_CID_MPEG_MFC_BASE + 152) +#define V4L2_CID_MPEG_VIDEO_VP9_VERSION (V4L2_CID_MPEG_MFC_BASE + 163) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 164) +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 165) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 166) +#define V4L2_CID_MPEG_VIDEO_VP9_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 167) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 168) +#define V4L2_CID_MPEG_VIDEO_VP9_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 169) +#define V4L2_CID_MPEG_VIDEO_VP9_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 170) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 171) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 172) +#define V4L2_CID_MPEG_VIDEO_VP9_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 173) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 174) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 175) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 176) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 177) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 178) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 179) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 180) +#define V4L2_CID_MPEG_VIDEO_DISABLE_IVF_HEADER (V4L2_CID_MPEG_MFC_BASE + 181) +#define V4L2_CID_MPEG_VIDEO_MATRIX_COEFFICIENTS (V4L2_CID_MPEG_MFC_BASE + 192) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 201) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 202) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 203) +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 204) +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 205) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 206) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 207) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 208) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 209) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 210) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 211) +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 212) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 213) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 214) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 215) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 216) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 217) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 218) +#endif diff --git a/kernel-3.4-headers/videodev2_exynos_media_ext.h b/kernel-3.4-headers/videodev2_exynos_media_ext.h new file mode 100644 index 0000000..29efb33 --- /dev/null +++ b/kernel-3.4-headers/videodev2_exynos_media_ext.h @@ -0,0 +1,73 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_VIDEODEV2_EXYNOS_MEDIA_EXT_H +#define __LINUX_VIDEODEV2_EXYNOS_MEDIA_EXT_H +#define V4L2_CID_MPEG_VIDEO_VBV_DELAY (V4L2_CID_MPEG_MFC_BASE + 26) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING (V4L2_CID_MPEG_MFC_BASE + 27) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0 (V4L2_CID_MPEG_MFC_BASE + 28) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE (V4L2_CID_MPEG_MFC_BASE + 29) +enum v4l2_mpeg_video_h264_sei_fp_arrangement_type { + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_CHEKERBOARD = 0, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_COLUMN = 1, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_ROW = 2, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_SIDE_BY_SIDE = 3, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TOP_BOTTOM = 4, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TEMPORAL = 5, +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO (V4L2_CID_MPEG_MFC_BASE + 30) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE (V4L2_CID_MPEG_MFC_BASE + 31) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_h264_fmo_map_type { + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES = 0, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_SCATTERED_SLICES = 1, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_FOREGROUND_WITH_LEFT_OVER = 2, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_BOX_OUT = 3, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_RASTER_SCAN = 4, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_WIPE_SCAN = 5, + V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_EXPLICIT = 6, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP (V4L2_CID_MPEG_MFC_BASE + 32) +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION (V4L2_CID_MPEG_MFC_BASE + 33) +enum v4l2_mpeg_video_h264_fmo_change_dir { +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT = 0, + V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_LEFT = 1, +}; +#define V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE (V4L2_CID_MPEG_MFC_BASE + 34) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH (V4L2_CID_MPEG_MFC_BASE + 35) +#define V4L2_CID_MPEG_VIDEO_H264_ASO (V4L2_CID_MPEG_MFC_BASE + 36) +#define V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER (V4L2_CID_MPEG_MFC_BASE + 37) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING (V4L2_CID_MPEG_MFC_BASE + 38) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_MFC_BASE + 39) +enum v4l2_mpeg_video_h264_hierarchical_coding_type { + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B = 0, + V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 40) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 41) +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/libaudio/Android.mk b/libaudio/Android.mk new file mode 100644 index 0000000..77a1727 --- /dev/null +++ b/libaudio/Android.mk @@ -0,0 +1,28 @@ +# +# Copyright 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This contains the module build definitions for the hardware-specific +# components for this device. +# +# As much as possible, those components should be built unconditionally, +# with device-specific names to avoid collisions, to avoid device-specific +# bitrot and build breakages. Building a component unconditionally does +# *not* include it on all devices, so it is safe even with hardware-specific +# components. + +LOCAL_PATH := $(call my-dir) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/libaudio/effecthal/Android.mk b/libaudio/effecthal/Android.mk new file mode 100644 index 0000000..77a1727 --- /dev/null +++ b/libaudio/effecthal/Android.mk @@ -0,0 +1,28 @@ +# +# Copyright 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This contains the module build definitions for the hardware-specific +# components for this device. +# +# As much as possible, those components should be built unconditionally, +# with device-specific names to avoid collisions, to avoid device-specific +# bitrot and build breakages. Building a component unconditionally does +# *not* include it on all devices, so it is safe even with hardware-specific +# components. + +LOCAL_PATH := $(call my-dir) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/libaudio/effecthal/postprocessing/Android.mk b/libaudio/effecthal/postprocessing/Android.mk new file mode 100644 index 0000000..77a1727 --- /dev/null +++ b/libaudio/effecthal/postprocessing/Android.mk @@ -0,0 +1,28 @@ +# +# Copyright 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This contains the module build definitions for the hardware-specific +# components for this device. +# +# As much as possible, those components should be built unconditionally, +# with device-specific names to avoid collisions, to avoid device-specific +# bitrot and build breakages. Building a component unconditionally does +# *not* include it on all devices, so it is safe even with hardware-specific +# components. + +LOCAL_PATH := $(call my-dir) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Android.mk b/libaudio/effecthal/postprocessing/aosp-effect/Android.mk new file mode 100644 index 0000000..54ac3d6 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Android.mk @@ -0,0 +1,53 @@ +ifeq ($(BOARD_USE_OFFLOAD_EFFECT),true) +LOCAL_PATH:= $(call my-dir) + +# music nxp offload bundle wrapper +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + Bundle/exynos_effectbundle.cpp + +LOCAL_C_INCLUDES += \ + external/tinyalsa/include \ + $(LOCAL_PATH)/Bundle \ + $(call include-path-for, audio-effects) + +LOCAL_CFLAGS += -fvisibility=hidden + +LOCAL_MODULE:= libhwnxpbundlewrapper + +LOCAL_MODULE_RELATIVE_PATH := soundfx + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libdl \ + libtinyalsa + +include $(BUILD_SHARED_LIBRARY) + +# music nxp offload reverb wrapper +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + Reverb/exynos_effectReverb.cpp + +LOCAL_CFLAGS += -fvisibility=hidden + +LOCAL_MODULE:= libhwnxpreverbwrapper + +LOCAL_MODULE_RELATIVE_PATH := soundfx + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libdl \ + libtinyalsa + +LOCAL_C_INCLUDES += \ + external/tinyalsa/include \ + $(LOCAL_PATH)/Reverb \ + $(LOCAL_PATH)/Bundle \ + $(call include-path-for, audio-effects) + +include $(BUILD_SHARED_LIBRARY) +endif diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM.h b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM.h new file mode 100644 index 0000000..dcb1bd2 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM.h @@ -0,0 +1,628 @@ +/* + * Copyright (C) 2004-2010 NXP Software + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/****************************************************************************************/ +/* */ +/* Header file for the application layer interface of Concert Sound, Bass Enhancement, */ +/* Equalizer, Power Spectrum Analyzer, Trebble Enhancement and volume management */ +/* bundle. */ +/* */ +/* This files includes all definitions, types, structures and function */ +/* prototypes required by the calling layer. All other types, structures and */ +/* functions are private. */ +/* */ +/****************************************************************************************/ +/* */ +/* Note: 1 */ +/* ======= */ +/* The algorithm can execute either with separate input and output buffers or with */ +/* a common buffer, i.e. the data is processed in-place. */ +/* */ +/****************************************************************************************/ +/* */ +/* Note: 2 */ +/* ======= */ +/* Three data formats are support Stereo,Mono-In-Stereo and Mono. The data is */ +/* interleaved as follows: */ +/* */ +/* Byte Offset Stereo Input Mono-In-Stereo Input Mono Input */ +/* =========== ============ ==================== ============== */ +/* 0 Left Sample #1 Mono Sample #1 Mono Sample #1 */ +/* 2 Right Sample #1 Mono Sample #1 Mono Sample #2 */ +/* 4 Left Sample #2 Mono Sample #2 Mono Sample #3 */ +/* 6 Right Sample #2 Mono Sample #2 Mono Sample #4 */ +/* . . . . */ +/* . . . . */ +/* */ +/****************************************************************************************/ + +#ifndef __LVM_H__ +#define __LVM_H__ + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + + +/****************************************************************************************/ +/* */ +/* Includes */ +/* */ +/****************************************************************************************/ + +#include "LVM_Types.h" + + +/****************************************************************************************/ +/* */ +/* Definitions */ +/* */ +/****************************************************************************************/ + +/* Memory table*/ +#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */ + +/* Concert Sound effect level presets */ +#define LVM_CS_EFFECT_NONE 0 /* 0% effect, minimum value */ +#define LVM_CS_EFFECT_LOW 16384 /* 50% effect */ +#define LVM_CS_EFFECT_MED 24576 /* 75% effect */ +#define LVM_CS_EFFECT_HIGH 32767 /* 100% effect, maximum value */ + +/* Treble enhancement */ +#define LVM_TE_LOW_MIPS 32767 + +/* Bass enhancement effect level presets */ +#define LVM_BE_0DB 0 /* 0dB boost, no effect */ +#define LVM_BE_3DB 3 /* +3dB boost */ +#define LVM_BE_6DB 6 /* +6dB boost */ +#define LVM_BE_9DB 9 /* +9dB boost */ +#define LVM_BE_12DB 12 /* +12dB boost */ +#define LVM_BE_15DB 15 /* +15dB boost */ + +/* N-Band Equalizer */ +#define LVM_EQ_NBANDS 5 /* Number of bands for equalizer */ + +/* Headroom management */ +#define LVM_HEADROOM_MAX_NBANDS 5 + +/****************************************************************************************/ +/* */ +/* Types */ +/* */ +/****************************************************************************************/ + +/* Instance handle */ +typedef void *LVM_Handle_t; + + +/* Status return values */ +typedef enum +{ + LVM_SUCCESS = 0, /* Successful return from a routine */ + LVM_ALIGNMENTERROR = 1, /* Memory alignment error */ + LVM_NULLADDRESS = 2, /* NULL allocation address */ + LVM_OUTOFRANGE = 3, /* Out of range control parameter */ + LVM_INVALIDNUMSAMPLES = 4, /* Invalid number of samples */ + LVM_WRONGAUDIOTIME = 5, /* Wrong time value for audio time*/ + LVM_ALGORITHMDISABLED = 6, /* Algorithm is disabled*/ + LVM_ALGORITHMPSA = 7, /* Algorithm PSA returns an error */ + LVM_RETURNSTATUS_DUMMY = LVM_MAXENUM +} LVM_ReturnStatus_en; + + +/* Buffer Management mode */ +typedef enum +{ + LVM_MANAGED_BUFFERS = 0, + LVM_UNMANAGED_BUFFERS = 1, + LVM_BUFFERS_DUMMY = LVM_MAXENUM +} LVM_BufferMode_en; + +/* Output device type */ +typedef enum +{ + LVM_HEADPHONES = 0, + LVM_EX_HEADPHONES = 1, + LVM_SPEAKERTYPE_MAX = LVM_MAXENUM +} LVM_OutputDeviceType_en; + +/* Virtualizer mode selection*/ +typedef enum +{ + LVM_CONCERTSOUND = 0, + LVM_VIRTUALIZERTYPE_DUMMY = LVM_MAXENUM +} LVM_VirtualizerType_en; + +/* N-Band Equaliser operating mode */ +typedef enum +{ + LVM_EQNB_OFF = 0, + LVM_EQNB_ON = 1, + LVM_EQNB_DUMMY = LVM_MAXENUM +} LVM_EQNB_Mode_en; + +/* Bass Enhancement operating mode */ +typedef enum +{ + LVM_BE_OFF = 0, + LVM_BE_ON = 1, + LVM_BE_DUMMY = LVM_MAXENUM +} LVM_BE_Mode_en; + +/* Bass Enhancement centre frequency selection control */ +typedef enum +{ + LVM_BE_CENTRE_55Hz = 0, + LVM_BE_CENTRE_66Hz = 1, + LVM_BE_CENTRE_78Hz = 2, + LVM_BE_CENTRE_90Hz = 3, + LVM_BE_CENTRE_DUMMY = LVM_MAXENUM +} LVM_BE_CentreFreq_en; + +/* Bass Enhancement HPF selection control */ +typedef enum +{ + LVM_BE_HPF_OFF = 0, + LVM_BE_HPF_ON = 1, + LVM_BE_HPF_DUMMY = LVM_MAXENUM +} LVM_BE_FilterSelect_en; + +/* Volume Control operating mode */ +typedef enum +{ + LVM_VC_OFF = 0, + LVM_VC_ON = 1, + LVM_VC_DUMMY = LVM_MAXENUM +} LVM_VC_Mode_en; + +/* Treble Enhancement operating mode */ +typedef enum +{ + LVM_TE_OFF = 0, + LVM_TE_ON = 1, + LVM_TE_DUMMY = LVM_MAXENUM +} LVM_TE_Mode_en; + +/* Headroom management operating mode */ +typedef enum +{ + LVM_HEADROOM_OFF = 0, + LVM_HEADROOM_ON = 1, + LVM_Headroom_DUMMY = LVM_MAXENUM +} LVM_Headroom_Mode_en; + +typedef enum +{ + LVM_PSA_SPEED_SLOW, /* Peak decaying at slow speed */ + LVM_PSA_SPEED_MEDIUM, /* Peak decaying at medium speed */ + LVM_PSA_SPEED_FAST, /* Peak decaying at fast speed */ + LVM_PSA_SPEED_DUMMY = LVM_MAXENUM +} LVM_PSA_DecaySpeed_en; + +typedef enum +{ + LVM_PSA_OFF = 0, + LVM_PSA_ON = 1, + LVM_PSA_DUMMY = LVM_MAXENUM +} LVM_PSA_Mode_en; + +/* Version information */ +typedef struct +{ + LVM_CHAR *pVersionNumber; /* Pointer to the version number in the format X.YY.ZZ */ + LVM_CHAR *pPlatform; /* Pointer to the library platform type */ +} LVM_VersionInfo_st; + + +/****************************************************************************************/ +/* */ +/* Structures */ +/* */ +/****************************************************************************************/ + +/* Memory table containing the region definitions */ +typedef struct +{ + LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */ +} LVM_MemTab_t; + +/* N-Band equaliser band definition */ +typedef struct +{ + LVM_INT16 Gain; /* Band gain in dB */ + LVM_UINT16 Frequency; /* Band centre frequency in Hz */ + LVM_UINT16 QFactor; /* Band quality factor (x100) */ +} LVM_EQNB_BandDef_t; + + +/* Headroom band definition */ +typedef struct +{ + LVM_UINT16 Limit_Low; /* Low frequency limit of the band in Hertz */ + LVM_UINT16 Limit_High; /* High frequency limit of the band in Hertz */ + LVM_INT16 Headroom_Offset; /* Headroom = biggest band gain - Headroom_Offset */ +} LVM_HeadroomBandDef_t; + + +/* Control Parameter structure */ +typedef struct +{ + /* General parameters */ + LVM_Mode_en OperatingMode; /* Bundle operating mode On/Bypass */ + LVM_Fs_en SampleRate; /* Sample rate */ + LVM_Format_en SourceFormat; /* Input data format */ + LVM_OutputDeviceType_en SpeakerType; /* Output device type */ + + /* Concert Sound Virtualizer parameters*/ + LVM_Mode_en VirtualizerOperatingMode; /* Virtualizer operating mode On/Off */ + LVM_VirtualizerType_en VirtualizerType; /* Virtualizer type: ConcertSound */ + LVM_UINT16 VirtualizerReverbLevel; /* Virtualizer reverb level in % */ + LVM_INT16 CS_EffectLevel; /* Concert Sound effect level */ + + /* N-Band Equaliser parameters */ + LVM_EQNB_Mode_en EQNB_OperatingMode; /* N-Band Equaliser operating mode */ + LVM_UINT16 EQNB_NBands; /* Number of bands */ + LVM_EQNB_BandDef_t *pEQNB_BandDefinition; /* Pointer to equaliser definitions */ + + /* Bass Enhancement parameters */ + LVM_BE_Mode_en BE_OperatingMode; /* Bass Enhancement operating mode */ + LVM_INT16 BE_EffectLevel; /* Bass Enhancement effect level */ + LVM_BE_CentreFreq_en BE_CentreFreq; /* Bass Enhancement centre frequency */ + LVM_BE_FilterSelect_en BE_HPF; /* Bass Enhancement high pass filter selector */ + + /* Volume Control parameters */ + LVM_INT16 VC_EffectLevel; /* Volume Control setting in dBs */ + LVM_INT16 VC_Balance; /* Left Right Balance control in dB (-96 to 96 dB), -ve values reduce + Right channel while +ve value reduces Left channel*/ + + /* Treble Enhancement parameters */ + LVM_TE_Mode_en TE_OperatingMode; /* Treble Enhancement On/Off */ + LVM_INT16 TE_EffectLevel; /* Treble Enhancement gain dBs */ + + /* Spectrum Analyzer parameters Control */ + LVM_PSA_Mode_en PSA_Enable; + LVM_PSA_DecaySpeed_en PSA_PeakDecayRate; /* Peak value decay rate*/ + +} LVM_ControlParams_t; + + +/* Instance Parameter structure */ +typedef struct +{ + /* General */ + LVM_BufferMode_en BufferMode; /* Buffer management mode */ + LVM_UINT16 MaxBlockSize; /* Maximum processing block size */ + + /* N-Band Equaliser */ + LVM_UINT16 EQNB_NumBands; /* Maximum number of equaliser bands */ + + /* PSA */ + LVM_PSA_Mode_en PSA_Included; /* Controls the instance memory allocation for PSA: ON/OFF */ +} LVM_InstParams_t; + +/* Headroom management parameter structure */ +typedef struct +{ + LVM_Headroom_Mode_en Headroom_OperatingMode; /* Headroom Control On/Off */ + LVM_HeadroomBandDef_t *pHeadroomDefinition; /* Pointer to headroom bands definition */ + LVM_UINT16 NHeadroomBands; /* Number of headroom bands */ + +} LVM_HeadroomParams_t; + +/****************************************************************************************/ +/* */ +/* Function Prototypes */ +/* */ +/****************************************************************************************/ + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetVersionInfo */ +/* */ +/* DESCRIPTION: */ +/* This function is used to retrieve information about the library's version. */ +/* */ +/* PARAMETERS: */ +/* pVersion Pointer to an empty version info structure */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS when pVersion is NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetVersionInfo(LVM_VersionInfo_st *pVersion); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetMemoryTable */ +/* */ +/* DESCRIPTION: */ +/* This function is used for memory allocation and free. It can be called in */ +/* two ways: */ +/* */ +/* hInstance = NULL Returns the memory requirements */ +/* hInstance = Instance handle Returns the memory requirements and */ +/* allocated base addresses for the instance */ +/* */ +/* When this function is called for memory allocation (hInstance=NULL) the memory */ +/* base address pointers are NULL on return. */ +/* */ +/* When the function is called for free (hInstance = Instance Handle) the memory */ +/* table returns the allocated memory and base addresses used during initialisation. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pMemoryTable Pointer to an empty memory definition table */ +/* pInstParams Pointer to the instance parameters */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS When one of pMemoryTable or pInstParams is NULL */ +/* LVM_OUTOFRANGE When any of the Instance parameters are out of range */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetMemoryTable(LVM_Handle_t hInstance, + LVM_MemTab_t *pMemoryTable, + LVM_InstParams_t *pInstParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetInstanceHandle */ +/* */ +/* DESCRIPTION: */ +/* This function is used to create a bundle instance. It returns the created instance */ +/* handle through phInstance. All parameters are set to their default, inactive state. */ +/* */ +/* PARAMETERS: */ +/* phInstance pointer to the instance handle */ +/* pMemoryTable Pointer to the memory definition table */ +/* pInstParams Pointer to the instance parameters */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Initialisation succeeded */ +/* LVM_NULLADDRESS One or more memory has a NULL pointer */ +/* LVM_OUTOFRANGE When any of the Instance parameters are out of range */ +/* */ +/* NOTES: */ +/* 1. This function must not be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetInstanceHandle(LVM_Handle_t *phInstance, + LVM_MemTab_t *pMemoryTable, + LVM_InstParams_t *pInstParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_ClearAudioBuffers */ +/* */ +/* DESCRIPTION: */ +/* This function is used to clear the internal audio buffers of the bundle. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Initialisation succeeded */ +/* LVM_NULLADDRESS Instance memory has a NULL pointer */ +/* */ +/* NOTES: */ +/* 1. This function must not be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_ClearAudioBuffers(LVM_Handle_t hInstance); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetControlParameters */ +/* */ +/* DESCRIPTION: */ +/* Request the LifeVibes module parameters. The current parameter set is returned */ +/* via the parameter pointer. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pParams Pointer to an empty parameter structure */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS when any of hInstance or pParams is NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetControlParameters(LVM_Handle_t hInstance, + LVM_ControlParams_t *pParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_SetControlParameters */ +/* */ +/* DESCRIPTION: */ +/* Sets or changes the LifeVibes module parameters. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pParams Pointer to a parameter structure */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS When hInstance, pParams or any control pointers are NULL */ +/* LVM_OUTOFRANGE When any of the control parameters are out of range */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_SetControlParameters(LVM_Handle_t hInstance, + LVM_ControlParams_t *pParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_Process */ +/* */ +/* DESCRIPTION: */ +/* Process function for the LifeVibes module. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pInData Pointer to the input data */ +/* pOutData Pointer to the output data */ +/* NumSamples Number of samples in the input buffer */ +/* AudioTime Audio Time of the current input data in milli-seconds */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_INVALIDNUMSAMPLES When the NumSamples is not a valied multiple in unmanaged */ +/* buffer mode */ +/* LVM_ALIGNMENTERROR When either the input our output buffers are not 32-bit */ +/* aligned in unmanaged mode */ +/* LVM_NULLADDRESS When one of hInstance, pInData or pOutData is NULL */ +/* */ +/* NOTES: */ +/* 1. The input and output buffers must be 32-bit aligned */ +/* 2. Number of samples is defined as follows: */ +/* MONO the number of samples in the block */ +/* MONOINSTEREO the number of sample pairs in the block */ +/* STEREO the number of sample pairs in the block */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_Process(LVM_Handle_t hInstance, + const LVM_INT16 *pInData, + LVM_INT16 *pOutData, + LVM_UINT16 NumSamples, + LVM_UINT32 AudioTime); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_SetHeadroomParams */ +/* */ +/* DESCRIPTION: */ +/* This function is used to set the automatic headroom management parameters. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pHeadroomParams Pointer to headroom parameter structure */ +/* */ +/* RETURNS: */ +/* LVM_NULLADDRESS When hInstance or pHeadroomParams is NULL */ +/* LVM_SUCCESS Succeeded */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_SetHeadroomParams( LVM_Handle_t hInstance, + LVM_HeadroomParams_t *pHeadroomParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetHeadroomParams */ +/* */ +/* DESCRIPTION: */ +/* This function is used to get the automatic headroom management parameters. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pHeadroomParams Pointer to headroom parameter structure (output) */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS When hInstance or pHeadroomParams are NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetHeadroomParams( LVM_Handle_t hInstance, + LVM_HeadroomParams_t *pHeadroomParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_GetSpectrum */ +/* */ +/* DESCRIPTION: */ +/* This function is used to retrieve Spectral information at a given Audio time */ +/* for display usage */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pCurrentPeaks Pointer to location where currents peaks are to be saved */ +/* pPastPeaks Pointer to location where past peaks are to be saved */ +/* pCentreFreqs Pointer to location where centre frequency of each band is */ +/* to be saved */ +/* AudioTime Audio time at which the spectral information is needed */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS If any of input addresses are NULL */ +/* LVM_WRONGAUDIOTIME Failure due to audio time error */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_GetSpectrum( LVM_Handle_t hInstance, + LVM_UINT8 *pCurrentPeaks, + LVM_UINT8 *pPastPeaks, + LVM_INT32 AudioTime); + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVM_SetVolumeNoSmoothing */ +/* */ +/* DESCRIPTION: */ +/* This function is used to set output volume without any smoothing */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pParams Control Parameters, only volume value is used here */ +/* */ +/* RETURNS: */ +/* LVM_SUCCESS Succeeded */ +/* LVM_NULLADDRESS If any of input addresses are NULL */ +/* LVM_OUTOFRANGE When any of the control parameters are out of range */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVM_Process function */ +/* */ +/****************************************************************************************/ +LVM_ReturnStatus_en LVM_SetVolumeNoSmoothing( LVM_Handle_t hInstance, + LVM_ControlParams_t *pParams); + + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __LVM_H__ */ diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM_Types.h b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM_Types.h new file mode 100644 index 0000000..0c6fb25 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/LVM_Types.h @@ -0,0 +1,188 @@ +/* + * Copyright (C) 2004-2010 NXP Software + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/****************************************************************************************/ +/* */ +/* Header file defining the standard LifeVibes types for use in the application layer */ +/* interface of all LifeVibes modules */ +/* */ +/****************************************************************************************/ + +#ifndef LVM_TYPES_H +#define LVM_TYPES_H + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + +#include + +/****************************************************************************************/ +/* */ +/* definitions */ +/* */ +/****************************************************************************************/ + +#define LVM_NULL 0 /* NULL pointer */ + +#define LVM_TRUE 1 /* Booleans */ +#define LVM_FALSE 0 + +#define LVM_MAXINT_8 127 /* Maximum positive integer size */ +#define LVM_MAXINT_16 32767 +#define LVM_MAXINT_32 2147483647 +#define LVM_MAXENUM 2147483647 + +#define LVM_MODULEID_MASK 0xFF00 /* Mask to extract the calling module ID from callbackId */ +#define LVM_EVENTID_MASK 0x00FF /* Mask to extract the callback event from callbackId */ + +/* Memory table*/ +#define LVM_MEMREGION_PERSISTENT_SLOW_DATA 0 /* Offset to the instance memory region */ +#define LVM_MEMREGION_PERSISTENT_FAST_DATA 1 /* Offset to the persistent data memory region */ +#define LVM_MEMREGION_PERSISTENT_FAST_COEF 2 /* Offset to the persistent coefficient memory region */ +#define LVM_MEMREGION_TEMPORARY_FAST 3 /* Offset to temporary memory region */ + +#define LVM_NR_MEMORY_REGIONS 4 /* Number of memory regions */ + +/* Memory partition type */ +#define LVM_MEM_PARTITION0 0 /* 1st memory partition */ +#define LVM_MEM_PARTITION1 1 /* 2nd memory partition */ +#define LVM_MEM_PARTITION2 2 /* 3rd memory partition */ +#define LVM_MEM_PARTITION3 3 /* 4th memory partition */ + +/* Use type */ +#define LVM_MEM_PERSISTENT 0 /* Persistent memory type */ +#define LVM_MEM_SCRATCH 4 /* Scratch memory type */ + +/* Access type */ +#define LVM_MEM_INTERNAL 0 /* Internal (fast) access memory */ +#define LVM_MEM_EXTERNAL 8 /* External (slow) access memory */ + +/* Platform specific */ +#define LVM_PERSISTENT LVM_MEM_PARTITION0+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL +#define LVM_PERSISTENT_DATA LVM_MEM_PARTITION1+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL +#define LVM_PERSISTENT_COEF LVM_MEM_PARTITION2+LVM_MEM_PERSISTENT+LVM_MEM_INTERNAL +#define LVM_SCRATCH LVM_MEM_PARTITION3+LVM_MEM_SCRATCH+LVM_MEM_INTERNAL + +/****************************************************************************************/ +/* */ +/* Basic types */ +/* */ +/****************************************************************************************/ + +typedef char LVM_CHAR; /* ASCII character */ + +typedef int8_t LVM_INT8; /* Signed 8-bit word */ +typedef uint8_t LVM_UINT8; /* Unsigned 8-bit word */ + +typedef int16_t LVM_INT16; /* Signed 16-bit word */ +typedef uint16_t LVM_UINT16; /* Unsigned 16-bit word */ + +typedef int32_t LVM_INT32; /* Signed 32-bit word */ +typedef uint32_t LVM_UINT32; /* Unsigned 32-bit word */ + + +/****************************************************************************************/ +/* */ +/* Standard Enumerated types */ +/* */ +/****************************************************************************************/ + +/* Operating mode */ +typedef enum +{ + LVM_MODE_OFF = 0, + LVM_MODE_ON = 1, + LVM_MODE_DUMMY = LVM_MAXENUM +} LVM_Mode_en; + + +/* Format */ +typedef enum +{ + LVM_STEREO = 0, + LVM_MONOINSTEREO = 1, + LVM_MONO = 2, + LVM_SOURCE_DUMMY = LVM_MAXENUM +} LVM_Format_en; + + +/* LVM sampling rates */ +typedef enum +{ + LVM_FS_8000 = 0, + LVM_FS_11025 = 1, + LVM_FS_12000 = 2, + LVM_FS_16000 = 3, + LVM_FS_22050 = 4, + LVM_FS_24000 = 5, + LVM_FS_32000 = 6, + LVM_FS_44100 = 7, + LVM_FS_48000 = 8, + LVM_FS_INVALID = LVM_MAXENUM-1, + LVM_FS_DUMMY = LVM_MAXENUM +} LVM_Fs_en; + + +/* Memory Types */ +typedef enum +{ + LVM_PERSISTENT_SLOW_DATA = LVM_MEMREGION_PERSISTENT_SLOW_DATA, + LVM_PERSISTENT_FAST_DATA = LVM_MEMREGION_PERSISTENT_FAST_DATA, + LVM_PERSISTENT_FAST_COEF = LVM_MEMREGION_PERSISTENT_FAST_COEF, + LVM_TEMPORARY_FAST = LVM_MEMREGION_TEMPORARY_FAST, + LVM_MEMORYTYPE_DUMMY = LVM_MAXENUM +} LVM_MemoryTypes_en; + + +/* Memory region definition */ +typedef struct +{ + LVM_UINT32 Size; /* Region size in bytes */ + LVM_MemoryTypes_en Type; /* Region type */ + void *pBaseAddress; /* Pointer to the region base address */ +} LVM_MemoryRegion_st; + + +/* Memory table containing the region definitions */ +typedef struct +{ + LVM_MemoryRegion_st Region[LVM_NR_MEMORY_REGIONS]; /* One definition for each region */ +} LVM_MemoryTable_st; + + +/****************************************************************************************/ +/* */ +/* Standard Function Prototypes */ +/* */ +/****************************************************************************************/ +typedef LVM_INT32 (*LVM_Callback)(void *pCallbackData, /* Pointer to the callback data structure */ + void *pGeneralPurpose, /* General purpose pointer (e.g. to a data structure needed in the callback) */ + LVM_INT16 GeneralPurpose ); /* General purpose variable (e.g. to be used as callback ID) */ + + +/****************************************************************************************/ +/* */ +/* End of file */ +/* */ +/****************************************************************************************/ + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* LVM_TYPES_H */ diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.cpp b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.cpp new file mode 100644 index 0000000..c6ba240 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.cpp @@ -0,0 +1,3162 @@ +/* + * Copyright (C) 2010-2010 NXP Software + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "HW_Bundle" +#define ARRAY_SIZE(array) (sizeof array / sizeof array[0]) +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include +#include "exynos_effectbundle.h" +#include "math.h" + + +// effect_handle_t interface implementation for bass boost +extern "C" const struct effect_interface_s gLvmHwEffectInterface; + +#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\ + if (LvmStatus == LVM_NULLADDRESS){\ + ALOGV("\tLVM_ERROR : Parameter error - "\ + "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\ + }\ + if (LvmStatus == LVM_ALIGNMENTERROR){\ + ALOGV("\tLVM_ERROR : Parameter error - "\ + "bad alignment returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\ + }\ + if (LvmStatus == LVM_INVALIDNUMSAMPLES){\ + ALOGV("\tLVM_ERROR : Parameter error - "\ + "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\ + }\ + if (LvmStatus == LVM_OUTOFRANGE){\ + ALOGV("\tLVM_ERROR : Parameter error - "\ + "out of range returned by %s in %s\n", callingFunc, calledFunc);\ + }\ + } + +// Namespaces +namespace android { +namespace { + +// Flag to allow a one time init of global memory, only happens on first call ever +int LvmhwInitFlag = LVM_FALSE; +SessionContext HwGlobalSessionMemory[LVM_MAX_SESSIONS]; +int HwSessionIndex[LVM_MAX_SESSIONS]; +#define MIXER_CARD 0 +#define MIXER_CTL_NAME "NXP BDL data" +#define BUNDLE_PARAM_MAX 35 + +/* local functions */ +#define CHECK_ARG(cond) { \ + if (!(cond)) { \ + ALOGV("\tLVM_ERROR : Invalid argument: "#cond); \ + return -EINVAL; \ + } \ +} + +// NXP HW BassBoost UUID +const effect_descriptor_t gHwBassBoostDescriptor = { + {0x0634f220, 0xddd4, 0x11db, 0xa0fc, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, + {0x0acd3de0, 0x7b93, 0x11e5, 0xbbaf, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_DEVICE_IND | EFFECT_FLAG_HW_ACC_TUNNEL + |EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_VOLUME_IND), + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Dynamic Bass Boost", + "Offload NXP Software Ltd.", +}; + +// NXP HW Virtualizer UUID +const effect_descriptor_t gHwVirtualizerDescriptor = { + {0x37cc2c00, 0xdddd, 0x11db, 0x8577, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + {0xc09d2040, 0x7b93, 0x11e5, 0x8e60, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_HW_ACC_TUNNEL + |EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_VOLUME_IND), + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Virtualizer", + "Offload NXP Software Ltd.", +}; + +// NXP HW Equalizer UUID +const effect_descriptor_t gHwEqualizerDescriptor = { + {0x0bed4300, 0xddd6, 0x11db, 0x8f34, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type + {0xd35a7d40, 0x7b93, 0x11e5, 0x8aae, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid Eq NXP + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_HW_ACC_TUNNEL + |EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_VOLUME_IND), + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Equalizer", + "Offload NXP Software Ltd.", +}; + +// NXP HW Volume UUID +const effect_descriptor_t gHwVolumeDescriptor = { + {0x09e8ede0, 0xddde, 0x11db, 0xb4f6, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, + {0xf3bb1040, 0x7b93, 0x11e5, 0x9930, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b }}, //uuid VOL NXP + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_HW_ACC_TUNNEL + |EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_VOLUME_IND), + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offloadd Volume", + "Offload NXP Software Ltd.", +}; + +//--- local function prototypes +void LvmGlobalBundle_init (void); +int LvmBundle_init (EffectContext *pContext); +int LvmEffect_enable (EffectContext *pContext); +int LvmEffect_disable (EffectContext *pContext); +int Effect_setConfig (EffectContext *pContext, effect_config_t *pConfig); +void Effect_getConfig (EffectContext *pContext, effect_config_t *pConfig); +int BassBoost_setParameter (EffectContext *pContext, void *pParam, void *pValue); +int BassBoost_getParameter (EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue); +int Virtualizer_setParameter (EffectContext *pContext, void *pParam, void *pValue); +int Virtualizer_getParameter (EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue); +int Equalizer_setParameter (EffectContext *pContext, void *pParam, void *pValue); +int Equalizer_getParameter (EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue); +int Volume_setParameter (EffectContext *pContext, void *pParam, void *pValue); +int Volume_getParameter (EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue); +int Effect_setEnabled(EffectContext *pContext, bool enabled); +LVM_ReturnStatus_en Offload_SetEffect_ControlParameters(EffectContext *pContext); + +/* Effect Library Interface Implementation */ + +extern "C" int EffectHwCreate(const effect_uuid_t *uuid, + int32_t sessionId, + int32_t ioId __unused, + effect_handle_t *pHandle){ + int ret = 0; + int sessionNo; + int i; + EffectContext *pContext = NULL; + bool newBundle = false; + SessionContext *pSessionContext; + + ALOGV("\n\tEffectHwCreate start session %d", sessionId); + + if (pHandle == NULL || uuid == NULL){ + ALOGV("\tLVM_ERROR : EffectHwCreate() called with NULL pointer"); + ret = -EINVAL; + goto exit; + } + + if(LvmhwInitFlag == LVM_FALSE){ + LvmhwInitFlag = LVM_TRUE; + ALOGV("\tEffectHwCreate - Initializing all global memory"); + LvmGlobalBundle_init(); + } + + // Find next available sessionNo + for(i=0; ipBundledContext = HwGlobalSessionMemory[sessionNo].pBundledContext; + pContext->pBundledContext->SessionNo = sessionNo; + pContext->pBundledContext->SessionId = sessionId; + pContext->pBundledContext->hInstance = NULL; + pContext->pBundledContext->bVolumeEnabled = LVM_FALSE; + pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE; + pContext->pBundledContext->bBassEnabled = LVM_FALSE; + pContext->pBundledContext->bBassTempDisabled = LVM_FALSE; + pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE; + pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE; + pContext->pBundledContext->nOutputDevice = AUDIO_DEVICE_NONE; + pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE; + pContext->pBundledContext->NumberEffectsEnabled = 0; + pContext->pBundledContext->NumberEffectsCalled = 0; + pContext->pBundledContext->firstVolume = LVM_TRUE; + pContext->pBundledContext->volume = 0; + + /* Saved strength is used to return the exact strength that was used in the set to the get + * because we map the original strength range of 0:1000 to 1:15, and this will avoid + * quantisation like effect when returning + */ + pContext->pBundledContext->BassStrengthSaved = 0; + pContext->pBundledContext->VirtStrengthSaved = 0; + pContext->pBundledContext->CurPreset = PRESET_CUSTOM; + pContext->pBundledContext->levelSaved = 0; + pContext->pBundledContext->bMuteEnabled = LVM_FALSE; + pContext->pBundledContext->bStereoPositionEnabled = LVM_FALSE; + pContext->pBundledContext->positionSaved = 0; + pContext->pBundledContext->workBuffer = NULL; + pContext->pBundledContext->frameCount = -1; + pContext->pBundledContext->SamplesToExitCountVirt = 0; + pContext->pBundledContext->SamplesToExitCountBb = 0; + pContext->pBundledContext->SamplesToExitCountEq = 0; + + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + pContext->pBundledContext->bandGaindB[i] = EQNB_5BandSoftPresets[i]; + } + + ALOGV("\tEffectHwCreate - Calling LvmBundle_init"); + ret = LvmBundle_init(pContext); + + if (ret < 0){ + ALOGV("\tLVM_ERROR : EffectHwCreate() Bundle init failed"); + goto exit; + } + } + else{ + ALOGV("\tEffectHwCreate - Assigning memory for previously created effect on sessionNo %d", + sessionNo); + pContext->pBundledContext = + HwGlobalSessionMemory[sessionNo].pBundledContext; + } + ALOGV("\tEffectHwCreate - pBundledContext is %p", pContext->pBundledContext); + + pSessionContext = &HwGlobalSessionMemory[pContext->pBundledContext->SessionNo]; + + // Create each Effect + if (memcmp(uuid, &gHwBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0){ + // Create Bass Boost + ALOGD("\tEffectHwCreate - Effect to be created is LVM_BASS_BOOST"); + pSessionContext->bBassInstantiated = LVM_TRUE; + pContext->pBundledContext->SamplesToExitCountBb = 0; + + pContext->itfe = &gLvmHwEffectInterface; + pContext->EffectType = LVM_BASS_BOOST; + } else if (memcmp(uuid, &gHwVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){ + // Create Virtualizer + ALOGD("\tEffectHwCreate - Effect to be created is LVM_VIRTUALIZER"); + pSessionContext->bVirtualizerInstantiated=LVM_TRUE; + pContext->pBundledContext->SamplesToExitCountVirt = 0; + + pContext->itfe = &gLvmHwEffectInterface; + pContext->EffectType = LVM_VIRTUALIZER; + } else if (memcmp(uuid, &gHwEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0){ + // Create Equalizer + ALOGD("\tEffectHwCreate - Effect to be created is LVM_EQUALIZER"); + pSessionContext->bEqualizerInstantiated = LVM_TRUE; + pContext->pBundledContext->SamplesToExitCountEq = 0; + + pContext->itfe = &gLvmHwEffectInterface; + pContext->EffectType = LVM_EQUALIZER; + } else if (memcmp(uuid, &gHwVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0){ + // Create Volume + ALOGD("\tEffectHwCreate - Effect to be created is LVM_VOLUME"); + pSessionContext->bVolumeInstantiated = LVM_TRUE; + + pContext->itfe = &gLvmHwEffectInterface; + pContext->EffectType = LVM_VOLUME; + } + else{ + ALOGD("\tLVM_ERROR : EffectHwCreate() invalid UUID"); + ret = -EINVAL; + goto exit; + } + +exit: + if (ret != 0) { + if (pContext != NULL) { + if (newBundle) { + HwGlobalSessionMemory[sessionNo].bBundledEffectsEnabled = LVM_FALSE; + HwSessionIndex[sessionNo] = LVM_UNUSED_SESSION; + delete pContext->pBundledContext; + } + delete pContext; + } + *pHandle = (effect_handle_t)NULL; + } else { + *pHandle = (effect_handle_t)pContext; + if (pContext->EffectType == LVM_BASS_BOOST) { + ALOGV("[DEBUG]\tEffectHwCreate - Address LOG"); + ALOGV("[DEBUG]\tEffectHwCreate - pContext = %p Band = %p", pContext, pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition); + } + } + ALOGV("\tEffectHwCreate end..\n\n"); + return ret; +} /* end EffectHwCreate */ + +extern "C" int EffectHwRelease(effect_handle_t handle){ + ALOGV("\n\tEffectHwRelease start %p", handle); + EffectContext * pContext = (EffectContext *)handle; + + ALOGV("\tEffectHwRelease start handle: %p, context %p", handle, pContext->pBundledContext); + if (pContext == NULL){ + ALOGV("\tLVM_ERROR : EffectHwRelease called with NULL pointer"); + return -EINVAL; + } + + SessionContext *pSessionContext = &HwGlobalSessionMemory[pContext->pBundledContext->SessionNo]; + + // Clear the instantiated flag for the effect + // protect agains the case where an effect is un-instantiated without being disabled + if(pContext->EffectType == LVM_BASS_BOOST) { + ALOGV("\tEffectHwRelease LVM_BASS_BOOST Clearing global intstantiated flag"); + pSessionContext->bBassInstantiated = LVM_FALSE; + if(pContext->pBundledContext->SamplesToExitCountBb > 0){ + pContext->pBundledContext->NumberEffectsEnabled--; + } + pContext->pBundledContext->SamplesToExitCountBb = 0; + } else if(pContext->EffectType == LVM_VIRTUALIZER) { + ALOGV("\tEffectHwRelease LVM_VIRTUALIZER Clearing global intstantiated flag"); + pSessionContext->bVirtualizerInstantiated = LVM_FALSE; + if(pContext->pBundledContext->SamplesToExitCountVirt > 0){ + pContext->pBundledContext->NumberEffectsEnabled--; + } + pContext->pBundledContext->SamplesToExitCountVirt = 0; + } else if(pContext->EffectType == LVM_EQUALIZER) { + ALOGV("\tEffectHwRelease LVM_EQUALIZER Clearing global intstantiated flag"); + pSessionContext->bEqualizerInstantiated =LVM_FALSE; + if(pContext->pBundledContext->SamplesToExitCountEq > 0){ + pContext->pBundledContext->NumberEffectsEnabled--; + } + pContext->pBundledContext->SamplesToExitCountEq = 0; + } else if(pContext->EffectType == LVM_VOLUME) { + ALOGV("\tEffectHwRelease LVM_VOLUME Clearing global intstantiated flag"); + pSessionContext->bVolumeInstantiated = LVM_FALSE; + if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE){ + pContext->pBundledContext->NumberEffectsEnabled--; + } + } else { + ALOGV("\tLVM_ERROR : EffectHwRelease : Unsupported effect\n\n\n\n\n\n\n"); + } + + // Disable effect, in this case ignore errors (return codes) + // if an effect has already been disabled + Effect_setEnabled(pContext, LVM_FALSE); + + // if all effects are no longer instantiaed free the lvm memory and delete BundledEffectContext + if ((pSessionContext->bBassInstantiated == LVM_FALSE) && + (pSessionContext->bVolumeInstantiated == LVM_FALSE) && + (pSessionContext->bEqualizerInstantiated ==LVM_FALSE) && + (pSessionContext->bVirtualizerInstantiated==LVM_FALSE)) + { + // Clear the HwSessionIndex + for(int i=0; ipBundledContext->SessionId){ + HwSessionIndex[i] = LVM_UNUSED_SESSION; + ALOGV("\tEffectHwRelease: Clearing HwSessionIndex SessionNo %d for SessionId %d\n", + i, pContext->pBundledContext->SessionId); + break; + } + } + + ALOGV("\tEffectHwRelease: All effects are no longer instantiated\n"); + pSessionContext->bBundledEffectsEnabled = LVM_FALSE; + pSessionContext->pBundledContext = LVM_NULL; + + ALOGV("\tEffectHwRelease: Deleting LVM Bundle context %p\n", pContext->pBundledContext); + if (pContext->pBundledContext->workBuffer != NULL) { + free(pContext->pBundledContext->workBuffer); + } + + if (pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition != NULL) { + free(pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition); + } + /* close mixer control */ + mixer_close(pContext->pBundledContext->mixerHandle); + delete pContext->pBundledContext; + pContext->pBundledContext = LVM_NULL; + } + // free the effect context for current effect + delete pContext; + + ALOGV("\tEffectHwRelease end\n"); + return 0; + +} /* end EffectHwRelease */ + +extern "C" int EffectHwGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor) { + const effect_descriptor_t *desc = NULL; + + ALOGV("\tEffectHwGetDescriptor start\n"); + if (pDescriptor == NULL || uuid == NULL){ + ALOGE("EffectHwGetDescriptor() called with NULL pointer"); + return -EINVAL; + } + + if (memcmp(uuid, &gHwBassBoostDescriptor.uuid, sizeof(effect_uuid_t)) == 0) { + desc = &gHwBassBoostDescriptor; + } else if (memcmp(uuid, &gHwVirtualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) { + desc = &gHwVirtualizerDescriptor; + } else if (memcmp(uuid, &gHwEqualizerDescriptor.uuid, sizeof(effect_uuid_t)) == 0) { + desc = &gHwEqualizerDescriptor; + } else if (memcmp(uuid, &gHwVolumeDescriptor.uuid, sizeof(effect_uuid_t)) == 0) { + desc = &gHwVolumeDescriptor; + } + + if (desc == NULL) { + return -EINVAL; + } + + ALOGV("\tEffectHwGetDescriptor end\n"); + *pDescriptor = *desc; + + return 0; +} /* end EffectHwGetDescriptor */ + +void LvmGlobalBundle_init(){ + ALOGV("\tLvmGlobalBundle_init start"); + for(int i=0; ipBundledContext->OffloadEnabled) { + /* Update common strcuture parameter to array */ + param[0] = (int32_t)pContext->pBundledContext->ActiveParams.OperatingMode; + param[1] = (int32_t)pContext->pBundledContext->ActiveParams.SampleRate; + param[2] = (int32_t)pContext->pBundledContext->ActiveParams.SourceFormat; + param[3] = (int32_t)pContext->pBundledContext->ActiveParams.SpeakerType; + param[4] = (int32_t)pContext->pBundledContext->ActiveParams.VirtualizerOperatingMode; + param[5] = (int32_t)pContext->pBundledContext->ActiveParams.VirtualizerType; + param[6] = (int32_t)pContext->pBundledContext->ActiveParams.VirtualizerReverbLevel; + param[7] = (int32_t)pContext->pBundledContext->ActiveParams.CS_EffectLevel; + param[8] = (int32_t)pContext->pBundledContext->ActiveParams.EQNB_OperatingMode; + param[9] = (int32_t)pContext->pBundledContext->ActiveParams.EQNB_NBands; + param[10] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[0].Gain; + param[11] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[0].Frequency; + param[12] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[0].QFactor; + param[13] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[1].Gain; + param[14] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[1].Frequency; + param[15] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[1].QFactor; + param[16] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[2].Gain; + param[17] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[2].Frequency; + param[18] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[2].QFactor; + param[19] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[3].Gain; + param[20] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[3].Frequency; + param[21] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[3].QFactor; + param[22] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[4].Gain; + param[23] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[4].Frequency; + param[24] = (int32_t)pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[4].QFactor; + param[25] = (int32_t)pContext->pBundledContext->ActiveParams.BE_OperatingMode; + param[26] = (int32_t)pContext->pBundledContext->ActiveParams.BE_EffectLevel; + param[27] = (int32_t)pContext->pBundledContext->ActiveParams.BE_CentreFreq; + param[28] = (int32_t)pContext->pBundledContext->ActiveParams.BE_HPF; + param[29] = (int32_t)pContext->pBundledContext->ActiveParams.VC_EffectLevel; + param[30] = (int32_t)pContext->pBundledContext->ActiveParams.VC_Balance; + param[31] = (int32_t)pContext->pBundledContext->ActiveParams.TE_OperatingMode; + param[32] = (int32_t)pContext->pBundledContext->ActiveParams.TE_EffectLevel; + param[33] = (int32_t)pContext->pBundledContext->ActiveParams.PSA_Enable; + param[34] = (int32_t)pContext->pBundledContext->ActiveParams.PSA_PeakDecayRate; + + if (pContext->pBundledContext->mixerCtl) { + ALOGV("\tOffload_SetEffect_ControlParameters: mixer_ctl_set_array"); + ret = mixer_ctl_set_array(pContext->pBundledContext->mixerCtl, param, ARRAY_SIZE(param)); + if (ret) { + ALOGE("%s: mixer_ctl_set_array return error(%d)", __func__, LvmStatus); + LvmStatus = LVM_OUTOFRANGE; + } else { + LvmStatus = LVM_SUCCESS; + /*for (i=0; i < 35; i++) { + ALOGD("mixer-array param[%d] = %d", i, param[i]); + }*/ + } + } + } + + ALOGV("\tOffload_SetEffect_ControlParameters Exit"); + return LvmStatus; +} + +//---------------------------------------------------------------------------- +// LvmBundle_init() +//---------------------------------------------------------------------------- +// Purpose: Initialize engine with default configuration, creates instance +// with all effects disabled. +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int LvmBundle_init(EffectContext *pContext){ + int status; + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */ + LVM_EQNB_BandDef_t *pBandDefs = NULL; /* Equaliser band definitions */ + + ALOGV("\tLvmBundle_init start"); + + pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; + pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + pContext->config.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->config.inputCfg.samplingRate = 44100; + pContext->config.inputCfg.bufferProvider.getBuffer = NULL; + pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL; + pContext->config.inputCfg.bufferProvider.cookie = NULL; + pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL; + pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; + pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + pContext->config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->config.outputCfg.samplingRate = 44100; + pContext->config.outputCfg.bufferProvider.getBuffer = NULL; + pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL; + pContext->config.outputCfg.bufferProvider.cookie = NULL; + pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL; + + CHECK_ARG(pContext != NULL); + + /* open mixer control */ + pContext->pBundledContext->mixerHandle = mixer_open(MIXER_CARD); + if (!pContext->pBundledContext->mixerHandle) { + ALOGE("%s: Failed to open mixer", __func__); + return -EINVAL; + } + + /* Get required control from mixer */ + pContext->pBundledContext->mixerCtl = mixer_get_ctl_by_name(pContext->pBundledContext->mixerHandle, MIXER_CTL_NAME); + if (!pContext->pBundledContext->mixerCtl) { + ALOGE("%s: mixer_get_ctl_by_name failed", __func__); + mixer_close(pContext->pBundledContext->mixerHandle); + return -EINVAL; + } + + /* Set the initial process parameters */ + /* General parameters */ + pContext->pBundledContext->ActiveParams.OperatingMode = LVM_MODE_ON; + pContext->pBundledContext->ActiveParams.SampleRate = LVM_FS_44100; + pContext->pBundledContext->ActiveParams.SourceFormat = LVM_STEREO; + pContext->pBundledContext->ActiveParams.SpeakerType = LVM_HEADPHONES; + + pContext->pBundledContext->SampleRate = LVM_FS_44100; + + /* Concert Sound parameters */ + pContext->pBundledContext->ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF; + pContext->pBundledContext->ActiveParams.VirtualizerType = LVM_CONCERTSOUND; + pContext->pBundledContext->ActiveParams.VirtualizerReverbLevel = 100; + pContext->pBundledContext->ActiveParams.CS_EffectLevel = LVM_CS_EFFECT_NONE; + + /* N-Band Equaliser parameters */ + pContext->pBundledContext->ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF; + pContext->pBundledContext->ActiveParams.EQNB_NBands = FIVEBAND_NUMBANDS; + pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition = (LVM_EQNB_BandDef_t*)malloc(sizeof(LVM_EQNB_BandDef_t) * MAX_NUM_BANDS); + pBandDefs = pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition; + + for (int i=0; ipBundledContext->ActiveParams.VC_EffectLevel = 0; + pContext->pBundledContext->ActiveParams.VC_Balance = 0; + + /* Treble Enhancement parameters */ + pContext->pBundledContext->ActiveParams.TE_OperatingMode = LVM_TE_OFF; + pContext->pBundledContext->ActiveParams.TE_EffectLevel = 0; + + /* PSA Control parameters */ + pContext->pBundledContext->ActiveParams.PSA_Enable = LVM_PSA_OFF; + pContext->pBundledContext->ActiveParams.PSA_PeakDecayRate = (LVM_PSA_DecaySpeed_en)0; + + /* Bass Enhancement parameters */ + pContext->pBundledContext->ActiveParams.BE_OperatingMode = LVM_BE_OFF; + pContext->pBundledContext->ActiveParams.BE_EffectLevel = 0; + pContext->pBundledContext->ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz; + pContext->pBundledContext->ActiveParams.BE_HPF = LVM_BE_HPF_ON; + + /* PSA Control parameters */ + pContext->pBundledContext->ActiveParams.PSA_Enable = LVM_PSA_OFF; + pContext->pBundledContext->ActiveParams.PSA_PeakDecayRate = LVM_PSA_SPEED_MEDIUM; + + /* TE Control parameters */ + pContext->pBundledContext->ActiveParams.TE_OperatingMode = LVM_TE_OFF; + pContext->pBundledContext->ActiveParams.TE_EffectLevel = 0; + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "LvmBundle_init") + if(LvmStatus != LVM_SUCCESS) return -EINVAL; + + ALOGV("\tNXPBundle Initial ControlParameters are configured Succesfully\n"); + ALOGV("\tLvmBundle_init End"); + return 0; +} /* end LvmBundle_init */ + + +//---------------------------------------------------------------------------- +// LvmBundle_process() +//---------------------------------------------------------------------------- +// Purpose: +// Apply LVM Bundle effects +// +// Inputs: +// pIn: pointer to stereo 16 bit input data +// pOut: pointer to stereo 16 bit output data +// frameCount: Frames to process +// pContext: effect engine context +// strength strength to be applied +// +// Outputs: +// pOut: pointer to updated stereo 16 bit output data +// +//---------------------------------------------------------------------------- + +int LvmBundle_process(LVM_INT16 *pIn __unused, + LVM_INT16 *pOut __unused, + int frameCount __unused, + EffectContext *pContext __unused){ + return 0; +} /* end LvmBundle_process */ + + +//---------------------------------------------------------------------------- +// EqualizerUpdateActiveParams() +//---------------------------------------------------------------------------- +// Purpose: Update ActiveParams for Equalizer +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- +void EqualizerUpdateActiveParams(EffectContext *pContext) { + //LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + /* current control settings are available in EffectContext structure use it and update + the structure before sending the parameter */ + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[i].Frequency = EQNB_5BandPresetsFrequencies[i]; + pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[i].QFactor = EQNB_5BandPresetsQFactors[i]; + pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition[i].Gain = pContext->pBundledContext->bandGaindB[i]; + } + + //ALOGV("\tEqualizerUpdateActiveParams just Set -> %d\n", + // ActiveParams.pEQNB_BandDefinition[band].Gain); + +} + +//---------------------------------------------------------------------------- +// LvmEffect_limitLevel() +//---------------------------------------------------------------------------- +// Purpose: limit the overall level to a value less than 0 dB preserving +// the overall EQ band gain and BassBoost relative levels. +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- +void LvmEffect_limitLevel(EffectContext *pContext) { + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + /* current control settings are available in EffectContext structure use it and update + the structure before sending the parameter */ + int gainCorrection = 0; + //Count the energy contribution per band for EQ and BassBoost only if they are active. + float energyContribution = 0; + float energyCross = 0; + float energyBassBoost = 0; + float crossCorrection = 0; + + //EQ contribution + if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) { + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0; + float bandCoefficient = LimitLevel_bandEnergyCoefficient[i]; + float bandEnergy = bandFactor * bandCoefficient * bandCoefficient; + if (bandEnergy > 0) + energyContribution += bandEnergy; + } + + //cross EQ coefficients + float bandFactorSum = 0; + for (int i = 0; i < FIVEBAND_NUMBANDS-1; i++) { + float bandFactor1 = pContext->pBundledContext->bandGaindB[i]/15.0; + float bandFactor2 = pContext->pBundledContext->bandGaindB[i+1]/15.0; + + if (bandFactor1 > 0 && bandFactor2 > 0) { + float crossEnergy = bandFactor1 * bandFactor2 * + LimitLevel_bandEnergyCrossCoefficient[i]; + bandFactorSum += bandFactor1 * bandFactor2; + + if (crossEnergy > 0) + energyCross += crossEnergy; + } + } + bandFactorSum -= 1.0; + if (bandFactorSum > 0) + crossCorrection = bandFactorSum * 0.7; + } + + //BassBoost contribution + if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) { + float boostFactor = (pContext->pBundledContext->BassStrengthSaved)/1000.0; + float boostCoefficient = LimitLevel_bassBoostEnergyCoefficient; + + energyContribution += boostFactor * boostCoefficient * boostCoefficient; + + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + float bandFactor = pContext->pBundledContext->bandGaindB[i]/15.0; + float bandCrossCoefficient = LimitLevel_bassBoostEnergyCrossCoefficient[i]; + float bandEnergy = boostFactor * bandFactor * + bandCrossCoefficient; + if (bandEnergy > 0) + energyBassBoost += bandEnergy; + } + } + + //Virtualizer contribution + if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) { + energyContribution += LimitLevel_virtualizerContribution * + LimitLevel_virtualizerContribution; + } + + double totalEnergyEstimation = sqrt(energyContribution + energyCross + energyBassBoost) - + crossCorrection; + ALOGV(" TOTAL energy estimation: %0.2f", totalEnergyEstimation); + + //roundoff + int maxLevelRound = (int)(totalEnergyEstimation + 0.99); + if (maxLevelRound + pContext->pBundledContext->volume > 0) { + gainCorrection = maxLevelRound + pContext->pBundledContext->volume; + } + + pContext->pBundledContext->ActiveParams.VC_EffectLevel = pContext->pBundledContext->volume - gainCorrection; + if (pContext->pBundledContext->ActiveParams.VC_EffectLevel < -96) { + pContext->pBundledContext->ActiveParams.VC_EffectLevel = -96; + } + ALOGV("\tVol:%d, GainCorrection: %d, Actual vol: %d", pContext->pBundledContext->volume, + gainCorrection, pContext->pBundledContext->ActiveParams.VC_EffectLevel); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "LvmEffect_limitLevel") + if(LvmStatus != LVM_SUCCESS) return; + + //ALOGV("\tLvmEffect_limitLevel just set (-96dB -> 0dB) -> %d\n",ActiveParams.VC_EffectLevel ); +#if 0 //*******SHOULD BE INFORMED TO FIRMWARE SIDE IMPLEMENTATION + if (pContext->pBundledContext->firstVolume == LVM_TRUE){ + LvmStatus = LVM_SetVolumeNoSmoothing(pContext->pBundledContext->hInstance, &ActiveParams); + LVM_ERROR_CHECK(LvmStatus, "LVM_SetVolumeNoSmoothing", "LvmBundle_process") + ALOGV("\tLVM_VOLUME: Disabling Smoothing for first volume change to remove spikes/clicks"); + pContext->pBundledContext->firstVolume = LVM_FALSE; + } +#endif +} + +//---------------------------------------------------------------------------- +// LvmEffect_enable() +//---------------------------------------------------------------------------- +// Purpose: Enable the effect in the bundle +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int LvmEffect_enable(EffectContext *pContext){ + //ALOGV("\tLvmEffect_enable start"); + + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */ + + /* current control settings are available in Context structure*/ + + if(pContext->EffectType == LVM_BASS_BOOST) { + ALOGV("\tLvmEffect_enable : Enabling LVM_BASS_BOOST"); + pContext->pBundledContext->ActiveParams.BE_OperatingMode = LVM_BE_ON; + } + if(pContext->EffectType == LVM_VIRTUALIZER) { + ALOGV("\tLvmEffect_enable : Enabling LVM_VIRTUALIZER"); + pContext->pBundledContext->ActiveParams.VirtualizerOperatingMode = LVM_MODE_ON; + } + if(pContext->EffectType == LVM_EQUALIZER) { + ALOGV("\tLvmEffect_enable : Enabling LVM_EQUALIZER"); + pContext->pBundledContext->ActiveParams.EQNB_OperatingMode = LVM_EQNB_ON; + } + if(pContext->EffectType == LVM_VOLUME) { + ALOGV("\tLvmEffect_enable : Enabling LVM_VOLUME"); + } + + LvmEffect_limitLevel(pContext); + //ALOGV("\tLvmEffect_enable end"); + return 0; +} + +//---------------------------------------------------------------------------- +// LvmEffect_disable() +//---------------------------------------------------------------------------- +// Purpose: Disable the effect in the bundle +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int LvmEffect_disable(EffectContext *pContext){ + //ALOGV("\tLvmEffect_disable start"); + + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */ + /* Get the current control settings from context structure*/ + + if(pContext->EffectType == LVM_BASS_BOOST) { + ALOGV("\tLvmEffect_disable : Disabling LVM_BASS_BOOST"); + pContext->pBundledContext->ActiveParams.BE_OperatingMode = LVM_BE_OFF; + } + if(pContext->EffectType == LVM_VIRTUALIZER) { + ALOGV("\tLvmEffect_disable : Disabling LVM_VIRTUALIZER"); + pContext->pBundledContext->ActiveParams.VirtualizerOperatingMode = LVM_MODE_OFF; + } + if(pContext->EffectType == LVM_EQUALIZER) { + ALOGV("\tLvmEffect_disable : Disabling LVM_EQUALIZER"); + pContext->pBundledContext->ActiveParams.EQNB_OperatingMode = LVM_EQNB_OFF; + } + if(pContext->EffectType == LVM_VOLUME) { + ALOGV("\tLvmEffect_disable : Disabling LVM_VOLUME"); + } + + LvmEffect_limitLevel(pContext); + //ALOGV("\tLvmEffect_disable end"); + return 0; +} + +//---------------------------------------------------------------------------- +// Effect_setConfig() +//---------------------------------------------------------------------------- +// Purpose: Set input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Effect_setConfig(EffectContext *pContext, effect_config_t *pConfig){ + LVM_Fs_en SampleRate; + //ALOGV("\tEffect_setConfig start"); + + CHECK_ARG(pContext != NULL); + CHECK_ARG(pConfig != NULL); + + CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate); + CHECK_ARG(pConfig->inputCfg.channels == pConfig->outputCfg.channels); + CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format); + CHECK_ARG(pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO); + CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE + || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE); + CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); + + pContext->config = *pConfig; + + switch (pConfig->inputCfg.samplingRate) { + case 8000: + SampleRate = LVM_FS_8000; + pContext->pBundledContext->SamplesPerSecond = 8000*2; // 2 secs Stereo + break; + case 16000: + SampleRate = LVM_FS_16000; + pContext->pBundledContext->SamplesPerSecond = 16000*2; // 2 secs Stereo + break; + case 22050: + SampleRate = LVM_FS_22050; + pContext->pBundledContext->SamplesPerSecond = 22050*2; // 2 secs Stereo + break; + case 32000: + SampleRate = LVM_FS_32000; + pContext->pBundledContext->SamplesPerSecond = 32000*2; // 2 secs Stereo + break; + case 44100: + SampleRate = LVM_FS_44100; + pContext->pBundledContext->SamplesPerSecond = 44100*2; // 2 secs Stereo + break; + case 48000: + SampleRate = LVM_FS_48000; + pContext->pBundledContext->SamplesPerSecond = 48000*2; // 2 secs Stereo + break; + default: + ALOGV("\tEffect_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate); + return -EINVAL; + } + + if(pContext->pBundledContext->SampleRate != SampleRate){ + + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; + + ALOGV("\tEffect_setConfig change sampling rate to %d", SampleRate); + + /* Use current control settings from context structure*/ + pContext->pBundledContext->ActiveParams.SampleRate = SampleRate; + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "Effect_setConfig") + if(LvmStatus != LVM_SUCCESS) return -EINVAL; + + ALOGV("\tEffect_setConfig Succesfully called Offload_SetEffect_ControlParameters\n"); + pContext->pBundledContext->SampleRate = SampleRate; + + }else{ + //ALOGV("\tEffect_setConfig keep sampling rate at %d", SampleRate); + } + + //ALOGV("\tEffect_setConfig End...."); + return 0; +} /* end Effect_setConfig */ + +//---------------------------------------------------------------------------- +// Effect_getConfig() +//---------------------------------------------------------------------------- +// Purpose: Get input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +void Effect_getConfig(EffectContext *pContext, effect_config_t *pConfig) +{ + *pConfig = pContext->config; +} /* end Effect_getConfig */ + +//---------------------------------------------------------------------------- +// BassGetStrength() +//---------------------------------------------------------------------------- +// Purpose: +// get the effect strength currently being used, what is actually returned is the strengh that was +// previously used in the set, this is because the app uses a strength in the range 0-1000 while +// the bassboost uses 1-15, so to avoid a quantisation the original set value is used. However the +// actual used value is checked to make sure it corresponds to the one being returned +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +uint32_t BassGetStrength(EffectContext *pContext){ + //ALOGV("\tBassGetStrength() (0-1000) -> %d\n", pContext->pBundledContext->BassStrengthSaved); + + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */ + /* Use the current control settings from Context structure*/ + /* Check that the strength returned matches the strength that was set earlier */ + if(pContext->pBundledContext->ActiveParams.BE_EffectLevel != + (LVM_INT16)((15*pContext->pBundledContext->BassStrengthSaved)/1000)){ + ALOGV("\tLVM_ERROR : BassGetStrength module strength does not match savedStrength %d %d\n", + pContext->pBundledContext->ActiveParams.BE_EffectLevel, pContext->pBundledContext->BassStrengthSaved); + return -EINVAL; + } + + //ALOGV("\tBassGetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel ); + //ALOGV("\tBassGetStrength() (saved) -> %d\n", pContext->pBundledContext->BassStrengthSaved ); + return pContext->pBundledContext->BassStrengthSaved; +} /* end BassGetStrength */ + +//---------------------------------------------------------------------------- +// BassSetStrength() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the strength to the BassBosst. Must first be converted from the range 0-1000 to 1-15 +// +// Inputs: +// pContext: effect engine context +// strength strength to be applied +// +//---------------------------------------------------------------------------- + +void BassSetStrength(EffectContext *pContext, uint32_t strength){ + //ALOGV("\tBassSetStrength(%d)", strength); + + pContext->pBundledContext->BassStrengthSaved = (int)strength; + + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + /* Use the current control settings from Context Structure */ + /* Bass Enhancement parameters */ + pContext->pBundledContext->ActiveParams.BE_EffectLevel = (LVM_INT16)((15*strength)/1000); + pContext->pBundledContext->ActiveParams.BE_CentreFreq = LVM_BE_CENTRE_90Hz; + + //ALOGV("\tBassSetStrength() (0-15) -> %d\n", ActiveParams.BE_EffectLevel ); + + LvmEffect_limitLevel(pContext); +} /* end BassSetStrength */ + +//---------------------------------------------------------------------------- +// VirtualizerGetStrength() +//---------------------------------------------------------------------------- +// Purpose: +// get the effect strength currently being used, what is actually returned is the strengh that was +// previously used in the set, this is because the app uses a strength in the range 0-1000 while +// the Virtualizer uses 1-100, so to avoid a quantisation the original set value is used.However the +// actual used value is checked to make sure it corresponds to the one being returned +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +uint32_t VirtualizerGetStrength(EffectContext *pContext){ + //ALOGV("\tVirtualizerGetStrength (0-1000) -> %d\n",pContext->pBundledContext->VirtStrengthSaved); + + //ALOGV("\tVirtualizerGetStrength() (0-100) -> %d\n", ActiveParams.VirtualizerReverbLevel*10); + return pContext->pBundledContext->VirtStrengthSaved; +} /* end getStrength */ + +//---------------------------------------------------------------------------- +// VirtualizerSetStrength() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the strength to the Virtualizer. Must first be converted from the range 0-1000 to 1-15 +// +// Inputs: +// pContext: effect engine context +// strength strength to be applied +// +//---------------------------------------------------------------------------- + +void VirtualizerSetStrength(EffectContext *pContext, uint32_t strength){ + //ALOGV("\tVirtualizerSetStrength(%d)", strength); + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + pContext->pBundledContext->VirtStrengthSaved = (int)strength; + + /* Get the current control settings from Context structure */ + /* Virtualizer parameters */ + pContext->pBundledContext->ActiveParams.CS_EffectLevel = (int)((strength*32767)/1000); + + ALOGV("\tVirtualizerSetStrength() (0-1000) -> %d\n", strength ); + ALOGV("\tVirtualizerSetStrength() (0- 100) -> %d\n", pContext->pBundledContext->ActiveParams.CS_EffectLevel ); + + LvmEffect_limitLevel(pContext); +} /* end setStrength */ + +//---------------------------------------------------------------------------- +// VirtualizerIsDeviceSupported() +//---------------------------------------------------------------------------- +// Purpose: +// Check if an audio device type is supported by this implementation +// +// Inputs: +// deviceType the type of device that affects the processing (e.g. for binaural vs transaural) +// Output: +// -EINVAL if the configuration is not supported or it is unknown +// 0 if the configuration is supported +//---------------------------------------------------------------------------- +int VirtualizerIsDeviceSupported(audio_devices_t deviceType) { + switch (deviceType) { + case AUDIO_DEVICE_OUT_WIRED_HEADSET: + case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: + case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES: + return 0; + default : + return -EINVAL; + } +} + +//---------------------------------------------------------------------------- +// VirtualizerIsConfigurationSupported() +//---------------------------------------------------------------------------- +// Purpose: +// Check if a channel mask + audio device type is supported by this implementation +// +// Inputs: +// channelMask the channel mask of the input to virtualize +// deviceType the type of device that affects the processing (e.g. for binaural vs transaural) +// Output: +// -EINVAL if the configuration is not supported or it is unknown +// 0 if the configuration is supported +//---------------------------------------------------------------------------- +int VirtualizerIsConfigurationSupported(audio_channel_mask_t channelMask, + audio_devices_t deviceType) { + uint32_t channelCount = audio_channel_count_from_out_mask(channelMask); + if ((channelCount == 0) || (channelCount > 2)) { + return -EINVAL; + } + + return VirtualizerIsDeviceSupported(deviceType); +} + +//---------------------------------------------------------------------------- +// VirtualizerForceVirtualizationMode() +//---------------------------------------------------------------------------- +// Purpose: +// Force the virtualization mode to that of the given audio device +// +// Inputs: +// pContext effect engine context +// forcedDevice the type of device whose virtualization mode we'll always use +// Output: +// -EINVAL if the device is not supported or is unknown +// 0 if the device is supported and the virtualization mode forced +// +//---------------------------------------------------------------------------- +int VirtualizerForceVirtualizationMode(EffectContext *pContext, audio_devices_t forcedDevice) { + ALOGV("VirtualizerForceVirtualizationMode: forcedDev=0x%x enabled=%d tmpDisabled=%d", + forcedDevice, pContext->pBundledContext->bVirtualizerEnabled, + pContext->pBundledContext->bVirtualizerTempDisabled); + int status = 0; + bool useVirtualizer = false; + + if (VirtualizerIsDeviceSupported(forcedDevice) != 0) { + if (forcedDevice != AUDIO_DEVICE_NONE) { + //forced device is not supported, make it behave as a reset of forced mode + forcedDevice = AUDIO_DEVICE_NONE; + // but return an error + status = -EINVAL; + } + } + + if (forcedDevice == AUDIO_DEVICE_NONE) { + // disabling forced virtualization mode: + // verify whether the virtualization should be enabled or disabled + if (VirtualizerIsDeviceSupported(pContext->pBundledContext->nOutputDevice) == 0) { + useVirtualizer = (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE); + } + pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_NONE; + } else { + // forcing virtualization mode: here we already know the device is supported + pContext->pBundledContext->nVirtualizerForcedDevice = AUDIO_DEVICE_OUT_WIRED_HEADPHONE; + // only enable for a supported mode, when the effect is enabled + useVirtualizer = (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE); + } + + if (useVirtualizer) { + if (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_TRUE) { + ALOGV("\tVirtualizerForceVirtualizationMode re-enable LVM_VIRTUALIZER"); + android::LvmEffect_enable(pContext); + pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE; + } else { + ALOGV("\tVirtualizerForceVirtualizationMode leaving LVM_VIRTUALIZER enabled"); + } + } else { + if (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE) { + ALOGV("\tVirtualizerForceVirtualizationMode disable LVM_VIRTUALIZER"); + android::LvmEffect_disable(pContext); + pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE; + } else { + ALOGV("\tVirtualizerForceVirtualizationMode leaving LVM_VIRTUALIZER disabled"); + } + } + + ALOGV("\tafter VirtualizerForceVirtualizationMode: enabled=%d tmpDisabled=%d", + pContext->pBundledContext->bVirtualizerEnabled, + pContext->pBundledContext->bVirtualizerTempDisabled); + + return status; +} +//---------------------------------------------------------------------------- +// VirtualizerGetSpeakerAngles() +//---------------------------------------------------------------------------- +// Purpose: +// Get the virtual speaker angles for a channel mask + audio device type +// configuration which is guaranteed to be supported by this implementation +// +// Inputs: +// channelMask: the channel mask of the input to virtualize +// deviceType the type of device that affects the processing (e.g. for binaural vs transaural) +// Input/Output: +// pSpeakerAngles the array of integer where each speaker angle is written as a triplet in the +// following format: +// int32_t a bit mask with a single value selected for each speaker, following +// the convention of the audio_channel_mask_t type +// int32_t a value in degrees expressing the speaker azimuth, where 0 is in front +// of the user, 180 behind, -90 to the left, 90 to the right of the user +// int32_t a value in degrees expressing the speaker elevation, where 0 is the +// horizontal plane, +90 is directly above the user, -90 below +// +//---------------------------------------------------------------------------- +void VirtualizerGetSpeakerAngles(audio_channel_mask_t channelMask __unused, + audio_devices_t deviceType __unused, int32_t *pSpeakerAngles) { + // the channel count is guaranteed to be 1 or 2 + // the device is guaranteed to be of type headphone + // this virtualizer is always 2in with speakers at -90 and 90deg of azimuth, 0deg of elevation + *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_LEFT; + *pSpeakerAngles++ = -90; // azimuth + *pSpeakerAngles++ = 0; // elevation + *pSpeakerAngles++ = (int32_t) AUDIO_CHANNEL_OUT_FRONT_RIGHT; + *pSpeakerAngles++ = 90; // azimuth + *pSpeakerAngles = 0; // elevation +} + +//---------------------------------------------------------------------------- +// VirtualizerGetVirtualizationMode() +//---------------------------------------------------------------------------- +// Purpose: +// Retrieve the current device whose processing mode is used by this effect +// +// Output: +// AUDIO_DEVICE_NONE if the effect is not virtualizing +// or the device type if the effect is virtualizing +//---------------------------------------------------------------------------- +audio_devices_t VirtualizerGetVirtualizationMode(EffectContext *pContext) { + audio_devices_t virtDevice = AUDIO_DEVICE_NONE; + if ((pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) + && (pContext->pBundledContext->bVirtualizerTempDisabled == LVM_FALSE)) { + if (pContext->pBundledContext->nVirtualizerForcedDevice != AUDIO_DEVICE_NONE) { + // virtualization mode is forced, return that device + virtDevice = pContext->pBundledContext->nVirtualizerForcedDevice; + } else { + // no forced mode, return the current device + virtDevice = pContext->pBundledContext->nOutputDevice; + } + } + ALOGV("VirtualizerGetVirtualizationMode() returning 0x%x", virtDevice); + return virtDevice; +} + +//---------------------------------------------------------------------------- +// EqualizerGetBandLevel() +//---------------------------------------------------------------------------- +// Purpose: Retrieve the gain currently being used for the band passed in +// +// Inputs: +// band: band number +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- +int32_t EqualizerGetBandLevel(EffectContext *pContext, int32_t band){ + //ALOGV("\tEqualizerGetBandLevel -> %d\n", pContext->pBundledContext->bandGaindB[band] ); + return pContext->pBundledContext->bandGaindB[band] * 100; +} + +//---------------------------------------------------------------------------- +// EqualizerSetBandLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Sets gain value for the given band. +// +// Inputs: +// band: band number +// Gain: Gain to be applied in millibels +// pContext: effect engine context +// +// Outputs: +// +//--------------------------------------------------------------------------- +void EqualizerSetBandLevel(EffectContext *pContext, int band, short Gain){ + int gainRounded; + if(Gain > 0){ + gainRounded = (int)((Gain+50)/100); + }else{ + gainRounded = (int)((Gain-50)/100); + } + //ALOGV("\tEqualizerSetBandLevel(%d)->(%d)", Gain, gainRounded); + pContext->pBundledContext->bandGaindB[band] = gainRounded; + pContext->pBundledContext->CurPreset = PRESET_CUSTOM; + + EqualizerUpdateActiveParams(pContext); + LvmEffect_limitLevel(pContext); +} + +//---------------------------------------------------------------------------- +// EqualizerGetCentreFrequency() +//---------------------------------------------------------------------------- +// Purpose: Retrieve the frequency being used for the band passed in +// +// Inputs: +// band: band number +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- +int32_t EqualizerGetCentreFrequency(EffectContext *pContext, int32_t band){ + int32_t Frequency =0; + + LVM_EQNB_BandDef_t *BandDef; + /* Use the current control settings from context structure */ + + BandDef = pContext->pBundledContext->ActiveParams.pEQNB_BandDefinition; + Frequency = (int32_t)BandDef[band].Frequency*1000; // Convert to millibels + + //ALOGV("\tEqualizerGetCentreFrequency -> %d\n", Frequency ); + return Frequency; +} + +//---------------------------------------------------------------------------- +// EqualizerGetBandFreqRange( +//---------------------------------------------------------------------------- +// Purpose: +// +// Gets lower and upper boundaries of a band. +// For the high shelf, the low bound is the band frequency and the high +// bound is Nyquist. +// For the peaking filters, they are the gain[dB]/2 points. +// +// Inputs: +// band: band number +// pContext: effect engine context +// +// Outputs: +// pLow: lower band range +// pLow: upper band range +//---------------------------------------------------------------------------- +int32_t EqualizerGetBandFreqRange(EffectContext *pContext __unused, int32_t band, uint32_t *pLow, + uint32_t *pHi){ + *pLow = bandFreqRange[band][0]; + *pHi = bandFreqRange[band][1]; + return 0; +} + +//---------------------------------------------------------------------------- +// EqualizerGetBand( +//---------------------------------------------------------------------------- +// Purpose: +// +// Returns the band with the maximum influence on a given frequency. +// Result is unaffected by whether EQ is enabled or not, or by whether +// changes have been committed or not. +// +// Inputs: +// targetFreq The target frequency, in millihertz. +// pContext: effect engine context +// +// Outputs: +// pLow: lower band range +// pLow: upper band range +//---------------------------------------------------------------------------- +int32_t EqualizerGetBand(EffectContext *pContext __unused, uint32_t targetFreq){ + int band = 0; + + if(targetFreq < bandFreqRange[0][0]){ + return -EINVAL; + }else if(targetFreq == bandFreqRange[0][0]){ + return 0; + } + for(int i=0; i bandFreqRange[i][0])&&(targetFreq <= bandFreqRange[i][1])){ + band = i; + } + } + return band; +} + +//---------------------------------------------------------------------------- +// EqualizerGetPreset( +//---------------------------------------------------------------------------- +// Purpose: +// +// Gets the currently set preset ID. +// Will return PRESET_CUSTOM in case the EQ parameters have been modified +// manually since a preset was set. +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- +int32_t EqualizerGetPreset(EffectContext *pContext){ + return pContext->pBundledContext->CurPreset; +} + +//---------------------------------------------------------------------------- +// EqualizerSetPreset( +//---------------------------------------------------------------------------- +// Purpose: +// +// Sets the current preset by ID. +// All the band parameters will be overridden. +// +// Inputs: +// pContext: effect engine context +// preset The preset ID. +// +//---------------------------------------------------------------------------- +void EqualizerSetPreset(EffectContext *pContext, int preset){ + + //ALOGV("\tEqualizerSetPreset(%d)", preset); + pContext->pBundledContext->CurPreset = preset; + + //ActiveParams.pEQNB_BandDefinition = &BandDefs[0]; + for (int i=0; ipBundledContext->bandGaindB[i] = + EQNB_5BandSoftPresets[i + preset * FIVEBAND_NUMBANDS]; + } + + EqualizerUpdateActiveParams(pContext); + LvmEffect_limitLevel(pContext); + + //ALOGV("\tEqualizerSetPreset Succesfully called LVM_SetControlParameters\n"); + return; +} + +int32_t EqualizerGetNumPresets(){ + return sizeof(gEqualizerPresets) / sizeof(PresetConfig); +} + +//---------------------------------------------------------------------------- +// EqualizerGetPresetName( +//---------------------------------------------------------------------------- +// Purpose: +// Gets a human-readable name for a preset ID. Will return "Custom" if +// PRESET_CUSTOM is passed. +// +// Inputs: +// preset The preset ID. Must be less than number of presets. +// +//------------------------------------------------------------------------- +const char * EqualizerGetPresetName(int32_t preset){ + //ALOGV("\tEqualizerGetPresetName start(%d)", preset); + if (preset == PRESET_CUSTOM) { + return "Custom"; + } else { + return gEqualizerPresets[preset].name; + } + //ALOGV("\tEqualizerGetPresetName end(%d)", preset); + return 0; +} + +//---------------------------------------------------------------------------- +// VolumeSetVolumeLevel() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// level level to be applied +// +//---------------------------------------------------------------------------- + +int VolumeSetVolumeLevel(EffectContext *pContext, int16_t level){ + + if (level > 0 || level < -9600) { + return -EINVAL; + } + + if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) { + pContext->pBundledContext->levelSaved = level / 100; + } else { + pContext->pBundledContext->volume = level / 100; + } + + LvmEffect_limitLevel(pContext); + + return 0; +} /* end VolumeSetVolumeLevel */ + +//---------------------------------------------------------------------------- +// VolumeGetVolumeLevel() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int VolumeGetVolumeLevel(EffectContext *pContext, int16_t *level){ + + if (pContext->pBundledContext->bMuteEnabled == LVM_TRUE) { + *level = pContext->pBundledContext->levelSaved * 100; + } else { + *level = pContext->pBundledContext->volume * 100; + } + return 0; +} /* end VolumeGetVolumeLevel */ + +//---------------------------------------------------------------------------- +// VolumeSetMute() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// mute: enable/disable flag +// +//---------------------------------------------------------------------------- + +int32_t VolumeSetMute(EffectContext *pContext, uint32_t mute){ + //ALOGV("\tVolumeSetMute start(%d)", mute); + + pContext->pBundledContext->bMuteEnabled = mute; + + /* Set appropriate volume level */ + if(pContext->pBundledContext->bMuteEnabled == LVM_TRUE){ + pContext->pBundledContext->levelSaved = pContext->pBundledContext->volume; + pContext->pBundledContext->volume = -96; + }else{ + pContext->pBundledContext->volume = pContext->pBundledContext->levelSaved; + } + + LvmEffect_limitLevel(pContext); + + return 0; +} /* end setMute */ + +//---------------------------------------------------------------------------- +// VolumeGetMute() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// +// Ourputs: +// mute: enable/disable flag +//---------------------------------------------------------------------------- + +int32_t VolumeGetMute(EffectContext *pContext, uint32_t *mute){ + //ALOGV("\tVolumeGetMute start"); + if((pContext->pBundledContext->bMuteEnabled == LVM_FALSE)|| + (pContext->pBundledContext->bMuteEnabled == LVM_TRUE)){ + *mute = pContext->pBundledContext->bMuteEnabled; + return 0; + }else{ + ALOGV("\tLVM_ERROR : VolumeGetMute read an invalid value from context %d", + pContext->pBundledContext->bMuteEnabled); + return -EINVAL; + } + //ALOGV("\tVolumeGetMute end"); +} /* end getMute */ + +int16_t VolumeConvertStereoPosition(int16_t position){ + int16_t convertedPosition = 0; + + convertedPosition = (int16_t)(((float)position/1000)*96); + return convertedPosition; + +} + +//---------------------------------------------------------------------------- +// VolumeSetStereoPosition() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// position: stereo position +// +// Outputs: +//---------------------------------------------------------------------------- + +int VolumeSetStereoPosition(EffectContext *pContext, int16_t position){ + + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + LVM_INT16 Balance = 0; + + + + pContext->pBundledContext->positionSaved = position; + Balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved); + + //ALOGV("\tVolumeSetStereoPosition start pContext->pBundledContext->positionSaved = %d", + //pContext->pBundledContext->positionSaved); + + if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){ + + //ALOGV("\tVolumeSetStereoPosition Position to be set is %d %d\n", position, Balance); + pContext->pBundledContext->positionSaved = position; + /* Use the current control settings from Context structure */ + //ALOGV("\tVolumeSetStereoPosition curent VC_Balance: %d\n", pContext->pBundledContext->ActiveParams.VC_Balance); + + /* Volume parameters */ + pContext->pBundledContext->ActiveParams.VC_Balance = Balance; + //ALOGV("\tVolumeSetStereoPosition() (-96dB -> +96dB) -> %d\n", pContext->pBundledContext->ActiveParams.VC_Balance ); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "VolumeSetStereoPosition") + if(LvmStatus != LVM_SUCCESS) return -EINVAL; + + //ALOGV("\tVolumeSetStereoPosition Succesfully called Offload_SetEffect_ControlParameters\n"); + + } + else{ + //ALOGV("\tVolumeSetStereoPosition Position attempting to set, but not enabled %d %d\n", + //position, Balance); + } + //ALOGV("\tVolumeSetStereoPosition end pContext->pBundledContext->positionSaved = %d\n", + //pContext->pBundledContext->positionSaved); + return 0; +} /* end VolumeSetStereoPosition */ + + +//---------------------------------------------------------------------------- +// VolumeGetStereoPosition() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// position: stereo position +//---------------------------------------------------------------------------- + +int32_t VolumeGetStereoPosition(EffectContext *pContext, int16_t *position){ + //ALOGV("\tVolumeGetStereoPosition start"); + + LVM_ReturnStatus_en LvmStatus = LVM_SUCCESS; /* Function call status */ + LVM_INT16 balance; + + //ALOGV("\tVolumeGetStereoPosition start pContext->pBundledContext->positionSaved = %d", + //pContext->pBundledContext->positionSaved); + + //ALOGV("\tVolumeGetStereoPosition -> %d\n", pContext->pBundledContext->ActiveParams.VC_Balance); + //ALOGV("\tVolumeGetStereoPosition Succesfully returned from LVM_GetControlParameters\n"); + + balance = VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved); + + if(pContext->pBundledContext->bStereoPositionEnabled == LVM_TRUE){ + if(balance != pContext->pBundledContext->ActiveParams.VC_Balance){ + return -EINVAL; + } + } + *position = (LVM_INT16)pContext->pBundledContext->positionSaved; // Convert dB to millibels + //ALOGV("\tVolumeGetStereoPosition end returning pContext->pBundledContext->positionSaved =%d\n", + //pContext->pBundledContext->positionSaved); + return 0; +} /* end VolumeGetStereoPosition */ + +//---------------------------------------------------------------------------- +// VolumeEnableStereoPosition() +//---------------------------------------------------------------------------- +// Purpose: +// +// Inputs: +// pContext: effect engine context +// mute: enable/disable flag +// +//---------------------------------------------------------------------------- + +int32_t VolumeEnableStereoPosition(EffectContext *pContext, uint32_t enabled){ + //ALOGV("\tVolumeEnableStereoPosition start()"); + + pContext->pBundledContext->bStereoPositionEnabled = enabled; + + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + /* Use the current control settings from context structure */ + //ALOGV("\tVolumeEnableStereoPosition to %d, position was %d\n", + // enabled, pContext->pBundledContext->ActiveParams.VC_Balance ); + + /* Set appropriate stereo position */ + if(pContext->pBundledContext->bStereoPositionEnabled == LVM_FALSE){ + pContext->pBundledContext->ActiveParams.VC_Balance = 0; + }else{ + pContext->pBundledContext->ActiveParams.VC_Balance = + VolumeConvertStereoPosition(pContext->pBundledContext->positionSaved); + } + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "VolumeEnableStereoPosition") + if(LvmStatus != LVM_SUCCESS) return -EINVAL; + + //ALOGV("\tVolumeEnableStereoPosition Succesfully called Offload_SetEffect_ControlParameters\n"); + //ALOGV("\tVolumeEnableStereoPosition end()\n"); + return 0; +} /* end VolumeEnableStereoPosition */ + +//---------------------------------------------------------------------------- +// BassBoost_getParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Get a BassBoost parameter +// +// Inputs: +// pBassBoost - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to variable to hold retrieved value +// pValueSize - pointer to value size: maximum size as input +// +// Outputs: +// *pValue updated with parameter value +// *pValueSize updated with actual value size +// +// +// Side Effects: +// +//---------------------------------------------------------------------------- + +int BassBoost_getParameter(EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue){ + int status = 0; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + int32_t param2; + char *name; + + //ALOGV("\tBassBoost_getParameter start"); + + switch (param){ + case BASSBOOST_PARAM_STRENGTH_SUPPORTED: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : BassBoost_getParameter() invalid pValueSize1 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + case BASSBOOST_PARAM_STRENGTH: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : BassBoost_getParameter() invalid pValueSize2 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + + default: + ALOGV("\tLVM_ERROR : BassBoost_getParameter() invalid param %d", param); + return -EINVAL; + } + + switch (param){ + case BASSBOOST_PARAM_STRENGTH_SUPPORTED: + *(uint32_t *)pValue = 1; + + //ALOGV("\tBassBoost_getParameter() BASSBOOST_PARAM_STRENGTH_SUPPORTED Value is %d", + // *(uint32_t *)pValue); + break; + + case BASSBOOST_PARAM_STRENGTH: + *(int16_t *)pValue = BassGetStrength(pContext); + + //ALOGV("\tBassBoost_getParameter() BASSBOOST_PARAM_STRENGTH Value is %d", + // *(int16_t *)pValue); + break; + + default: + ALOGV("\tLVM_ERROR : BassBoost_getParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + //ALOGV("\tBassBoost_getParameter end"); + return status; +} /* end BassBoost_getParameter */ + +//---------------------------------------------------------------------------- +// BassBoost_setParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Set a BassBoost parameter +// +// Inputs: +// pBassBoost - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to value +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int BassBoost_setParameter (EffectContext *pContext, void *pParam, void *pValue){ + int status = 0; + int16_t strength; + int32_t *pParamTemp = (int32_t *)pParam; + + //ALOGV("\tBassBoost_setParameter start"); + + switch (*pParamTemp){ + case BASSBOOST_PARAM_STRENGTH: + strength = *(int16_t *)pValue; + //ALOGV("\tBassBoost_setParameter() BASSBOOST_PARAM_STRENGTH value is %d", strength); + //ALOGV("\tBassBoost_setParameter() Calling pBassBoost->BassSetStrength"); + BassSetStrength(pContext, (int32_t)strength); + //ALOGV("\tBassBoost_setParameter() Called pBassBoost->BassSetStrength"); + break; + default: + ALOGV("\tLVM_ERROR : BassBoost_setParameter() invalid param %d", *pParamTemp); + break; + } + + //ALOGV("\tBassBoost_setParameter end"); + return status; +} /* end BassBoost_setParameter */ + +//---------------------------------------------------------------------------- +// Virtualizer_getParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Get a Virtualizer parameter +// +// Inputs: +// pVirtualizer - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to variable to hold retrieved value +// pValueSize - pointer to value size: maximum size as input +// +// Outputs: +// *pValue updated with parameter value +// *pValueSize updated with actual value size +// +// +// Side Effects: +// +//---------------------------------------------------------------------------- + +int Virtualizer_getParameter(EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue){ + int status = 0; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + char *name; + + //ALOGV("\tVirtualizer_getParameter start"); + + switch (param){ + case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize %d",*pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + case VIRTUALIZER_PARAM_STRENGTH: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize2 %d",*pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: + // return value size can only be interpreted as relative to input value, + // deferring validity check to below + break; + case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize %d",*pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + default: + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid param %d", param); + return -EINVAL; + } + + switch (param){ + case VIRTUALIZER_PARAM_STRENGTH_SUPPORTED: + *(uint32_t *)pValue = 1; + + //ALOGV("\tVirtualizer_getParameter() VIRTUALIZER_PARAM_STRENGTH_SUPPORTED Value is %d", + // *(uint32_t *)pValue); + break; + + case VIRTUALIZER_PARAM_STRENGTH: + *(int16_t *)pValue = VirtualizerGetStrength(pContext); + + //ALOGV("\tVirtualizer_getParameter() VIRTUALIZER_PARAM_STRENGTH Value is %d", + // *(int16_t *)pValue); + break; + + case VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES: { + const audio_channel_mask_t channelMask = (audio_channel_mask_t) *pParamTemp++; + const audio_devices_t deviceType = (audio_devices_t) *pParamTemp; + uint32_t nbChannels = audio_channel_count_from_out_mask(channelMask); + if (*pValueSize < 3 * nbChannels * sizeof(int32_t)){ + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid pValueSize %d",*pValueSize); + return -EINVAL; + } + // verify the configuration is supported + status = VirtualizerIsConfigurationSupported(channelMask, deviceType); + if (status == 0) { + ALOGV("VIRTUALIZER_PARAM_VIRTUAL_SPEAKER_ANGLES supports mask=0x%x device=0x%x", + channelMask, deviceType); + // configuration is supported, get the angles + VirtualizerGetSpeakerAngles(channelMask, deviceType, (int32_t *)pValue); + } + } + break; + + case VIRTUALIZER_PARAM_VIRTUALIZATION_MODE: + *(uint32_t *)pValue = (uint32_t) VirtualizerGetVirtualizationMode(pContext); + break; + + default: + ALOGV("\tLVM_ERROR : Virtualizer_getParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + ALOGV("\tVirtualizer_getParameter end returning status=%d", status); + return status; +} /* end Virtualizer_getParameter */ + +//---------------------------------------------------------------------------- +// Virtualizer_setParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Set a Virtualizer parameter +// +// Inputs: +// pVirtualizer - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to value +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Virtualizer_setParameter (EffectContext *pContext, void *pParam, void *pValue){ + int status = 0; + int16_t strength; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + + //ALOGV("\tVirtualizer_setParameter start"); + + switch (param){ + case VIRTUALIZER_PARAM_STRENGTH: + strength = *(int16_t *)pValue; + //ALOGV("\tVirtualizer_setParameter() VIRTUALIZER_PARAM_STRENGTH value is %d", strength); + //ALOGV("\tVirtualizer_setParameter() Calling pVirtualizer->setStrength"); + VirtualizerSetStrength(pContext, (int32_t)strength); + ALOGV("\tVirtualizer_setParameter() Called pVirtualizer->setStrength"); + break; + + case VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE: { + const audio_devices_t deviceType = *(audio_devices_t *) pValue; + status = VirtualizerForceVirtualizationMode(pContext, deviceType); + //ALOGV("VIRTUALIZER_PARAM_FORCE_VIRTUALIZATION_MODE device=0x%x result=%d", + // deviceType, status); + } + break; + + default: + ALOGV("\tLVM_ERROR : Virtualizer_setParameter() invalid param %d", param); + break; + } + + //ALOGV("\tVirtualizer_setParameter end"); + return status; +} /* end Virtualizer_setParameter */ + +//---------------------------------------------------------------------------- +// Equalizer_getParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Get a Equalizer parameter +// +// Inputs: +// pEqualizer - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to variable to hold retrieved value +// pValueSize - pointer to value size: maximum size as input +// +// Outputs: +// *pValue updated with parameter value +// *pValueSize updated with actual value size +// +// +// Side Effects: +// +//---------------------------------------------------------------------------- +int Equalizer_getParameter(EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue){ + int status = 0; + int bMute = 0; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + int32_t param2; + char *name; + + //ALOGV("\tEqualizer_getParameter start"); + + switch (param) { + case EQ_PARAM_NUM_BANDS: + case EQ_PARAM_CUR_PRESET: + case EQ_PARAM_GET_NUM_OF_PRESETS: + case EQ_PARAM_BAND_LEVEL: + case EQ_PARAM_GET_BAND: + if (*pValueSize < sizeof(int16_t)) { + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid pValueSize 1 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + + case EQ_PARAM_LEVEL_RANGE: + if (*pValueSize < 2 * sizeof(int16_t)) { + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid pValueSize 2 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = 2 * sizeof(int16_t); + break; + case EQ_PARAM_BAND_FREQ_RANGE: + if (*pValueSize < 2 * sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid pValueSize 3 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = 2 * sizeof(int32_t); + break; + + case EQ_PARAM_CENTER_FREQ: + if (*pValueSize < sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid pValueSize 5 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int32_t); + break; + + case EQ_PARAM_GET_PRESET_NAME: + break; + + case EQ_PARAM_PROPERTIES: + if (*pValueSize < (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t)) { + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid pValueSize 1 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = (2 + FIVEBAND_NUMBANDS) * sizeof(uint16_t); + break; + + default: + ALOGV("\tLVM_ERROR : Equalizer_getParameter unknown param %d", param); + return -EINVAL; + } + + switch (param) { + case EQ_PARAM_NUM_BANDS: + *(uint16_t *)pValue = (uint16_t)FIVEBAND_NUMBANDS; + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_NUM_BANDS %d", *(int16_t *)pValue); + break; + + case EQ_PARAM_LEVEL_RANGE: + *(int16_t *)pValue = -1500; + *((int16_t *)pValue + 1) = 1500; + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_LEVEL_RANGE min %d, max %d", + // *(int16_t *)pValue, *((int16_t *)pValue + 1)); + break; + + case EQ_PARAM_BAND_LEVEL: + param2 = *pParamTemp; + if (param2 >= FIVEBAND_NUMBANDS) { + status = -EINVAL; + break; + } + *(int16_t *)pValue = (int16_t)EqualizerGetBandLevel(pContext, param2); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_BAND_LEVEL band %d, level %d", + // param2, *(int32_t *)pValue); + break; + + case EQ_PARAM_CENTER_FREQ: + param2 = *pParamTemp; + if (param2 >= FIVEBAND_NUMBANDS) { + status = -EINVAL; + break; + } + *(int32_t *)pValue = EqualizerGetCentreFrequency(pContext, param2); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_CENTER_FREQ band %d, frequency %d", + // param2, *(int32_t *)pValue); + break; + + case EQ_PARAM_BAND_FREQ_RANGE: + param2 = *pParamTemp; + if (param2 >= FIVEBAND_NUMBANDS) { + status = -EINVAL; + break; + } + EqualizerGetBandFreqRange(pContext, param2, (uint32_t *)pValue, ((uint32_t *)pValue + 1)); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_BAND_FREQ_RANGE band %d, min %d, max %d", + // param2, *(int32_t *)pValue, *((int32_t *)pValue + 1)); + break; + + case EQ_PARAM_GET_BAND: + param2 = *pParamTemp; + *(uint16_t *)pValue = (uint16_t)EqualizerGetBand(pContext, param2); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_GET_BAND frequency %d, band %d", + // param2, *(uint16_t *)pValue); + break; + + case EQ_PARAM_CUR_PRESET: + *(uint16_t *)pValue = (uint16_t)EqualizerGetPreset(pContext); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_CUR_PRESET %d", *(int32_t *)pValue); + break; + + case EQ_PARAM_GET_NUM_OF_PRESETS: + *(uint16_t *)pValue = (uint16_t)EqualizerGetNumPresets(); + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_GET_NUM_OF_PRESETS %d", *(int16_t *)pValue); + break; + + case EQ_PARAM_GET_PRESET_NAME: + param2 = *pParamTemp; + if (param2 >= EqualizerGetNumPresets()) { + //if (param2 >= 20) { // AGO FIX + status = -EINVAL; + break; + } + name = (char *)pValue; + strncpy(name, EqualizerGetPresetName(param2), *pValueSize - 1); + name[*pValueSize - 1] = 0; + *pValueSize = strlen(name) + 1; + //ALOGV("\tEqualizer_getParameter() EQ_PARAM_GET_PRESET_NAME preset %d, name %s len %d", + // param2, gEqualizerPresets[param2].name, *pValueSize); + break; + + case EQ_PARAM_PROPERTIES: { + int16_t *p = (int16_t *)pValue; + ALOGV("\tEqualizer_getParameter() EQ_PARAM_PROPERTIES"); + p[0] = (int16_t)EqualizerGetPreset(pContext); + p[1] = (int16_t)FIVEBAND_NUMBANDS; + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + p[2 + i] = (int16_t)EqualizerGetBandLevel(pContext, i); + } + } break; + + default: + ALOGV("\tLVM_ERROR : Equalizer_getParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + //GV("\tEqualizer_getParameter end\n"); + return status; +} /* end Equalizer_getParameter */ + +//---------------------------------------------------------------------------- +// Equalizer_setParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Set a Equalizer parameter +// +// Inputs: +// pEqualizer - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to value +// +// Outputs: +// +//---------------------------------------------------------------------------- +int Equalizer_setParameter (EffectContext *pContext, void *pParam, void *pValue){ + int status = 0; + int32_t preset; + int32_t band; + int32_t level; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + + + //ALOGV("\tEqualizer_setParameter start"); + switch (param) { + case EQ_PARAM_CUR_PRESET: + preset = (int32_t)(*(uint16_t *)pValue); + + //ALOGV("\tEqualizer_setParameter() EQ_PARAM_CUR_PRESET %d", preset); + if ((preset >= EqualizerGetNumPresets())||(preset < 0)) { + status = -EINVAL; + break; + } + EqualizerSetPreset(pContext, preset); + break; + case EQ_PARAM_BAND_LEVEL: + band = *pParamTemp; + level = (int32_t)(*(int16_t *)pValue); + //ALOGV("\tEqualizer_setParameter() EQ_PARAM_BAND_LEVEL band %d, level %d", band, level); + if (band >= FIVEBAND_NUMBANDS) { + status = -EINVAL; + break; + } + EqualizerSetBandLevel(pContext, band, level); + break; + case EQ_PARAM_PROPERTIES: { + //ALOGV("\tEqualizer_setParameter() EQ_PARAM_PROPERTIES"); + int16_t *p = (int16_t *)pValue; + if ((int)p[0] >= EqualizerGetNumPresets()) { + status = -EINVAL; + break; + } + if (p[0] >= 0) { + EqualizerSetPreset(pContext, (int)p[0]); + } else { + if ((int)p[1] != FIVEBAND_NUMBANDS) { + status = -EINVAL; + break; + } + for (int i = 0; i < FIVEBAND_NUMBANDS; i++) { + EqualizerSetBandLevel(pContext, i, (int)p[2 + i]); + } + } + } break; + default: + ALOGV("\tLVM_ERROR : Equalizer_setParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + //ALOGV("\tEqualizer_setParameter end"); + return status; +} /* end Equalizer_setParameter */ + +//---------------------------------------------------------------------------- +// Volume_getParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Get a Volume parameter +// +// Inputs: +// pVolume - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to variable to hold retrieved value +// pValueSize - pointer to value size: maximum size as input +// +// Outputs: +// *pValue updated with parameter value +// *pValueSize updated with actual value size +// +// +// Side Effects: +// +//---------------------------------------------------------------------------- + +int Volume_getParameter(EffectContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue){ + int status = 0; + int bMute = 0; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++;; + char *name; + + //ALOGV("\tVolume_getParameter start"); + + switch (param){ + case VOLUME_PARAM_LEVEL: + case VOLUME_PARAM_MAXLEVEL: + case VOLUME_PARAM_STEREOPOSITION: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Volume_getParameter() invalid pValueSize 1 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + + case VOLUME_PARAM_MUTE: + case VOLUME_PARAM_ENABLESTEREOPOSITION: + if (*pValueSize < sizeof(int32_t)){ + ALOGV("\tLVM_ERROR : Volume_getParameter() invalid pValueSize 2 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int32_t); + break; + + default: + ALOGV("\tLVM_ERROR : Volume_getParameter unknown param %d", param); + return -EINVAL; + } + + switch (param){ + case VOLUME_PARAM_LEVEL: + status = VolumeGetVolumeLevel(pContext, (int16_t *)(pValue)); + //ALOGV("\tVolume_getParameter() VOLUME_PARAM_LEVEL Value is %d", + // *(int16_t *)pValue); + break; + + case VOLUME_PARAM_MAXLEVEL: + *(int16_t *)pValue = 0; + //ALOGV("\tVolume_getParameter() VOLUME_PARAM_MAXLEVEL Value is %d", + // *(int16_t *)pValue); + break; + + case VOLUME_PARAM_STEREOPOSITION: + VolumeGetStereoPosition(pContext, (int16_t *)pValue); + //ALOGV("\tVolume_getParameter() VOLUME_PARAM_STEREOPOSITION Value is %d", + // *(int16_t *)pValue); + break; + + case VOLUME_PARAM_MUTE: + status = VolumeGetMute(pContext, (uint32_t *)pValue); + ALOGV("\tVolume_getParameter() VOLUME_PARAM_MUTE Value is %d", + *(uint32_t *)pValue); + break; + + case VOLUME_PARAM_ENABLESTEREOPOSITION: + *(int32_t *)pValue = pContext->pBundledContext->bStereoPositionEnabled; + //ALOGV("\tVolume_getParameter() VOLUME_PARAM_ENABLESTEREOPOSITION Value is %d", + // *(uint32_t *)pValue); + break; + + default: + ALOGV("\tLVM_ERROR : Volume_getParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + //ALOGV("\tVolume_getParameter end"); + return status; +} /* end Volume_getParameter */ + + +//---------------------------------------------------------------------------- +// Volume_setParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Set a Volume parameter +// +// Inputs: +// pVolume - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to value +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Volume_setParameter (EffectContext *pContext, void *pParam, void *pValue){ + int status = 0; + int16_t level; + int16_t position; + uint32_t mute; + uint32_t positionEnabled; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + + //ALOGV("\tVolume_setParameter start"); + + switch (param){ + case VOLUME_PARAM_LEVEL: + level = *(int16_t *)pValue; + //ALOGV("\tVolume_setParameter() VOLUME_PARAM_LEVEL value is %d", level); + //ALOGV("\tVolume_setParameter() Calling pVolume->setVolumeLevel"); + status = VolumeSetVolumeLevel(pContext, (int16_t)level); + //ALOGV("\tVolume_setParameter() Called pVolume->setVolumeLevel"); + break; + + case VOLUME_PARAM_MUTE: + mute = *(uint32_t *)pValue; + //ALOGV("\tVolume_setParameter() Calling pVolume->setMute, mute is %d", mute); + //ALOGV("\tVolume_setParameter() Calling pVolume->setMute"); + status = VolumeSetMute(pContext, mute); + //ALOGV("\tVolume_setParameter() Called pVolume->setMute"); + break; + + case VOLUME_PARAM_ENABLESTEREOPOSITION: + positionEnabled = *(uint32_t *)pValue; + status = VolumeEnableStereoPosition(pContext, positionEnabled); + status = VolumeSetStereoPosition(pContext, pContext->pBundledContext->positionSaved); + //ALOGV("\tVolume_setParameter() VOLUME_PARAM_ENABLESTEREOPOSITION called"); + break; + + case VOLUME_PARAM_STEREOPOSITION: + position = *(int16_t *)pValue; + //ALOGV("\tVolume_setParameter() VOLUME_PARAM_STEREOPOSITION value is %d", position); + //ALOGV("\tVolume_setParameter() Calling pVolume->VolumeSetStereoPosition"); + status = VolumeSetStereoPosition(pContext, (int16_t)position); + //ALOGV("\tVolume_setParameter() Called pVolume->VolumeSetStereoPosition"); + break; + + default: + ALOGV("\tLVM_ERROR : Volume_setParameter() invalid param %d", param); + break; + } + + //ALOGV("\tVolume_setParameter end"); + return status; +} /* end Volume_setParameter */ + +/**************************************************************************************** + * Name : LVC_ToDB_s32Tos16() + * Input : Signed 32-bit integer + * Output : Signed 16-bit integer + * MSB (16) = sign bit + * (15->05) = integer part + * (04->01) = decimal part + * Returns : Db value with respect to full scale + * Description : + * Remarks : + ****************************************************************************************/ + +LVM_INT16 LVC_ToDB_s32Tos16(LVM_INT32 Lin_fix) +{ + LVM_INT16 db_fix; + LVM_INT16 Shift; + LVM_INT16 SmallRemainder; + LVM_UINT32 Remainder = (LVM_UINT32)Lin_fix; + + /* Count leading bits, 1 cycle in assembly*/ + for (Shift = 0; Shift<32; Shift++) + { + if ((Remainder & 0x80000000U)!=0) + { + break; + } + Remainder = Remainder << 1; + } + + /* + * Based on the approximation equation (for Q11.4 format): + * + * dB = -96 * Shift + 16 * (8 * Remainder - 2 * Remainder^2) + */ + db_fix = (LVM_INT16)(-96 * Shift); /* Six dB steps in Q11.4 format*/ + SmallRemainder = (LVM_INT16)((Remainder & 0x7fffffff) >> 24); + db_fix = (LVM_INT16)(db_fix + SmallRemainder ); + SmallRemainder = (LVM_INT16)(SmallRemainder * SmallRemainder); + db_fix = (LVM_INT16)(db_fix - (LVM_INT16)((LVM_UINT16)SmallRemainder >> 9)); + + /* Correct for small offset */ + db_fix = (LVM_INT16)(db_fix - 5); + + return db_fix; +} + +//---------------------------------------------------------------------------- +// Effect_setEnabled() +//---------------------------------------------------------------------------- +// Purpose: +// Enable or disable effect +// +// Inputs: +// pContext - pointer to effect context +// enabled - true if enabling the effect, false otherwise +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Effect_setEnabled(EffectContext *pContext, bool enabled) +{ + ALOGV("\tEffect_setEnabled() type %d, enabled %d", pContext->EffectType, enabled); + + if (enabled) { + // Bass boost or Virtualizer can be temporarily disabled if playing over device speaker due + // to their nature. + bool tempDisabled = false; + switch (pContext->EffectType) { + case LVM_BASS_BOOST: + if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) { + ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already enabled"); + return -EINVAL; + } + if(pContext->pBundledContext->SamplesToExitCountBb <= 0){ + pContext->pBundledContext->NumberEffectsEnabled++; + } + pContext->pBundledContext->SamplesToExitCountBb = + (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1); + pContext->pBundledContext->bBassEnabled = LVM_TRUE; + tempDisabled = pContext->pBundledContext->bBassTempDisabled; + break; + case LVM_EQUALIZER: + if (pContext->pBundledContext->bEqualizerEnabled == LVM_TRUE) { + ALOGV("\tEffect_setEnabled() LVM_EQUALIZER is already enabled"); + return -EINVAL; + } + if(pContext->pBundledContext->SamplesToExitCountEq <= 0){ + pContext->pBundledContext->NumberEffectsEnabled++; + } + pContext->pBundledContext->SamplesToExitCountEq = + (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1); + pContext->pBundledContext->bEqualizerEnabled = LVM_TRUE; + break; + case LVM_VIRTUALIZER: + if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) { + ALOGV("\tEffect_setEnabled() LVM_VIRTUALIZER is already enabled"); + return -EINVAL; + } + if(pContext->pBundledContext->SamplesToExitCountVirt <= 0){ + pContext->pBundledContext->NumberEffectsEnabled++; + } + pContext->pBundledContext->SamplesToExitCountVirt = + (LVM_INT32)(pContext->pBundledContext->SamplesPerSecond*0.1); + pContext->pBundledContext->bVirtualizerEnabled = LVM_TRUE; + tempDisabled = pContext->pBundledContext->bVirtualizerTempDisabled; + break; + case LVM_VOLUME: + if (pContext->pBundledContext->bVolumeEnabled == LVM_TRUE) { + ALOGV("\tEffect_setEnabled() LVM_VOLUME is already enabled"); + return -EINVAL; + } + pContext->pBundledContext->NumberEffectsEnabled++; + pContext->pBundledContext->bVolumeEnabled = LVM_TRUE; + break; + default: + ALOGV("\tEffect_setEnabled() invalid effect type"); + return -EINVAL; + } + if (!tempDisabled) { + LvmEffect_enable(pContext); + } + } else { + switch (pContext->EffectType) { + case LVM_BASS_BOOST: + if (pContext->pBundledContext->bBassEnabled == LVM_FALSE) { + ALOGV("\tEffect_setEnabled() LVM_BASS_BOOST is already disabled"); + return -EINVAL; + } + pContext->pBundledContext->bBassEnabled = LVM_FALSE; + break; + case LVM_EQUALIZER: + if (pContext->pBundledContext->bEqualizerEnabled == LVM_FALSE) { + ALOGV("\tEffect_setEnabled() LVM_EQUALIZER is already disabled"); + return -EINVAL; + } + pContext->pBundledContext->bEqualizerEnabled = LVM_FALSE; + break; + case LVM_VIRTUALIZER: + if (pContext->pBundledContext->bVirtualizerEnabled == LVM_FALSE) { + ALOGV("\tEffect_setEnabled() LVM_VIRTUALIZER is already disabled"); + return -EINVAL; + } + pContext->pBundledContext->bVirtualizerEnabled = LVM_FALSE; + break; + case LVM_VOLUME: + if (pContext->pBundledContext->bVolumeEnabled == LVM_FALSE) { + ALOGV("\tEffect_setEnabled() LVM_VOLUME is already disabled"); + return -EINVAL; + } + pContext->pBundledContext->bVolumeEnabled = LVM_FALSE; + break; + default: + ALOGV("\tEffect_setEnabled() invalid effect type"); + return -EINVAL; + } + LvmEffect_disable(pContext); + } + + return 0; +} + +//---------------------------------------------------------------------------- +// LVC_Convert_VolToDb() +//---------------------------------------------------------------------------- +// Purpose: +// Convery volume in Q24 to dB +// +// Inputs: +// vol: Q.24 volume dB +// +//----------------------------------------------------------------------- + +int16_t LVC_Convert_VolToDb(uint32_t vol){ + int16_t dB; + + dB = LVC_ToDB_s32Tos16(vol <<7); + dB = (dB +8)>>4; + dB = (dB <-96) ? -96 : dB ; + + return dB; +} + +} // namespace +} // namespace + +extern "C" { +/* Effect Control Interface Implementation: Process */ +int Effect_process(effect_handle_t self, + audio_buffer_t *inBuffer __unused, + audio_buffer_t *outBuffer __unused){ + EffectContext * pContext = (EffectContext *) self; + int status = 0; + return status; +} /* end Effect_process */ + +/* Effect Control Interface Implementation: Command */ +int Effect_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData){ + EffectContext * pContext = (EffectContext *) self; + int retsize; + + //ALOGV("\t\nEffect_command start"); + + if(pContext->EffectType == LVM_BASS_BOOST){ + //ALOGV("\tEffect_command setting command for LVM_BASS_BOOST"); + } + if(pContext->EffectType == LVM_VIRTUALIZER){ + //ALOGV("\tEffect_command setting command for LVM_VIRTUALIZER"); + } + if(pContext->EffectType == LVM_EQUALIZER){ + //ALOGV("\tEffect_command setting command for LVM_EQUALIZER"); + } + if(pContext->EffectType == LVM_VOLUME){ + //ALOGV("\tEffect_command setting command for LVM_VOLUME"); + } + + if (pContext == NULL){ + ALOGV("\tLVM_ERROR : Effect_command ERROR pContext == NULL"); + return -EINVAL; + } + + ALOGV("\tEffect_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize); + + // Incase we disable an effect, next time process is + // called the number of effect called could be greater + // pContext->pBundledContext->NumberEffectsCalled = 0; + + //ALOGV("\tEffect_command NumberEffectsCalled = %d, NumberEffectsEnabled = %d", + // pContext->pBundledContext->NumberEffectsCalled, + // pContext->pBundledContext->NumberEffectsEnabled); + + switch (cmdCode){ + case EFFECT_CMD_INIT: + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){ + ALOGV("\tLVM_ERROR, EFFECT_CMD_INIT: ERROR for effect type %d", + pContext->EffectType); + return -EINVAL; + } + *(int *) pReplyData = 0; + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT start"); + if(pContext->EffectType == LVM_BASS_BOOST){ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_BASS_BOOST"); + android::BassSetStrength(pContext, 0); + } + if(pContext->EffectType == LVM_VIRTUALIZER){ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VIRTUALIZER"); + android::VirtualizerSetStrength(pContext, 0); + } + if(pContext->EffectType == LVM_EQUALIZER){ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_EQUALIZER"); + android::EqualizerSetPreset(pContext, 0); + } + if(pContext->EffectType == LVM_VOLUME){ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_INIT for LVM_VOLUME"); + *(int *) pReplyData = android::VolumeSetVolumeLevel(pContext, 0); + } + break; + + case EFFECT_CMD_SET_CONFIG: + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG start"); + if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) { + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: " + "EFFECT_CMD_SET_CONFIG: ERROR"); + return -EINVAL; + } + *(int *) pReplyData = android::Effect_setConfig(pContext, (effect_config_t *) pCmdData); + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_CONFIG end"); + break; + + case EFFECT_CMD_GET_CONFIG: + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) { + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: " + "EFFECT_CMD_GET_CONFIG: ERROR"); + return -EINVAL; + } + + android::Effect_getConfig(pContext, (effect_config_t *)pReplyData); + break; + + case EFFECT_CMD_RESET: + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET start"); + android::Effect_setConfig(pContext, &pContext->config); + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_RESET end"); + break; + + case EFFECT_CMD_GET_PARAM:{ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM start"); + + effect_param_t *p = (effect_param_t *)pCmdData; + + if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) || + cmdSize < (sizeof(effect_param_t) + p->psize) || + pReplyData == NULL || replySize == NULL || + *replySize < (sizeof(effect_param_t) + p->psize)) { + ALOGV("\tLVM_ERROR : EFFECT_CMD_GET_PARAM: ERROR"); + return -EINVAL; + } + + memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize); + + p = (effect_param_t *)pReplyData; + + int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t); + + if(pContext->EffectType == LVM_BASS_BOOST){ + p->status = android::BassBoost_getParameter(pContext, + p->data, + &p->vsize, + p->data + voffset); + //ALOGV("\tBassBoost_command EFFECT_CMD_GET_PARAM " + // "*pCmdData %d, *replySize %d, *pReplyData %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset)); + } + + if(pContext->EffectType == LVM_VIRTUALIZER){ + p->status = android::Virtualizer_getParameter(pContext, + (void *)p->data, + &p->vsize, + p->data + voffset); + + //ALOGV("\tVirtualizer_command EFFECT_CMD_GET_PARAM " + // "*pCmdData %d, *replySize %d, *pReplyData %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset)); + } + if(pContext->EffectType == LVM_EQUALIZER){ + //ALOGV("\tEqualizer_command cmdCode Case: " + // "EFFECT_CMD_GET_PARAM start"); + p->status = android::Equalizer_getParameter(pContext, + p->data, + &p->vsize, + p->data + voffset); + + //ALOGV("\tEqualizer_command EFFECT_CMD_GET_PARAM *pCmdData %d, *replySize %d, " + // "*pReplyData %08x %08x", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), *replySize, + // *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset), + // *(int32_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset + + // sizeof(int32_t))); + } + if(pContext->EffectType == LVM_VOLUME){ + //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_GET_PARAM start"); + p->status = android::Volume_getParameter(pContext, + (void *)p->data, + &p->vsize, + p->data + voffset); + + //ALOGV("\tVolume_command EFFECT_CMD_GET_PARAM " + // "*pCmdData %d, *replySize %d, *pReplyData %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset)); + } + *replySize = sizeof(effect_param_t) + voffset + p->vsize; + + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_GET_PARAM end"); + } break; + case EFFECT_CMD_SET_PARAM:{ + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM start"); + if(pContext->EffectType == LVM_BASS_BOOST){ + //ALOGV("\tBassBoost_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t))); + + if (pCmdData == NULL || + cmdSize != (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : BassBoost_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR"); + return -EINVAL; + } + effect_param_t *p = (effect_param_t *) pCmdData; + + if (p->psize != sizeof(int32_t)){ + ALOGV("\tLVM_ERROR : BassBoost_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)"); + return -EINVAL; + } + + //ALOGV("\tnBassBoost_command cmdSize is %d\n" + // "\tsizeof(effect_param_t) is %d\n" + // "\tp->psize is %d\n" + // "\tp->vsize is %d" + // "\n", + // cmdSize, sizeof(effect_param_t), p->psize, p->vsize ); + + *(int *)pReplyData = android::BassBoost_setParameter(pContext, + (void *)p->data, + p->data + p->psize); + } + if(pContext->EffectType == LVM_VIRTUALIZER){ + // Warning this log will fail to properly read an int32_t value, assumes int16_t + //ALOGV("\tVirtualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t))); + + if (pCmdData == NULL || + // legal parameters are int16_t or int32_t + cmdSize > (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int32_t)) || + cmdSize < (sizeof(effect_param_t) + sizeof(int32_t) +sizeof(int16_t)) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR"); + return -EINVAL; + } + effect_param_t *p = (effect_param_t *) pCmdData; + + if (p->psize != sizeof(int32_t)){ + ALOGV("\tLVM_ERROR : Virtualizer_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)"); + return -EINVAL; + } + + //ALOGV("\tnVirtualizer_command cmdSize is %d\n" + // "\tsizeof(effect_param_t) is %d\n" + // "\tp->psize is %d\n" + // "\tp->vsize is %d" + // "\n", + // cmdSize, sizeof(effect_param_t), p->psize, p->vsize ); + + *(int *)pReplyData = android::Virtualizer_setParameter(pContext, + (void *)p->data, + p->data + p->psize); + } + if(pContext->EffectType == LVM_EQUALIZER){ + //ALOGV("\tEqualizer_command cmdCode Case: " + // "EFFECT_CMD_SET_PARAM start"); + //ALOGV("\tEqualizer_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t))); + + if (pCmdData == NULL || cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Equalizer_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR"); + return -EINVAL; + } + effect_param_t *p = (effect_param_t *) pCmdData; + + *(int *)pReplyData = android::Equalizer_setParameter(pContext, + (void *)p->data, + p->data + p->psize); + } + if(pContext->EffectType == LVM_VOLUME){ + //ALOGV("\tVolume_command cmdCode Case: EFFECT_CMD_SET_PARAM start"); + //ALOGV("\tVolume_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) +sizeof(int32_t))); + + if (pCmdData == NULL || + cmdSize < (sizeof(effect_param_t) + sizeof(int32_t)) || + pReplyData == NULL || replySize == NULL || + *replySize != sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Volume_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR"); + return -EINVAL; + } + effect_param_t *p = (effect_param_t *) pCmdData; + + *(int *)pReplyData = android::Volume_setParameter(pContext, + (void *)p->data, + p->data + p->psize); + } + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_PARAM end"); + } break; + + case EFFECT_CMD_ENABLE: + ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_ENABLE start"); + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) { + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_ENABLE: ERROR"); + return -EINVAL; + } + + *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_TRUE); + break; + + case EFFECT_CMD_DISABLE: + //ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_DISABLE start"); + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) { + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_DISABLE: ERROR"); + return -EINVAL; + } + *(int *)pReplyData = android::Effect_setEnabled(pContext, LVM_FALSE); + break; + + case EFFECT_CMD_SET_DEVICE: + { + ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE start"); + if (pCmdData == NULL){ + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: EFFECT_CMD_SET_DEVICE: ERROR"); + return -EINVAL; + } + + uint32_t device = *(uint32_t *)pCmdData; + pContext->pBundledContext->nOutputDevice = (audio_devices_t) device; + + if (pContext->EffectType == LVM_BASS_BOOST) { + if((device == AUDIO_DEVICE_OUT_SPEAKER) || + (device == AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT) || + (device == AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER)){ + ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_BASS_BOOST %d", + *(int32_t *)pCmdData); + ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_BAS_BOOST"); + + // If a device doesnt support bassboost the effect must be temporarily disabled + // the effect must still report its original state as this can only be changed + // by the ENABLE/DISABLE command + + if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) { + ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_BASS_BOOST %d", + *(int32_t *)pCmdData); + android::LvmEffect_disable(pContext); + } + pContext->pBundledContext->bBassTempDisabled = LVM_TRUE; + } else { + ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_BASS_BOOST %d", + *(int32_t *)pCmdData); + + // If a device supports bassboost and the effect has been temporarily disabled + // previously then re-enable it + + if (pContext->pBundledContext->bBassEnabled == LVM_TRUE) { + ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_BASS_BOOST %d", + *(int32_t *)pCmdData); + android::LvmEffect_enable(pContext); + } + pContext->pBundledContext->bBassTempDisabled = LVM_FALSE; + } + } + if (pContext->EffectType == LVM_VIRTUALIZER) { + if (pContext->pBundledContext->nVirtualizerForcedDevice == AUDIO_DEVICE_NONE) { + // default case unless configuration is forced + if (android::VirtualizerIsDeviceSupported(device) != 0) { + ALOGV("\tEFFECT_CMD_SET_DEVICE device is invalid for LVM_VIRTUALIZER %d", + *(int32_t *)pCmdData); + ALOGV("\tEFFECT_CMD_SET_DEVICE temporary disable LVM_VIRTUALIZER"); + + //If a device doesnt support virtualizer the effect must be temporarily + // disabled the effect must still report its original state as this can + // only be changed by the ENABLE/DISABLE command + + if (pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE) { + ALOGV("\tEFFECT_CMD_SET_DEVICE disable LVM_VIRTUALIZER %d", + *(int32_t *)pCmdData); + android::LvmEffect_disable(pContext); + } + pContext->pBundledContext->bVirtualizerTempDisabled = LVM_TRUE; + } else { + ALOGV("\tEFFECT_CMD_SET_DEVICE device is valid for LVM_VIRTUALIZER %d", + *(int32_t *)pCmdData); + + // If a device supports virtualizer and the effect has been temporarily + // disabled previously then re-enable it + + if(pContext->pBundledContext->bVirtualizerEnabled == LVM_TRUE){ + ALOGV("\tEFFECT_CMD_SET_DEVICE re-enable LVM_VIRTUALIZER %d", + *(int32_t *)pCmdData); + android::LvmEffect_enable(pContext); + } + pContext->pBundledContext->bVirtualizerTempDisabled = LVM_FALSE; + } + } // else virtualization mode is forced to a certain device, nothing to do + } + ALOGV("\tEffect_command cmdCode Case: EFFECT_CMD_SET_DEVICE end"); + break; + } + case EFFECT_CMD_SET_VOLUME: + { + uint32_t leftVolume, rightVolume; + int16_t leftdB, rightdB; + int16_t maxdB, pandB; + int32_t vol_ret[2] = {1<<24,1<<24}; // Apply no volume + int status = 0; + LVM_ReturnStatus_en LvmStatus=LVM_SUCCESS; /* Function call status */ + + // if pReplyData is NULL, VOL_CTRL is delegated to another effect + if(pReplyData == LVM_NULL){ + break; + } + + if (pCmdData == NULL || cmdSize != 2 * sizeof(uint32_t) || pReplyData == NULL || + replySize == NULL || *replySize < 2*sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Effect_command cmdCode Case: " + "EFFECT_CMD_SET_VOLUME: ERROR"); + return -EINVAL; + } + + leftVolume = ((*(uint32_t *)pCmdData)); + rightVolume = ((*((uint32_t *)pCmdData + 1))); + + if(leftVolume == 0x1000000){ + leftVolume -= 1; + } + if(rightVolume == 0x1000000){ + rightVolume -= 1; + } + + // Convert volume to dB + leftdB = android::LVC_Convert_VolToDb(leftVolume); + rightdB = android::LVC_Convert_VolToDb(rightVolume); + + pandB = rightdB - leftdB; + + // Calculate max volume in dB + maxdB = leftdB; + if(rightdB > maxdB){ + maxdB = rightdB; + } + //ALOGV("\tEFFECT_CMD_SET_VOLUME Session: %d, SessionID: %d VOLUME is %d dB (%d), " + // "effect is %d", + //pContext->pBundledContext->SessionNo, pContext->pBundledContext->SessionId, + //(int32_t)maxdB, maxVol<<7, pContext->EffectType); + //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left is %d, Right is %d", leftVolume, rightVolume); + //ALOGV("\tEFFECT_CMD_SET_VOLUME: Left %ddB, Right %ddB, Position %ddB", + // leftdB, rightdB, pandB); + + //FIXME CHECK againmemcpy(pReplyData, vol_ret, sizeof(int32_t)*2); + android::VolumeSetVolumeLevel(pContext, (int16_t)(maxdB*100)); + + /* Use the current control settings from Context structure */ + + /* Volume parameters */ + pContext->pBundledContext->ActiveParams.VC_Balance = pandB; + ALOGV("\t\tVolumeSetStereoPosition() (-96dB -> +96dB)-> %d\n", pContext->pBundledContext->ActiveParams.VC_Balance ); + + /* Activate the initial settings */ + LvmStatus = android::Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "Effect_command") + if(LvmStatus != LVM_SUCCESS) return -EINVAL; + break; + } + case EFFECT_CMD_SET_AUDIO_MODE: + break; + case EFFECT_CMD_OFFLOAD: + if (pCmdData == NULL || cmdSize != sizeof(effect_offload_param_t) + || pReplyData == NULL || *replySize != sizeof(uint32_t)) { + return -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmdCode); + } else { + effect_offload_param_t* offload_param = (effect_offload_param_t*)pCmdData; + + ALOGD("%s: Command(%u)= offload %d, output %d", __func__, cmdCode, offload_param->isOffload, offload_param->ioHandle); + + pContext->pBundledContext->OffloadEnabled = offload_param->isOffload; + if (pContext->pBundledContext->OutHandle == offload_param->ioHandle) { + ALOGV("%s: This context has same output %d", __func__, offload_param->ioHandle); + } else { + pContext->pBundledContext->OutHandle = offload_param->ioHandle; + } + *(int *)pReplyData = 0; + } + break; + default: + return -EINVAL; + } + + ALOGV("\tEffect_command end...\n\n"); + return 0; +} /* end Effect_command */ + +/* Effect Control Interface Implementation: get_descriptor */ +int Effect_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor) +{ + EffectContext * pContext = (EffectContext *) self; + const effect_descriptor_t *desc; + + if (pContext == NULL || pDescriptor == NULL) { + ALOGV("Effect_getDescriptor() invalid param"); + return -EINVAL; + } + + ALOGD("%s: called", __func__); + switch(pContext->EffectType) { + case LVM_BASS_BOOST: + desc = &android::gHwBassBoostDescriptor; + ALOGD("%s: called LVM_BASS_BOOST", __func__); + break; + case LVM_VIRTUALIZER: + desc = &android::gHwVirtualizerDescriptor; + ALOGD("%s: called LVM_VIRTUALIZER", __func__); + break; + case LVM_EQUALIZER: + desc = &android::gHwEqualizerDescriptor; + ALOGD("%s: called LVM_EQUALIZER", __func__); + break; + case LVM_VOLUME: + desc = &android::gHwVolumeDescriptor; + ALOGD("%s: called LVM_VOLUME", __func__); + break; + default: + return -EINVAL; + } + + *pDescriptor = *desc; + + return 0; +} /* end Effect_getDescriptor */ + +// effect_handle_t interface implementation for effect +const struct effect_interface_s gLvmHwEffectInterface = { + Effect_process, + Effect_command, + Effect_getDescriptor, + NULL, +}; /* end gLvmHwEffectInterface */ + +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + .tag = AUDIO_EFFECT_LIBRARY_TAG, + .version = EFFECT_LIBRARY_API_VERSION, + .name = "Effect Bundle hardware Library", + .implementor = "Samsung SystemLSI", + .create_effect = android::EffectHwCreate, + .release_effect = android::EffectHwRelease, + .get_descriptor = android::EffectHwGetDescriptor, +}; + +} diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.h b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.h new file mode 100644 index 0000000..150fa83 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Bundle/exynos_effectbundle.h @@ -0,0 +1,227 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EFFECTBUNDLE_H_ +#define ANDROID_EFFECTBUNDLE_H_ + +#include +#include +#include +#include +#include +#include + +#if __cplusplus +extern "C" { +#endif + +#define FIVEBAND_NUMBANDS 5 +#define MAX_NUM_BANDS 5 +#define MAX_CALL_SIZE 256 +#define LVM_MAX_SESSIONS 32 +#define LVM_UNUSED_SESSION INT_MAX +#define BASS_BOOST_CUP_LOAD_ARM9E 150 // Expressed in 0.1 MIPS +#define VIRTUALIZER_CUP_LOAD_ARM9E 120 // Expressed in 0.1 MIPS +#define EQUALIZER_CUP_LOAD_ARM9E 220 // Expressed in 0.1 MIPS +#define VOLUME_CUP_LOAD_ARM9E 0 // Expressed in 0.1 MIPS +#define BUNDLE_MEM_USAGE 25 // Expressed in kB +//#define LVM_PCM + +#ifndef OPENSL_ES_H_ +static const effect_uuid_t SL_IID_VOLUME_ = { 0x09e8ede0, 0xddde, 0x11db, 0xb4f6, + { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }; +const effect_uuid_t * const SL_IID_VOLUME = &SL_IID_VOLUME_; +#endif //OPENSL_ES_H_ + +typedef enum +{ + LVM_BASS_BOOST, + LVM_VIRTUALIZER, + LVM_EQUALIZER, + LVM_VOLUME +} lvm_effect_en; + +// Preset configuration. +struct PresetConfig { + // Human-readable name. + const char * name; + // An array of size nBands where each element is a configuration for the + // corresponding band. + //const BandConfig * bandConfigs; +}; + +/* BundledEffectContext : One per session */ +struct BundledEffectContext{ + LVM_Handle_t hInstance; /* Instance handle */ + int SessionNo; /* Current session number */ + int SessionId; /* Current session id */ + bool bVolumeEnabled; /* Flag for Volume */ + bool bEqualizerEnabled; /* Flag for EQ */ + bool bBassEnabled; /* Flag for Bass */ + bool bBassTempDisabled; /* Flag for Bass to be re-enabled */ + bool bVirtualizerEnabled; /* Flag for Virtualizer */ + bool bVirtualizerTempDisabled; /* Flag for effect to be re-enabled */ + audio_devices_t nOutputDevice; /* Output device for the effect */ + audio_devices_t nVirtualizerForcedDevice; /* Forced device virtualization mode*/ + int NumberEffectsEnabled; /* Effects in this session */ + int NumberEffectsCalled; /* Effects called so far */ + bool firstVolume; /* No smoothing on first Vol change */ + // Saved parameters for each effect */ + // Bass Boost + int BassStrengthSaved; /* Conversion between Get/Set */ + // Equalizer + int CurPreset; /* Current preset being used */ + // Virtualzer + int VirtStrengthSaved; /* Conversion between Get/Set */ + // Volume + int levelSaved; /* for when mute is set, level must be saved */ + int positionSaved; + bool bMuteEnabled; /* Must store as mute = -96dB level */ + bool bStereoPositionEnabled; + LVM_Fs_en SampleRate; + int SamplesPerSecond; + int SamplesToExitCountEq; + int SamplesToExitCountBb; + int SamplesToExitCountVirt; + LVM_INT16 *workBuffer; + int frameCount; + int32_t bandGaindB[FIVEBAND_NUMBANDS]; + int volume; + #ifdef LVM_PCM + FILE *PcmInPtr; + FILE *PcmOutPtr; + #endif + LVM_ControlParams_t ActiveParams; + struct mixer *mixerHandle; + struct mixer_ctl *mixerCtl; + /* output io-handle to which the effect is attached to */ + audio_io_handle_t OutHandle; + bool OffloadEnabled; +}; + +/* SessionContext : One session */ +struct SessionContext{ + bool bBundledEffectsEnabled; + bool bVolumeInstantiated; + bool bEqualizerInstantiated; + bool bBassInstantiated; + bool bVirtualizerInstantiated; + BundledEffectContext *pBundledContext; +}; + +struct EffectContext{ + const struct effect_interface_s *itfe; + effect_config_t config; + lvm_effect_en EffectType; + BundledEffectContext *pBundledContext; +}; + + +/* enumerated parameter settings for Volume effect */ +typedef enum +{ + VOLUME_PARAM_LEVEL, // type SLmillibel = typedef SLuint16 (set & get) + VOLUME_PARAM_MAXLEVEL, // type SLmillibel = typedef SLuint16 (get) + VOLUME_PARAM_MUTE, // type SLboolean = typedef SLuint32 (set & get) + VOLUME_PARAM_ENABLESTEREOPOSITION, // type SLboolean = typedef SLuint32 (set & get) + VOLUME_PARAM_STEREOPOSITION, // type SLpermille = typedef SLuint16 (set & get) +} t_volume_params; + +static const int PRESET_CUSTOM = -1; + +static const uint32_t bandFreqRange[FIVEBAND_NUMBANDS][2] = { + {30000, 120000}, + {120001, 460000}, + {460001, 1800000}, + {1800001, 7000000}, + {7000001, 1}}; + +//Note: If these frequencies change, please update LimitLevel values accordingly. +static const LVM_UINT16 EQNB_5BandPresetsFrequencies[] = { + 60, /* Frequencies in Hz */ + 230, + 910, + 3600, + 14000}; + +static const LVM_UINT16 EQNB_5BandPresetsQFactors[] = { + 96, /* Q factor multiplied by 100 */ + 96, + 96, + 96, + 96}; + +static const LVM_INT16 EQNB_5BandNormalPresets[] = { + 3, 0, 0, 0, 3, /* Normal Preset */ + 8, 5, -3, 5, 6, /* Classical Preset */ + 15, -6, 7, 13, 10, /* Dance Preset */ + 0, 0, 0, 0, 0, /* Flat Preset */ + 6, -2, -2, 6, -3, /* Folk Preset */ + 8, -8, 13, -1, -4, /* Heavy Metal Preset */ + 10, 6, -4, 5, 8, /* Hip Hop Preset */ + 8, 5, -4, 5, 9, /* Jazz Preset */ + -6, 4, 9, 4, -5, /* Pop Preset */ + 10, 6, -1, 8, 10}; /* Rock Preset */ + +static const LVM_INT16 EQNB_5BandSoftPresets[] = { + 3, 0, 0, 0, 3, /* Normal Preset */ + 5, 3, -2, 4, 4, /* Classical Preset */ + 6, 0, 2, 4, 1, /* Dance Preset */ + 0, 0, 0, 0, 0, /* Flat Preset */ + 3, 0, 0, 2, -1, /* Folk Preset */ + 4, 1, 9, 3, 0, /* Heavy Metal Preset */ + 5, 3, 0, 1, 3, /* Hip Hop Preset */ + 4, 2, -2, 2, 5, /* Jazz Preset */ + -1, 2, 5, 1, -2, /* Pop Preset */ + 5, 3, -1, 3, 5}; /* Rock Preset */ + +static const PresetConfig gEqualizerPresets[] = { + {"Normal"}, + {"Classical"}, + {"Dance"}, + {"Flat"}, + {"Folk"}, + {"Heavy Metal"}, + {"Hip Hop"}, + {"Jazz"}, + {"Pop"}, + {"Rock"}}; + +/* The following tables have been computed using the actual levels measured by the output of + * white noise or pink noise (IEC268-1) for the EQ and BassBoost Effects. These are estimates of + * the actual energy that 'could' be present in the given band. + * If the frequency values in EQNB_5BandPresetsFrequencies change, these values might need to be + * updated. + */ + +static const float LimitLevel_bandEnergyCoefficient[FIVEBAND_NUMBANDS] = { + 7.56, 9.69, 9.59, 7.37, 2.88}; + +static const float LimitLevel_bandEnergyCrossCoefficient[FIVEBAND_NUMBANDS-1] = { + 126.0, 115.0, 125.0, 104.0 }; + +static const float LimitLevel_bassBoostEnergyCrossCoefficient[FIVEBAND_NUMBANDS] = { + 221.21, 208.10, 28.16, 0.0, 0.0 }; + +static const float LimitLevel_bassBoostEnergyCoefficient = 7.12; + +static const float LimitLevel_virtualizerContribution = 1.9; + +#if __cplusplus +} // extern "C" +#endif + +#endif /*ANDROID_EFFECTBUNDLE_H_*/ diff --git a/libaudio/effecthal/postprocessing/aosp-effect/MODULE_LICENSE_APACHE2 b/libaudio/effecthal/postprocessing/aosp-effect/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/libaudio/effecthal/postprocessing/aosp-effect/NOTICE b/libaudio/effecthal/postprocessing/aosp-effect/NOTICE new file mode 100644 index 0000000..c5b1efa --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Reverb/LVREV.h b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/LVREV.h new file mode 100644 index 0000000..28e3369 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/LVREV.h @@ -0,0 +1,312 @@ +/* + * Copyright (C) 2004-2010 NXP Software + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/****************************************************************************************/ +/* */ +/* Header file for the application layer interface of the LVREV module */ +/* */ +/* This files includes all definitions, types, structures and function prototypes */ +/* required by the calling layer. All other types, structures and functions are */ +/* private. */ +/* */ +/****************************************************************************************/ + +#ifndef __LVREV_H__ +#define __LVREV_H__ + +#ifdef __cplusplus +extern "C" { +#endif /* __cplusplus */ + + +/****************************************************************************************/ +/* */ +/* Includes */ +/* */ +/****************************************************************************************/ +#include "LVM_Types.h" + + +/****************************************************************************************/ +/* */ +/* Definitions */ +/* */ +/****************************************************************************************/ +/* General */ +#define LVREV_BLOCKSIZE_MULTIPLE 1 /* Processing block size multiple */ +#define LVREV_MAX_T60 7000 /* Maximum decay time is 7000ms */ + +/* Memory table*/ +#define LVREV_NR_MEMORY_REGIONS 4 /* Number of memory regions */ + + +/****************************************************************************************/ +/* */ +/* Types */ +/* */ +/****************************************************************************************/ +/* Instance handle */ +typedef void *LVREV_Handle_t; + + +/* Status return values */ +typedef enum +{ + LVREV_SUCCESS = 0, /* Successful return from a routine */ + LVREV_NULLADDRESS = 1, /* NULL allocation address */ + LVREV_OUTOFRANGE = 2, /* Out of range control parameter */ + LVREV_INVALIDNUMSAMPLES = 3, /* Invalid number of samples */ + LVREV_RETURNSTATUS_DUMMY = LVM_MAXENUM +} LVREV_ReturnStatus_en; + + +/* Reverb delay lines */ +typedef enum +{ + LVREV_DELAYLINES_1 = 1, /* One delay line */ + LVREV_DELAYLINES_2 = 2, /* Two delay lines */ + LVREV_DELAYLINES_4 = 4, /* Four delay lines */ + LVREV_DELAYLINES_DUMMY = LVM_MAXENUM +} LVREV_NumDelayLines_en; + + +/****************************************************************************************/ +/* */ +/* Structures */ +/* */ +/****************************************************************************************/ + +/* Memory table containing the region definitions */ +typedef struct +{ + LVM_MemoryRegion_st Region[LVREV_NR_MEMORY_REGIONS]; /* One definition for each region */ +} LVREV_MemoryTable_st; + + +/* Control Parameter structure */ +typedef struct +{ + /* General parameters */ + LVM_Mode_en OperatingMode; /* Operating mode */ + LVM_Fs_en SampleRate; /* Sample rate */ + LVM_Format_en SourceFormat; /* Source data format */ + + /* Parameters for REV */ + LVM_UINT16 Level; /* Level, 0 to 100 representing percentage of reverb */ + LVM_UINT16 LPF; /* Low pass filter, in Hz */ + LVM_UINT16 HPF; /* High pass filter, in Hz */ + LVM_UINT16 T60; /* Decay time constant, in ms */ + LVM_UINT16 Density; /* Echo density, 0 to 100 for minimum to maximum density */ + LVM_UINT16 Damping; /* Damping */ + LVM_UINT16 RoomSize; /* Simulated room size, 1 to 100 for minimum to maximum size */ + +} LVREV_ControlParams_st; + + +/* Instance Parameter structure */ +typedef struct +{ + /* General */ + LVM_UINT16 MaxBlockSize; /* Maximum processing block size */ + + /* Reverb */ + LVM_Format_en SourceFormat; /* Source data formats to support */ + LVREV_NumDelayLines_en NumDelays; /* The number of delay lines, 1, 2 or 4 */ + +} LVREV_InstanceParams_st; + + +/****************************************************************************************/ +/* */ +/* Function Prototypes */ +/* */ +/****************************************************************************************/ + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVREV_GetMemoryTable */ +/* */ +/* DESCRIPTION: */ +/* This function is used to obtain the LVREV module memory requirements to support */ +/* memory allocation. It can also be used to return the memory base address provided */ +/* during memory allocation to support freeing of memory when the LVREV module is no */ +/* longer required. It is called in two ways: */ +/* */ +/* hInstance = NULL Returns the memory requirements */ +/* hInstance = Instance handle Returns the memory requirements and allocated */ +/* base addresses. */ +/* */ +/* When this function is called with hInstance = NULL the memory base address pointers */ +/* will be NULL on return. */ +/* */ +/* When the function is called for freeing memory, hInstance = Instance Handle the */ +/* memory table returns the allocated memory and base addresses used during */ +/* initialisation. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance Handle */ +/* pMemoryTable Pointer to an empty memory table */ +/* pInstanceParams Pointer to the instance parameters */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Succeeded */ +/* LVREV_NULLADDRESS When pMemoryTable is NULL */ +/* LVREV_NULLADDRESS When requesting memory requirements and pInstanceParams */ +/* is NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVREV_Process function */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_GetMemoryTable(LVREV_Handle_t hInstance, + LVREV_MemoryTable_st *pMemoryTable, + LVREV_InstanceParams_st *pInstanceParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVREV_GetInstanceHandle */ +/* */ +/* DESCRIPTION: */ +/* This function is used to create a LVREV module instance. It returns the created */ +/* instance handle through phInstance. All parameters are set to invalid values, the */ +/* LVREV_SetControlParameters function must be called with a set of valid control */ +/* parameters before the LVREV_Process function can be called. */ +/* */ +/* The memory allocation must be provided by the application by filling in the memory */ +/* region base addresses in the memory table before calling this function. */ +/* */ +/* PARAMETERS: */ +/* phInstance Pointer to the instance handle */ +/* pMemoryTable Pointer to the memory definition table */ +/* pInstanceParams Pointer to the instance parameters */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Succeeded */ +/* LVREV_NULLADDRESS When phInstance or pMemoryTable or pInstanceParams is NULL */ +/* LVREV_NULLADDRESS When one of the memory regions has a NULL pointer */ +/* */ +/* NOTES: */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_GetInstanceHandle(LVREV_Handle_t *phInstance, + LVREV_MemoryTable_st *pMemoryTable, + LVREV_InstanceParams_st *pInstanceParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVXX_GetControlParameters */ +/* */ +/* DESCRIPTION: */ +/* Request the LVREV module control parameters. The current parameter set is returned */ +/* via the parameter pointer. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pControlParams Pointer to an empty parameter structure */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Succeeded */ +/* LVREV_NULLADDRESS When hInstance or pControlParams is NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVREV_Process function */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_GetControlParameters(LVREV_Handle_t hInstance, + LVREV_ControlParams_st *pControlParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVREV_SetControlParameters */ +/* */ +/* DESCRIPTION: */ +/* Sets or changes the LVREV module parameters. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pNewParams Pointer to a parameter structure */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Succeeded */ +/* LVREV_NULLADDRESS When hInstance or pNewParams is NULL */ +/* */ +/* NOTES: */ +/* 1. This function may be interrupted by the LVREV_Process function */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_SetControlParameters(LVREV_Handle_t hInstance, + LVREV_ControlParams_st *pNewParams); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVREV_ClearAudioBuffers */ +/* */ +/* DESCRIPTION: */ +/* This function is used to clear the internal audio buffers of the module. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Initialisation succeeded */ +/* LVREV_NULLADDRESS Instance is NULL */ +/* */ +/* NOTES: */ +/* 1. This function must not be interrupted by the LVREV_Process function */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_ClearAudioBuffers(LVREV_Handle_t hInstance); + + +/****************************************************************************************/ +/* */ +/* FUNCTION: LVREV_Process */ +/* */ +/* DESCRIPTION: */ +/* Process function for the LVREV module. */ +/* */ +/* PARAMETERS: */ +/* hInstance Instance handle */ +/* pInData Pointer to the input data */ +/* pOutData Pointer to the output data */ +/* NumSamples Number of samples in the input buffer */ +/* */ +/* RETURNS: */ +/* LVREV_SUCCESS Succeeded */ +/* LVREV_INVALIDNUMSAMPLES NumSamples was larger than the maximum block size */ +/* */ +/* NOTES: */ +/* 1. The input and output buffers must be 32-bit aligned */ +/* */ +/****************************************************************************************/ +LVREV_ReturnStatus_en LVREV_Process(LVREV_Handle_t hInstance, + const LVM_INT32 *pInData, + LVM_INT32 *pOutData, + const LVM_UINT16 NumSamples); + + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /* __LVREV_H__ */ + +/* End of file */ diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.cpp b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.cpp new file mode 100644 index 0000000..f487574 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.cpp @@ -0,0 +1,2238 @@ +/* + * Copyright (C) 2010-2010 NXP Software + * Copyright (C) 2009 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Exynos-Reverb" +#define ARRAY_SIZE(array) (sizeof array / sizeof array[0]) +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include +#include "exynos_effectReverb.h" +// from Reverb/lib +#include "LVREV.h" + +// effect_handle_t interface implementation for reverb +extern "C" const struct effect_interface_s gReverbInterface; + +#define LVM_ERROR_CHECK(LvmStatus, callingFunc, calledFunc){\ + if (LvmStatus == LVREV_NULLADDRESS){\ + ALOGV("\tLVREV_ERROR : Parameter error - "\ + "null pointer returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\ + }\ + if (LvmStatus == LVREV_INVALIDNUMSAMPLES){\ + ALOGV("\tLVREV_ERROR : Parameter error - "\ + "bad number of samples returned by %s in %s\n\n\n\n", callingFunc, calledFunc);\ + }\ + if (LvmStatus == LVREV_OUTOFRANGE){\ + ALOGV("\tLVREV_ERROR : Parameter error - "\ + "out of range returned by %s in %s\n", callingFunc, calledFunc);\ + }\ + } + +// Namespaces +namespace android { +namespace { + +/************************************************************************************/ +/* */ +/* Preset definitions */ +/* */ +/************************************************************************************/ + +const static t_reverb_settings sReverbPresets[] = { + // REVERB_PRESET_NONE: values are unused + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + // REVERB_PRESET_SMALLROOM + {-400, -600, 1100, 830, -400, 5, 500, 10, 1000, 1000}, + // REVERB_PRESET_MEDIUMROOM + {-400, -600, 1300, 830, -1000, 20, -200, 20, 1000, 1000}, + // REVERB_PRESET_LARGEROOM + {-400, -600, 1500, 830, -1600, 5, -1000, 40, 1000, 1000}, + // REVERB_PRESET_MEDIUMHALL + {-400, -600, 1800, 700, -1300, 15, -800, 30, 1000, 1000}, + // REVERB_PRESET_LARGEHALL + {-400, -600, 1800, 700, -2000, 30, -1400, 60, 1000, 1000}, + // REVERB_PRESET_PLATE + {-400, -200, 1300, 900, 0, 2, 0, 10, 1000, 750}, +}; + + +// NXP SW auxiliary environmental reverb +const effect_descriptor_t gAuxEnvReverbDescriptor = { + { 0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, { 0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e } }, + { 0x3c2d95e0, 0x932d, 0x11e5, 0xa788, { 0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b } }, + EFFECT_CONTROL_API_VERSION, + EFFECT_FLAG_TYPE_AUXILIARY | EFFECT_FLAG_HW_ACC_TUNNEL, + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Auxiliary Environmental Reverb", + "Offload NXP Software Ltd.", +}; + +// NXP SW insert environmental reverb +static const effect_descriptor_t gInsertEnvReverbDescriptor = { + {0xc2e5d5f0, 0x94bd, 0x4763, 0x9cac, {0x4e, 0x23, 0x4d, 0x06, 0x83, 0x9e}}, + {0x7cc70fa0, 0x932d, 0x11e5, 0xa5b4, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + EFFECT_CONTROL_API_VERSION, + EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_HW_ACC_TUNNEL, + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Insert Environmental Reverb", + "Offload NXP Software Ltd.", +}; + +// NXP SW auxiliary preset reverb +static const effect_descriptor_t gAuxPresetReverbDescriptor = { + {0x47382d60, 0xddd8, 0x11db, 0xbf3a, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + {0xa8b03600, 0x932d, 0x11e5, 0xa1bb, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + EFFECT_CONTROL_API_VERSION, + EFFECT_FLAG_TYPE_AUXILIARY | EFFECT_FLAG_HW_ACC_TUNNEL, + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Auxiliary Preset Reverb", + "Offload NXP Software Ltd.", +}; + +// NXP SW insert preset reverb +static const effect_descriptor_t gInsertPresetReverbDescriptor = { + {0x47382d60, 0xddd8, 0x11db, 0xbf3a, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + {0xd41c3d20, 0x932d, 0x11e5, 0xa723, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, + EFFECT_CONTROL_API_VERSION, + EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_INSERT_FIRST | EFFECT_FLAG_VOLUME_CTRL | EFFECT_FLAG_HW_ACC_TUNNEL, + 0, //CPU Load information, + 1, //Memory usage for this effect, + "Offload Insert Preset Reverb", + "Offload NXP Software Ltd.", +}; + +// gDescriptors contains pointers to all defined effect descriptor in this library +static const effect_descriptor_t * const gDescriptors[] = { + &gAuxEnvReverbDescriptor, + &gInsertEnvReverbDescriptor, + &gAuxPresetReverbDescriptor, + &gInsertPresetReverbDescriptor +}; + +struct ReverbContext{ + const struct effect_interface_s *itfe; + effect_config_t config; + LVREV_Handle_t hInstance; + int16_t SavedRoomLevel; + int16_t SavedHfLevel; + int16_t SavedDecayTime; + int16_t SavedDecayHfRatio; + int16_t SavedReverbLevel; + int16_t SavedDiffusion; + int16_t SavedDensity; + bool bEnabled; + LVM_Fs_en SampleRate; + //LVM_INT32 *InFrames32; + //LVM_INT32 *OutFrames32; + bool auxiliary; + bool preset; + uint16_t curPreset; + uint16_t nextPreset; + int SamplesToExitCount; + LVM_INT16 leftVolume; + LVM_INT16 rightVolume; + LVM_INT16 prevLeftVolume; + LVM_INT16 prevRightVolume; + int volumeMode; + /* variables for offload implementation */ + LVREV_ControlParams_st ActiveParams; /* Control Parameters */ + struct mixer *mixerHandle; + struct mixer_ctl *param_mixerCtl; + struct mixer_ctl *ctx_mixerCtl; + /* output io-handle to which the effect is attached to */ + audio_io_handle_t OutHandle; + bool OffloadEnabled; + +}; + +enum { + REVERB_VOLUME_OFF, + REVERB_VOLUME_FLAT, + REVERB_VOLUME_RAMP, +}; + +#define REVERB_DEFAULT_PRESET REVERB_PRESET_NONE + + +#define REVERB_SEND_LEVEL (0x0C00) // 0.75 in 4.12 format +#define REVERB_UNIT_VOLUME (0x1000) // 1.0 in 4.12 format + +#define MIXER_CARD 0 +#define MIXER_PARAM_CTL_NAME "NXP RVB param data" +#define REVERB_PARAM_MAX 10 +#define MIXER_CTX_CTL_NAME "NXP RVB ctx data" +#define REVERB_CTX_MAX 7 + +//--- local function prototypes +int Reverb_init (ReverbContext *pContext); +void Reverb_free (ReverbContext *pContext); +int Reverb_setConfig (ReverbContext *pContext, effect_config_t *pConfig); +void Reverb_getConfig (ReverbContext *pContext, effect_config_t *pConfig); +int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue); +int Reverb_getParameter (ReverbContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue); +int Reverb_LoadPreset (ReverbContext *pContext); +LVREV_ReturnStatus_en Offload_SetEffect_ControlParameters(ReverbContext *pContext); +LVREV_ReturnStatus_en Offload_SetEffect_ContextParameters(ReverbContext *pContext); + +/* Effect Library Interface Implementation */ + +extern "C" int EffectCreate(const effect_uuid_t *uuid, + int32_t sessionId __unused, + int32_t ioId __unused, + effect_handle_t *pHandle){ + int ret; + int i; + int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); + const effect_descriptor_t *desc; + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + ALOGV("\t\nEffectCreate start"); + + if (pHandle == NULL || uuid == NULL){ + ALOGV("\tLVM_ERROR : EffectCreate() called with NULL pointer"); + return -EINVAL; + } + + for (i = 0; i < length; i++) { + desc = gDescriptors[i]; + if (memcmp(uuid, &desc->uuid, sizeof(effect_uuid_t)) + == 0) { + ALOGV("\tEffectCreate - UUID matched Reverb type %d, UUID = %x", i, desc->uuid.timeLow); + break; + } + } + + if (i == length) { + return -ENOENT; + } + + ReverbContext *pContext = new ReverbContext; + + pContext->itfe = &gReverbInterface; + pContext->hInstance = NULL; + + pContext->auxiliary = false; + if ((desc->flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY){ + pContext->auxiliary = true; + ALOGV("\tEffectCreate - AUX"); + }else{ + ALOGV("\tEffectCreate - INS"); + } + + pContext->preset = false; + if (memcmp(&desc->type, SL_IID_PRESETREVERB, sizeof(effect_uuid_t)) == 0) { + pContext->preset = true; + // force reloading preset at first call to process() + pContext->curPreset = REVERB_PRESET_LAST + 1; + pContext->nextPreset = REVERB_DEFAULT_PRESET; + ALOGV("\tEffectCreate - PRESET"); + Reverb_LoadPreset(pContext); + }else{ + ALOGV("\tEffectCreate - ENVIRONMENTAL"); + } + + ALOGV("\tEffectCreate - Calling Reverb_init"); + ret = Reverb_init(pContext); + + if (ret < 0){ + ALOGV("\tLVM_ERROR : EffectCreate() init failed"); + delete pContext; + return ret; + } + + /* Send context paramets to offload reverb */ + LvmStatus = android::Offload_SetEffect_ContextParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ContextParameters", "Effect_command CMD_SET_VOLUME") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + + *pHandle = (effect_handle_t)pContext; + + // Allocate memory for reverb process (*2 is for STEREO) + //pContext->InFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2); + //pContext->OutFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2); + + ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext)); + ALOGV("\tEffectCreate end\n"); + return 0; +} /* end EffectCreate */ + +extern "C" int EffectRelease(effect_handle_t handle){ + ReverbContext * pContext = (ReverbContext *)handle; + + ALOGV("\tEffectRelease %p", handle); + if (pContext == NULL){ + ALOGV("\tLVM_ERROR : EffectRelease called with NULL pointer"); + return -EINVAL; + } + + //free(pContext->InFrames32); + //free(pContext->OutFrames32); + Reverb_free(pContext); + delete pContext; + return 0; +} /* end EffectRelease */ + +extern "C" int EffectGetDescriptor(const effect_uuid_t *uuid, + effect_descriptor_t *pDescriptor) { + int i; + int length = sizeof(gDescriptors) / sizeof(const effect_descriptor_t *); + + if (pDescriptor == NULL || uuid == NULL){ + ALOGV("EffectGetDescriptor() called with NULL pointer"); + return -EINVAL; + } + + for (i = 0; i < length; i++) { + if (memcmp(uuid, &gDescriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) { + *pDescriptor = *gDescriptors[i]; + ALOGV("EffectGetDescriptor - UUID matched Reverb type %d, UUID = %x", + i, gDescriptors[i]->uuid.timeLow); + return 0; + } + } + + return -EINVAL; +} /* end EffectGetDescriptor */ + +/* local functions */ +#define CHECK_ARG(cond) { \ + if (!(cond)) { \ + ALOGV("\tLVM_ERROR : Invalid argument: "#cond); \ + return -EINVAL; \ + } \ +} + +#if 0 +//---------------------------------------------------------------------------- +// MonoTo2I_32() +//---------------------------------------------------------------------------- +// Purpose: +// Convert MONO to STEREO +// +//---------------------------------------------------------------------------- + +void MonoTo2I_32( const LVM_INT32 *src, + LVM_INT32 *dst, + LVM_INT16 n) +{ + LVM_INT16 ii; + src += (n-1); + dst += ((n*2)-1); + + for (ii = n; ii != 0; ii--) + { + *dst = *src; + dst--; + + *dst = *src; + dst--; + src--; + } + + return; +} + +//---------------------------------------------------------------------------- +// From2iToMono_32() +//---------------------------------------------------------------------------- +// Purpose: +// Convert STEREO to MONO +// +//---------------------------------------------------------------------------- + +void From2iToMono_32( const LVM_INT32 *src, + LVM_INT32 *dst, + LVM_INT16 n) +{ + LVM_INT16 ii; + LVM_INT32 Temp; + + for (ii = n; ii != 0; ii--) + { + Temp = (*src>>1); + src++; + + Temp +=(*src>>1); + src++; + + *dst = Temp; + dst++; + } + + return; +} + +static inline int16_t clamp16(int32_t sample) +{ + if ((sample>>15) ^ (sample>>31)) + sample = 0x7FFF ^ (sample>>31); + return sample; +} +#endif +//---------------------------------------------------------------------------- +// Offload_SetEffect_ControlParameters +//---------------------------------------------------------------------------- +// Purpose: +// Send all the active control parameters to offloaded effect library +// +//---------------------------------------------------------------------------- +LVREV_ReturnStatus_en Offload_SetEffect_ControlParameters(ReverbContext *pContext){ + LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */ + int32_t param[REVERB_PARAM_MAX] = {0}; + int i, ret; + + ALOGV("\t[Reverb]Offload_SetEffect_ControlParameters Enter"); + if (pContext->OffloadEnabled) { + /* Update common strcuture parameter to array */ + param[0] = (int32_t)pContext->ActiveParams.OperatingMode; + param[1] = (int32_t)pContext->ActiveParams.SampleRate; + param[2] = (int32_t)pContext->ActiveParams.SourceFormat; + param[3] = (int32_t)pContext->ActiveParams.Level; + param[4] = (int32_t)pContext->ActiveParams.LPF; + param[5] = (int32_t)pContext->ActiveParams.HPF; + param[6] = (int32_t)pContext->ActiveParams.T60; + param[7] = (int32_t)pContext->ActiveParams.Density; + param[8] = (int32_t)pContext->ActiveParams.Damping; + param[9] = (int32_t)pContext->ActiveParams.RoomSize; + + if (pContext->param_mixerCtl) { + ALOGV("\t[Reverb]Offload_SetEffect_ControlParameters: mixer_ctl_set_array"); + ret = mixer_ctl_set_array(pContext->param_mixerCtl, param, ARRAY_SIZE(param)); + if (ret) { + ALOGE("[Reverb]%s: mixer_ctl_set_array return error(%d)", __func__, LvmStatus); + LvmStatus = LVREV_OUTOFRANGE; + } else { + LvmStatus = LVREV_SUCCESS; + for (i=0; i < REVERB_PARAM_MAX; i++) { + ALOGD("[Reverb]mixer-array param[%d] = %d", i, param[i]); + } + } + } + } + + ALOGV("\t[Reverb]Offload_SetEffect_ControlParameters Exit"); + return LvmStatus; +} + +//---------------------------------------------------------------------------- +// Offload_SetEffect_ContextParameters +//---------------------------------------------------------------------------- +// Purpose: +// Send all the context parameters to offloaded effect library +// +//---------------------------------------------------------------------------- +LVREV_ReturnStatus_en Offload_SetEffect_ContextParameters(ReverbContext *pContext){ + LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */ + int32_t param[REVERB_CTX_MAX] = {0}; + int i, ret; + + ALOGV("\t[Reverb]Offload_SetEffect_ContextParameters Enter"); + if (pContext->OffloadEnabled) { + /* Update common strcuture parameter to array */ + param[0] = (int32_t)pContext->auxiliary; + param[1] = (int32_t)pContext->leftVolume; + param[2] = (int32_t)pContext->rightVolume; + param[3] = (int32_t)pContext->volumeMode; + param[4] = (int32_t)pContext->bEnabled; + param[5] = (int32_t)pContext->preset; + param[6] = (int32_t)pContext->curPreset; + + if (pContext->ctx_mixerCtl) { + ALOGV("\t[Reverb]Offload_SetEffect_ContextParameters: mixer_ctl_set_array"); + ret = mixer_ctl_set_array(pContext->ctx_mixerCtl, param, ARRAY_SIZE(param)); + if (ret) { + ALOGE("[Reverb]%s: mixer_ctl_set_array return error(%d)", __func__, LvmStatus); + LvmStatus = LVREV_OUTOFRANGE; + } else { + LvmStatus = LVREV_SUCCESS; + for (i=0; i < REVERB_CTX_MAX; i++) { + ALOGD("[Reverb]mixer-array context[%d] = %d", i, param[i]); + } + } + } + } + + ALOGV("\t[Reverb]Offload_SetEffect_ControlParameters Exit"); + return LvmStatus; +} + +//---------------------------------------------------------------------------- +// process() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the Reverb +// +// Inputs: +// pIn: pointer to stereo/mono 16 bit input data +// pOut: pointer to stereo 16 bit output data +// frameCount: Frames to process +// pContext: effect engine context +// strength strength to be applied +// +// Outputs: +// pOut: pointer to updated stereo 16 bit output data +// +//---------------------------------------------------------------------------- + +int process( LVM_INT16 *pIn __unused, + LVM_INT16 *pOut __unused, + int frameCount __unused, + ReverbContext *pContext __unused){ +#if 0 + LVM_INT16 samplesPerFrame = 1; + LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; /* Function call status */ + LVM_INT16 *OutFrames16; + + + // Check that the input is either mono or stereo + if (pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO) { + samplesPerFrame = 2; + } else if (pContext->config.inputCfg.channels != AUDIO_CHANNEL_OUT_MONO) { + ALOGV("\tLVREV_ERROR : process invalid PCM format"); + return -EINVAL; + } + + OutFrames16 = (LVM_INT16 *)pContext->OutFrames32; + + // Check for NULL pointers + if((pContext->InFrames32 == NULL)||(pContext->OutFrames32 == NULL)){ + ALOGV("\tLVREV_ERROR : process failed to allocate memory for temporary buffers "); + return -EINVAL; + } + + #ifdef LVM_PCM + fwrite(pIn, frameCount*sizeof(LVM_INT16)*samplesPerFrame, 1, pContext->PcmInPtr); + fflush(pContext->PcmInPtr); + #endif + + if (pContext->preset && pContext->nextPreset != pContext->curPreset) { + Reverb_LoadPreset(pContext); + } + + + + // Convert to Input 32 bits + if (pContext->auxiliary) { + for(int i=0; iInFrames32[i] = (LVM_INT32)pIn[i]<<8; + } + } else { + // insert reverb input is always stereo + for (int i = 0; i < frameCount; i++) { + pContext->InFrames32[2*i] = (pIn[2*i] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12 + pContext->InFrames32[2*i+1] = (pIn[2*i+1] * REVERB_SEND_LEVEL) >> 4; // <<8 + >>12 + } + } + + if (pContext->preset && pContext->curPreset == REVERB_PRESET_NONE) { + memset(pContext->OutFrames32, 0, frameCount * sizeof(LVM_INT32) * 2); //always stereo here + } else { + if(pContext->bEnabled == LVM_FALSE && pContext->SamplesToExitCount > 0) { + memset(pContext->InFrames32,0,frameCount * sizeof(LVM_INT32) * samplesPerFrame); + ALOGV("\tZeroing %d samples per frame at the end of call", samplesPerFrame); + } + + /* Process the samples, producing a stereo output */ + LvmStatus = LVREV_Process(pContext->hInstance, /* Instance handle */ + pContext->InFrames32, /* Input buffer */ + pContext->OutFrames32, /* Output buffer */ + frameCount); /* Number of samples to read */ + } + + LVM_ERROR_CHECK(LvmStatus, "LVREV_Process", "process") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + + // Convert to 16 bits + if (pContext->auxiliary) { + for (int i=0; i < frameCount*2; i++) { //always stereo here + OutFrames16[i] = clamp16(pContext->OutFrames32[i]>>8); + } + } else { + for (int i=0; i < frameCount*2; i++) { //always stereo here + OutFrames16[i] = clamp16((pContext->OutFrames32[i]>>8) + (LVM_INT32)pIn[i]); + } + + // apply volume with ramp if needed + if ((pContext->leftVolume != pContext->prevLeftVolume || + pContext->rightVolume != pContext->prevRightVolume) && + pContext->volumeMode == REVERB_VOLUME_RAMP) { + LVM_INT32 vl = (LVM_INT32)pContext->prevLeftVolume << 16; + LVM_INT32 incl = (((LVM_INT32)pContext->leftVolume << 16) - vl) / frameCount; + LVM_INT32 vr = (LVM_INT32)pContext->prevRightVolume << 16; + LVM_INT32 incr = (((LVM_INT32)pContext->rightVolume << 16) - vr) / frameCount; + + for (int i = 0; i < frameCount; i++) { + OutFrames16[2*i] = + clamp16((LVM_INT32)((vl >> 16) * OutFrames16[2*i]) >> 12); + OutFrames16[2*i+1] = + clamp16((LVM_INT32)((vr >> 16) * OutFrames16[2*i+1]) >> 12); + + vl += incl; + vr += incr; + } + + pContext->prevLeftVolume = pContext->leftVolume; + pContext->prevRightVolume = pContext->rightVolume; + } else if (pContext->volumeMode != REVERB_VOLUME_OFF) { + if (pContext->leftVolume != REVERB_UNIT_VOLUME || + pContext->rightVolume != REVERB_UNIT_VOLUME) { + for (int i = 0; i < frameCount; i++) { + OutFrames16[2*i] = + clamp16((LVM_INT32)(pContext->leftVolume * OutFrames16[2*i]) >> 12); + OutFrames16[2*i+1] = + clamp16((LVM_INT32)(pContext->rightVolume * OutFrames16[2*i+1]) >> 12); + } + } + pContext->prevLeftVolume = pContext->leftVolume; + pContext->prevRightVolume = pContext->rightVolume; + pContext->volumeMode = REVERB_VOLUME_RAMP; + } + } + + #ifdef LVM_PCM + fwrite(OutFrames16, frameCount*sizeof(LVM_INT16)*2, 1, pContext->PcmOutPtr); + fflush(pContext->PcmOutPtr); + #endif + + // Accumulate if required + if (pContext->config.outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE){ + //ALOGV("\tBuffer access is ACCUMULATE"); + for (int i=0; ihInstance, + &MemTab, + LVM_NULL); + + LVM_ERROR_CHECK(LvmStatus, "LVM_GetMemoryTable", "Reverb_free") + + for (int i=0; imixerHandle); +} /* end Reverb_free */ + +//---------------------------------------------------------------------------- +// Reverb_setConfig() +//---------------------------------------------------------------------------- +// Purpose: Set input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Reverb_setConfig(ReverbContext *pContext, effect_config_t *pConfig){ + LVM_Fs_en SampleRate; + //ALOGV("\tReverb_setConfig start"); + + CHECK_ARG(pContext != NULL); + CHECK_ARG(pConfig != NULL); + + CHECK_ARG(pConfig->inputCfg.samplingRate == pConfig->outputCfg.samplingRate); + CHECK_ARG(pConfig->inputCfg.format == pConfig->outputCfg.format); + CHECK_ARG((pContext->auxiliary && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_MONO) || + ((!pContext->auxiliary) && pConfig->inputCfg.channels == AUDIO_CHANNEL_OUT_STEREO)); + CHECK_ARG(pConfig->outputCfg.channels == AUDIO_CHANNEL_OUT_STEREO); + CHECK_ARG(pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_WRITE + || pConfig->outputCfg.accessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE); + CHECK_ARG(pConfig->inputCfg.format == AUDIO_FORMAT_PCM_16_BIT); + + //ALOGV("\tReverb_setConfig calling memcpy"); + pContext->config = *pConfig; + + + switch (pConfig->inputCfg.samplingRate) { + case 8000: + SampleRate = LVM_FS_8000; + break; + case 16000: + SampleRate = LVM_FS_16000; + break; + case 22050: + SampleRate = LVM_FS_22050; + break; + case 32000: + SampleRate = LVM_FS_32000; + break; + case 44100: + SampleRate = LVM_FS_44100; + break; + case 48000: + SampleRate = LVM_FS_48000; + break; + default: + ALOGV("\rReverb_setConfig invalid sampling rate %d", pConfig->inputCfg.samplingRate); + return -EINVAL; + } + + if (pContext->SampleRate != SampleRate) { + LVREV_ReturnStatus_en LvmStatus = LVREV_SUCCESS; + + ALOGV("\tReverb_setConfig change sampling rate to %d", SampleRate); + pContext->ActiveParams.SampleRate = SampleRate; + + /* Update Reverb updated controls parameters */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "Reverb_setConfig") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + //ALOGV("\tReverb_setConfig Succesfully called Offload_SetEffect_ControlParameters\n"); + pContext->SampleRate = SampleRate; + }else{ + //ALOGV("\tReverb_setConfig keep sampling rate at %d", SampleRate); + } + + //ALOGV("\tReverb_setConfig End"); + return 0; +} /* end Reverb_setConfig */ + +//---------------------------------------------------------------------------- +// Reverb_getConfig() +//---------------------------------------------------------------------------- +// Purpose: Get input and output audio configuration. +// +// Inputs: +// pContext: effect engine context +// pConfig: pointer to effect_config_t structure holding input and output +// configuration parameters +// +// Outputs: +// +//---------------------------------------------------------------------------- + +void Reverb_getConfig(ReverbContext *pContext, effect_config_t *pConfig) +{ + *pConfig = pContext->config; +} /* end Reverb_getConfig */ + +//---------------------------------------------------------------------------- +// Reverb_init() +//---------------------------------------------------------------------------- +// Purpose: Initialize engine with default configuration +// +// Inputs: +// pContext: effect engine context +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Reverb_init(ReverbContext *pContext){ + int status; + + ALOGV("\tReverb_init start"); + + CHECK_ARG(pContext != NULL); + + if (pContext->hInstance != NULL){ + Reverb_free(pContext); + } + + pContext->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; + if (pContext->auxiliary) { + pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_MONO; + } else { + pContext->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + } + + pContext->config.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->config.inputCfg.samplingRate = 44100; + pContext->config.inputCfg.bufferProvider.getBuffer = NULL; + pContext->config.inputCfg.bufferProvider.releaseBuffer = NULL; + pContext->config.inputCfg.bufferProvider.cookie = NULL; + pContext->config.inputCfg.mask = EFFECT_CONFIG_ALL; + pContext->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; + pContext->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + pContext->config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + pContext->config.outputCfg.samplingRate = 44100; + pContext->config.outputCfg.bufferProvider.getBuffer = NULL; + pContext->config.outputCfg.bufferProvider.releaseBuffer = NULL; + pContext->config.outputCfg.bufferProvider.cookie = NULL; + pContext->config.outputCfg.mask = EFFECT_CONFIG_ALL; + + pContext->leftVolume = REVERB_UNIT_VOLUME; + pContext->rightVolume = REVERB_UNIT_VOLUME; + pContext->prevLeftVolume = REVERB_UNIT_VOLUME; + pContext->prevRightVolume = REVERB_UNIT_VOLUME; + pContext->volumeMode = REVERB_VOLUME_FLAT; + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ +#if 0 + //LVREV_ControlParams_st params; /* Control Parameters */ + LVREV_InstanceParams_st InstParams; /* Instance parameters */ + LVREV_MemoryTable_st MemTab; /* Memory allocation table */ + bool bMallocFailure = LVM_FALSE; + + /* Set the capabilities */ + InstParams.MaxBlockSize = MAX_CALL_SIZE; + InstParams.SourceFormat = LVM_STEREO; // Max format, could be mono during process + InstParams.NumDelays = LVREV_DELAYLINES_4; + + /* Allocate memory, forcing alignment */ + LvmStatus = LVREV_GetMemoryTable(LVM_NULL, + &MemTab, + &InstParams); + + LVM_ERROR_CHECK(LvmStatus, "LVREV_GetMemoryTable", "Reverb_init") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + + ALOGV("\tCreateInstance Succesfully called LVM_GetMemoryTable\n"); + + /* Allocate memory */ + for (int i=0; ihInstance = LVM_NULL; + + /* Init sets the instance handle */ + LvmStatus = LVREV_GetInstanceHandle(&pContext->hInstance, + &MemTab, + &InstParams); + + LVM_ERROR_CHECK(LvmStatus, "LVM_GetInstanceHandle", "Reverb_init") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + + ALOGV("\tReverb_init CreateInstance Succesfully called LVM_GetInstanceHandle\n"); +#endif + + /* open mixer control */ + pContext->mixerHandle = mixer_open(MIXER_CARD); + if (!pContext->mixerHandle) { + ALOGE("%s: Failed to open mixer", __func__); + return -EINVAL; + } + + /* Get required parameter control from mixer dai */ + pContext->param_mixerCtl = mixer_get_ctl_by_name(pContext->mixerHandle, MIXER_PARAM_CTL_NAME); + if (!pContext->param_mixerCtl) { + ALOGE("%s: mixer_get_ctl_by_name failed", __func__); + mixer_close(pContext->mixerHandle); + return -EINVAL; + } + + /* Get required context control from mixer dai */ + pContext->ctx_mixerCtl = mixer_get_ctl_by_name(pContext->mixerHandle, MIXER_CTX_CTL_NAME); + if (!pContext->ctx_mixerCtl) { + ALOGE("%s: mixer_get_ctl_by_name failed", __func__); + mixer_close(pContext->mixerHandle); + return -EINVAL; + } + + /* Set the initial process parameters */ + /* General parameters */ + pContext->ActiveParams.OperatingMode = LVM_MODE_ON; + pContext->ActiveParams.SampleRate = LVM_FS_44100; + pContext->SampleRate = LVM_FS_44100; + + if(pContext->config.inputCfg.channels == AUDIO_CHANNEL_OUT_MONO){ + pContext->ActiveParams.SourceFormat = LVM_MONO; + } else { + pContext->ActiveParams.SourceFormat = LVM_STEREO; + } + + /* Reverb parameters */ + pContext->ActiveParams.Level = 0; + pContext->ActiveParams.LPF = 23999; + pContext->ActiveParams.HPF = 50; + pContext->ActiveParams.T60 = 1490; + pContext->ActiveParams.Density = 100; + pContext->ActiveParams.Damping = 21; + pContext->ActiveParams.RoomSize = 100; + + pContext->SamplesToExitCount = (pContext->ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000; + + /* Saved strength is used to return the exact strength that was used in the set to the get + * because we map the original strength range of 0:1000 to 1:15, and this will avoid + * quantisation like effect when returning + */ + pContext->SavedRoomLevel = -6000; + pContext->SavedHfLevel = 0; + pContext->bEnabled = LVM_FALSE; + pContext->SavedDecayTime = pContext->ActiveParams.T60; + pContext->SavedDecayHfRatio = pContext->ActiveParams.Damping*20; + pContext->SavedDensity = pContext->ActiveParams.RoomSize*10; + pContext->SavedDiffusion = pContext->ActiveParams.Density*10; + pContext->SavedReverbLevel = -6000; + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "Reverb_Init") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + + ALOGV("\tReverb_init CreateInstance Succesfully called LVREV_SetControlParameters\n"); + ALOGV("\tReverb_init End"); + return 0; +} /* end Reverb_init */ + +//---------------------------------------------------------------------------- +// ReverbConvertLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Convert level from OpenSL ES format to LVM format +// +// Inputs: +// level level to be applied +// +//---------------------------------------------------------------------------- + +int16_t ReverbConvertLevel(int16_t level){ + static int16_t LevelArray[101] = + { + -12000, -4000, -3398, -3046, -2796, -2603, -2444, -2310, -2194, -2092, + -2000, -1918, -1842, -1773, -1708, -1648, -1592, -1540, -1490, -1443, + -1398, -1356, -1316, -1277, -1240, -1205, -1171, -1138, -1106, -1076, + -1046, -1018, -990, -963, -938, -912, -888, -864, -841, -818, + -796, -775, -754, -734, -714, -694, -675, -656, -638, -620, + -603, -585, -568, -552, -536, -520, -504, -489, -474, -459, + -444, -430, -416, -402, -388, -375, -361, -348, -335, -323, + -310, -298, -286, -274, -262, -250, -239, -228, -216, -205, + -194, -184, -173, -162, -152, -142, -132, -121, -112, -102, + -92, -82, -73, -64, -54, -45, -36, -27, -18, -9, + 0 + }; + int16_t i; + + for(i = 0; i < 101; i++) + { + if(level <= LevelArray[i]) + break; + } + return i; +} + +//---------------------------------------------------------------------------- +// ReverbConvertHFLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Convert level from OpenSL ES format to LVM format +// +// Inputs: +// level level to be applied +// +//---------------------------------------------------------------------------- + +int16_t ReverbConvertHfLevel(int16_t Hflevel){ + int16_t i; + + static LPFPair_t LPFArray[97] = + { // Limit range to 50 for LVREV parameter range + {-10000, 50}, { -5000, 50 }, { -4000, 50}, { -3000, 158}, { -2000, 502}, + {-1000, 1666},{ -900, 1897}, { -800, 2169}, { -700, 2496}, { -600, 2895}, + {-500, 3400}, { -400, 4066}, { -300, 5011}, { -200, 6537}, { -100, 9826}, + {-99, 9881 }, { -98, 9937 }, { -97, 9994 }, { -96, 10052}, { -95, 10111}, + {-94, 10171}, { -93, 10231}, { -92, 10293}, { -91, 10356}, { -90, 10419}, + {-89, 10484}, { -88, 10549}, { -87, 10616}, { -86, 10684}, { -85, 10753}, + {-84, 10823}, { -83, 10895}, { -82, 10968}, { -81, 11042}, { -80, 11117}, + {-79, 11194}, { -78, 11272}, { -77, 11352}, { -76, 11433}, { -75, 11516}, + {-74, 11600}, { -73, 11686}, { -72, 11774}, { -71, 11864}, { -70, 11955}, + {-69, 12049}, { -68, 12144}, { -67, 12242}, { -66, 12341}, { -65, 12443}, + {-64, 12548}, { -63, 12654}, { -62, 12763}, { -61, 12875}, { -60, 12990}, + {-59, 13107}, { -58, 13227}, { -57, 13351}, { -56, 13477}, { -55, 13607}, + {-54, 13741}, { -53, 13878}, { -52, 14019}, { -51, 14164}, { -50, 14313}, + {-49, 14467}, { -48, 14626}, { -47, 14789}, { -46, 14958}, { -45, 15132}, + {-44, 15312}, { -43, 15498}, { -42, 15691}, { -41, 15890}, { -40, 16097}, + {-39, 16311}, { -38, 16534}, { -37, 16766}, { -36, 17007}, { -35, 17259}, + {-34, 17521}, { -33, 17795}, { -32, 18081}, { -31, 18381}, { -30, 18696}, + {-29, 19027}, { -28, 19375}, { -27, 19742}, { -26, 20129}, { -25, 20540}, + {-24, 20976}, { -23, 21439}, { -22, 21934}, { -21, 22463}, { -20, 23031}, + {-19, 23643}, { -18, 23999} + }; + + for(i = 0; i < 96; i++) + { + if(Hflevel <= LPFArray[i].Room_HF) + break; + } + return LPFArray[i].LPF; +} + +//---------------------------------------------------------------------------- +// ReverbSetRoomHfLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the HF level to the Reverb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// level level to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetRoomHfLevel(ReverbContext *pContext, int16_t level){ + //ALOGV("\tReverbSetRoomHfLevel start (%d)", level); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + pContext->ActiveParams.LPF = ReverbConvertHfLevel(level); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetRoomHfLevel") + + //ALOGV("\tReverbSetRoomhfLevel() just Set -> %d\n", ActiveParams.LPF); + pContext->SavedHfLevel = level; + //ALOGV("\tReverbSetHfRoomLevel end.. saving %d", pContext->SavedHfLevel); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetRoomHfLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Get the level applied to the Revervb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int16_t ReverbGetRoomHfLevel(ReverbContext *pContext){ + int16_t level; + //ALOGV("\tReverbGetRoomHfLevel start, saved level is %d", pContext->SavedHfLevel); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context*/ + level = ReverbConvertHfLevel(pContext->SavedHfLevel); + + //ALOGV("\tReverbGetRoomHfLevel() ActiveParams.LPFL %d, pContext->SavedHfLevel: %d, " + // "converted level: %d\n", ActiveParams.LPF, pContext->SavedHfLevel, level); + + if(pContext->ActiveParams.LPF != level){ + ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomHfLevel() has wrong level -> %d %d\n", + pContext->ActiveParams.Level, level); + } + + //ALOGV("\tReverbGetRoomHfLevel end"); + return pContext->SavedHfLevel; +} + +//---------------------------------------------------------------------------- +// ReverbSetReverbLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the level to the Reverb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// level level to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetReverbLevel(ReverbContext *pContext, int16_t level){ + //ALOGV("\n\tReverbSetReverbLevel start (%d)", level); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT32 CombinedLevel; // Sum of room and reverb level controls + + /* Get the current settings from context */ + // needs to subtract max levels for both RoomLevel and ReverbLevel + CombinedLevel = (level + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL; + //ALOGV("\tReverbSetReverbLevel() CombinedLevel is %d = %d + %d\n", + // CombinedLevel, level, pContext->SavedRoomLevel); + + pContext->ActiveParams.Level = ReverbConvertLevel(CombinedLevel); + + //ALOGV("\tReverbSetReverbLevel() Trying to set -> %d\n", ActiveParams.Level); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetReverbLevel") + + //ALOGV("\tReverbSetReverbLevel() just Set -> %d\n", ActiveParams.Level); + + pContext->SavedReverbLevel = level; + //ALOGV("\tReverbSetReverbLevel end pContext->SavedReverbLevel is %d\n\n", + // pContext->SavedReverbLevel); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetReverbLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Get the level applied to the Revervb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int16_t ReverbGetReverbLevel(ReverbContext *pContext){ + int16_t level; + //ALOGV("\tReverbGetReverbLevel start"); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT32 CombinedLevel; // Sum of room and reverb level controls + + /* Get the current settings from context */ + // needs to subtract max levels for both RoomLevel and ReverbLevel + CombinedLevel = (pContext->SavedReverbLevel + pContext->SavedRoomLevel)-LVREV_MAX_REVERB_LEVEL; + + //ALOGV("\tReverbGetReverbLevel() CombinedLevel is %d = %d + %d\n", + //CombinedLevel, pContext->SavedReverbLevel, pContext->SavedRoomLevel); + level = ReverbConvertLevel(CombinedLevel); + + //ALOGV("\tReverbGetReverbLevel(): ActiveParams.Level: %d, pContext->SavedReverbLevel: %d, " + //"pContext->SavedRoomLevel: %d, CombinedLevel: %d, converted level: %d\n", + //ActiveParams.Level, pContext->SavedReverbLevel,pContext->SavedRoomLevel, CombinedLevel,level); + + if(pContext->ActiveParams.Level != level){ + ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetReverbLevel() has wrong level -> %d %d\n", + pContext->ActiveParams.Level, level); + } + + //ALOGV("\tReverbGetReverbLevel end\n"); + + return pContext->SavedReverbLevel; +} + +//---------------------------------------------------------------------------- +// ReverbSetRoomLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the level to the Reverb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// level level to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetRoomLevel(ReverbContext *pContext, int16_t level){ + //ALOGV("\tReverbSetRoomLevel start (%d)", level); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT32 CombinedLevel; // Sum of room and reverb level controls + + /* Get the current settings */ + // needs to subtract max levels for both RoomLevel and ReverbLevel + CombinedLevel = (level + pContext->SavedReverbLevel)-LVREV_MAX_REVERB_LEVEL; + pContext->ActiveParams.Level = ReverbConvertLevel(CombinedLevel); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetRoomLevel") + + //ALOGV("\tReverbSetRoomLevel() just Set -> %d\n", ActiveParams.Level); + + pContext->SavedRoomLevel = level; + //ALOGV("\tReverbSetRoomLevel end"); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetRoomLevel() +//---------------------------------------------------------------------------- +// Purpose: +// Get the level applied to the Revervb. Must first be converted to LVM format +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int16_t ReverbGetRoomLevel(ReverbContext *pContext){ + int16_t level; + //ALOGV("\tReverbGetRoomLevel start"); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT32 CombinedLevel; // Sum of room and reverb level controls + + /* Get the current settings from context */ + // needs to subtract max levels for both RoomLevel and ReverbLevel + CombinedLevel = (pContext->SavedRoomLevel + pContext->SavedReverbLevel-LVREV_MAX_REVERB_LEVEL); + level = ReverbConvertLevel(CombinedLevel); + + //ALOGV("\tReverbGetRoomLevel, Level = %d, pContext->SavedRoomLevel = %d, " + // "pContext->SavedReverbLevel = %d, CombinedLevel = %d, level = %d", + // ActiveParams.Level, pContext->SavedRoomLevel, + // pContext->SavedReverbLevel, CombinedLevel, level); + + if(pContext->ActiveParams.Level != level){ + ALOGV("\tLVM_ERROR : (ignore at start up) ReverbGetRoomLevel() has wrong level -> %d %d\n", + pContext->ActiveParams.Level, level); + } + + //ALOGV("\tReverbGetRoomLevel end"); + return pContext->SavedRoomLevel; +} + +//---------------------------------------------------------------------------- +// ReverbSetDecayTime() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the decay time to the Reverb. +// +// Inputs: +// pContext: effect engine context +// time decay to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetDecayTime(ReverbContext *pContext, uint32_t time){ + //ALOGV("\tReverbSetDecayTime start (%d)", time); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + if (time <= LVREV_MAX_T60) { + pContext->ActiveParams.T60 = (LVM_UINT16)time; + } + else { + pContext->ActiveParams.T60 = LVREV_MAX_T60; + } + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetDecayTime") + + //ALOGV("\tReverbSetDecayTime() just Set -> %d\n", ActiveParams.T60); + + pContext->SamplesToExitCount = (pContext->ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000; + //ALOGV("\tReverbSetDecayTime() just Set SamplesToExitCount-> %d\n",pContext->SamplesToExitCount); + pContext->SavedDecayTime = (int16_t)time; + //ALOGV("\tReverbSetDecayTime end"); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetDecayTime() +//---------------------------------------------------------------------------- +// Purpose: +// Get the decay time applied to the Revervb. +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +uint32_t ReverbGetDecayTime(ReverbContext *pContext){ + //ALOGV("\tReverbGetDecayTime start"); + + LVREV_ControlParams_st ActiveParams; /* Current control Parameters */ + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + if(pContext->ActiveParams.T60 != pContext->SavedDecayTime){ + // This will fail if the decay time is set to more than 7000 + ALOGV("\tLVM_ERROR : ReverbGetDecayTime() has wrong level -> %d %d\n", + pContext->ActiveParams.T60, pContext->SavedDecayTime); + } + + //ALOGV("\tReverbGetDecayTime end"); + return (uint32_t)pContext->ActiveParams.T60; +} + +//---------------------------------------------------------------------------- +// ReverbSetDecayHfRatio() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the HF decay ratio to the Reverb. +// +// Inputs: +// pContext: effect engine context +// ratio ratio to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetDecayHfRatio(ReverbContext *pContext, int16_t ratio){ + //ALOGV("\tReverbSetDecayHfRatioe start (%d)", ratio); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + pContext->ActiveParams.Damping = (LVM_INT16)(ratio/20); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetDecayHfRatio") + + //ALOGV("\tReverbSetDecayHfRatio() just Set -> %d\n", ActiveParams.Damping); + + pContext->SavedDecayHfRatio = ratio; + //ALOGV("\tReverbSetDecayHfRatio end"); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetDecayHfRatio() +//---------------------------------------------------------------------------- +// Purpose: +// Get the HF decay ratio applied to the Revervb. +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int32_t ReverbGetDecayHfRatio(ReverbContext *pContext){ + //ALOGV("\tReverbGetDecayHfRatio start"); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + if(pContext->ActiveParams.Damping != (LVM_INT16)(pContext->SavedDecayHfRatio / 20)){ + ALOGV("\tLVM_ERROR : ReverbGetDecayHfRatio() has wrong level -> %d %d\n", + pContext->ActiveParams.Damping, pContext->SavedDecayHfRatio); + } + + //ALOGV("\tReverbGetDecayHfRatio end"); + return pContext->SavedDecayHfRatio; +} + +//---------------------------------------------------------------------------- +// ReverbSetDiffusion() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the diffusion to the Reverb. +// +// Inputs: +// pContext: effect engine context +// level decay to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetDiffusion(ReverbContext *pContext, int16_t level){ + //ALOGV("\tReverbSetDiffusion start (%d)", level); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + pContext->ActiveParams.Density = (LVM_INT16)(level/10); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetDiffusion") + + //ALOGV("\tReverbSetDiffusion() just Set -> %d\n", ActiveParams.Density); + + pContext->SavedDiffusion = level; + //ALOGV("\tReverbSetDiffusion end"); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetDiffusion() +//---------------------------------------------------------------------------- +// Purpose: +// Get the decay time applied to the Revervb. +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int32_t ReverbGetDiffusion(ReverbContext *pContext){ + //ALOGV("\tReverbGetDiffusion start"); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT16 Temp; + + /* Get the current settings from context */ + Temp = (LVM_INT16)(pContext->SavedDiffusion/10); + + if(pContext->ActiveParams.Density != Temp){ + ALOGV("\tLVM_ERROR : ReverbGetDiffusion invalid value %d %d", Temp, pContext->ActiveParams.Density); + } + + //ALOGV("\tReverbGetDiffusion end"); + return pContext->SavedDiffusion; +} + +//---------------------------------------------------------------------------- +// ReverbSetDensity() +//---------------------------------------------------------------------------- +// Purpose: +// Apply the density level the Reverb. +// +// Inputs: +// pContext: effect engine context +// level decay to be applied +// +//---------------------------------------------------------------------------- + +void ReverbSetDensity(ReverbContext *pContext, int16_t level){ + //ALOGV("\tReverbSetDensity start (%d)", level); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + /* Get the current settings from context */ + pContext->ActiveParams.RoomSize = (LVM_INT16)(((level * 99) / 1000) + 1); + + /* Activate the initial settings */ + LvmStatus = Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetDensity") + + //ALOGV("\tReverbSetDensity just Set -> %d\n", ActiveParams.RoomSize); + + pContext->SavedDensity = level; + //ALOGV("\tReverbSetDensity end"); + return; +} + +//---------------------------------------------------------------------------- +// ReverbGetDensity() +//---------------------------------------------------------------------------- +// Purpose: +// Get the density level applied to the Revervb. +// +// Inputs: +// pContext: effect engine context +// +//---------------------------------------------------------------------------- + +int32_t ReverbGetDensity(ReverbContext *pContext){ + //ALOGV("\tReverbGetDensity start"); + + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + LVM_INT16 Temp; + /* Get the current settings from context*/ + Temp = (LVM_INT16)(((pContext->SavedDensity * 99) / 1000) + 1); + + if(Temp != pContext->ActiveParams.RoomSize){ + ALOGV("\tLVM_ERROR : ReverbGetDensity invalid value %d %d", Temp, pContext->ActiveParams.RoomSize); + } + + //ALOGV("\tReverbGetDensity end"); + return pContext->SavedDensity; +} + +//---------------------------------------------------------------------------- +// Reverb_LoadPreset() +//---------------------------------------------------------------------------- +// Purpose: +// Load a the next preset +// +// Inputs: +// pContext - handle to instance data +// +// Outputs: +// +// Side Effects: +// +//---------------------------------------------------------------------------- +int Reverb_LoadPreset(ReverbContext *pContext) +{ + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + //TODO: add reflections delay, level and reverb delay when early reflections are + // implemented + pContext->curPreset = pContext->nextPreset; + + if (pContext->curPreset != REVERB_PRESET_NONE) { + const t_reverb_settings *preset = &sReverbPresets[pContext->curPreset]; + ReverbSetRoomLevel(pContext, preset->roomLevel); + ReverbSetRoomHfLevel(pContext, preset->roomHFLevel); + ReverbSetDecayTime(pContext, preset->decayTime); + ReverbSetDecayHfRatio(pContext, preset->decayHFRatio); + //reflectionsLevel + //reflectionsDelay + ReverbSetReverbLevel(pContext, preset->reverbLevel); + // reverbDelay + ReverbSetDiffusion(pContext, preset->diffusion); + ReverbSetDensity(pContext, preset->density); + } + + LvmStatus = android::Offload_SetEffect_ContextParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ContextParameters", "Reverb_LoadPreset") + + return 0; +} + + +//---------------------------------------------------------------------------- +// Reverb_getParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Get a Reverb parameter +// +// Inputs: +// pContext - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to variable to hold retrieved value +// pValueSize - pointer to value size: maximum size as input +// +// Outputs: +// *pValue updated with parameter value +// *pValueSize updated with actual value size +// +// +// Side Effects: +// +//---------------------------------------------------------------------------- + +int Reverb_getParameter(ReverbContext *pContext, + void *pParam, + uint32_t *pValueSize, + void *pValue){ + int status = 0; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + char *name; + t_reverb_settings *pProperties; + + //ALOGV("\tReverb_getParameter start"); + if (pContext->preset) { + if (param != REVERB_PARAM_PRESET || *pValueSize < sizeof(uint16_t)) { + return -EINVAL; + } + + *(uint16_t *)pValue = pContext->nextPreset; + ALOGV("get REVERB_PARAM_PRESET, preset %d", pContext->nextPreset); + return 0; + } + + switch (param){ + case REVERB_PARAM_ROOM_LEVEL: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize1 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_ROOM_HF_LEVEL: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize12 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_DECAY_TIME: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize3 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + case REVERB_PARAM_DECAY_HF_RATIO: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize4 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_REFLECTIONS_LEVEL: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize5 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_REFLECTIONS_DELAY: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize6 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + case REVERB_PARAM_REVERB_LEVEL: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize7 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_REVERB_DELAY: + if (*pValueSize != sizeof(uint32_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize8 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(uint32_t); + break; + case REVERB_PARAM_DIFFUSION: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize9 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_DENSITY: + if (*pValueSize != sizeof(int16_t)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize10 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(int16_t); + break; + case REVERB_PARAM_PROPERTIES: + if (*pValueSize != sizeof(t_reverb_settings)){ + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid pValueSize11 %d", *pValueSize); + return -EINVAL; + } + *pValueSize = sizeof(t_reverb_settings); + break; + + default: + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid param %d", param); + return -EINVAL; + } + + pProperties = (t_reverb_settings *) pValue; + + switch (param){ + case REVERB_PARAM_PROPERTIES: + pProperties->roomLevel = ReverbGetRoomLevel(pContext); + pProperties->roomHFLevel = ReverbGetRoomHfLevel(pContext); + pProperties->decayTime = ReverbGetDecayTime(pContext); + pProperties->decayHFRatio = ReverbGetDecayHfRatio(pContext); + pProperties->reflectionsLevel = 0; + pProperties->reflectionsDelay = 0; + pProperties->reverbDelay = 0; + pProperties->reverbLevel = ReverbGetReverbLevel(pContext); + pProperties->diffusion = ReverbGetDiffusion(pContext); + pProperties->density = ReverbGetDensity(pContext); + + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomLevel %d", + pProperties->roomLevel); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is roomHFLevel %d", + pProperties->roomHFLevel); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayTime %d", + pProperties->decayTime); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is decayHFRatio %d", + pProperties->decayHFRatio); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsLevel %d", + pProperties->reflectionsLevel); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reflectionsDelay %d", + pProperties->reflectionsDelay); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbDelay %d", + pProperties->reverbDelay); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is reverbLevel %d", + pProperties->reverbLevel); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is diffusion %d", + pProperties->diffusion); + ALOGV("\tReverb_getParameter() REVERB_PARAM_PROPERTIES Value is density %d", + pProperties->density); + break; + + case REVERB_PARAM_ROOM_LEVEL: + *(int16_t *)pValue = ReverbGetRoomLevel(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_LEVEL Value is %d", + // *(int16_t *)pValue); + break; + case REVERB_PARAM_ROOM_HF_LEVEL: + *(int16_t *)pValue = ReverbGetRoomHfLevel(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_ROOM_HF_LEVEL Value is %d", + // *(int16_t *)pValue); + break; + case REVERB_PARAM_DECAY_TIME: + *(uint32_t *)pValue = ReverbGetDecayTime(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_TIME Value is %d", + // *(int32_t *)pValue); + break; + case REVERB_PARAM_DECAY_HF_RATIO: + *(int16_t *)pValue = ReverbGetDecayHfRatio(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_HF_RATION Value is %d", + // *(int16_t *)pValue); + break; + case REVERB_PARAM_REVERB_LEVEL: + *(int16_t *)pValue = ReverbGetReverbLevel(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_REVERB_LEVEL Value is %d", + // *(int16_t *)pValue); + break; + case REVERB_PARAM_DIFFUSION: + *(int16_t *)pValue = ReverbGetDiffusion(pContext); + + //ALOGV("\tReverb_getParameter() REVERB_PARAM_DECAY_DIFFUSION Value is %d", + // *(int16_t *)pValue); + break; + case REVERB_PARAM_DENSITY: + *(uint16_t *)pValue = 0; + *(int16_t *)pValue = ReverbGetDensity(pContext); + //ALOGV("\tReverb_getParameter() REVERB_PARAM_DENSITY Value is %d", + // *(uint32_t *)pValue); + break; + case REVERB_PARAM_REFLECTIONS_LEVEL: + *(uint16_t *)pValue = 0; + case REVERB_PARAM_REFLECTIONS_DELAY: + *(uint32_t *)pValue = 0; + case REVERB_PARAM_REVERB_DELAY: + *(uint32_t *)pValue = 0; + break; + + default: + ALOGV("\tLVM_ERROR : Reverb_getParameter() invalid param %d", param); + status = -EINVAL; + break; + } + + //ALOGV("\tReverb_getParameter end"); + return status; +} /* end Reverb_getParameter */ + +//---------------------------------------------------------------------------- +// Reverb_setParameter() +//---------------------------------------------------------------------------- +// Purpose: +// Set a Reverb parameter +// +// Inputs: +// pContext - handle to instance data +// pParam - pointer to parameter +// pValue - pointer to value +// +// Outputs: +// +//---------------------------------------------------------------------------- + +int Reverb_setParameter (ReverbContext *pContext, void *pParam, void *pValue){ + int status = 0; + int16_t level; + int16_t ratio; + uint32_t time; + t_reverb_settings *pProperties; + int32_t *pParamTemp = (int32_t *)pParam; + int32_t param = *pParamTemp++; + + //ALOGV("\tReverb_setParameter start"); + if (pContext->preset) { + if (param != REVERB_PARAM_PRESET) { + return -EINVAL; + } + + uint16_t preset = *(uint16_t *)pValue; + ALOGV("set REVERB_PARAM_PRESET, preset %d", preset); + if (preset > REVERB_PRESET_LAST) { + return -EINVAL; + } + pContext->nextPreset = preset; + if (pContext->nextPreset != pContext->curPreset) { + //ALOGV("%s Reverb-LoadPreset called", __func__); + Reverb_LoadPreset(pContext); + } + return 0; + } + + switch (param){ + case REVERB_PARAM_PROPERTIES: + ALOGV("\tReverb_setParameter() REVERB_PARAM_PROPERTIES"); + pProperties = (t_reverb_settings *) pValue; + ReverbSetRoomLevel(pContext, pProperties->roomLevel); + ReverbSetRoomHfLevel(pContext, pProperties->roomHFLevel); + ReverbSetDecayTime(pContext, pProperties->decayTime); + ReverbSetDecayHfRatio(pContext, pProperties->decayHFRatio); + ReverbSetReverbLevel(pContext, pProperties->reverbLevel); + ReverbSetDiffusion(pContext, pProperties->diffusion); + ReverbSetDensity(pContext, pProperties->density); + break; + case REVERB_PARAM_ROOM_LEVEL: + level = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_LEVEL value is %d", level); + //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomLevel"); + ReverbSetRoomLevel(pContext, level); + //ALOGV("\tReverb_setParameter() Called ReverbSetRoomLevel"); + break; + case REVERB_PARAM_ROOM_HF_LEVEL: + level = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_ROOM_HF_LEVEL value is %d", level); + //ALOGV("\tReverb_setParameter() Calling ReverbSetRoomHfLevel"); + ReverbSetRoomHfLevel(pContext, level); + //ALOGV("\tReverb_setParameter() Called ReverbSetRoomHfLevel"); + break; + case REVERB_PARAM_DECAY_TIME: + time = *(uint32_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_TIME value is %d", time); + //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayTime"); + ReverbSetDecayTime(pContext, time); + //ALOGV("\tReverb_setParameter() Called ReverbSetDecayTime"); + break; + case REVERB_PARAM_DECAY_HF_RATIO: + ratio = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_HF_RATIO value is %d", ratio); + //ALOGV("\tReverb_setParameter() Calling ReverbSetDecayHfRatio"); + ReverbSetDecayHfRatio(pContext, ratio); + //ALOGV("\tReverb_setParameter() Called ReverbSetDecayHfRatio"); + break; + case REVERB_PARAM_REVERB_LEVEL: + level = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_REVERB_LEVEL value is %d", level); + //ALOGV("\tReverb_setParameter() Calling ReverbSetReverbLevel"); + ReverbSetReverbLevel(pContext, level); + //ALOGV("\tReverb_setParameter() Called ReverbSetReverbLevel"); + break; + case REVERB_PARAM_DIFFUSION: + ratio = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DIFFUSION value is %d", ratio); + //ALOGV("\tReverb_setParameter() Calling ReverbSetDiffusion"); + ReverbSetDiffusion(pContext, ratio); + //ALOGV("\tReverb_setParameter() Called ReverbSetDiffusion"); + break; + case REVERB_PARAM_DENSITY: + ratio = *(int16_t *)pValue; + //ALOGV("\tReverb_setParameter() REVERB_PARAM_DECAY_DENSITY value is %d", ratio); + //ALOGV("\tReverb_setParameter() Calling ReverbSetDensity"); + ReverbSetDensity(pContext, ratio); + //ALOGV("\tReverb_setParameter() Called ReverbSetDensity"); + break; + break; + case REVERB_PARAM_REFLECTIONS_LEVEL: + case REVERB_PARAM_REFLECTIONS_DELAY: + case REVERB_PARAM_REVERB_DELAY: + break; + default: + ALOGV("\tLVM_ERROR : Reverb_setParameter() invalid param %d", param); + break; + } + + //ALOGV("\tReverb_setParameter end"); + return status; +} /* end Reverb_setParameter */ + +} // namespace +} // namespace + +extern "C" { +/* Effect Control Interface Implementation: Process */ +int Reverb_process(effect_handle_t self, + audio_buffer_t *inBuffer __unused, + audio_buffer_t *outBuffer __unused){ + android::ReverbContext * pContext = (android::ReverbContext *) self; + int status = 0; +#if 0 + if (pContext == NULL){ + ALOGV("\tLVM_ERROR : Reverb_process() ERROR pContext == NULL"); + return -EINVAL; + } + if (inBuffer == NULL || inBuffer->raw == NULL || + outBuffer == NULL || outBuffer->raw == NULL || + inBuffer->frameCount != outBuffer->frameCount){ + ALOGV("\tLVM_ERROR : Reverb_process() ERROR NULL INPUT POINTER OR FRAME COUNT IS WRONG"); + return -EINVAL; + } + //ALOGV("\tReverb_process() Calling process with %d frames", outBuffer->frameCount); + /* Process all the available frames, block processing is handled internalLY by the LVM bundle */ + status = process( (LVM_INT16 *)inBuffer->raw, + (LVM_INT16 *)outBuffer->raw, + outBuffer->frameCount, + pContext); + + if (pContext->bEnabled == LVM_FALSE) { + if (pContext->SamplesToExitCount > 0) { + pContext->SamplesToExitCount -= outBuffer->frameCount; + } else { + status = -ENODATA; + } + } +#endif + return status; +} /* end Reverb_process */ + +/* Effect Control Interface Implementation: Command */ +int Reverb_command(effect_handle_t self, + uint32_t cmdCode, + uint32_t cmdSize, + void *pCmdData, + uint32_t *replySize, + void *pReplyData){ + android::ReverbContext * pContext = (android::ReverbContext *) self; + int retsize; + LVREV_ReturnStatus_en LvmStatus=LVREV_SUCCESS; /* Function call status */ + + + if (pContext == NULL){ + ALOGV("\tLVM_ERROR : Reverb_command ERROR pContext == NULL"); + return -EINVAL; + } + + //ALOGV("\tReverb_command INPUTS are: command %d cmdSize %d",cmdCode, cmdSize); + + switch (cmdCode){ + case EFFECT_CMD_INIT: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_INIT start"); + + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)){ + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_INIT: ERROR"); + return -EINVAL; + } + *(int *) pReplyData = 0; + break; + + case EFFECT_CMD_SET_CONFIG: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_SET_CONFIG start"); + if (pCmdData == NULL || cmdSize != sizeof(effect_config_t) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int)) { + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_SET_CONFIG: ERROR"); + return -EINVAL; + } + *(int *) pReplyData = android::Reverb_setConfig(pContext, + (effect_config_t *) pCmdData); + break; + + case EFFECT_CMD_GET_CONFIG: + if (pReplyData == NULL || replySize == NULL || *replySize != sizeof(effect_config_t)) { + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_GET_CONFIG: ERROR"); + return -EINVAL; + } + + android::Reverb_getConfig(pContext, (effect_config_t *)pReplyData); + break; + + case EFFECT_CMD_RESET: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_RESET start"); + Reverb_setConfig(pContext, &pContext->config); + break; + + case EFFECT_CMD_GET_PARAM:{ + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_GET_PARAM start"); + effect_param_t *p = (effect_param_t *)pCmdData; + + if (pCmdData == NULL || cmdSize < sizeof(effect_param_t) || + cmdSize < (sizeof(effect_param_t) + p->psize) || + pReplyData == NULL || replySize == NULL || + *replySize < (sizeof(effect_param_t) + p->psize)) { + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_GET_PARAM: ERROR"); + return -EINVAL; + } + + memcpy(pReplyData, pCmdData, sizeof(effect_param_t) + p->psize); + + p = (effect_param_t *)pReplyData; + + int voffset = ((p->psize - 1) / sizeof(int32_t) + 1) * sizeof(int32_t); + + p->status = android::Reverb_getParameter(pContext, + (void *)p->data, + &p->vsize, + p->data + voffset); + + *replySize = sizeof(effect_param_t) + voffset + p->vsize; + + //ALOGV("\tReverb_command EFFECT_CMD_GET_PARAM " + // "*pCmdData %d, *replySize %d, *pReplyData %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pReplyData + sizeof(effect_param_t) + voffset)); + + } break; + case EFFECT_CMD_SET_PARAM:{ + + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_SET_PARAM start"); + //ALOGV("\tReverb_command EFFECT_CMD_SET_PARAM param %d, *replySize %d, value %d ", + // *(int32_t *)((char *)pCmdData + sizeof(effect_param_t)), + // *replySize, + // *(int16_t *)((char *)pCmdData + sizeof(effect_param_t) + sizeof(int32_t))); + + if (pCmdData == NULL || (cmdSize < (sizeof(effect_param_t) + sizeof(int32_t))) || + pReplyData == NULL || replySize == NULL || *replySize != sizeof(int32_t)) { + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR"); + return -EINVAL; + } + + effect_param_t *p = (effect_param_t *) pCmdData; + + if (p->psize != sizeof(int32_t)){ + ALOGV("\t4LVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_SET_PARAM: ERROR, psize is not sizeof(int32_t)"); + return -EINVAL; + } + + //ALOGV("\tn5Reverb_command cmdSize is %d\n" + // "\tsizeof(effect_param_t) is %d\n" + // "\tp->psize is %d\n" + // "\tp->vsize is %d" + // "\n", + // cmdSize, sizeof(effect_param_t), p->psize, p->vsize ); + + *(int *)pReplyData = android::Reverb_setParameter(pContext, + (void *)p->data, + p->data + p->psize); + } break; + + case EFFECT_CMD_ENABLE: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_ENABLE start"); + + if (pReplyData == NULL || *replySize != sizeof(int)){ + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_ENABLE: ERROR"); + return -EINVAL; + } + if(pContext->bEnabled == LVM_TRUE){ + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_ENABLE: ERROR-Effect is already enabled"); + return -EINVAL; + } + *(int *)pReplyData = 0; + pContext->bEnabled = LVM_TRUE; + /* Get the current settings from context*/ + pContext->SamplesToExitCount = + (pContext->ActiveParams.T60 * pContext->config.inputCfg.samplingRate)/1000; + // force no volume ramp for first buffer processed after enabling the effect + pContext->volumeMode = android::REVERB_VOLUME_FLAT; + //ALOGV("\tEFFECT_CMD_ENABLE SamplesToExitCount = %d", pContext->SamplesToExitCount); + + /* Send control parameters before enabling the effect */ + LvmStatus = android::Offload_SetEffect_ControlParameters(pContext); + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ControlParameters", "ReverbSetDensity") + + /* Send context paramets to offload reverb */ + LvmStatus = android::Offload_SetEffect_ContextParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ContextParameters", "Effect_command CMD_ENABLE") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + break; + case EFFECT_CMD_DISABLE: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_DISABLE start"); + + if (pReplyData == NULL || *replySize != sizeof(int)){ + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_DISABLE: ERROR"); + return -EINVAL; + } + if(pContext->bEnabled == LVM_FALSE){ + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_DISABLE: ERROR-Effect is not yet enabled"); + return -EINVAL; + } + *(int *)pReplyData = 0; + pContext->bEnabled = LVM_FALSE; + /* Send context paramets to offload reverb */ + LvmStatus = android::Offload_SetEffect_ContextParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ContextParameters", "Effect_command CMD_DISABLE") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + break; + + case EFFECT_CMD_SET_VOLUME: + if (pCmdData == NULL || + cmdSize != 2 * sizeof(uint32_t)) { + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "EFFECT_CMD_SET_VOLUME: ERROR"); + return -EINVAL; + } + + + if (pReplyData != NULL) { // we have volume control + pContext->leftVolume = (LVM_INT16)((*(uint32_t *)pCmdData + (1 << 11)) >> 12); + pContext->rightVolume = (LVM_INT16)((*((uint32_t *)pCmdData + 1) + (1 << 11)) >> 12); + *(uint32_t *)pReplyData = (1 << 24); + *((uint32_t *)pReplyData + 1) = (1 << 24); + if (pContext->volumeMode == android::REVERB_VOLUME_OFF) { + // force no volume ramp for first buffer processed after getting volume control + pContext->volumeMode = android::REVERB_VOLUME_FLAT; + } + } else { // we don't have volume control + pContext->leftVolume = REVERB_UNIT_VOLUME; + pContext->rightVolume = REVERB_UNIT_VOLUME; + pContext->volumeMode = android::REVERB_VOLUME_OFF; + } + ALOGV("EFFECT_CMD_SET_VOLUME left %d, right %d mode %d", + pContext->leftVolume, pContext->rightVolume, pContext->volumeMode); + /* Send context paramets to offload reverb */ + LvmStatus = android::Offload_SetEffect_ContextParameters(pContext); + + LVM_ERROR_CHECK(LvmStatus, "Offload_SetEffect_ContextParameters", "Effect_command CMD_SET_VOLUME") + if(LvmStatus != LVREV_SUCCESS) return -EINVAL; + break; + + case EFFECT_CMD_SET_DEVICE: + case EFFECT_CMD_SET_AUDIO_MODE: + //ALOGV("\tReverb_command cmdCode Case: " + // "EFFECT_CMD_SET_DEVICE/EFFECT_CMD_SET_VOLUME/EFFECT_CMD_SET_AUDIO_MODE start"); + break; + case EFFECT_CMD_OFFLOAD: + if (pCmdData == NULL || cmdSize != sizeof(effect_offload_param_t) + || pReplyData == NULL || *replySize != sizeof(uint32_t)) { + return -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmdCode); + } else { + effect_offload_param_t* offload_param = (effect_offload_param_t*)pCmdData; + + ALOGD("%s [Reverb]: Command(%u)= offload %d, output %d", __func__, cmdCode, offload_param->isOffload, offload_param->ioHandle); + + pContext->OffloadEnabled = offload_param->isOffload; + if (pContext->OutHandle == offload_param->ioHandle) { + ALOGV("%s [Reverb]: This context has same output %d", __func__, offload_param->ioHandle); + } else { + pContext->OutHandle = offload_param->ioHandle; + } + *(int *)pReplyData = 0; + } + break; + default: + ALOGV("\tLVM_ERROR : Reverb_command cmdCode Case: " + "DEFAULT start %d ERROR",cmdCode); + return -EINVAL; + } + + //ALOGV("\tReverb_command end\n\n"); + return 0; +} /* end Reverb_command */ + +/* Effect Control Interface Implementation: get_descriptor */ +int Reverb_getDescriptor(effect_handle_t self, + effect_descriptor_t *pDescriptor) +{ + android::ReverbContext * pContext = (android::ReverbContext *)self; + const effect_descriptor_t *desc; + + if (pContext == NULL || pDescriptor == NULL) { + ALOGV("Reverb_getDescriptor() invalid param"); + return -EINVAL; + } + + if (pContext->auxiliary) { + if (pContext->preset) { + desc = &android::gAuxPresetReverbDescriptor; + } else { + desc = &android::gAuxEnvReverbDescriptor; + } + } else { + if (pContext->preset) { + desc = &android::gInsertPresetReverbDescriptor; + } else { + desc = &android::gInsertEnvReverbDescriptor; + } + } + + *pDescriptor = *desc; + + return 0; +} /* end Reverb_getDescriptor */ + +// effect_handle_t interface implementation for Reverb effect +const struct effect_interface_s gReverbInterface = { + Reverb_process, + Reverb_command, + Reverb_getDescriptor, + NULL, +}; /* end gReverbInterface */ + +// This is the only symbol that needs to be exported +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + .tag = AUDIO_EFFECT_LIBRARY_TAG, + .version = EFFECT_LIBRARY_API_VERSION, + .name = "Reverb hardware Library", + .implementor = "Samsung SystemLSI", + .create_effect = android::EffectCreate, + .release_effect = android::EffectRelease, + .get_descriptor = android::EffectGetDescriptor, +}; + +} diff --git a/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.h b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.h new file mode 100644 index 0000000..42d4960 --- /dev/null +++ b/libaudio/effecthal/postprocessing/aosp-effect/Reverb/exynos_effectReverb.h @@ -0,0 +1,47 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EXYNOS_EFFECTREVERB_H_ +#define ANDROID_EXYNOS_EFFECTREVERB_H_ + +#include +#include +#include + +#if __cplusplus +extern "C" { +#endif + +#define MAX_NUM_BANDS 5 +#define MAX_CALL_SIZE 256 +#define LVREV_MAX_T60 7000 +#define LVREV_MAX_REVERB_LEVEL 2000 +#define LVREV_MAX_FRAME_SIZE 2560 +#define LVREV_CUP_LOAD_ARM9E 470 // Expressed in 0.1 MIPS +#define LVREV_MEM_USAGE 71+(LVREV_MAX_FRAME_SIZE>>7) // Expressed in kB +//#define LVM_PCM + +typedef struct _LPFPair_t +{ + int16_t Room_HF; + int16_t LPF; +} LPFPair_t; +#if __cplusplus +} // extern "C" +#endif + + +#endif /*ANDROID_EXYNOS_EFFECTREVERB_H_*/ diff --git a/libaudio/effecthal/visualizer/Android.mk b/libaudio/effecthal/visualizer/Android.mk new file mode 100644 index 0000000..f28b8d7 --- /dev/null +++ b/libaudio/effecthal/visualizer/Android.mk @@ -0,0 +1,27 @@ +ifeq ($(BOARD_USE_OFFLOAD_EFFECT),true) +LOCAL_PATH:= $(call my-dir) + +# Exynos Offload Visualizer library +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + exynos_visualizer.c + +LOCAL_C_INCLUDES := \ + external/tinyalsa/include \ + $(call include-path-for, audio-effects) + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + liblog \ + libtinyalsa + +LOCAL_CFLAGS+= -O2 -fvisibility=hidden + +LOCAL_MODULE:= libexynosvisualizer +LOCAL_MODULE_RELATIVE_PATH := soundfx + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) +endif diff --git a/libaudio/effecthal/visualizer/MODULE_LICENSE_APACHE2 b/libaudio/effecthal/visualizer/MODULE_LICENSE_APACHE2 new file mode 100644 index 0000000..e69de29 diff --git a/libaudio/effecthal/visualizer/NOTICE b/libaudio/effecthal/visualizer/NOTICE new file mode 100644 index 0000000..c5b1efa --- /dev/null +++ b/libaudio/effecthal/visualizer/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2005-2008, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libaudio/effecthal/visualizer/exynos_visualizer.c b/libaudio/effecthal/visualizer/exynos_visualizer.c new file mode 100644 index 0000000..6e27c01 --- /dev/null +++ b/libaudio/effecthal/visualizer/exynos_visualizer.c @@ -0,0 +1,1225 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "Offload_Visualizer" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include + +#include "exynos_visualizer.h" + + +/* effect_handle_t interface implementation for visualizer effect */ +extern const struct effect_interface_s effect_interface; + + +/* Exynos Offload Visualizer UUID: XX-XX-XX-XX-0002a5d5c51b */ +const effect_descriptor_t visualizer_descriptor = { + {0xe46b26a0, 0xdddd, 0x11db, 0x8afd, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // type + {0x7a8044a0, 0x1a71, 0x11e3, 0xa184, {0x00, 0x02, 0xa5, 0xd5, 0xc5, 0x1b}}, // uuid + EFFECT_CONTROL_API_VERSION, + (EFFECT_FLAG_TYPE_INSERT | EFFECT_FLAG_HW_ACC_TUNNEL), + 0, /* TODO */ + 1, + "Samsung Exynos Offload Visualizer", + "Samsung SystemLSI", +}; + +/* The list for Offloaded Effect Descriptor by Exynos EffectHAL */ +const effect_descriptor_t *descriptors[] = { + &visualizer_descriptor, + NULL, +}; + +pthread_once_t once = PTHREAD_ONCE_INIT; +int init_status; + +/* list of created effects */ +struct listnode created_effects_list; + +/* list of active output streams */ +struct listnode active_outputs_list; + +/* visualizer capture pcm handle */ +static struct pcm *pcm = NULL; + +/* lock must be held when modifying or accessing created_effects_list or active_outputs_list */ +pthread_mutex_t lock; + +pthread_mutex_t thread_lock; +pthread_cond_t cond; +pthread_t capture_thread; +bool exit_thread; +int thread_status; + + +/* + * Local functions + */ +static void init_once() +{ + list_init(&created_effects_list); + list_init(&active_outputs_list); + + pthread_mutex_init(&lock, NULL); + pthread_mutex_init(&thread_lock, NULL); + pthread_cond_init(&cond, NULL); + exit_thread = false; + thread_status = -1; + + init_status = 0; +} + +int lib_init() +{ + pthread_once(&once, init_once); + return init_status; + } + +bool effect_exists(effect_context_t *context) +{ + struct listnode *node; + + list_for_each(node, &created_effects_list) { + effect_context_t *fx_ctxt = node_to_item(node, effect_context_t, effects_list_node); + if (fx_ctxt == context) { + return true; + } + } + + return false; +} + +output_context_t *get_output(audio_io_handle_t output) +{ + struct listnode *node; + + list_for_each(node, &active_outputs_list) { + output_context_t *out_ctxt = node_to_item(node, output_context_t, outputs_list_node); + if (out_ctxt->handle == output) { + return out_ctxt; + } + } + + return NULL; +} + +void add_effect_to_output(output_context_t * output, effect_context_t *context) +{ + struct listnode *fx_node; + + list_for_each(fx_node, &output->effects_list) { + effect_context_t *fx_ctxt = node_to_item(fx_node, effect_context_t, output_node); + if (fx_ctxt == context) + return; + } + + list_add_tail(&output->effects_list, &context->output_node); + if (context->ops.start) + context->ops.start(context, output); + + return ; +} + +void remove_effect_from_output( + output_context_t * output, + effect_context_t *context) +{ + struct listnode *fx_node; + + list_for_each(fx_node, &output->effects_list) { + effect_context_t *fx_ctxt = node_to_item(fx_node, effect_context_t, output_node); + if (fx_ctxt == context) { + if (context->ops.stop) + context->ops.stop(context, output); + list_remove(&context->output_node); + return; + } + } + + return ; +} + +bool effects_enabled() +{ + struct listnode *out_node; + + list_for_each(out_node, &active_outputs_list) { + struct listnode *fx_node; + output_context_t *out_ctxt = node_to_item(out_node, output_context_t, outputs_list_node); + + list_for_each(fx_node, &out_ctxt->effects_list) { + effect_context_t *fx_ctxt = node_to_item(fx_node, effect_context_t, output_node); + if (fx_ctxt->state == EFFECT_STATE_ACTIVE && fx_ctxt->ops.process != NULL) + return true; + } + } + + return false; +} + +void *capture_thread_loop(void *arg __unused) +{ + int16_t data[AUDIO_CAPTURE_PERIOD_SIZE * AUDIO_CAPTURE_CHANNEL_COUNT * sizeof(int16_t)]; + audio_buffer_t buf; + buf.frameCount = AUDIO_CAPTURE_PERIOD_SIZE; + buf.s16 = data; + bool capture_enabled = false; + int ret; + + + prctl(PR_SET_NAME, (unsigned long)"Visualizer Capture", 0, 0, 0); + + ALOGD("%s: Started running Visualizer Capture Thread", __func__); + + pthread_mutex_lock(&lock); + while(!exit_thread) { + if (effects_enabled()) { + /* User start Visualizer, and compress offload playback is working */ + if (!capture_enabled) { + pcm = pcm_open(SOUND_CARD, CAPTURE_DEVICE, PCM_IN, &pcm_config_capture); + if (pcm && !pcm_is_ready(pcm)) { + ALOGE("%s: Failed to open PCM for Visualizer (%s)", __func__, pcm_get_error(pcm)); + pcm_close(pcm); + pcm = NULL; + } else { + ALOGD("%s: Opened PCM for Visualizer", __func__); + capture_enabled = true; + } + } + } else { + /* User stop Visualizer, but compress offload playback is working */ + if (capture_enabled) { + if (pcm != NULL) { + pcm_close(pcm); + pcm = NULL; + } + capture_enabled = false; + } + ALOGD("%s: Compress Offload playback is working, but visualizer is not started yet. Wait!!!", __func__); + pthread_cond_wait(&cond, &lock); + ALOGD("%s: Compress Offload playback is working, and visualizer is started. Run!!!", __func__); + } + + if (!capture_enabled) + continue; + + pthread_mutex_unlock(&lock); + ret = pcm_read(pcm, data, sizeof(data)); + pthread_mutex_lock(&lock); + if (ret == 0) { + struct listnode *out_node; + + list_for_each(out_node, &active_outputs_list) { + output_context_t *out_ctxt = node_to_item(out_node, output_context_t, outputs_list_node); + struct listnode *fx_node; + + list_for_each(fx_node, &out_ctxt->effects_list) { + effect_context_t *fx_ctxt = node_to_item(fx_node, effect_context_t, output_node); + if (fx_ctxt->ops.process != NULL) + fx_ctxt->ops.process(fx_ctxt, &buf, &buf); + } + } + } else { + if (pcm != NULL) + ALOGW("%s: read status %d %s", __func__, ret, pcm_get_error(pcm)); + else + ALOGW("%s: read status %d PCM Closed", __func__, ret); + } + } + + if (capture_enabled) { + if (pcm != NULL) { + pcm_close(pcm); + pcm = NULL; + } + capture_enabled = false; + } + pthread_mutex_unlock(&lock); + + ALOGD("%s: Stopped Visualizer Capture Thread", __func__); + return (void *)NULL; +} + +/* + * Interface from AudioHAL + */ +__attribute__ ((visibility ("default"))) +int notify_start_output(audio_io_handle_t output) +{ + int ret = 0; + struct listnode *node; + output_context_t * out_ctxt = NULL; + + ALOGD("%s: called with Audio Output Handle (%u)", __func__, output); + + if (lib_init() != 0) + return init_status; + + pthread_mutex_lock(&thread_lock); + pthread_mutex_lock(&lock); + if (get_output(output) != NULL) { + ALOGW("%s output already started", __func__); + ret = -ENOSYS; + } else { + out_ctxt = (output_context_t *) malloc(sizeof(output_context_t)); + if (out_ctxt) { + ALOGD("%s: created Output Context for Audio Handle (%u)", __func__, output); + out_ctxt->handle = output; + + list_init(&out_ctxt->effects_list); + + list_for_each(node, &created_effects_list) { + effect_context_t *fx_ctxt = node_to_item(node, effect_context_t, effects_list_node); + if (fx_ctxt->out_handle == output) { + ALOGD("%s: Start Effect Context for Audio Output Handle (%u)", __func__, output); + if (fx_ctxt->ops.start) + fx_ctxt->ops.start(fx_ctxt, out_ctxt); + list_add_tail(&out_ctxt->effects_list, &fx_ctxt->output_node); + } + } + + if (list_empty(&active_outputs_list)) { + exit_thread = false; + thread_status = pthread_create(&capture_thread, (const pthread_attr_t *) NULL, capture_thread_loop, NULL); + } + + list_add_tail(&active_outputs_list, &out_ctxt->outputs_list_node); + pthread_cond_signal(&cond); + } else { + ALOGE("%s: Failed to allocate memory for Output Context", __func__); + ret = -ENOMEM; + } + } + pthread_mutex_unlock(&lock); + pthread_mutex_unlock(&thread_lock); + + return ret; +} + +__attribute__ ((visibility ("default"))) +int notify_stop_output(audio_io_handle_t output) +{ + int ret = 0; + struct listnode *node; + struct listnode *fx_node; + output_context_t *out_ctxt; + + ALOGD("%s: called with Audio Output Handle (%u)", __func__, output); + + if (lib_init() != 0) + return init_status; + + pthread_mutex_lock(&thread_lock); + pthread_mutex_lock(&lock); + out_ctxt = get_output(output); + if (out_ctxt == NULL) { + ALOGE("%s: This Audio Output Handle is not started", __func__); + ret = -ENOSYS; + } else { + list_for_each(fx_node, &out_ctxt->effects_list) { + effect_context_t *fx_ctxt = node_to_item(fx_node, effect_context_t, output_node); + if (fx_ctxt->ops.stop) + fx_ctxt->ops.stop(fx_ctxt, out_ctxt); + } + + list_remove(&out_ctxt->outputs_list_node); + pthread_cond_signal(&cond); + + if (list_empty(&active_outputs_list)) { + if (thread_status == 0) { + exit_thread = true; + /* PCM should be closed here, since the active list is empty + pcm_read is returned immediately if it is wait in capture + thread */ + if (pcm != NULL) { + pcm_stop(pcm); + pcm_close(pcm); + pcm = NULL; + ALOGD("%s: Closed PCM for Visualizer", __func__); + } + pthread_mutex_unlock(&lock); + pthread_cond_signal(&cond); + pthread_join(capture_thread, (void **) NULL); + pthread_mutex_lock(&lock); + thread_status = -1; + } + } + free(out_ctxt); + } + pthread_mutex_unlock(&lock); + pthread_mutex_unlock(&thread_lock); + + return ret; +} + +/* +** Effect operation functions + */ +int set_config(effect_context_t *context, effect_config_t *config) +{ + if (config->inputCfg.samplingRate != config->outputCfg.samplingRate) return -EINVAL; + if (config->inputCfg.channels != config->outputCfg.channels) return -EINVAL; + if (config->inputCfg.format != config->outputCfg.format) return -EINVAL; + if (config->inputCfg.channels != AUDIO_CHANNEL_OUT_STEREO) return -EINVAL; + if (config->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_WRITE && + config->outputCfg.accessMode != EFFECT_BUFFER_ACCESS_ACCUMULATE) return -EINVAL; + if (config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) return -EINVAL; + + context->config = *config; + + if (context->ops.reset) + context->ops.reset(context); + + return 0; +} + +void get_config(effect_context_t *context, effect_config_t *config) +{ + *config = context->config; +} + +uint32_t visualizer_get_delta_time_ms_from_updated_time( + visualizer_context_t* visu_ctxt) +{ + uint32_t delta_ms = 0; + if (visu_ctxt->buffer_update_time.tv_sec != 0) { + struct timespec ts; + + if (clock_gettime(CLOCK_MONOTONIC, &ts) == 0) { + time_t secs = ts.tv_sec - visu_ctxt->buffer_update_time.tv_sec; + long nsec = ts.tv_nsec - visu_ctxt->buffer_update_time.tv_nsec; + if (nsec < 0) { + --secs; + nsec += 1000000000; + } + delta_ms = secs * 1000 + nsec / 1000000; + } + } + return delta_ms; +} + +int visualizer_reset(effect_context_t *context) +{ + visualizer_context_t * visu_ctxt = (visualizer_context_t *)context; + + visu_ctxt->capture_idx = 0; + visu_ctxt->last_capture_idx = 0; + visu_ctxt->buffer_update_time.tv_sec = 0; + visu_ctxt->latency = DSP_OUTPUT_LATENCY_MS; + memset(visu_ctxt->capture_buf, 0x80, CAPTURE_BUF_SIZE); + + return 0; +} + +int visualizer_enable(effect_context_t *context __unused) +{ + return 0; +} + +int visualizer_disable(effect_context_t *context __unused) +{ + return 0; +} + +int visualizer_start( + effect_context_t *context __unused, + output_context_t *output __unused) +{ + return 0; +} + +int visualizer_stop( + effect_context_t *context __unused, + output_context_t *output __unused) +{ + return 0; +} + +int visualizer_process( + effect_context_t *context, + audio_buffer_t *in_buf, + audio_buffer_t *out_buf) +{ + visualizer_context_t *visu_ctxt = (visualizer_context_t *)context; + + if (!effect_exists(context)) + return -EINVAL; + + if (in_buf == NULL || in_buf->raw == NULL || + out_buf == NULL || out_buf->raw == NULL || + in_buf->frameCount != out_buf->frameCount || + in_buf->frameCount == 0) { + return -EINVAL; + } + + // perform measurements if needed + if (visu_ctxt->meas_mode & MEASUREMENT_MODE_PEAK_RMS) { + // find the peak and RMS squared for the new buffer + uint32_t inIdx; + int16_t max_sample = 0; + float rms_squared_acc = 0; + for (inIdx = 0 ; inIdx < in_buf->frameCount * visu_ctxt->channel_count ; inIdx++) { + if (in_buf->s16[inIdx] > max_sample) { + max_sample = in_buf->s16[inIdx]; + } else if (-in_buf->s16[inIdx] > max_sample) { + max_sample = -in_buf->s16[inIdx]; + } + rms_squared_acc += (in_buf->s16[inIdx] * in_buf->s16[inIdx]); + } + // store the measurement + visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].peak_u16 = (uint16_t)max_sample; + visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].rms_squared = + rms_squared_acc / (in_buf->frameCount * visu_ctxt->channel_count); + visu_ctxt->past_meas[visu_ctxt->meas_buffer_idx].is_valid = true; + if (++visu_ctxt->meas_buffer_idx >= visu_ctxt->meas_wndw_size_in_buffers) { + visu_ctxt->meas_buffer_idx = 0; + } + } + + /* all code below assumes stereo 16 bit PCM output and input */ + int32_t shift; + + if (visu_ctxt->scaling_mode == VISUALIZER_SCALING_MODE_NORMALIZED) { + /* derive capture scaling factor from peak value in current buffer + * this gives more interesting captures for display. */ + shift = 32; + int len = in_buf->frameCount * 2; + int i; + for (i = 0; i < len; i++) { + int32_t smp = in_buf->s16[i]; + if (smp < 0) smp = -smp - 1; /* take care to keep the max negative in range */ + int32_t clz = __builtin_clz(smp); + if (shift > clz) shift = clz; + } + /* A maximum amplitude signal will have 17 leading zeros, which we want to + * translate to a shift of 8 (for converting 16 bit to 8 bit) */ + shift = 25 - shift; + /* Never scale by less than 8 to avoid returning unaltered PCM signal. */ + if (shift < 3) { + shift = 3; + } + /* add one to combine the division by 2 needed after summing + * left and right channels below */ + shift++; + } else { + assert(visu_ctxt->scaling_mode == VISUALIZER_SCALING_MODE_AS_PLAYED); + shift = 9; + } + + uint32_t capt_idx; + uint32_t in_idx; + uint8_t *buf = visu_ctxt->capture_buf; + for (in_idx = 0, capt_idx = visu_ctxt->capture_idx; + in_idx < in_buf->frameCount; + in_idx++, capt_idx++) { + if (capt_idx >= CAPTURE_BUF_SIZE) { + /* wrap around */ + capt_idx = 0; + } + int32_t smp = in_buf->s16[2 * in_idx] + in_buf->s16[2 * in_idx + 1]; + smp = smp >> shift; + buf[capt_idx] = ((uint8_t)smp)^0x80; + } + + /* XXX the following two should really be atomic, though it probably doesn't + * matter much for visualization purposes */ + visu_ctxt->capture_idx = capt_idx; + /* update last buffer update time stamp */ + if (clock_gettime(CLOCK_MONOTONIC, &visu_ctxt->buffer_update_time) < 0) { + visu_ctxt->buffer_update_time.tv_sec = 0; + } + + if (context->state != EFFECT_STATE_ACTIVE) { + ALOGV("%s DONE inactive", __func__); + return -ENODATA; + } + + return 0; +} + +int visualizer_set_parameter( + effect_context_t *context, + effect_param_t *p, + uint32_t size __unused) +{ + visualizer_context_t *visu_ctxt = (visualizer_context_t *)context; + + if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t)) + return -EINVAL; + + switch (*(uint32_t *)p->data) { + case VISUALIZER_PARAM_CAPTURE_SIZE: + visu_ctxt->capture_size = *((uint32_t *)p->data + 1); + ALOGV("%s set capture_size = %d", __func__, visu_ctxt->capture_size); + break; + + case VISUALIZER_PARAM_SCALING_MODE: + visu_ctxt->scaling_mode = *((uint32_t *)p->data + 1); + ALOGV("%s set scaling_mode = %d", __func__, visu_ctxt->scaling_mode); + break; + + case VISUALIZER_PARAM_LATENCY: + /* Ignore latency as we capture at DSP output + * visu_ctxt->latency = *((uint32_t *)p->data + 1); */ + ALOGV("%s set latency = %d", __func__, visu_ctxt->latency); + break; + + case VISUALIZER_PARAM_MEASUREMENT_MODE: + visu_ctxt->meas_mode = *((uint32_t *)p->data + 1); + ALOGV("%s set meas_mode = %d", __func__, visu_ctxt->meas_mode); + break; + + default: + return -EINVAL; + } + + return 0; +} + +int visualizer_get_parameter( + effect_context_t *context, + effect_param_t *p, + uint32_t *size) +{ + visualizer_context_t *visu_ctxt = (visualizer_context_t *)context; + + p->status = 0; + *size = sizeof(effect_param_t) + sizeof(uint32_t); + if (p->psize != sizeof(uint32_t)) { + p->status = -EINVAL; + return 0; + } + + switch (*(uint32_t *)p->data) { + case VISUALIZER_PARAM_CAPTURE_SIZE: + ALOGV("%s get capture_size = %d", __func__, visu_ctxt->capture_size); + *((uint32_t *)p->data + 1) = visu_ctxt->capture_size; + p->vsize = sizeof(uint32_t); + *size += sizeof(uint32_t); + break; + + case VISUALIZER_PARAM_SCALING_MODE: + ALOGV("%s get scaling_mode = %d", __func__, visu_ctxt->scaling_mode); + *((uint32_t *)p->data + 1) = visu_ctxt->scaling_mode; + p->vsize = sizeof(uint32_t); + *size += sizeof(uint32_t); + break; + + case VISUALIZER_PARAM_MEASUREMENT_MODE: + ALOGV("%s get meas_mode = %d", __func__, visu_ctxt->meas_mode); + *((uint32_t *)p->data + 1) = visu_ctxt->meas_mode; + p->vsize = sizeof(uint32_t); + *size += sizeof(uint32_t); + break; + + default: + p->status = -EINVAL; + } + + return 0; +} + +int visualizer_command( + effect_context_t *context, + uint32_t cmd_code, + uint32_t cmd_size __unused, + void *cmd_data __unused, + uint32_t *reply_size, + void *reply_data) +{ + visualizer_context_t * visu_ctxt = (visualizer_context_t *)context; + int ret = 0; + + switch (cmd_code) { + case VISUALIZER_CMD_CAPTURE: { + if (reply_data == NULL || *reply_size != visu_ctxt->capture_size) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + if (context->state == EFFECT_STATE_ACTIVE) { + int32_t latency_ms = visu_ctxt->latency; + const uint32_t delta_ms = visualizer_get_delta_time_ms_from_updated_time(visu_ctxt); + latency_ms -= delta_ms; + + if (latency_ms < 0) + latency_ms = 0; + + const uint32_t delta_smp = context->config.inputCfg.samplingRate * latency_ms / 1000; + + int32_t capture_point = visu_ctxt->capture_idx - visu_ctxt->capture_size - delta_smp; + int32_t capture_size = visu_ctxt->capture_size; + if (capture_point < 0) { + int32_t size = -capture_point; + if (size > capture_size) + size = capture_size; + + memcpy(reply_data, visu_ctxt->capture_buf + CAPTURE_BUF_SIZE + capture_point, size); + reply_data = (void *)((size_t)reply_data + size); + capture_size -= size; + capture_point = 0; + } + memcpy(reply_data, visu_ctxt->capture_buf + capture_point, capture_size); + + + /* if audio framework has stopped playing audio although the effect is still + * active we must clear the capture buffer to return silence */ + if ((visu_ctxt->last_capture_idx == visu_ctxt->capture_idx) + && (visu_ctxt->buffer_update_time.tv_sec != 0)) { + if (delta_ms > MAX_STALL_TIME_MS) { + ALOGV("%s capture going to idle", __func__); + visu_ctxt->buffer_update_time.tv_sec = 0; + memset(reply_data, 0x80, visu_ctxt->capture_size); + } + } + visu_ctxt->last_capture_idx = visu_ctxt->capture_idx; + } else { + memset(reply_data, 0x80, visu_ctxt->capture_size); + } + } + } + break; + + case VISUALIZER_CMD_MEASURE: { + uint16_t peak_u16 = 0; + float sum_rms_squared = 0.0f; + uint8_t nb_valid_meas = 0; + + /* reset measurements if last measurement was too long ago (which implies stored + * measurements aren't relevant anymore and shouldn't bias the new one) */ + const int32_t delay_ms = visualizer_get_delta_time_ms_from_updated_time(visu_ctxt); + if (delay_ms > DISCARD_MEASUREMENTS_TIME_MS) { + uint32_t i; + ALOGV("Discarding measurements, last measurement is %dms old", delay_ms); + for (i=0 ; imeas_wndw_size_in_buffers ; i++) { + visu_ctxt->past_meas[i].is_valid = false; + visu_ctxt->past_meas[i].peak_u16 = 0; + visu_ctxt->past_meas[i].rms_squared = 0; + } + visu_ctxt->meas_buffer_idx = 0; + } else { + /* only use actual measurements, otherwise the first RMS measure happening before + * MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS have been played will always be artificially low */ + uint32_t i; + for (i=0 ; i < visu_ctxt->meas_wndw_size_in_buffers ; i++) { + if (visu_ctxt->past_meas[i].is_valid) { + if (visu_ctxt->past_meas[i].peak_u16 > peak_u16) { + peak_u16 = visu_ctxt->past_meas[i].peak_u16; + } + sum_rms_squared += visu_ctxt->past_meas[i].rms_squared; + nb_valid_meas++; + } + } + } + + float rms = nb_valid_meas == 0 ? 0.0f : sqrtf(sum_rms_squared / nb_valid_meas); + int32_t* p_int_reply_data = (int32_t*)reply_data; + + /* convert from I16 sample values to mB and write results */ + if (rms < 0.000016f) { + p_int_reply_data[MEASUREMENT_IDX_RMS] = -9600; //-96dB + } else { + p_int_reply_data[MEASUREMENT_IDX_RMS] = (int32_t) (2000 * log10(rms / 32767.0f)); + } + + if (peak_u16 == 0) { + p_int_reply_data[MEASUREMENT_IDX_PEAK] = -9600; //-96dB + } else { + p_int_reply_data[MEASUREMENT_IDX_PEAK] = (int32_t) (2000 * log10(peak_u16 / 32767.0f)); + } + ALOGV("VISUALIZER_CMD_MEASURE peak=%d (%dmB), rms=%.1f (%dmB)", peak_u16, p_int_reply_data[MEASUREMENT_IDX_PEAK],rms, p_int_reply_data[MEASUREMENT_IDX_RMS]); + } + break; + + default: + ALOGW("%s: Not supported Command %u", __func__, cmd_code); + ret = -EINVAL; + break; + } + + return ret; +} + +int visualizer_init(effect_context_t *context) +{ + visualizer_context_t * visu_ctxt = (visualizer_context_t *)context; + int i; + + context->config.inputCfg.accessMode = EFFECT_BUFFER_ACCESS_READ; + context->config.inputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + context->config.inputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + context->config.inputCfg.samplingRate = 44100; + context->config.inputCfg.bufferProvider.getBuffer = NULL; + context->config.inputCfg.bufferProvider.releaseBuffer = NULL; + context->config.inputCfg.bufferProvider.cookie = NULL; + context->config.inputCfg.mask = EFFECT_CONFIG_ALL; + context->config.outputCfg.accessMode = EFFECT_BUFFER_ACCESS_ACCUMULATE; + context->config.outputCfg.channels = AUDIO_CHANNEL_OUT_STEREO; + context->config.outputCfg.format = AUDIO_FORMAT_PCM_16_BIT; + context->config.outputCfg.samplingRate = 44100; + context->config.outputCfg.bufferProvider.getBuffer = NULL; + context->config.outputCfg.bufferProvider.releaseBuffer = NULL; + context->config.outputCfg.bufferProvider.cookie = NULL; + context->config.outputCfg.mask = EFFECT_CONFIG_ALL; + + visu_ctxt->capture_size = VISUALIZER_CAPTURE_SIZE_MAX; + visu_ctxt->scaling_mode = VISUALIZER_SCALING_MODE_NORMALIZED; + + // measurement initialization + visu_ctxt->channel_count = audio_channel_count_from_out_mask(context->config.inputCfg.channels); + visu_ctxt->meas_mode = MEASUREMENT_MODE_NONE; + visu_ctxt->meas_wndw_size_in_buffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS; + visu_ctxt->meas_buffer_idx = 0; + + for (i = 0; i < visu_ctxt->meas_wndw_size_in_buffers; i++) { + visu_ctxt->past_meas[i].is_valid = false; + visu_ctxt->past_meas[i].peak_u16 = 0; + visu_ctxt->past_meas[i].rms_squared = 0; + } + + set_config(context, &context->config); + + return 0; +} + +int visualizer_release(effect_context_t *context __unused) +{ + return 0; +} + +/* + * Effect Control Interface Implementation + */ +/* Effect Control Interface Implementation: process */ +int effect_process( + effect_handle_t self, + audio_buffer_t *in_buffer, + audio_buffer_t *out_buffer) +{ + effect_context_t *context = (effect_context_t *)self; + + ALOGD("%s: called", __func__); + + if (context == NULL) { + return -EINVAL; + } + + if (in_buffer == NULL || in_buffer->raw == NULL || + out_buffer == NULL || out_buffer->raw == NULL || + in_buffer->frameCount != out_buffer->frameCount || + in_buffer->frameCount == 0) { + return -EINVAL; + } + + + if (context->state != EFFECT_STATE_ACTIVE) { + return -ENODATA; + } + + return 0; +} + +/* Effect Control Interface Implementation: command */ +int effect_command( + effect_handle_t self, + uint32_t cmd_code, + uint32_t cmd_size, + void *cmd_data, + uint32_t *reply_size, + void *reply_data) +{ + effect_context_t *context = (effect_context_t *)self; + int ret = 0; + int retsize; + + ALOGV("%s: Effect(%s), Audio Output Handle (%d)", __func__, context->desc->name, context->out_handle); + ALOGV("%s: command (%u)", __func__, cmd_code); + + if (context == NULL || context->state == EFFECT_STATE_UNINITIALIZED) { + ALOGE("%s: Invalid Parameter", __func__); + return -EINVAL; + } + + switch (cmd_code) { + case EFFECT_CMD_INIT: /* o: cmd_size = 0, cmd_data = NULL */ + if (reply_data == NULL || *reply_size != sizeof(uint32_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + *(int *)reply_data = context->ops.init(context); + context->state = EFFECT_STATE_INITIALIZED; + } + break; + + case EFFECT_CMD_SET_CONFIG: /* 1 */ + if (cmd_data == NULL || cmd_size != sizeof(effect_config_t) + || reply_data == NULL || *reply_size != sizeof(uint32_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + *(int *) reply_data = set_config(context, (effect_config_t *)cmd_data); + } + break; + + case EFFECT_CMD_RESET: /* 2: cmd_size = 0, cmd_data = NULL, reply_size = 0, reply_data = NULL */ + if (context->ops.reset) + context->ops.reset(context); + break; + + case EFFECT_CMD_ENABLE: /* 3: cmd_size = 0, cmd_data = NULL */ + if (reply_data == NULL || *reply_size != sizeof(uint32_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else if (context->state != EFFECT_STATE_INITIALIZED) { + ret = -ENOSYS; + ALOGE("%s: Command(%u) has Invalid State", __func__, context->state); + } else { + if (context->offload_enabled && context->ops.enable) + context->ops.enable(context); + + pthread_cond_signal(&cond); + context->state = EFFECT_STATE_ACTIVE; + *(int *)reply_data = 0; + } + break; + + case EFFECT_CMD_DISABLE: /* 4: cmd_size = 0, cmd_data = NULL */ + if (reply_data == NULL || *reply_size != sizeof(uint32_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else if (context->state != EFFECT_STATE_ACTIVE) { + ret = -ENOSYS; + ALOGE("%s: Command(%u) has Invalid State", __func__, context->state); + } else { + if (context->offload_enabled && context->ops.disable) + context->ops.disable(context); + + pthread_cond_signal(&cond); + context->state = EFFECT_STATE_INITIALIZED; + *(int *)reply_data = 0; + } + break; + + case EFFECT_CMD_SET_PARAM: /* 5 */ + if (cmd_data == NULL || + cmd_size != (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t)) || + reply_data == NULL || *reply_size != sizeof(uint32_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + *(int *)reply_data = 0; + + effect_param_t *p = (effect_param_t *)cmd_data; + if (p->psize != sizeof(uint32_t) || p->vsize != sizeof(uint32_t)) { + *(int *)reply_data = -EINVAL; + ALOGE("%s: Parameter in Command(%u) has Invalid Size", __func__, cmd_code); + break; + } else { + if (context->ops.set_parameter) + *(int32_t *)reply_data = context->ops.set_parameter(context, p, *reply_size); + } + } + break; + + case EFFECT_CMD_SET_PARAM_DEFERRED: /* 6 */ + case EFFECT_CMD_SET_PARAM_COMMIT: /* 7 */ + break; + + case EFFECT_CMD_GET_PARAM: /* 8 */ + if (cmd_data == NULL || + cmd_size != (int)(sizeof(effect_param_t) + sizeof(uint32_t)) || + reply_data == NULL || *reply_size < (int)(sizeof(effect_param_t) + sizeof(uint32_t) + sizeof(uint32_t))) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else if (!context->offload_enabled) { + ret = -EINVAL; + ALOGE("%s: Command(%u) cannot applied because Offload not enabled yet", __func__, cmd_code); + } else { + memcpy(reply_data, cmd_data, sizeof(effect_param_t) + sizeof(uint32_t)); + + effect_param_t *p = (effect_param_t *)reply_data; + p->status = 0; + *reply_size = sizeof(effect_param_t) + sizeof(uint32_t); + if (p->psize != sizeof(uint32_t)) { + p->status = -EINVAL; + ALOGE("%s: Parameter in Command(%u) has Invalid Size", __func__, cmd_code); + break; + } else { + if (context->ops.get_parameter) + context->ops.get_parameter(context, p, reply_size); + } + } + break; + + case EFFECT_CMD_SET_DEVICE: /* 9 */ + case EFFECT_CMD_SET_VOLUME: /* 10 */ + case EFFECT_CMD_SET_AUDIO_MODE: /* 11 */ + case EFFECT_CMD_SET_CONFIG_REVERSE: /* 12 */ + case EFFECT_CMD_SET_INPUT_DEVICE: /* 13 */ + break; + + case EFFECT_CMD_GET_CONFIG: /* 14: cmd_size = 0, cmd_data = NULL */ + if (reply_data == NULL || *reply_size != sizeof(effect_config_t)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + get_config(context, (effect_config_t *)reply_data); + } + break; + + case EFFECT_CMD_GET_CONFIG_REVERSE: /* 15 */ + case EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS: /* 16 */ + case EFFECT_CMD_GET_FEATURE_CONFIG: /* 17 */ + case EFFECT_CMD_SET_FEATURE_CONFIG: /* 18 */ + case EFFECT_CMD_SET_AUDIO_SOURCE: /* 19 */ + break; + + case EFFECT_CMD_OFFLOAD: /* 20 */ + if (cmd_size != sizeof(effect_offload_param_t) || cmd_data == NULL + || reply_data == NULL || *reply_size != sizeof(int)) { + ret = -EINVAL; + ALOGE("%s: Command(%u) has Invalid Parameter", __func__, cmd_code); + } else { + effect_offload_param_t* offload_param = (effect_offload_param_t*)cmd_data; + + ALOGD("%s: Command(%u)= offload %d, output %d", __func__, cmd_code, offload_param->isOffload, offload_param->ioHandle); + + context->offload_enabled = offload_param->isOffload; + if (context->out_handle == offload_param->ioHandle) { + ALOGD("%s: Requested same Audio output", __func__); + } else { + ALOGD("%s: Requested to change Audio output from %d to %d", __func__, context->out_handle, offload_param->ioHandle); + context->out_handle = offload_param->ioHandle; + } + *(int *)reply_data = 0; + } + break; + + default: + if (cmd_code >= EFFECT_CMD_FIRST_PROPRIETARY && context->ops.command) { + if (context->offload_enabled) + ret = context->ops.command(context, cmd_code, cmd_size, cmd_data, reply_size, reply_data); + } else { + ALOGW("%s: Not supported Command %u", __func__, cmd_code); + ret = -EINVAL; + } + break; + } + + return ret; +} + +/* Effect Control Interface Implementation: get_descriptor */ +int effect_get_descriptor(effect_handle_t self, effect_descriptor_t *descriptor) +{ + effect_context_t *context = (effect_context_t *)self; + + ALOGD("%s: called", __func__); + + if (context == NULL || descriptor == NULL) { + ALOGV("%s: invalid param", __func__); + return -EINVAL; + } + + if (!effect_exists(context)) + return -EINVAL; + + *descriptor = *context->desc; + + return 0; +} + +/* effect_handle_t interface implementation for offload effects */ +const struct effect_interface_s effect_interface = { + effect_process, + effect_command, + effect_get_descriptor, + NULL, +}; + +/** + ** AudioEffectHAL Interface Implementation +**/ +int effect_lib_create( + const effect_uuid_t *uuid, + int32_t session_id __unused, + int32_t io_id, + effect_handle_t *handle) +{ + effect_context_t *context = 0; + int ret = 0, i; + + ALOGD("%s: called", __func__); + + if (lib_init() != 0) + return init_status; + + if (handle == NULL || uuid == NULL) { + ALOGE("%s: Invalid Parameters", __func__); + return -EINVAL; + } + + /* Check UUID from Descriptor List which this effect lib is supporting */ + for (i = 0; descriptors[i] != NULL; i++) { + if (memcmp(uuid, &descriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("%s: got supported descriptor", __func__); + break; + } + } + if (descriptors[i] == NULL) { + ALOGE("%s: Not supported UUID", __func__); + return -EINVAL; + } + + /* According to UUID,, make Effect Context */ + if (memcmp(uuid, &visualizer_descriptor.uuid, sizeof(effect_uuid_t)) == 0) { + ALOGD("%s: Requested Visualizer for Audio IO(%d)", __func__, io_id); + visualizer_context_t *visu_ctxt = (visualizer_context_t *)calloc(1, sizeof(visualizer_context_t)); + if (visu_ctxt) { + context = (effect_context_t *)visu_ctxt; + context->ops.init = visualizer_init; + context->ops.release = visualizer_release; + context->ops.reset = visualizer_reset; + context->ops.enable = visualizer_enable; + context->ops.disable = visualizer_disable; + context->ops.start = visualizer_start; + context->ops.stop = visualizer_stop; + context->ops.process = visualizer_process; + context->ops.set_parameter = visualizer_set_parameter; + context->ops.get_parameter = visualizer_get_parameter; + context->ops.command = visualizer_command; + context->desc = &visualizer_descriptor; + } else { + ALOGE("%s: Failed to allocate memory for conxtext", __func__); + ret = -ENOMEM; + } + } else { + ALOGE("%s: Not supported Visualizer UUID", __func__); + ret = -EINVAL; + } + + if (ret == 0) { + context->itfe = &effect_interface; + context->state = EFFECT_STATE_UNINITIALIZED; + context->out_handle = (audio_io_handle_t)io_id; + + ret = context->ops.init(context); + if (ret < 0) { + ALOGW("%s init failed", __func__); + free(context); + return ret; + } + + context->state = EFFECT_STATE_INITIALIZED; + + pthread_mutex_lock(&lock); + list_add_tail(&created_effects_list, &context->effects_list_node); + output_context_t *out_ctxt = get_output(io_id); + if (out_ctxt != NULL) { + ALOGD("%s: Got Output Context for Audio Output Handle (%d)", __func__, io_id); + add_effect_to_output(out_ctxt, context); + } + pthread_mutex_unlock(&lock); + + *handle = (effect_handle_t)context; + } + + ALOGD("%s created context %p", __func__, context); + return ret; +} + +int effect_lib_release(effect_handle_t handle) +{ + effect_context_t *context = (effect_context_t *)handle; + int ret = 0; + + ALOGD("%s: called", __func__); + + if (lib_init() != 0) + return init_status; + + if (handle == NULL) { + ALOGE("%s: Invalid Parameters", __func__); + ret = -EINVAL; + } else { + pthread_mutex_lock(&lock); + if (effect_exists(context)) { + output_context_t *out_ctxt = get_output(context->out_handle); + if (out_ctxt != NULL) + remove_effect_from_output(out_ctxt, context); + list_remove(&context->effects_list_node); + ALOGD("%s: Remove effect context from Effect List", __func__); + + if (context->ops.release) + context->ops.release(context); + context->state = EFFECT_STATE_UNINITIALIZED; + free(context); + } + pthread_mutex_unlock(&lock); + } + + return ret; +} + +int effect_lib_get_descriptor( + const effect_uuid_t *uuid, + effect_descriptor_t *descriptor) +{ + int ret = 0, i; + + ALOGD("%s: called", __func__); + + if (lib_init() != 0) + return init_status; + + if (descriptor == NULL || uuid == NULL) { + ALOGV("%s: called with NULL pointer", __func__); + ret = -EINVAL; + } else { + for (i = 0; descriptors[i] != NULL; i++) { + if (memcmp(uuid, &descriptors[i]->uuid, sizeof(effect_uuid_t)) == 0) { + ALOGV("%s: got supported descriptor", __func__); + memcpy((void *)descriptor, (const void *)descriptors[i], sizeof(effect_descriptor_t)); + //*descriptor = *descriptors[i]; + break; + } + } + if (descriptors[i] == NULL) { + ALOGE("%s: Not supported UUID", __func__); + ret = -EINVAL; + } + } + + return ret; +} + + +/* This is the only symbol that needs to be exported */ +__attribute__ ((visibility ("default"))) +audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = { + .tag = AUDIO_EFFECT_LIBRARY_TAG, + .version = EFFECT_LIBRARY_API_VERSION, + .name = "Exynos Offload Visualizer HAL", + .implementor = "Samsung SystemLSI", + .create_effect = effect_lib_create, + .release_effect = effect_lib_release, + .get_descriptor = effect_lib_get_descriptor, +}; diff --git a/libaudio/effecthal/visualizer/exynos_visualizer.h b/libaudio/effecthal/visualizer/exynos_visualizer.h new file mode 100644 index 0000000..0fa4b0e --- /dev/null +++ b/libaudio/effecthal/visualizer/exynos_visualizer.h @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include +#include +#include + + +enum { + EFFECT_STATE_UNINITIALIZED, + EFFECT_STATE_INITIALIZED, + EFFECT_STATE_ACTIVE, +}; + +typedef struct effect_context_s effect_context_t; +typedef struct output_context_s output_context_t; + + +/* effect specific operations. Only the init() and process() operations must be defined. + * Others are optional. + */ +typedef struct effect_ops_s { + int (*init)(effect_context_t *context); + int (*release)(effect_context_t *context); + int (*reset)(effect_context_t *context); + int (*enable)(effect_context_t *context); + int (*disable)(effect_context_t *context); + int (*start)(effect_context_t *context, output_context_t *output); + int (*stop)(effect_context_t *context, output_context_t *output); + int (*process)(effect_context_t *context, audio_buffer_t *in, audio_buffer_t *out); + int (*set_parameter)(effect_context_t *context, effect_param_t *param, uint32_t size); + int (*get_parameter)(effect_context_t *context, effect_param_t *param, uint32_t *size); + int (*command)(effect_context_t *context, uint32_t cmd_code, uint32_t cmd_size, void *cmd_data, uint32_t *reply_size, void *reply_data); +} effect_ops_t; + +struct effect_context_s { + const struct effect_interface_s *itfe; + + struct listnode effects_list_node; /* node in created_effects_list */ + struct listnode output_node; /* node in output_context_t.effects_list */ + effect_config_t config; + const effect_descriptor_t *desc; + audio_io_handle_t out_handle; /* io handle of the output the effect is attached to */ + uint32_t state; + bool offload_enabled; /* when offload is enabled we process VISUALIZER_CMD_CAPTURE command. + Otherwise non offloaded visualizer has already processed the command + and we must not overwrite the reply. */ + effect_ops_t ops; +}; + +typedef struct output_context_s { + struct listnode outputs_list_node; /* node in active_outputs_list */ + audio_io_handle_t handle; /* io handle */ + struct listnode effects_list; /* list of effects attached to this output */ +} output_context_t; + + + +/* maximum time since last capture buffer update before resetting capture buffer. This means + that the framework has stopped playing audio and we must start returning silence */ +#define MAX_STALL_TIME_MS 1000 + +#define CAPTURE_BUF_SIZE 65536 /* "64k should be enough for everyone" */ + +#define DISCARD_MEASUREMENTS_TIME_MS 2000 /* discard measurements older than this number of ms */ + +/* maximum number of buffers for which we keep track of the measurements */ +#define MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS 25 /* note: buffer index is stored in uint8_t */ + +typedef struct buffer_stats_s { + bool is_valid; + uint16_t peak_u16; /* the positive peak of the absolute value of the samples in a buffer */ + float rms_squared; /* the average square of the samples in a buffer */ +} buffer_stats_t; + +typedef struct visualizer_context_s { + effect_context_t common; + + uint32_t capture_idx; + uint32_t capture_size; + uint32_t scaling_mode; + uint32_t last_capture_idx; + uint32_t latency; + struct timespec buffer_update_time; + uint8_t capture_buf[CAPTURE_BUF_SIZE]; + /* for measurements */ + uint8_t channel_count; /* to avoid recomputing it every time a buffer is processed */ + uint32_t meas_mode; + uint8_t meas_wndw_size_in_buffers; + uint8_t meas_buffer_idx; + buffer_stats_t past_meas[MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS]; +} visualizer_context_t; + + +#define DSP_OUTPUT_LATENCY_MS 0 /* Fudge factor for latency after capture point in audio DSP */ + +#define SOUND_CARD 0 +#define CAPTURE_DEVICE 8 + +/* Proxy port supports only MMAP read and those fixed parameters*/ +#define AUDIO_CAPTURE_CHANNEL_COUNT 2 +#define AUDIO_CAPTURE_SMP_RATE 48000 +#define AUDIO_CAPTURE_PERIOD_SIZE 1024 +#define AUDIO_CAPTURE_PERIOD_COUNT 4 + +struct pcm_config pcm_config_capture = { + .channels = AUDIO_CAPTURE_CHANNEL_COUNT, + .rate = AUDIO_CAPTURE_SMP_RATE, + .period_size = AUDIO_CAPTURE_PERIOD_SIZE, + .period_count = AUDIO_CAPTURE_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, + .start_threshold = AUDIO_CAPTURE_PERIOD_SIZE / 4, + .stop_threshold = INT_MAX, + .avail_min = AUDIO_CAPTURE_PERIOD_SIZE / 4, +}; diff --git a/libaudio/hal/Android.mk b/libaudio/hal/Android.mk new file mode 100644 index 0000000..e22ac1b --- /dev/null +++ b/libaudio/hal/Android.mk @@ -0,0 +1,72 @@ +# Copyright (C) 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# +# Primary Audio HAL +# +LOCAL_PATH := $(call my-dir) +DEVICE_BASE_PATH := $(TOP)/device/samsung + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + audio_hw.c + +ifeq ($(BOARD_USE_SIPC_RIL), true) +LOCAL_SRC_FILES += \ + sec/voice_manager.c +else +LOCAL_SRC_FILES += \ + ww/voice_manager.c +endif + +LOCAL_C_INCLUDES += \ + external/tinyalsa/include \ + external/tinycompress/include \ + external/kernel-headers/original/uapi/sound \ + $(call include-path-for, audio-route) \ + $(call include-path-for, audio-utils) \ + $(DEVICE_BASE_PATH)/$(TARGET_BOOTLOADER_BOARD_NAME)/conf \ + external/expat/lib + +ifeq ($(BOARD_USE_SIPC_RIL), true) +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/sec +else +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/ww +endif + +LOCAL_SHARED_LIBRARIES := \ + liblog \ + libcutils \ + libtinyalsa \ + libtinycompress \ + libaudioroute \ + libaudioutils \ + libdl \ + libexpat + +ifeq ($(BOARD_USE_SIPC_RIL), true) +LOCAL_CFLAGS += -DSUPPORT_SIPC_RIL +endif + +LOCAL_MODULE := audio.primary.$(TARGET_BOOTLOADER_BOARD_NAME) +LOCAL_MODULE_RELATIVE_PATH := hw + +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/libaudio/hal/NOTICE b/libaudio/hal/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libaudio/hal/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libaudio/hal/audio_hw.c b/libaudio/hal/audio_hw.c new file mode 100644 index 0000000..d601d3b --- /dev/null +++ b/libaudio/hal/audio_hw.c @@ -0,0 +1,3526 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "audio_hw_primary" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include + +#include + +#include "audio_hw.h" +#include "audio_hw_def.h" + +/******************************************************************************/ +/** Note: the following macro is used for extremely verbose logging message. **/ +/** In order to run with ALOG_ASSERT turned on, we need to have LOG_NDEBUG **/ +/** set to 0; but one side effect of this is to turn all LOGV's as well. Some**/ +/** messages are so verbose that we want to suppress them even when we have **/ +/** ALOG_ASSERT turned on. Do not uncomment the #def below unless you **/ +/** really know what you are doing and want to see all of the extremely **/ +/** verbose messages. **/ +/******************************************************************************/ +//#define VERY_VERY_VERBOSE_LOGGING +#ifdef VERY_VERY_VERBOSE_LOGGING +#define ALOGVV ALOGD +#else +#define ALOGVV(a...) do { } while(0) +#endif + +//#define ROUTING_VERBOSE_LOGGING +#ifdef ROUTING_VERBOSE_LOGGING +#define ALOGRV ALOGD +#else +#define ALOGRV(a...) do { } while(0) +#endif + +/******************************************************************************/ +/** **/ +/** The Global Local Functions **/ +/** **/ +/******************************************************************************/ +static audio_format_t audio_pcmformat_from_alsaformat(enum pcm_format pcmformat) +{ + audio_format_t format = AUDIO_FORMAT_PCM_16_BIT; + + switch (pcmformat) { + case PCM_FORMAT_S16_LE: + format = AUDIO_FORMAT_PCM_16_BIT; + break; + case PCM_FORMAT_S32_LE: + format = AUDIO_FORMAT_PCM_32_BIT; + break; + case PCM_FORMAT_S8: + format = AUDIO_FORMAT_PCM_8_BIT; + break; + case PCM_FORMAT_S24_LE: + case PCM_FORMAT_S24_3LE: + format = AUDIO_FORMAT_PCM_24_BIT_PACKED; + break; + case PCM_FORMAT_INVALID: + case PCM_FORMAT_MAX: + format = AUDIO_FORMAT_PCM_16_BIT; + break; + } + + return format; +} + +static unsigned int get_sound_card(audio_usage_id_t usage_id) +{ + return (unsigned int)(sound_device_table[usage_id][0]); +} + +static unsigned int get_sound_device(audio_usage_id_t usage_id) +{ + return (unsigned int)(sound_device_table[usage_id][1]); +} + +/* Only Primary Output Stream can control Voice Call */ +static bool output_drives_call(struct audio_device *adev, struct stream_out *out) +{ + return out == adev->primary_output; +} + +/* Check Call Mode or not */ +static bool isCallMode(audio_usage_mode_t mode) +{ + if (mode > AUSAGE_MODE_NORMAL && mode < AUSAGE_MODE_NONE) + return true; + else + return false; +} + +/* CP Centric Call Mode or not */ +static bool isCPCallMode(audio_usage_mode_t mode) +{ + if (mode == AUSAGE_MODE_VOICE_CALL || mode == AUSAGE_MODE_LTE_CALL) + return true; + else + return false; +} + +/******************************************************************************/ +/** **/ +/** The Local Functions for Audio Usage list **/ +/** **/ +/******************************************************************************/ +static struct exynos_audio_usage *get_dangling_ausage_from_list( + struct audio_device *adev, + audio_usage_type_t usagetype, + audio_io_handle_t handle) +{ + struct exynos_audio_usage *ausage = NULL; + struct exynos_audio_usage *active_ausage = NULL; + struct listnode *ausage_node; + + list_for_each(ausage_node, &adev->audio_usage_list) { + ausage = node_to_item(ausage_node, struct exynos_audio_usage, node); + if (ausage->usage_type == usagetype) { + if ((usagetype == AUSAGE_PLAYBACK)) { + if (ausage->stream.out != NULL && + ausage->stream.out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD && + ausage->stream.out->handle != handle) { + active_ausage = ausage; + ALOGV("device-%s: [PLAYBACK] usage-id (%d) -- Active Routed device(%s) Mode(%s)!", + __func__, ausage->usage_id, device_path_table[ausage->out_device_id], + mode_table[ausage->out_device_amode]); + break; + } + } + + if (usagetype == AUSAGE_CAPTURE) { + if (ausage->stream.in != NULL && + ausage->stream.in->handle != handle) { + active_ausage = ausage; + ALOGV("device-%s: [CAPTURE] usage-id (%d) -- Active Routed device(%s) Mode(%s)!", + __func__, ausage->usage_id, device_path_table[ausage->in_device_id], + mode_table[ausage->in_device_amode]); + break; + } + } + } + } + + return active_ausage; +} + +static struct exynos_audio_usage *get_active_ausage_from_list( + struct audio_device *adev, + struct exynos_audio_usage *me, + audio_usage_type_t usagetype) +{ + struct exynos_audio_usage *ausage = NULL; + struct exynos_audio_usage *active_ausage = NULL; + struct listnode *ausage_node; + + list_for_each(ausage_node, &adev->audio_usage_list) { + ausage = node_to_item(ausage_node, struct exynos_audio_usage, node); + if (ausage->usage_type == usagetype) { + if ((usagetype == AUSAGE_PLAYBACK)) { + if (ausage != me && ausage->stream.out != NULL && + ausage->stream.out->sstate != STATE_STANDBY) { + active_ausage = ausage; + ALOGV("device-%s: [PLAYBACK] usage-id (%d) -- Active Routed device(%s) Mode(%s)!", + __func__, ausage->usage_id, device_path_table[ausage->out_device_id], + mode_table[ausage->out_device_amode]); + break; + } + } + + if (usagetype == AUSAGE_CAPTURE) { + if (ausage != me && ausage->stream.in != NULL && + ausage->stream.in->sstate != STATE_STANDBY) { + active_ausage = ausage; + ALOGV("device-%s: [CAPTURE] usage-id (%d) -- Active Routed device(%s) Mode(%s)!", + __func__, ausage->usage_id, device_path_table[ausage->in_device_id], + mode_table[ausage->in_device_amode]); + break; + } + } + } + } + + return active_ausage; +} + +/* + * Return Audio Usage Structure matched with Usage Type, Usage ID and IO_Handle + */ +static struct exynos_audio_usage *get_ausage_from_list( + struct audio_device *adev, + audio_usage_type_t usagetype, + audio_usage_id_t usage_id, + audio_io_handle_t handle) +{ + struct exynos_audio_usage *ausage = NULL; + struct listnode *ausage_node; + audio_io_handle_t ausage_handle = 0; + + list_for_each(ausage_node, &adev->audio_usage_list) { + ausage = node_to_item(ausage_node, struct exynos_audio_usage, node); + + if (ausage->usage_type == AUSAGE_PLAYBACK) + ausage_handle = ausage->stream.out->handle; + else if (ausage->usage_type == AUSAGE_CAPTURE) + ausage_handle = ausage->stream.in->handle; + + if (ausage->usage_type == usagetype && + ausage->usage_id == usage_id && + ausage_handle == handle) + return ausage; + } + + return NULL; +} + +static void syncup_ausage_from_list( + struct audio_device *adev, + struct exynos_audio_usage *me, + audio_usage_type_t usagetype, + device_type_t cur_device, + device_type_t new_device) +{ + struct exynos_audio_usage *ausage; + struct listnode *ausage_node; + + list_for_each(ausage_node, &adev->audio_usage_list) { + ausage = node_to_item(ausage_node, struct exynos_audio_usage, node); + if (ausage->usage_type == usagetype) { + ALOGRV("device-%s: Usage(%s) current-device(%s)'s new-device(%s) usage_type(%s)!", + __func__, usage_table[ausage->usage_id], device_path_table[cur_device], + device_path_table[new_device], (ausage->usage_type == AUSAGE_PLAYBACK ? "PLAYBACK" : "CAPTURE")); + if ((ausage->usage_type == AUSAGE_PLAYBACK)) { + if (cur_device != DEVICE_NONE && new_device == DEVICE_NONE) { + if (ausage == me || (ausage->stream.out != NULL && + ausage->stream.out->sstate == STATE_STANDBY)) { + ausage->out_device_id = DEVICE_NONE; + ausage->out_device_amode = AUSAGE_MODE_NONE; + ALOGRV("device-%s: Usage(%s) device(%s) Disabled!", __func__, + usage_table[ausage->usage_id], device_path_table[cur_device]); + } + } else if (cur_device == DEVICE_NONE && new_device != DEVICE_NONE) { + if (ausage->stream.out != NULL) { + ausage->out_device_id = new_device; + ausage->out_device_amode = adev->usage_amode; + ALOGRV("device-%s: Usage(%s) device(%s) Enabled!", __func__, + usage_table[ausage->usage_id], device_path_table[new_device]); + } + } + } + + if ((ausage->usage_type == AUSAGE_CAPTURE)) { + if (cur_device != DEVICE_NONE && new_device == DEVICE_NONE) { + if (ausage == me || (ausage->stream.in != NULL && + ausage->stream.in->sstate == STATE_STANDBY)) { + ausage->in_device_id = DEVICE_NONE; + ausage->in_device_amode = AUSAGE_MODE_NONE; + ALOGRV("device-%s: Usage(%s) device(%s) Disabled!", __func__, + usage_table[ausage->usage_id], device_path_table[cur_device]); + } + } else if (cur_device == DEVICE_NONE && new_device != DEVICE_NONE) { + if (ausage->stream.in != NULL) { + ausage->in_device_id = new_device; + ausage->in_device_amode = adev->usage_amode; + ALOGRV("device-%s: Usage(%s) device(%s) Enabled!", __func__, + usage_table[ausage->usage_id],device_path_table[new_device]); + } + } + } + } + } + + return ; +} + +static int add_audio_usage( + struct audio_device *adev, + audio_usage_type_t type, + void *stream) +{ + struct exynos_audio_usage *ausage_node; + struct stream_out *out = NULL; + struct stream_in *in = NULL; + int ret = 0; + + ausage_node = (struct exynos_audio_usage *)calloc(1, sizeof(struct exynos_audio_usage)); + if (ausage_node) { + ausage_node->usage_type = type; + if (type == AUSAGE_PLAYBACK) { + out = (struct stream_out *)stream; + + ausage_node->usage_id = out->ausage; + ausage_node->stream.out = out; + } else if (type == AUSAGE_CAPTURE) { + in = (struct stream_in *)stream; + + ausage_node->usage_id = in->ausage; + ausage_node->stream.in = in; + } + + if (ausage_node->usage_id == AUSAGE_PLAYBACK_PRIMARY) { + /* In case of Primary Playback Usage, it is created at first. */ + ausage_node->out_device_id = DEVICE_NONE; + ausage_node->in_device_id = DEVICE_NONE; + ausage_node->out_device_amode = AUSAGE_MODE_NONE; + ausage_node->in_device_amode = AUSAGE_MODE_NONE; + } else { + /* Syncup with Primary Usage's Status */ + struct stream_out *primary_out = adev->primary_output; + struct exynos_audio_usage *primary_usage; + + primary_usage = get_ausage_from_list(adev, AUSAGE_PLAYBACK, primary_out->ausage, primary_out->handle); + if (primary_usage) { + ausage_node->out_device_id = primary_usage->out_device_id; + ausage_node->in_device_id = primary_usage->in_device_id; + ausage_node->out_device_amode = primary_usage->out_device_amode; + ausage_node->in_device_amode = primary_usage->in_device_amode; + } else + ALOGE("device-%s: There is no Primary Playback Usage!", __func__); + } + + list_add_tail(&adev->audio_usage_list, &ausage_node->node); + ALOGV("%s-%s: Added Audio Stream into Audio Usage list!", usage_table[ausage_node->usage_id], __func__); + } else { + ALOGE("device-%s: Failed to allocate Memory!", __func__); + ret = -ENOMEM; + } + + return ret; +} + +static int remove_audio_usage( + struct audio_device *adev, + audio_usage_type_t type, + void *stream) +{ + struct exynos_audio_usage *ausage_node = NULL; + struct stream_out *out = NULL; + struct stream_in *in = NULL; + audio_usage_id_t id = AUSAGE_DEFAULT; + audio_io_handle_t handle = 0; + int ret = 0; + + if (type == AUSAGE_PLAYBACK) { + out = (struct stream_out *)stream; + id = out->ausage; + handle = out->handle; + } else if (type == AUSAGE_CAPTURE) { + in = (struct stream_in *)stream; + id = in->ausage; + handle = in->handle; + } + + ausage_node = get_ausage_from_list(adev, type, id, handle); + if (ausage_node) { + list_remove(&ausage_node->node); + free(ausage_node); + ALOGV("%s-%s: Removed Audio Usage from Audio Usage list!", usage_table[id], __func__); + } else { + ALOGV("%s-%s: There is no Audio Usage in Audio Usage list!", usage_table[id], __func__); + } + + return ret; +} + +#ifdef ROUTING_VERBOSE_LOGGING +static void print_ausage(struct audio_device *adev) +{ + struct exynos_audio_usage *ausage; + struct listnode *ausage_node; + + list_for_each(ausage_node, &adev->audio_usage_list) { + ausage = node_to_item(ausage_node, struct exynos_audio_usage, node); + if (ausage->usage_type == AUSAGE_PLAYBACK) + ALOGRV("%s-%s: Audio Mode = %s, Audio Device = %s", usage_table[ausage->usage_id], + __func__, mode_table[ausage->out_device_amode], device_path_table[ausage->out_device_id]); + else + ALOGRV("%s-%s: Audio Mode = %s, Audio Device = %s", usage_table[ausage->usage_id], + __func__, mode_table[ausage->in_device_amode], device_path_table[ausage->in_device_id]); + } + + return ; + +} +#endif + +static void clean_dangling_streams( + struct audio_device *adev, + audio_usage_type_t type, + void *stream) +{ + struct exynos_audio_usage *dangling_ausage = NULL; + struct stream_out *out = NULL; + struct stream_in *in = NULL; + audio_usage_id_t id = AUSAGE_DEFAULT; + audio_io_handle_t handle = 0; + + if (type == AUSAGE_CAPTURE) { + in = (struct stream_in *)stream; + id = in->ausage; + handle = in->handle; + } + + do { + dangling_ausage = get_dangling_ausage_from_list(adev, type, handle); + if (dangling_ausage) { + if (type == AUSAGE_CAPTURE) { + struct stream_in *dangling_in = (struct stream_in *)dangling_ausage->stream.in; + + remove_audio_usage(adev, type, (void *)dangling_in); + + pthread_mutex_destroy(&dangling_in->lock); + free(dangling_in); + } +#if 0 + else if (type == AUSAGE_PLAYBACK && id == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + struct stream_out *dangling_out = (struct stream_out *)dangling_ausage->stream.out; + + remove_audio_usage(adev, type, (void *)dangling_out); + + if (dangling_out->nonblock_flag) + destroy_offload_callback_thread(dangling_out); + + if (dangling_out->comprconfig.codec != NULL) { + free(dangling_out->comprconfig.codec); + dangling_out->comprconfig.codec = NULL; + } + + pthread_mutex_destroy(&dangling_out->lock); + free(dangling_out); + } +#endif + } + } while (dangling_ausage); + + return ; +} + +/******************************************************************************/ +/** **/ +/** The Local Functions for Audio Path Routing **/ +/** **/ +/******************************************************************************/ + +/* Load mixer_path.xml, open Control Device and initialize Mixer structure */ +static bool init_route(struct audio_device *adev) +{ + struct route_info *trinfo = NULL; + struct audio_route *ar = NULL; + const char *card_name = NULL; + int i, ret_stat = 0; + + /* Open Mixer & Initialize Route Path */ + trinfo = (struct route_info *)calloc(1, sizeof(struct route_info)); + if (trinfo) { + /* We are using only one Sound Card */ + adev->mixerinfo = mixer_open(SOUND_CARD0); + if (adev->mixerinfo) { + ar = audio_route_init(SOUND_CARD0, NULL); + if (!ar) { + /* Fail to open Mixer or init route */ + ALOGE("device-%s: Failed to init audio route with Mixer(%d)!", __func__, SOUND_CARD0); + mixer_close(adev->mixerinfo); + adev->mixerinfo = NULL; + free(trinfo); + return false; + } + trinfo->card_num = (unsigned int)SOUND_CARD0; + trinfo->aroute = ar; + } else { + ALOGE("device-%s: Cannot open Mixer for %d!", __func__, SOUND_CARD0); + free(trinfo); + return false; + } + } else { + ALOGE("device-%s: Failed to allocate memory for audio route!", __func__); + return false; + } + + /* Set Route Info */ + adev->rinfo = trinfo; + + return true; +} + +/* Free Mixer structure and close Control Device */ +static void deinit_route(struct audio_device *adev) +{ + struct route_info *trinfo = adev->rinfo; + struct audio_route *ar = trinfo->aroute; + + if (trinfo && ar) { + adev->vol_ctrl = NULL; + + audio_route_free(ar); + if (adev->mixerinfo) { + mixer_close(adev->mixerinfo); + adev->mixerinfo = NULL; + } + + free(trinfo); + adev->rinfo = NULL; + } + + return; +} + +static audio_usage_mode_t get_usage_mode( + struct audio_device *adev, + audio_mode_t plat_amode) +{ + audio_usage_mode_t usage_amode = AUSAGE_MODE_NONE; + + switch (plat_amode) { + case AUDIO_MODE_NORMAL: + case AUDIO_MODE_RINGTONE: + usage_amode = AUSAGE_MODE_NORMAL; + if (adev->call_state != CALL_OFF) + ALOGE("device-%s: Abnormal Call State from Normal Mode!", __func__); + break; + case AUDIO_MODE_IN_CALL: + switch (adev->call_state) { + case CP_CALL: + usage_amode = AUSAGE_MODE_VOICE_CALL; + break; + case LTE_CALL: + usage_amode = AUSAGE_MODE_LTE_CALL; + break; + case WIFI_CALL: + usage_amode = AUSAGE_MODE_WIFI_CALL; + break; + case CALL_OFF: + usage_amode = AUSAGE_MODE_NORMAL; + ALOGE("device-%s: Abnormal Call State from InCall Mode!", __func__); + break; + } + break; + case AUDIO_MODE_IN_COMMUNICATION: + usage_amode = AUSAGE_MODE_VOIP_CALL; + if (adev->call_state != CALL_OFF) + ALOGE("device-%s: Abnormal Call State from Communication Mode!", __func__); + break; + default: + usage_amode = AUSAGE_MODE_NORMAL; + } + + return usage_amode; +} + +static void make_path_name( + audio_usage_mode_t path_amode, + device_type_t device, + char *path_name) +{ + memset(path_name, 0, MAX_PATH_NAME_LEN); + + strlcpy(path_name, mode_path_table[path_amode], MAX_PATH_NAME_LEN); + strlcat(path_name, "-", MAX_PATH_NAME_LEN); + strlcat(path_name, device_path_table[device], MAX_PATH_NAME_LEN); + + return ; +} + +static void do_set_route( + struct audio_device *adev, + audio_usage_id_t usage_id, + device_type_t device, + audio_usage_mode_t path_amode, + bool set) +{ + struct audio_route *ar = adev->rinfo->aroute; + char path_name[MAX_PATH_NAME_LEN]; + + make_path_name(path_amode, device, path_name); + if (set) + if (audio_route_apply_and_update_path(ar, path_name) < 0) + ALOGE("%s-%s: Failed to enable Audio Route(%s)", usage_table[usage_id], __func__, path_name); + else + ALOGD("%s-%s: Enabled Audio Route(%s)", usage_table[usage_id], __func__, path_name); + else + if (audio_route_reset_and_update_path(ar, path_name) < 0) + ALOGE("%s-%s: Failed to disable Audio Route(%s)", usage_table[usage_id], __func__, path_name); + else + ALOGD("%s-%s: Disabled Audio Route(%s)", usage_table[usage_id], __func__, path_name); + + return ; +} + +static int set_audio_route( + void *stream, + audio_usage_type_t usage_type, + audio_usage_id_t usage_id, + bool set) +{ + struct audio_device *adev = NULL; + struct stream_out *stream_out = NULL; + struct stream_in *stream_in = NULL; + + struct audio_route *ar = NULL; + struct exynos_audio_usage *ausage = NULL; + device_type_t cur_device = DEVICE_NONE; + device_type_t new_device = DEVICE_NONE; + audio_usage_mode_t cur_dev_amode = AUSAGE_MODE_NONE; + char cur_pathname[MAX_PATH_NAME_LEN] = "media-none"; + char new_pathname[MAX_PATH_NAME_LEN] = "media-none"; + bool disable_cur_device = false; + bool enable_new_device = false; + audio_io_handle_t handle = 0; + + int ret = 0; + + if (usage_type == AUSAGE_PLAYBACK) { + stream_out = (struct stream_out *)stream; + adev = stream_out->adev; + handle = stream_out->handle; + } + else if (usage_type == AUSAGE_CAPTURE) { + stream_in = (struct stream_in *)stream; + adev = stream_in->adev; + handle = stream_in->handle; + } + ar = adev->rinfo->aroute; + + /* Get usage pointer from the list */ + ausage = get_ausage_from_list(adev, usage_type, usage_id, handle); + if (ausage) { + /* Get current Mode & routed device information based on usage type */ + // Usage Structure has current device information + // Usage Audio Mode has current mode information + // Stream Structure has new device information + if (ausage->usage_type == AUSAGE_PLAYBACK) { + cur_device = ausage->out_device_id; + cur_dev_amode = ausage->out_device_amode; + new_device = get_device_id(ausage->stream.out->devices); + } else if (ausage->usage_type == AUSAGE_CAPTURE) { + cur_device = ausage->in_device_id; + cur_dev_amode = ausage->in_device_amode; + new_device = get_device_id(ausage->stream.in->devices); + } + ALOGD("%s-%s: [%s] current-device(%s) new-device(%s) in Mode(%s)!", usage_table[usage_id], __func__, + (ausage->usage_type == AUSAGE_PLAYBACK ? "PLAYBACK" : "CAPTURE"), + device_path_table[cur_device], device_path_table[new_device], mode_table[cur_dev_amode]); + + /* get audio route pathname for both current & new devices*/ + if (cur_device != DEVICE_NONE) { + make_path_name(cur_dev_amode, cur_device, cur_pathname); + ALOGRV("%s-%s: Current routed pathname: %s", usage_table[usage_id], __func__, cur_pathname); + } + + if (new_device != DEVICE_NONE) { + make_path_name(adev->usage_amode, new_device, new_pathname); + ALOGRV("%s-%s: New route pathname: %s", usage_table[usage_id], __func__, new_pathname); + } + + /* Handle request for disabling/enabling current/new routing */ + if (!set) { + /* Case: Disable Audio Path */ + ALOGRV("%s-%s: Disable audio device(%s)!", + usage_table[usage_id], __func__, device_path_table[cur_device]); + if (get_active_ausage_from_list(adev, ausage, ausage->usage_type) + || isCallMode(adev->usage_amode)) { + ALOGD("%s-%s: Current device(%s) is still in use by other usage!", + usage_table[usage_id], __func__, device_path_table[cur_device]); + } else { + if (cur_device != DEVICE_NONE) + disable_cur_device = true; + } + } else { + /* 1: Requested route is already set up */ + if (new_device == cur_device && + !strcmp(new_pathname, cur_pathname)) { + ALOGD("%s-%s: Request Audio Route [%s] is already setup", usage_table[usage_id], __func__, new_pathname); + } else { + /* need disable current and route new device */ + if (cur_device != DEVICE_NONE) + disable_cur_device = true; + enable_new_device = true; + } + } + + /* Disable current device and if it is output corresponding input device + should be disabled if active */ + if (disable_cur_device) { + if (ausage->usage_type == AUSAGE_PLAYBACK) { + struct exynos_audio_usage *active_ausage; + if (cur_device != DEVICE_NONE) { + /* Disable current routed path */ + do_set_route(adev, usage_id, cur_device, cur_dev_amode, false); + syncup_ausage_from_list(adev, ausage, ausage->usage_type, cur_device, DEVICE_NONE); + + /* check whether input device is active */ + active_ausage = get_active_ausage_from_list(adev, NULL, AUSAGE_CAPTURE); + if (active_ausage || isCallMode(adev->usage_amode)|| + isCallMode(cur_dev_amode)) { + device_type_t in_device = DEVICE_NONE; + device_type_t in_cur_device = DEVICE_NONE; /* Input device matching with disabled output device */ + audio_usage_mode_t in_dev_amode = AUSAGE_MODE_NONE; + audio_usage_id_t in_usage_id = AUSAGE_DEFAULT; + char in_active_pathname[MAX_PATH_NAME_LEN] = "media-none"; + char in_cur_pathname[MAX_PATH_NAME_LEN] = "media-none"; + + in_cur_device = get_indevice_id_from_outdevice(cur_device); + + /*get current routed in-device information */ + if (active_ausage) { + in_device = active_ausage->in_device_id; // Usage Structure has current device information + in_dev_amode = active_ausage->in_device_amode; + in_usage_id = active_ausage->usage_id; + if (in_device != DEVICE_NONE) { + make_path_name(in_dev_amode, in_device, in_active_pathname); + ALOGRV("%s-%s: Input active routed pathname: %s", usage_table[in_usage_id], __func__, in_active_pathname); + } + if (in_cur_device != DEVICE_NONE) { + make_path_name(adev->usage_amode, in_cur_device, in_cur_pathname); + ALOGRV("%s-%s: Input active routed pathname: %s", usage_table[in_usage_id], __func__, in_active_pathname); + } + } else { + in_device = in_cur_device; + in_dev_amode = cur_dev_amode; + in_usage_id = usage_id; + } + + ALOGRV("%s-%s: Disable Active Input Device if required cur_in(%s) out-in(%s) cur-routed-mode(%s) new-mode(%s)", + usage_table[usage_id], __func__, device_path_table[in_device], + device_path_table[get_indevice_id_from_outdevice(cur_device)], + mode_table[in_dev_amode], mode_table[adev->usage_amode]); + if ((active_ausage && (active_ausage->stream.in->source == AUDIO_SOURCE_CAMCORDER || + !strcmp(in_active_pathname, in_cur_pathname))) ) { + ALOGD("%s-%s: Skip disabling Current Active input Device(%s)", + usage_table[usage_id], __func__, device_path_table[in_device]); + } else { + ALOGD("%s-%s: Current Active input Device is Disabled (%s)", + usage_table[usage_id], __func__, device_path_table[in_device]); + if (in_device != DEVICE_NONE) { + /* Disable current routed in-device path */ + do_set_route(adev, in_usage_id, in_device, in_dev_amode, false); + syncup_ausage_from_list(adev, active_ausage, AUSAGE_CAPTURE, in_device, DEVICE_NONE); + } + } + } + } + } else { + if (cur_device != DEVICE_NONE) { + /* Disable current routed input path */ + ALOGD("%s-%s: Capture Device is Disabled (%s)", usage_table[usage_id], + __func__, device_path_table[cur_device]); + do_set_route(adev, usage_id, cur_device, cur_dev_amode, false); + syncup_ausage_from_list(adev, ausage, ausage->usage_type, cur_device, DEVICE_NONE); + } + } + } + + /* Enable new device and if it is output corresponding input device + should be enabled if active */ + if (enable_new_device) { + if (ausage->usage_type == AUSAGE_PLAYBACK) { + struct exynos_audio_usage *active_ausage; + device_type_t in_device = DEVICE_NONE; + + if (new_device != DEVICE_NONE) { + /* Enable new device routed path */ + do_set_route(adev, usage_id, new_device, adev->usage_amode, true); + syncup_ausage_from_list(adev, ausage, ausage->usage_type, DEVICE_NONE, new_device); + + /* check whether input device is active */ + active_ausage = get_active_ausage_from_list(adev, NULL, AUSAGE_CAPTURE); + if (active_ausage || isCallMode(adev->usage_amode)) { + device_type_t in_device = DEVICE_NONE; + audio_usage_id_t in_usage_id = AUSAGE_DEFAULT; + + ALOGD("%s-%s: ENABLE --Input Device for active usage ", usage_table[usage_id], __func__); + in_device = get_indevice_id_from_outdevice(new_device); + + /*get current routed in-device information */ + if (active_ausage) { + in_usage_id = active_ausage->usage_id; + } else { + in_usage_id = usage_id; + } + + if ((active_ausage && active_ausage->in_device_id == DEVICE_NONE) + || isCallMode(adev->usage_amode)) { + ALOGD("%s-%s: Input Device Enabled (%s)", usage_table[usage_id], + __func__, device_path_table[in_device]); + /* Enable new in-device routed path */ + do_set_route(adev, in_usage_id, in_device, adev->usage_amode, true); + syncup_ausage_from_list(adev, active_ausage, AUSAGE_CAPTURE, DEVICE_NONE, in_device); + } + } + } + } else { + if (new_device != DEVICE_NONE) { + /* Enable new input device routed path */ + ALOGD("%s-%s: Capture Device is Enabled (%s)", usage_table[usage_id], + __func__, device_path_table[new_device]); + do_set_route(adev, usage_id, new_device, adev->usage_amode, true); + syncup_ausage_from_list(adev, ausage, ausage->usage_type, DEVICE_NONE, new_device); + } + } + + } + } else { + ALOGW("%s-%s: Cannot find it from Audio Usage list!", usage_table[usage_id], __func__); + ret = -EINVAL; + } + +#ifdef ROUTING_VERBOSE_LOGGING + print_ausage(adev); +#endif + + return ret; +} + +/*****************************************************************************/ +/* */ +/* Local Functions for BT-SCO support */ +/* */ +/*****************************************************************************/ +static int do_start_bt_sco(struct stream_out *out) +{ + struct audio_device *adev = out->adev; + unsigned int sound_card; + unsigned int sound_device; + struct pcm_config pcmconfig; + int ret = 0; + + // Initialize BT-Sco sound card and device information + sound_card = BT_SCO_SOUND_CARD; + sound_device = BT_SCO_SOUND_DEVICE; + + pcmconfig = pcm_config_bt_sco; + + if (adev->pcm_btsco_out == NULL) { + /* Open BT-SCO Output */ + adev->pcm_btsco_out = pcm_open(sound_card, sound_device, PCM_OUT, &pcmconfig); + if (adev->pcm_btsco_out && !pcm_is_ready(adev->pcm_btsco_out)) { + ALOGE("BT SCO-%s: Output Device is not ready(%s)!", __func__, pcm_get_error(adev->pcm_btsco_out)); + ret = -EBADF; + goto err_out; + } + ALOGI("BT SCO-%s: PCM output device open Success!", __func__); + /* Start PCM Device */ + pcm_start(adev->pcm_btsco_out); + ALOGI("BT SCO-%s: PCM output device is started!", __func__); + } + + if (adev->pcm_btsco_in == NULL) { + /* Open BT-SCO Input */ + adev->pcm_btsco_in = pcm_open(sound_card, sound_device, PCM_IN, &pcmconfig); + if (adev->pcm_btsco_in && !pcm_is_ready(adev->pcm_btsco_in)) { + ALOGE("BT SCO-%s: Input Device is not ready(%s)!", __func__, pcm_get_error(adev->pcm_btsco_in)); + ret = -EBADF; + goto err_in; + } + ALOGI("BT SCO-%s: PCM input device open Success!", __func__); + + /* Start PCM Device */ + pcm_start(adev->pcm_btsco_in); + ALOGI("BT SCO-%s: PCM input device is started!", __func__); + } + + return ret; + +err_in: + if (adev->pcm_btsco_in) { + pcm_close(adev->pcm_btsco_in); + adev->pcm_btsco_in = NULL; + ALOGI("BT SCO-%s: PCM input device is closed!", __func__); + } + +err_out: + if (adev->pcm_btsco_out) { + pcm_close(adev->pcm_btsco_out); + adev->pcm_btsco_out = NULL; + ALOGI("BT SCO-%s: PCM output device is closed!", __func__); + } + + return ret; +} + +static int do_stop_bt_sco(struct audio_device *adev) +{ + if (adev->pcm_btsco_in) { + pcm_stop(adev->pcm_btsco_in); + pcm_close(adev->pcm_btsco_in); + adev->pcm_btsco_in = NULL; + ALOGI("BT SCO-%s: PCM input device is stopped & closed!", __func__); + } + + if (adev->pcm_btsco_out) { + pcm_stop(adev->pcm_btsco_out); + pcm_close(adev->pcm_btsco_out); + adev->pcm_btsco_out = NULL; + ALOGI("BT SCO-%s: PCM output device is stopped & closed!", __func__); + } + + return 0; +} + +/*****************************************************************************/ +/* */ +/* Local Functions for Voice Call */ +/* */ +/*****************************************************************************/ +static int do_start_voice_call(struct stream_out *out) +{ + struct audio_device *adev = out->adev; + unsigned int sound_card; + unsigned int sound_device; + struct pcm_config pcmconfig; + int ret = -EBADF; + + // Hard coded for test + sound_card = VOICE_CALL_SOUND_CARD; + sound_device = VOICE_CALL_SOUND_DEVICE; + + pcmconfig = pcm_config_vc_wb; + + if (out->devices & AUDIO_DEVICE_OUT_ALL_SCO) { + do_start_bt_sco(out); + } + + if (isCPCallMode(adev->usage_amode) && voice_is_in_call(adev->voice)) { + if (adev->pcm_voice_tx == NULL) { + /* Open Voice TX(Output) */ + adev->pcm_voice_tx = pcm_open(sound_card, sound_device, PCM_OUT, &pcmconfig); + if (adev->pcm_voice_tx && !pcm_is_ready(adev->pcm_voice_tx)) { + ALOGE("Voice Call-%s: PCM TX Device is not ready(%s)!", __func__, pcm_get_error(adev->pcm_voice_tx)); + goto err_tx; + } + ALOGI("Voice Call-%s: Success to open PCM TX Device!", __func__); + } + + if (adev->pcm_voice_rx == NULL) { + /* Open Voice RX(Input) */ + adev->pcm_voice_rx = pcm_open(sound_card, sound_device, PCM_IN, &pcmconfig); + if (adev->pcm_voice_rx && !pcm_is_ready(adev->pcm_voice_rx)) { + ALOGE("Voice Call-%s: PCM RX Device is not ready(%s)!", __func__, pcm_get_error(adev->pcm_voice_rx)); + goto err_rx; + } + ALOGI("Voice Call-%s: Success to open PCM RX Device!", __func__); + } + + /* Start All Devices */ + pcm_start(adev->pcm_voice_tx); + pcm_start(adev->pcm_voice_rx); + ALOGI("Voice Call-%s: Started PCM RX & TX Devices!", __func__); + ret = 0; + } + + return ret; + +err_rx: + if (adev->pcm_voice_rx) { + pcm_close(adev->pcm_voice_rx); + adev->pcm_voice_rx = NULL; + ALOGI("Voice Call-%s: PCM RX Device is closed!", __func__); + } + +err_tx: + if (adev->pcm_voice_tx) { + pcm_close(adev->pcm_voice_tx); + adev->pcm_voice_tx = NULL; + ALOGI("Voice Call-%s: PCM TX Device is closed!", __func__); + } + + return ret; +} + +static int do_stop_voice_call(struct audio_device *adev) +{ + if (adev->pcm_btsco_in || adev->pcm_btsco_out) { + /* close BT-SCO pcm first */ + do_stop_bt_sco(adev); + } + + if (adev->pcm_voice_rx) { + pcm_stop(adev->pcm_voice_rx); + pcm_close(adev->pcm_voice_rx); + adev->pcm_voice_rx = NULL; + ALOGI("Voice Call-%s: Stopped & Closed PCM RX Devices!", __func__); + } + + if (adev->pcm_voice_tx) { + pcm_stop(adev->pcm_voice_tx); + pcm_close(adev->pcm_voice_tx); + adev->pcm_voice_tx = NULL; + ALOGI("Voice Call-%s: Stopped & Closed PCM TX Devices!", __func__); + } + + return 0; +} + + +/*****************************************************************************/ +/* Local Functions for Playback Stream */ + +/* must be called with out->lock locked */ +static int send_offload_msg(struct stream_out *out, offload_msg_type_t msg) +{ + struct exynos_offload_msg *msg_node = NULL; + int ret = 0; + + msg_node = (struct exynos_offload_msg *)calloc(1, sizeof(struct exynos_offload_msg)); + if (msg_node) { + msg_node->msg = msg; + + list_add_tail(&out->offload_msg_list, &msg_node->node); + pthread_cond_signal(&out->offload_msg_cond); + + ALOGV("offload_out-%s: Sent Message = %s", __func__, offload_msg_table[msg]); + } else { + ALOGE("offload_out-%s: Failed to allocate memory for Offload MSG", __func__); + ret = -ENOMEM; + } + + return ret; +} + +/* must be called with out->lock locked */ +static offload_msg_type_t recv_offload_msg(struct stream_out *out) +{ + struct listnode *offload_msg_list = &(out->offload_msg_list); + + struct listnode *head = list_head(offload_msg_list); + struct exynos_offload_msg *msg_node = node_to_item(head, struct exynos_offload_msg, node); + offload_msg_type_t msg = msg_node->msg; + + list_remove(head); + free(msg_node); + + ALOGV("offload_out-%s: Received Message = %s", __func__, offload_msg_table[msg]); + return msg; +} + +static int do_set_volume(struct stream_out *out, float left, float right) +{ + struct audio_device *adev = out->adev; + struct mixer_ctl *ctrl; + int ret = -ENAVAIL; + int val[2]; + + ctrl = mixer_get_ctl_by_name(adev->mixerinfo, OFFLOAD_VOLUME_CONTROL_NAME); + if (ctrl) { + val[0] = (int)(left * COMPRESS_PLAYBACK_VOLUME_MAX); + val[1] = (int)(right * COMPRESS_PLAYBACK_VOLUME_MAX); + ret = mixer_ctl_set_array(ctrl, val, sizeof(val)/sizeof(val[0])); + if (ret != 0) + ALOGE("%s-%s: Fail to set Volume", usage_table[out->ausage], __func__); + else + ALOGD("%s-%s: Set Volume(%f:%f) => (%d:%d)", usage_table[out->ausage], __func__, left, right, val[0], val[1]); + } else { + ALOGE("%s-%s: Cannot find volume controller", usage_table[out->ausage], __func__); + } + + return ret; +} + +static int do_close_output_stream(struct stream_out *out) +{ + struct audio_device *adev = out->adev; + int ret = 0; + + /* Close PCM/Compress Device */ + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* In cases of low_latency, deep_buffer and Aux_Digital usage, It needs to pcm_close() */ + if (out->pcminfo) { + ret = pcm_close(out->pcminfo); + out->pcminfo = NULL; + } + ALOGI("%s-%s: Closed PCM Device", usage_table[out->ausage], __func__); + } else { + /* In cases of compress_offload usage, It needs to compress_close() */ + if (out->comprinfo) { + compress_close(out->comprinfo); + out->comprinfo = NULL; + } + ALOGI("%s-%s: Closed Compress Device", usage_table[out->ausage], __func__); + } + + /* Reset Routing Path */ + /* Have to keep devices information to restart without calling set_parameter() */ + set_audio_route((void *)out, AUSAGE_PLAYBACK, out->ausage, false); + + return ret; +} + +static int do_open_output_stream(struct stream_out *out) +{ + struct audio_device *adev = out->adev; + unsigned int sound_card; + unsigned int sound_device; + unsigned int flags; + int ret = 0; + char fn[256]; + + /* Set Routing Path */ + //set_audio_route((void *)out, AUSAGE_PLAYBACK, out->ausage, true); + + /* Open PCM/Compress Device */ + sound_card = get_sound_card(out->ausage); + sound_device = get_sound_device(out->ausage); + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (!out->pcminfo) { + /* In cases of low_latency, deep_buffer and Aux_Digital usage, It needs to pcm_open() */ + flags = PCM_OUT | PCM_MONOTONIC; + + out->pcminfo = pcm_open(sound_card, sound_device, flags, &out->pcmconfig); + if (out->pcminfo && !pcm_is_ready(out->pcminfo)) { + /* pcm_open does always return pcm structure, not NULL */ + ALOGE("%s-%s: PCM Device is not ready(%s)!", usage_table[out->ausage], __func__, pcm_get_error(out->pcminfo)); + goto err_open; + } + + //ALOGI("Refined PCM Period Size = %u, Period Count = %u", out->pcmconfig.period_size, out->pcmconfig.period_count); + snprintf(fn, sizeof(fn), "/dev/snd/pcmC%uD%u%c", sound_card, sound_device, flags & PCM_IN ? 'c' : 'p'); + ALOGI("%s-%s: Opened PCM Device is %s", usage_table[out->ausage], __func__, fn); + + out->comprinfo = NULL; + } + } else { + if (!out->comprinfo) { + /* In cases of compress_offload usage, It needs to compress_open() */ + flags = COMPRESS_IN; + + out->comprinfo = compress_open(sound_card, sound_device, flags, &out->comprconfig); + if (out->comprinfo && !is_compress_ready(out->comprinfo)) { + /* compress_open does always return compress structure, not NULL */ + ALOGE("%s-%s: Compress Device is not ready(%s)!", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + goto err_open; + } + + //ALOGI("Refined Compress Fragment Size = %u, Fragments = %u", out->comprconfig.fragment_size, out->comprconfig.fragments); + snprintf(fn, sizeof(fn), "/dev/snd/comprC%uD%u", sound_card, sound_device); + ALOGI("%s-%s: Opened Compress Device is %s", usage_table[out->ausage], __func__, fn); + + out->pcminfo = NULL; + + /* Set Volume */ + do_set_volume(out, out->vol_left, out->vol_right); + } + } + + return ret; + +err_open: + do_close_output_stream(out); + ret = -EINVAL; + return ret; +} + +static int do_start_output_stream(struct stream_out *out) +{ + struct audio_device *adev = out->adev; + int ret = -ENOSYS; + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { +#if 0 + if (out->pcminfo) { + ret = pcm_start(out->pcminfo); + if (ret == 0) + ALOGI("%s-%s: Started PCM Device", usage_table[out->ausage], __func__); + else + ALOGE("%s-%s: Cannot start PCM(%s)", usage_table[out->ausage], __func__, pcm_get_error(out->pcminfo)); + } +#endif + ret = 0; + } else { + if (out->comprinfo) { + if (out->nonblock_flag && out->offload_callback) { + compress_nonblock(out->comprinfo, out->nonblock_flag); + ALOGD("%s-%s: Set Nonblock mode!", usage_table[out->ausage], __func__); + } else { + compress_nonblock(out->comprinfo, 0); + ALOGD("%s-%s: Set Block mode!", usage_table[out->ausage], __func__); + } + + ret = compress_start(out->comprinfo); + if (ret == 0) + ALOGI("%s-%s: Started Compress Device", usage_table[out->ausage], __func__); + else + ALOGE("%s-%s: Cannot start Compress Offload(%s)", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + + /* Notify Offload playback started to VisualizerHAL */ + if (adev->notify_start_output_tovisualizer != NULL) { + adev->notify_start_output_tovisualizer(out->handle); + ALOGI("%s-%s: Notify Start to VisualizerHAL", usage_table[out->ausage], __func__); + } + } + } + + return ret; +} + +static int do_stop_output_stream(struct stream_out *out, bool force_stop) +{ + struct audio_device *adev = out->adev; + int ret = -ENOSYS; + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { +#if 0 + if (out->pcminfo) { + ret = pcm_stop(out->pcminfo); + if (ret == 0) + ALOGD("%s-%s: Stopped PCM Device", usage_table[out->ausage], __func__); + else + ALOGE("%s-%s: Cannot stop PCM(%s)", usage_table[out->ausage], __func__, pcm_get_error(out->pcminfo)); + } +#endif + ret = 0; + } else { + if (out->comprinfo) { + /* Check Offload_Callback_Thread is blocked & wait to finish the action */ + if (!force_stop) { + if (out->callback_thread_blocked) { + ALOGV("%s-%s: Waiting Offload Callback Thread is done", usage_table[out->ausage], __func__); + pthread_cond_wait(&out->offload_sync_cond, &out->lock); + } + } + + /* Notify Offload playback stopped to VisualizerHAL */ + if (adev->notify_stop_output_tovisualizer != NULL) { + adev->notify_stop_output_tovisualizer(out->handle); + ALOGI("%s-%s: Notify Stop to VisualizerHAL", usage_table[out->ausage], __func__); + } + + ret = compress_stop(out->comprinfo); + if (ret == 0) + ALOGD("%s-%s: Stopped Compress Device", usage_table[out->ausage], __func__); + else + ALOGE("%s-%s: Cannot stop Compress Offload(%s)", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + + out->ready_new_metadata = 1; + } + } + + return ret; +} + +static int do_write_buffer(struct stream_out *out, const void* buffer, size_t bytes) +{ + int ret = 0, wrote = 0; + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* In cases of low_latency, deep_buffer and Aux_Digital usage, It needs to pcm_write() */ + if (out->pcminfo) { + ret = pcm_write(out->pcminfo, (void *)buffer, bytes); + if (ret == 0) { + ALOGVV("%s-%s: Write Success(%u bytes) to PCM Device", usage_table[out->ausage], __func__, (unsigned int)bytes); + out->written += bytes / (out->pcmconfig.channels * sizeof(short)); // convert to frame unit + ALOGVV("%s-%s: Written = %u frames", usage_table[out->ausage], __func__, (unsigned int)out->written); + wrote = bytes; + } else { + wrote = ret; + ALOGE_IF(out->err_count < MAX_ERR_COUNT, "%s-%s: Write Fail = %s", + usage_table[out->ausage], __func__, pcm_get_error(out->pcminfo)); + out->err_count++; + } + } + } else { + /* In case of compress_offload usage, It needs to compress_write() */ + if (out->comprinfo) { + if (out->ready_new_metadata) { + compress_set_gapless_metadata(out->comprinfo, &out->offload_metadata); + ALOGD("%s-%s: Sent gapless metadata(delay = %u, padding = %u) to Compress Device", + usage_table[out->ausage], __func__, out->offload_metadata.encoder_delay, + out->offload_metadata.encoder_padding); + out->ready_new_metadata = 0; + } + + wrote = compress_write(out->comprinfo, buffer, bytes); + ALOGVV("%s-%s: Write Request(%u bytes) to Compress Device, and Accepted (%u bytes)", usage_table[out->ausage], __func__, (unsigned int)bytes, wrote); + if (wrote < 0) { + ALOGE_IF(out->err_count < MAX_ERR_COUNT, "%s-%s: Error playing sample with Code(%s)", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + out->err_count++; + } else if (wrote >= 0 && wrote < (ssize_t)bytes) { + /* Compress Device has no available buffer, we have to wait */ + ALOGV("%s-%s: There are no available buffer in Compress Device, Need to wait", usage_table[out->ausage], __func__); + ret = send_offload_msg(out, OFFLOAD_MSG_WAIT_WRITE); + } + } + } + + return wrote; +} + +/* Compress Offload Specific Functions */ +static bool is_supported_compressed_format(audio_format_t format) +{ + switch (format & AUDIO_FORMAT_MAIN_MASK) { + case AUDIO_FORMAT_MP3: + case AUDIO_FORMAT_AAC: + return true; + default: + break; + } + + return false; +} + +static int get_snd_codec_id(audio_format_t format) +{ + int id = 0; + + switch (format & AUDIO_FORMAT_MAIN_MASK) { + case AUDIO_FORMAT_MP3: + id = SND_AUDIOCODEC_MP3; + break; + case AUDIO_FORMAT_AAC: + id = SND_AUDIOCODEC_AAC; + break; + default: + ALOGE("%s: Unsupported audio format", __func__); + } + + return id; +} + +static void *offload_cbthread_loop(void *context) +{ + struct stream_out *out = (struct stream_out *) context; + bool get_exit = false; + int ret = 0; + + setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO); + set_sched_policy(0, SP_FOREGROUND); + prctl(PR_SET_NAME, (unsigned long)"Offload Callback", 0, 0, 0); + + ALOGD("%s-%s: Started running Offload Callback Thread", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + do { + offload_msg_type_t msg = OFFLOAD_MSG_INVALID; + stream_callback_event_t event; + bool need_callback = false; + + if (list_empty(&out->offload_msg_list)) { + ALOGV("%s-%s: transit to sleep", usage_table[out->ausage], __func__); + pthread_cond_wait(&out->offload_msg_cond, &out->lock); + ALOGV("%s-%s: transit to wake-up", usage_table[out->ausage], __func__); + } + + if (!list_empty(&out->offload_msg_list)) + msg = recv_offload_msg(out); + + if (msg == OFFLOAD_MSG_EXIT) { + get_exit = true; + continue; + } + + out->callback_thread_blocked = true; + pthread_mutex_unlock(&out->lock); + + switch (msg) { + case OFFLOAD_MSG_WAIT_WRITE: + if (out->comprinfo) { + ret = compress_wait(out->comprinfo, -1); + if (ret) { + if (out->comprinfo) + ALOGE("Error - compress_wait return %s", compress_get_error(out->comprinfo)); + else + ALOGE("Error - compress_wait return, but compress device already closed"); + } + } + + need_callback = true; + event = STREAM_CBK_EVENT_WRITE_READY; + break; + + case OFFLOAD_MSG_WAIT_PARTIAL_DRAIN: + if (out->comprinfo) { + ret = compress_next_track(out->comprinfo); + if (ret) { + if (out->comprinfo) + ALOGE("Error - compress_next_track return %s", compress_get_error(out->comprinfo)); + else + ALOGE("Error - compress_next_track return, but compress device already closed"); + } + } + + if (out->comprinfo) { + ret = compress_partial_drain(out->comprinfo); + if (ret) { + if (out->comprinfo) + ALOGE("Error - compress_partial_drain return %s", compress_get_error(out->comprinfo)); + else + ALOGE("Error - compress_partial_drain return, but compress device already closed"); + } + } + + need_callback = true; + event = STREAM_CBK_EVENT_DRAIN_READY; + + /* Resend the metadata for next iteration */ + out->ready_new_metadata = 1; + break; + + case OFFLOAD_MSG_WAIT_DRAIN: + if (out->comprinfo) { + ret = compress_drain(out->comprinfo); + if (ret) { + if (out->comprinfo) + ALOGE("Error - compress_drain return %s", compress_get_error(out->comprinfo)); + else + ALOGE("Error - compress_drain return, but compress device already closed"); + } + } + + need_callback = true; + event = STREAM_CBK_EVENT_DRAIN_READY; + break; + + default: + ALOGE("Invalid message = %u", msg); + break; + } + + pthread_mutex_lock(&out->lock); + out->callback_thread_blocked = false; + pthread_cond_signal(&out->offload_sync_cond); + + if (need_callback) { + out->offload_callback(event, NULL, out->offload_cookie); + if (event == STREAM_CBK_EVENT_DRAIN_READY) + ALOGD("%s-%s: Callback to Platform with %d", usage_table[out->ausage], __func__, event); + } + } while(!get_exit); + + /* Clean the message list */ + pthread_cond_signal(&out->offload_sync_cond); + while(!list_empty(&out->offload_msg_list)) + recv_offload_msg(out); + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: Stopped running Offload Callback Thread", usage_table[out->ausage], __func__); + return NULL; +} + +static int create_offload_callback_thread(struct stream_out *out) +{ + pthread_cond_init(&out->offload_msg_cond, (const pthread_condattr_t *) NULL); + pthread_cond_init(&out->offload_sync_cond, (const pthread_condattr_t *) NULL); + + pthread_create(&out->offload_callback_thread, (const pthread_attr_t *) NULL, offload_cbthread_loop, out); + out->callback_thread_blocked = false; + + return 0; +} + +static int destroy_offload_callback_thread(struct stream_out *out) +{ + int ret = 0; + + pthread_mutex_lock(&out->lock); + if (out->sstate != STATE_IDLE) { + /* Stop stream & transit to Idle */ + ALOGD("%s-%s: compress offload stream is running, will stop!", usage_table[out->ausage], __func__); + do_stop_output_stream(out, false); + out->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to Idle", usage_table[out->ausage], __func__); + } + ret = send_offload_msg(out, OFFLOAD_MSG_EXIT); + pthread_mutex_unlock(&out->lock); + + pthread_join(out->offload_callback_thread, (void **) NULL); + ALOGD("%s-%s: Joined Offload Callback Thread!", usage_table[out->ausage], __func__); + + pthread_cond_destroy(&out->offload_sync_cond); + pthread_cond_destroy(&out->offload_msg_cond); + + return 0; +} + + +static void check_and_set_pcm_config( + struct pcm_config *pcmconfig, + struct audio_config *config) +{ + /* Need to check which values are selected when requested configuration + is different with default configuration */ + + ALOGI("Check PCM Channel Count: Default(%d), Request(%d)", + pcmconfig->channels, audio_channel_count_from_out_mask(config->channel_mask)); + pcmconfig->channels = audio_channel_count_from_out_mask(config->channel_mask); + + ALOGI("Check PCM Smapling Rate: Default(%d), Request(%d)", pcmconfig->rate, config->sample_rate); + pcmconfig->rate = config->sample_rate; + + ALOGI("Check PCM Format: Default(%d), Request(%d)", + pcmconfig->format, pcm_format_from_audio_format(config->format)); + pcmconfig->format = pcm_format_from_audio_format(config->format); + + return; +} + +static void amplify(short *pcm_buf, size_t frames) +{ + char value[PROPERTY_VALUE_MAX]; + /* NOTICE + * Beware of clipping. + * Too much volume can cause unrecognizable sound. + */ + if (property_get("ro.fm_record_volume", value, "1.0")) { + float volume; + if (1 == sscanf(value, "%f", &volume)) { + while(frames--) { + *pcm_buf = clamp16((int32_t)(*pcm_buf * volume)); + pcm_buf++; + *pcm_buf = clamp16((int32_t)(*pcm_buf * volume)); + pcm_buf++; + } + } + } +} + +/*****************************************************************************/ +/* Local Functions for Capture Stream */ +static int do_close_input_stream(struct stream_in *in) +{ + struct audio_device *adev = in->adev; + int ret = 0; + + /* Close PCM Device */ + if (in->ausage == AUSAGE_CAPTURE_LOW_LATENCY) { + /* In cases of low_latency usage, It needs to pcm_close() */ + if (in->pcminfo) { + ret = pcm_close(in->pcminfo); + if (adev->pcm_capture == in->pcminfo) + adev->pcm_capture = NULL; + in->pcminfo = NULL; + } + ALOGI("%s-%s: Closed PCM Device", usage_table[in->ausage], __func__); + } else { + /* In cases of Error */ + ALOGE("%s-%s: Invalid Usage", usage_table[in->ausage], __func__); + ret = -EINVAL; + } + + /* Reset Routing Path */ + /* Have to keep devices information to restart without calling set_parameter() */ + set_audio_route((void *)in, AUSAGE_CAPTURE, in->ausage, false); + + return ret; +} + +static int do_open_input_stream(struct stream_in *in) +{ + struct audio_device *adev = in->adev; + unsigned int sound_card; + unsigned int sound_device; + unsigned int flags; + int ret = 0; + char fn[256]; + + /* Set Routing Path */ + //set_audio_route((void *)in, AUSAGE_CAPTURE, in->ausage, true); + + /* Check Unique PCM Device */ + if (adev->pcm_capture) { + ALOGW("%s-%s: PCM Device for Capture is already opened!!!", usage_table[in->ausage], __func__); + clean_dangling_streams(adev, AUSAGE_CAPTURE, (void *)in); + pcm_close(adev->pcm_capture); + adev->pcm_capture = NULL; + } + + /* Open PCM Device */ + sound_card = get_sound_card(in->ausage); + sound_device = get_sound_device(in->ausage); + + if (in->ausage == AUSAGE_CAPTURE_LOW_LATENCY) { + if (!in->pcminfo) { + /* In cases of low_latency usage, It needs to pcm_open() */ + flags = PCM_IN; + + in->pcminfo = pcm_open(sound_card, sound_device, flags, &in->pcmconfig); + if (in->pcminfo && !pcm_is_ready(in->pcminfo)) { + ALOGE("%s-%s: PCM Device is not ready(%s)!", usage_table[in->ausage], __func__, pcm_get_error(in->pcminfo)); + goto err_open; + } + + ALOGVV("%s: Refined PCM Period Size = %u, Period Count = %u", __func__, in->pcmconfig.period_size, in->pcmconfig.period_count); + snprintf(fn, sizeof(fn), "/dev/snd/pcmC%uD%u%c", sound_card, sound_device, flags & PCM_IN ? 'c' : 'p'); + ALOGI("%s-%s: Opened PCM Device is %s", usage_table[in->ausage], __func__, fn); + + adev->pcm_capture = in->pcminfo; + } + } else { + /* In cases of Error */ + ALOGE("%s-%s: Invalid Usage", usage_table[in->ausage], __func__); + in->pcminfo = NULL; + ret = -EINVAL; + } + + return ret; + +err_open: + do_close_input_stream(in); + ret = -EINVAL; + return ret; +} + +static int do_start_input_stream(struct stream_in *in) +{ + int ret = -ENOSYS; + + if (in->ausage == AUSAGE_CAPTURE_LOW_LATENCY) { + if (in->pcminfo) { + ret = pcm_start(in->pcminfo); + if (ret == 0) + ALOGI("%s-%s: Started PCM Device", usage_table[in->ausage], __func__); + else + ALOGE("%s-%s: Cannot start PCM(%s)", usage_table[in->ausage], __func__, pcm_get_error(in->pcminfo)); + } + } else { + /* In cases of Error */ + ALOGE("%s-%s: Invalid Usage", usage_table[in->ausage], __func__); + ret = -EINVAL; + } + + return ret; +} + +static int do_stop_input_stream(struct stream_in *in) +{ + int ret = -ENOSYS; + + if (in->ausage == AUSAGE_CAPTURE_LOW_LATENCY) { + if (in->pcminfo) { + ret = pcm_stop(in->pcminfo); + if (ret == 0) + ALOGI("%s-%s: Stopped PCM Device", usage_table[in->ausage], __func__); + else + ALOGE("%s-%s: Cannot stop PCM(%s)", usage_table[in->ausage], __func__, pcm_get_error(in->pcminfo)); + } + } else { + /* In cases of Error */ + ALOGE("%s-%s: Invalid Usage", usage_table[in->ausage], __func__); + ret = -EINVAL; + } + + return ret; +} + +static int do_read_buffer(struct stream_in *in, void* buffer, size_t bytes) +{ + int ret = 0, read = 0; + + if (in->ausage == AUSAGE_CAPTURE_LOW_LATENCY) { + if (in->pcminfo) { + ret = pcm_read(in->pcminfo, (void*)buffer, (unsigned int)bytes); + if (ret == 0) { + ALOGVV("%s-%s: Read Success(%u bytes) from PCM Device", usage_table[in->ausage], __func__, (unsigned int)bytes); + read = bytes; + if (in->devices == AUDIO_DEVICE_IN_FM_TUNER) { + amplify((short *) buffer, bytes / 4); + } + } else { + read = ret; + ALOGE_IF(in->err_count < MAX_ERR_COUNT, "%s-%s: Read Fail = %s", + usage_table[in->ausage], __func__, pcm_get_error(in->pcminfo)); + in->err_count++; + } + } + } + + return read; +} + +static void check_pcm_config( + struct pcm_config *pcmconfig, + struct audio_config *config) +{ + // Need to check which values are selected when requested configuration is different with default configuration + ALOGI("%s: Check PCM Channel Count: Default(%d), Request(%d)", __func__, pcmconfig->channels, audio_channel_count_from_out_mask(config->channel_mask)); + ALOGI("%s: Check PCM Smapling Rate: Default(%d), Request(%d)", __func__, pcmconfig->rate, config->sample_rate); + ALOGI("%s: Check PCM Format: Default(%d), Request(%d)", __func__, pcmconfig->format, pcm_format_from_audio_format(config->format)); + + return ; +} + +static int check_input_parameters( + uint32_t sample_rate, + audio_format_t format, + int channel_count) +{ + if (format != AUDIO_FORMAT_PCM_16_BIT) + return -EINVAL; + + if (channel_count != 2) + return -EINVAL; + + if (sample_rate != 48000) + return -EINVAL; + + return 0; +} + +static size_t get_input_buffer_size(uint32_t sample_rate, int channel_count) +{ + size_t size = 0; + + size = (sample_rate * AUDIO_CAPTURE_PERIOD_DURATION_MSEC) / 1000;// Number of Sample during duration + size *= sizeof(short) * channel_count; // Always 16bit PCM, so 2Bytes data(Short Size) + + /* make sure the size is multiple of 32 bytes at 48 kHz mono 16-bit PCM: + * 5.000 ms => 240 samples => 15*16*1*2 = 480 Bytes, a whole multiple of 32 (15) + * 3.333 ms => 160 samples => 10*16*1*2 = 320 Bytes, a whole multiple of 32 (10) + */ + size += 0x1f; + size &= ~0x1f; + + return size; +} + +/****************************************************************************/ + +/****************************************************************************/ +/** **/ +/** The Stream_Out Function Implementation **/ +/** **/ +/****************************************************************************/ +static uint32_t out_get_sample_rate(const struct audio_stream *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGV("%s-%s: exit with sample rate = %u", usage_table[out->ausage], __func__, out->sample_rate); + return out->sample_rate; +} + +static int out_set_sample_rate( + struct audio_stream *stream __unused, + uint32_t rate __unused) +{ + return -ENOSYS; +} + +static size_t out_get_buffer_size(const struct audio_stream *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* Total Buffer Size in Kernel = period_size * period_count * number of bytes per sample(frame) */ + ALOGV("%s-%s: Period Size = %u, Frame Size = %u", usage_table[out->ausage], __func__, + out->pcmconfig.period_size, (unsigned int)audio_stream_out_frame_size((const struct audio_stream_out *)stream)); + return out->pcmconfig.period_size * audio_stream_out_frame_size((const struct audio_stream_out *)stream); + } else { + /* Total Buffer Size in Kernel is fixed 4K * 5ea */ + ALOGV("%s-%s: Fragment Size = %u", usage_table[out->ausage], __func__, COMPRESS_PLAYBACK_BUFFER_SIZE); + return COMPRESS_PLAYBACK_BUFFER_SIZE; + } + + return 4096; +} + +static audio_channel_mask_t out_get_channels(const struct audio_stream *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGV("%s-%s: exit with channel mask = 0x%x", usage_table[out->ausage], __func__, out->channel_mask); + return out->channel_mask; +} + +static audio_format_t out_get_format(const struct audio_stream *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGV("%s-%s: exit with audio format = 0x%x", usage_table[out->ausage], __func__, out->format); + return out->format; +} + +static int out_set_format( + struct audio_stream *stream __unused, + audio_format_t format __unused) +{ + return -ENOSYS; +} + +static int out_standby(struct audio_stream *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + struct audio_device *adev = out->adev; + + ALOGV("%s-%s: enter", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + if (out->sstate != STATE_STANDBY) { + /* Stop stream & transit to Idle */ + if (out->sstate != STATE_IDLE) { + ALOGV("%s-%s: stream is running, will stop!", usage_table[out->ausage], __func__); + do_stop_output_stream(out, false); + out->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to Idle", usage_table[out->ausage], __func__); + } + + /* Close device & transit to Standby */ + pthread_mutex_lock(&adev->lock); + do_close_output_stream(out); + pthread_mutex_unlock(&adev->lock); + out->sstate = STATE_STANDBY; + ALOGI("%s-%s: Transit to Standby", usage_table[out->ausage], __func__); + } + out->err_count = 0; + pthread_mutex_unlock(&out->lock); + + ALOGV("%s-%s: exit", usage_table[out->ausage], __func__); + return 0; +} + +static int out_dump(const struct audio_stream *stream, int fd) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGV("%s-%s: enit with fd(%d)", usage_table[out->ausage], __func__, fd); + return 0; +} + +static int out_set_parameters(struct audio_stream *stream, const char *kvpairs) +{ + struct stream_out *out = (struct stream_out *)stream; + struct audio_device *adev = out->adev; + struct str_parms *parms; + int ret = 0, process_count = 0; + char value[32]; + + ALOGD("%s-%s: enter with param = %s", usage_table[out->ausage], __func__, kvpairs); + + parms = str_parms_create_str(kvpairs); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + struct compr_gapless_mdata tmp_mdata; + bool need_to_set_metadata = false; + + tmp_mdata.encoder_delay = 0; + tmp_mdata.encoder_padding = 0; + + /* These parameters are sended from sendMetaDataToHal() in Util.cpp when openAudioSink() is called */ + ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE, value, sizeof(value)); + if (ret >= 0) { + unsigned int bit_rate = atoi(value); + if (out->comprconfig.codec->bit_rate == bit_rate) + ALOGI("%s: Requested same bit rate(%u)", __func__, bit_rate); + + process_count++; + } + + ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_SAMPLE_RATE, value, sizeof(value)); + if (ret >= 0) { + unsigned int sample_rate = atoi(value); + if (out->sample_rate == sample_rate) + ALOGI("%s: Requested same sample rate(%u)", __func__, sample_rate); + + process_count++; + } + + ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_NUM_CHANNEL, value, sizeof(value)); + if (ret >= 0) { + unsigned int num_ch = atoi(value); + if (out->channel_mask== num_ch) + ALOGI("%s: Requested same Number of Channel(%u)", __func__, num_ch); + + process_count++; + } + + ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES, value, sizeof(value)); + if (ret >= 0) { + tmp_mdata.encoder_delay = atoi(value); + ALOGI("%s: Codec Delay Samples(%u)", __func__, tmp_mdata.encoder_delay); + need_to_set_metadata = true; + + process_count++; + } + + ret = str_parms_get_str(parms, AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES, value, sizeof(value)); + if (ret >= 0) { + tmp_mdata.encoder_padding = atoi(value); + ALOGI("%s: Codec Padding Samples(%u)", __func__, tmp_mdata.encoder_padding); + need_to_set_metadata = true; + + process_count++; + } + + if (need_to_set_metadata) { + out->offload_metadata = tmp_mdata; + out->ready_new_metadata = 1; + } + } + + ret = str_parms_get_str(parms, AUDIO_PARAMETER_STREAM_ROUTING, value, sizeof(value)); + if (ret >= 0) { + audio_devices_t req_device; + + /* AudioFlinger wants to change Audio Routing to some device */ + req_device = atoi(value); + if (req_device != AUDIO_DEVICE_NONE) { + /* Start/stop the BT SCO PCM nodes */ + if ((req_device & AUDIO_DEVICE_OUT_ALL_SCO) ^ + (out->devices & AUDIO_DEVICE_OUT_ALL_SCO)) { + if (req_device & AUDIO_DEVICE_OUT_ALL_SCO) + do_start_bt_sco(out); + else + do_stop_bt_sco(adev); + } + + ALOGD("%s-%s: Requested to change route from 0x%X to 0x%X", + usage_table[out->ausage], __func__, out->devices, req_device); + out->devices = req_device; + + /* Route the new device as requested by framework */ + pthread_mutex_lock(&adev->lock); + set_audio_route((void *)out, AUSAGE_PLAYBACK, out->ausage, true); + pthread_mutex_unlock(&adev->lock); + + if (output_drives_call(adev, out)) { + /* This output stream can be handled routing for voice call */ + if (adev->voice) { + if (!voice_is_in_call(adev->voice)) { + /* Check Audio Mode. If it is In-Call Mode, have to start voice call */ + if (isCPCallMode(adev->usage_amode)) { + /* Start Call */ + ret = voice_open(adev->voice); + if (ret == 0) { + adev->usage_amode = get_usage_mode(adev, adev->amode); + ALOGD("device-%s: Platform mode(%d) configured HAL mode(%s)", __func__, adev->amode, mode_table[adev->usage_amode]); + if (adev->usage_amode == AUSAGE_MODE_VOICE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_CS); + else if (adev->usage_amode == AUSAGE_MODE_LTE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_PS); + voice_set_path(adev->voice, out->devices); + + do_start_voice_call(out); + voice_set_volume(adev->voice, adev->voice_volume); + ALOGD("%s-%s: *** Started CP Voice Call with voice_volume:%f ***", + usage_table[out->ausage],__func__, adev->voice_volume); + } else { + ALOGE("%s-%s: Failed to open Voice Client", usage_table[out->ausage], __func__); + } + } + } else { + if (isCPCallMode(adev->usage_amode)) { + /* Now, In-Call State. Let's update RIL regarding output change */ + adev->usage_amode = get_usage_mode(adev, adev->amode); + ALOGD("device-%s: Platform mode(%d) configured HAL mode(%s)", __func__, adev->amode, mode_table[adev->usage_amode]); + if (adev->usage_amode == AUSAGE_MODE_VOICE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_CS); + else if (adev->usage_amode == AUSAGE_MODE_LTE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_PS); + + voice_set_path(adev->voice, out->devices); + ALOGD("%s-%s: RIL Route Updated for 0x%X", usage_table[out->ausage], __func__, out->devices); + } + } + } + } + } else { + /* When audio device will be changed, AudioFlinger requests to route with AUDIO_DEVICE_NONE */ + ALOGV("%s-%s: Requested to change route to AUDIO_DEVICE_NONE", usage_table[out->ausage], __func__); + } + + process_count++; + } + pthread_mutex_unlock(&out->lock); + + if (process_count == 0) + ALOGW("%s-%s: Not Supported param!", usage_table[out->ausage], __func__); + + str_parms_destroy(parms); + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return 0; +} + +static char * out_get_parameters(const struct audio_stream *stream, const char *keys) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGD("%s-%s: enter with param = %s", usage_table[out->ausage], __func__, keys); + return strdup(""); +} + +static uint32_t out_get_latency(const struct audio_stream_out *stream) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGV("%s-%s: enter", usage_table[out->ausage], __func__); + + if (out->ausage != AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* Basic latency(ms unit) is the delay in kernel buffer = (period * count * 1,000) / sample_rate */ + /* Do we need to add platform latency from DMA to real device(speaker) */ + return (out->pcmconfig.period_count * out->pcmconfig.period_size * 1000) / (out->pcmconfig.rate); + } else { + /* In case of Compress Offload, need to check it */ + return 100; + } + + return 0; +} + +static int out_set_volume( + struct audio_stream_out *stream, + float left, + float right) +{ + struct stream_out *out = (struct stream_out *)stream; + int ret = -ENOSYS; + + ALOGV("%s-%s: enter", usage_table[out->ausage], __func__); + + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* Save requested volume level to use at start */ + out->vol_left = left; + out->vol_right = right; + + ret = do_set_volume(out, left, right); + } else { + ALOGE("%s-%s: Don't support volume control for this stream", usage_table[out->ausage], __func__); + } + + ALOGV("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static ssize_t out_write( + struct audio_stream_out *stream, + const void* buffer, + size_t bytes) +{ + struct stream_out *out = (struct stream_out *)stream; + struct audio_device *adev = out->adev; + int ret = 0, wrote = 0; + + ALOGVV("%s-%s: enter", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + /* Check Device is opened */ + if (out->sstate == STATE_STANDBY) { + pthread_mutex_lock(&adev->lock); + ret = do_open_output_stream(out); + if (ret != 0) { + ALOGE("%s-%s: Fail to open Output Stream!", usage_table[out->ausage], __func__); + pthread_mutex_unlock(&adev->lock); + pthread_mutex_unlock(&out->lock); + return wrote; + } else { + out->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to Idle", usage_table[out->ausage], __func__); + } + pthread_mutex_unlock(&adev->lock); + } + + /* Transfer data before start */ + if (out->sstate > STATE_STANDBY) { + wrote = do_write_buffer(out, buffer, bytes); + if (wrote >= 0) { + if (out->sstate == STATE_IDLE) { + /* Start stream & Transit to Playing */ + ret = do_start_output_stream(out); + if (ret != 0) { + ALOGE("%s-%s: Fail to start Output Stream!", usage_table[out->ausage], __func__); + } else { + out->sstate = STATE_PLAYING; + ALOGI("%s-%s: Transit to Playing", usage_table[out->ausage], __func__); + } + } + } + } + pthread_mutex_unlock(&out->lock); + + ALOGVV("Playback Stream(%d) %s: exit", out->ausage, __func__); + return wrote; +} + +static int out_get_render_position( + const struct audio_stream_out *stream, + uint32_t *dsp_frames) +{ + struct stream_out *out = (struct stream_out *)stream; + unsigned int sample_rate = 0; + int ret = -ENAVAIL; + + ALOGVV("Playback Stream(%d) %s: enter", out->ausage, __func__); + + pthread_mutex_lock(&out->lock); + if ((out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) && (dsp_frames != NULL)) { + *dsp_frames = 0; + if (out->comprinfo) { + ret = compress_get_tstamp(out->comprinfo, (unsigned long *)dsp_frames, &sample_rate); + if (ret < 0) { + ALOGV("%s-%s: Error is %s", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + } else { + ALOGVV("%s-%s: rendered frames %u with sample_rate %u", usage_table[out->ausage], __func__, *dsp_frames, sample_rate); + ret = 0; + } + } + } else { + ALOGE("%s-%s: PCM is not support yet!", usage_table[out->ausage], __func__); + } + pthread_mutex_unlock(&out->lock); + + ALOGVV("Playback Stream(%d) %s: exit", out->ausage, __func__); + return ret; +} + +static int out_add_audio_effect( + const struct audio_stream *stream, + effect_handle_t effect) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGD("%s: exit with effect(%p)", __func__, effect); + return 0; +} + +static int out_remove_audio_effect( + const struct audio_stream *stream, + effect_handle_t effect) +{ + struct stream_out *out = (struct stream_out *)stream; + + ALOGD("%s: exit with effect(%p)", __func__, effect); + return 0; +} + +static int out_get_next_write_timestamp( + const struct audio_stream_out *stream, + int64_t *timestamp) +{ + struct stream_out *out = (struct stream_out *)stream; + + *timestamp = 0; + +// ALOGV("%s: exit", __func__); + return -EINVAL; +} + +static int out_get_presentation_position( + const struct audio_stream_out *stream, + uint64_t *frames, + struct timespec *timestamp) +{ + struct stream_out *out = (struct stream_out *)stream; + unsigned int sample_rate = 0; + unsigned int avail = 0; + unsigned long hw_frames; + int ret = -ENAVAIL; + + ALOGVV("%s-%s: entered", usage_table[out->ausage], __func__); + + if (frames != NULL) { + *frames = 0; + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->comprinfo) { + ret = compress_get_tstamp(out->comprinfo, &hw_frames, &sample_rate); + if (ret < 0) { + ALOGV_IF(out->err_count < MAX_ERR_COUNT, "%s-%s: Error is %s", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + out->err_count++; + } else { + ALOGV("%s-%s: rendered frames %lu with sample_rate %u", usage_table[out->ausage], __func__, hw_frames, sample_rate); + *frames = hw_frames; + + clock_gettime(CLOCK_MONOTONIC, timestamp); + ret = 0; + } + } else { + ALOGE("%s-%s: Compress Device is not opended yet", usage_table[out->ausage], __func__); + } + } else { + if (out->pcminfo) { + // Need to check again + ret = pcm_get_htimestamp(out->pcminfo, &avail, timestamp); + if (ret < 0) { + ALOGV_IF(out->err_count < MAX_ERR_COUNT, "%s-%s: Error is %d", usage_table[out->ausage], __func__, ret); + out->err_count++; + } else { + uint64_t kernel_buffer_size = (uint64_t)out->pcmconfig.period_size * (uint64_t)out->pcmconfig.period_count; // Total Frame Count in kernel Buffer + int64_t signed_frames = out->written - kernel_buffer_size + avail; + + ALOGVV("%s-%s: %lld frames are rendered", usage_table[out->ausage], __func__, (long long)signed_frames); + + if (signed_frames >= 0) { + *frames = (uint64_t)signed_frames; + ret = 0; + } + } + } else { + ALOGE("%s-%s: PCM Device is not opended yet", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + } else { + ALOGE("%s-%s: Invalid Parameter with Null pointer parameter", usage_table[out->ausage], __func__); + } + + ALOGVV("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static int out_set_callback( + struct audio_stream_out *stream, + stream_callback_t callback, + void *cookie) +{ + struct stream_out *out = (struct stream_out *)stream; + int ret = -EINVAL; + + ALOGD("%s-%s: entered", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (callback && cookie) { + out->offload_callback = callback; + out->offload_cookie = cookie; + ret = 0; + + ALOGD("%s-%s: set callback function & cookie", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static int out_pause(struct audio_stream_out* stream) +{ + struct stream_out *out = (struct stream_out *)stream; + struct audio_device *adev = out->adev; + int ret = -ENOSYS; + + ALOGD("%s-%s: entered", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->comprinfo) { + if (out->sstate == STATE_PLAYING) { + if (adev->notify_stop_output_tovisualizer != NULL) { + adev->notify_stop_output_tovisualizer(out->handle); + ALOGI("%s: Notify Stop to VisualizerHAL", __func__); + } + + ret = compress_pause(out->comprinfo); + if (ret == 0) { + out->sstate = STATE_PAUSED; + ALOGI("%s-%s: Transit to Paused", usage_table[out->ausage], __func__); + } else { + ALOGD("%s-%s: Failed to pause(%s)", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + } + } else { + ALOGD("%s-%s: Abnormal State(%u) for pausing", usage_table[out->ausage], __func__, out->sstate); + } + } else { + ALOGE("%s-%s: Invalid for pausing", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static int out_resume(struct audio_stream_out* stream) +{ + struct stream_out *out = (struct stream_out *)stream; + struct audio_device *adev = out->adev; + int ret = -ENOSYS; + + ALOGD("%s-%s: entered", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->comprinfo) { + if (out->sstate == STATE_PAUSED) { + ret = compress_resume(out->comprinfo); + if (ret == 0) { + out->sstate = STATE_PLAYING; + ALOGI("%s-%s: Transit to Playing", usage_table[out->ausage], __func__); + + if (adev->notify_start_output_tovisualizer != NULL) { + adev->notify_start_output_tovisualizer(out->handle); + ALOGI("%s: Notify Start to VisualizerHAL", __func__); + } + } else { + ALOGD("%s-%s: Failed to resume(%s)", usage_table[out->ausage], __func__, compress_get_error(out->comprinfo)); + } + } else { + ALOGD("%s-%s: Abnormal State(%u) for resuming", usage_table[out->ausage], __func__, out->sstate); + } + } else { + ALOGE("%s-%s: Invalid for resuming", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static int out_drain(struct audio_stream_out* stream, audio_drain_type_t type ) +{ + struct stream_out *out = (struct stream_out *)stream; + int ret = -ENOSYS; + + ALOGD("%s-%s: entered with type = %d", usage_table[out->ausage], __func__, type); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->comprinfo) { + if (out->sstate > STATE_IDLE) { + if (type == AUDIO_DRAIN_EARLY_NOTIFY) + ret = send_offload_msg(out, OFFLOAD_MSG_WAIT_PARTIAL_DRAIN); + else + ret = send_offload_msg(out, OFFLOAD_MSG_WAIT_DRAIN); + } else { + out->offload_callback(STREAM_CBK_EVENT_DRAIN_READY, NULL, out->offload_cookie); + ALOGD("%s-%s: State is IDLE. Return callback with drain_ready", usage_table[out->ausage], __func__); + } + } else { + ALOGE("%s-%s: Invalid for draining", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + +static int out_flush(struct audio_stream_out* stream) +{ + struct stream_out *out = (struct stream_out *)stream; + int ret = -ENOSYS; + + ALOGD("%s-%s: entered", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&out->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->comprinfo) { + if (out->sstate != STATE_IDLE) { + ret = do_stop_output_stream(out, true); + out->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to idle due to flush", usage_table[out->ausage], __func__); + } else { + ret = 0; + ALOGD("%s-%s: This stream is already stopped", usage_table[out->ausage], __func__); + } + } else { + ALOGE("%s-%s: Invalid for flushing", usage_table[out->ausage], __func__); + } + } + pthread_mutex_unlock(&out->lock); + + ALOGD("%s-%s: exit", usage_table[out->ausage], __func__); + return ret; +} + + +/****************************************************************************/ +/** **/ +/** The Stream_In Function Implementation **/ +/** **/ +/****************************************************************************/ +static uint32_t in_get_sample_rate(const struct audio_stream *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGVV("%s-%s: exit with sample rate = %u", usage_table[in->ausage], __func__, in->sample_rate); + return in->sample_rate; +} + +static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate) +{ + struct stream_in *in = (struct stream_in *)stream; + (void)rate; + + ALOGVV("%s-%s: exit with %u", usage_table[in->ausage], __func__, rate); + return -ENOSYS; +} + +static size_t in_get_buffer_size(const struct audio_stream *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + size_t size = 0; + int channel_count = audio_channel_count_from_in_mask(in->channel_mask); + + size = get_input_buffer_size(in->sample_rate, channel_count); + + ALOGVV("%s-%s: exit with %d", usage_table[in->ausage], __func__, (int)size); + return size; +} + +static audio_channel_mask_t in_get_channels(const struct audio_stream *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGVV("%s-%s: exit with channel mask = 0x%x", usage_table[in->ausage], __func__, in->channel_mask); + return in->channel_mask; +} + +static audio_format_t in_get_format(const struct audio_stream *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGVV("%s-%s: exit with audio format = 0x%x", usage_table[in->ausage], __func__, in->format); + return in->format; +} + +static int in_set_format(struct audio_stream *stream, audio_format_t format) +{ + struct stream_in *in = (struct stream_in *)stream; + (void)format; + + ALOGVV("%s-%s: enter with %d", usage_table[in->ausage], __func__, format); + + ALOGVV("%s-%s: exit", usage_table[in->ausage], __func__); + return -ENOSYS; +} + +static int in_standby(struct audio_stream *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + struct audio_device *adev = in->adev; + + ALOGD("%s-%s: enter", usage_table[in->ausage], __func__); + + pthread_mutex_lock(&in->lock); + if (in->sstate != STATE_STANDBY) { + /* Stop stream & transit to Idle */ + if (in->sstate != STATE_IDLE) { + ALOGV("%s-%s: stream is running, will stop!", usage_table[in->ausage], __func__); + do_stop_input_stream(in); + in->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to Idle", usage_table[in->ausage], __func__); + } + + /* Close device & transit to Standby */ + pthread_mutex_lock(&adev->lock); + do_close_input_stream(in); + pthread_mutex_unlock(&adev->lock); + in->sstate = STATE_STANDBY; + ALOGI("%s-%s: Transit to Standby", usage_table[in->ausage], __func__); + } + in->err_count = 0; + pthread_mutex_unlock(&in->lock); + + ALOGD("%s-%s: exit", usage_table[in->ausage], __func__); + return 0; +} + +static int in_dump(const struct audio_stream *stream, int fd) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGV("%s-%s: enter with fd(%d)", usage_table[in->ausage], __func__, fd); + + ALOGV("%s-%s: exit", usage_table[in->ausage], __func__); + return 0; +} + +static int in_set_parameters(struct audio_stream *stream, const char *kvpairs) +{ + struct stream_in *in = (struct stream_in *)stream; + struct audio_device *adev = in->adev; + struct str_parms *parms; + int ret = 0, process_count = 0; + char value[32]; + + ALOGD("%s-%s: enter with param = %s", usage_table[in->ausage], __func__, kvpairs); + + parms = str_parms_create_str(kvpairs); + + pthread_mutex_lock(&in->lock); + ret = str_parms_get_str(parms, AUDIO_PARAMETER_STREAM_INPUT_SOURCE, value, sizeof(value)); + if (ret >= 0) { + unsigned int val; + + val = (unsigned int)atoi(value); + if ((in->source != val) && (val != 0)) { + ALOGD("%s-%s: Changing source from %d to %d", usage_table[in->ausage], __func__, in->source, val); + in->source = val; + } + + process_count++; + } + + ret = str_parms_get_str(parms, AUDIO_PARAMETER_STREAM_ROUTING, value, sizeof(value)); + if (ret >= 0) { + audio_devices_t req_device; + + /* AudioFlinger wants to change Audio Routing to some device */ + req_device = atoi(value); + if (req_device != 0) { + ALOGD("%s-%s: Requested to change route from 0x%X to 0x%X", usage_table[in->ausage], __func__, in->devices, req_device); + in->devices = req_device; + + pthread_mutex_lock(&adev->lock); + set_audio_route((void *)in, AUSAGE_CAPTURE, in->ausage, true); + pthread_mutex_unlock(&adev->lock); + } + + process_count++; + } + pthread_mutex_unlock(&in->lock); + + if (process_count == 0) + ALOGW("%s-%s: Not Supported param!", usage_table[in->ausage], __func__); + + str_parms_destroy(parms); + ALOGD("%s-%s: exit", usage_table[in->ausage], __func__); + return 0; +} + +static char * in_get_parameters( + const struct audio_stream *stream, + const char *keys) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGD("%s-%s: enter with keys(%s)", usage_table[in->ausage], __func__, keys); + + return strdup(""); +} + +static int in_set_gain(struct audio_stream_in *stream, float gain) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGV("%s-%s: enter with gain(%f)", usage_table[in->ausage], __func__, gain); + + return 0; +} + +static ssize_t in_read( + struct audio_stream_in *stream, + void* buffer, + size_t bytes) +{ + struct stream_in *in = (struct stream_in *)stream; + struct audio_device *adev = in->adev; + size_t frames_rq = bytes / audio_stream_in_frame_size(stream); + int ret = 0; + + ALOGVV("%s-%s: enter", usage_table[in->ausage], __func__); + + pthread_mutex_lock(&in->lock); + /* Check Device is opened */ + if (in->sstate == STATE_STANDBY) { + pthread_mutex_lock(&adev->lock); + ret = do_open_input_stream(in); + if (ret != 0) { + ALOGE("%s-%s: Fail to open Output Stream!", usage_table[in->ausage], __func__); + pthread_mutex_unlock(&adev->lock); + pthread_mutex_unlock(&in->lock); + return ret; + } else { + in->sstate = STATE_IDLE; + ALOGI("%s-%s: Transit to Idle", usage_table[in->ausage], __func__); + } + pthread_mutex_unlock(&adev->lock); + } + + /* Transfer data before start */ + if (in->sstate == STATE_IDLE) { + /* Start stream & transit to Playing */ + ret = do_start_input_stream(in); + if (ret != 0) { + ALOGE("%s-%s: Fail to start Output Stream!", usage_table[in->ausage], __func__); + } else { + in->sstate = STATE_PLAYING; + ALOGI("%s-%s: Transit to Capturing", usage_table[in->ausage], __func__); + } + } + + if (in->sstate == STATE_PLAYING) + do_read_buffer(in, buffer, bytes); + pthread_mutex_unlock(&in->lock); + + ALOGVV("%s-%s: exit with read data(%d Bytes)", usage_table[in->ausage], __func__, (int)bytes); + return bytes; +} + +static uint32_t in_get_input_frames_lost(struct audio_stream_in *stream) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGV("%s-%s: exit", usage_table[in->ausage], __func__); + return 0; +} + +static int in_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGD("%s: enter with effect(%p)", __func__, effect); + + ALOGD("%s: exit", __func__); + return 0; +} + +static int in_remove_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + struct stream_in *in = (struct stream_in *)stream; + + ALOGD("%s: enter with effect(%p)", __func__, effect); + + ALOGD("%s: exit", __func__); + return 0; +} + +/*****************************************************************************/ +/** **/ +/** The Audio Device Function Implementation **/ +/** **/ +/*****************************************************************************/ +static int adev_open_output_stream( + struct audio_hw_device *dev, + audio_io_handle_t handle, + audio_devices_t devices, + audio_output_flags_t flags, + struct audio_config *config, + struct audio_stream_out **stream_out, + const char *address __unused) +{ + struct audio_device *adev = (struct audio_device *)dev; + struct stream_out *out; + struct exynos_audio_usage *active_ausage; + int ret; + + ALOGD("device-%s: enter: io_handle (%d), sample_rate(%d) channel_mask(%#x) devices(%#x) flags(%#x)", + __func__, handle, config->sample_rate, config->channel_mask, devices, flags); + + *stream_out = NULL; + + /* Allocate memory for Structure audio_stream_out */ + out = (struct stream_out *)calloc(1, sizeof(struct stream_out)); + if (!out) { + ALOGE("device-%s: Fail to allocate memory for stream_out", __func__); + return -ENOMEM; + } + out->adev = adev; + + if (devices == AUDIO_DEVICE_NONE) + devices = AUDIO_DEVICE_OUT_SPEAKER; + + /* Save common parameters from Android Platform */ + out->handle = handle; + out->devices = devices; + out->flags = flags; + out->sample_rate = config->sample_rate; + out->channel_mask = config->channel_mask; + out->format = config->format; + + /* Set basic configuration from Flags */ + if ((out->flags & AUDIO_OUTPUT_FLAG_PRIMARY) != 0) { + /* Case: Normal Playback */ + ALOGD("device-%s: Requested open Primary output", __func__); + + out->ausage = AUSAGE_PLAYBACK_PRIMARY; + + /* Check previous primary output is exist */ + if (adev->primary_output == NULL) { + adev->primary_output = out; + } else { + ALOGE("%s-%s: Primary output is already opened", usage_table[out->ausage], __func__); + ret = -EEXIST; + goto err_open; + } + + /* Set PCM Configuration */ + out->pcmconfig = pcm_config_primary; + check_and_set_pcm_config(&out->pcmconfig, config); + } else if ((out->flags & AUDIO_OUTPUT_FLAG_FAST) != 0) { + /* Case: Playback with Low Latency */ + ALOGD("device-%s: Requested open fast output", __func__); + + out->ausage = AUSAGE_PLAYBACK_LOW_LATENCY; + + /* Set PCM Configuration */ + out->pcmconfig = pcm_config_low_latency; + check_and_set_pcm_config(&out->pcmconfig, config); + } else if ((out->flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) { + /* Case: Playback with Deep Buffer */ + ALOGD("device-%s: Requested open Deep Buffer output", __func__); + + out->ausage = AUSAGE_PLAYBACK_DEEP_BUFFER; + + /* Set PCM Configuration */ + out->pcmconfig = pcm_config_deep_buffer; + check_and_set_pcm_config(&out->pcmconfig, config); + } else if ((out->flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) { + /* Case: Payback with Direct */ + if ((out->devices & AUDIO_DEVICE_OUT_AUX_DIGITAL) != 0) { + /* Sub-Case: Playback with Aux Digital */ + ALOGD("device-%s: Requested open Aux output", __func__); + + out->ausage = AUSAGE_PLAYBACK_AUX_DIGITAL; + } else if ((out->flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) { + /* Sub-Case: Playback with Offload */ + ALOGD("device-%s: Requested open Compress Offload output", __func__); + + out->ausage = AUSAGE_PLAYBACK_COMPR_OFFLOAD; + + if (is_supported_compressed_format(config->offload_info.format)) { + out->comprconfig.codec = (struct snd_codec *)calloc(1, sizeof(struct snd_codec)); + if (out->comprconfig.codec == NULL) { + ALOGE("%s-%s: Fail to allocate memory for Sound Codec", usage_table[out->ausage], __func__); + + ret = -ENOMEM; + goto err_open; + } + + /* Mapping function pointers in Structure audio_stream_in as real function */ + out->stream.set_callback = out_set_callback; + out->stream.pause = out_pause; + out->stream.resume = out_resume; + out->stream.drain = out_drain; + out->stream.flush = out_flush; + + if (config->offload_info.channel_mask + && (config->offload_info.channel_mask != config->channel_mask)) { + ALOGV("%s-%s: Channel Mask = Config(%u), Offload_Info(%u)", + usage_table[out->ausage], __func__, config->channel_mask, config->offload_info.channel_mask); + config->channel_mask = config->offload_info.channel_mask; + } + + if (config->offload_info.sample_rate && (config->offload_info.sample_rate != config->sample_rate)) { + ALOGV("%s-%s: Sampling Rate = Config(%u), Offload_Info(%u)", + usage_table[out->ausage], __func__, config->sample_rate, config->offload_info.sample_rate); + config->sample_rate = config->offload_info.sample_rate; + } + + /* Set Compress Offload Configuration */ + out->comprconfig.fragment_size = COMPRESS_OFFLOAD_FRAGMENT_SIZE; + out->comprconfig.fragments = COMPRESS_OFFLOAD_NUM_FRAGMENTS; + out->comprconfig.codec->id = get_snd_codec_id(config->offload_info.format); +// out->comprconfig.codec->ch_in = audio_channel_count_from_out_mask(config->channel_mask); + out->comprconfig.codec->ch_in = config->channel_mask; +// out->comprconfig.codec->ch_out = audio_channel_count_from_out_mask(config->channel_mask); + out->comprconfig.codec->ch_out = config->channel_mask; +// out->comprconfig.codec->sample_rate = compress_get_alsa_rate(config->sample_rate); + out->comprconfig.codec->sample_rate = config->sample_rate; + out->comprconfig.codec->bit_rate = config->offload_info.bit_rate; + out->comprconfig.codec->format = config->format; + + ALOGV("%s-%s: Sound Codec = ID(%u), Channel(%u), Sample Rate(%u), Bit Rate(%u)", + usage_table[out->ausage], __func__, out->comprconfig.codec->id, out->comprconfig.codec->ch_in, + out->comprconfig.codec->sample_rate, out->comprconfig.codec->bit_rate); + + if (flags & AUDIO_OUTPUT_FLAG_NON_BLOCKING) { + ALOGV("%s-%s: Need to work as Nonblock Mode!", usage_table[out->ausage], __func__); + out->nonblock_flag = 1; + + create_offload_callback_thread(out); + list_init(&out->offload_msg_list); + } + + out->ready_new_metadata = 1; + } else { + ALOGE("%s-%s: Unsupported Compressed Format(%x)", usage_table[out->ausage], + __func__, config->offload_info.format); + + ret = -EINVAL; + goto err_open; + } + } + } else { + /* Error Case: Not Supported usage */ + ALOGE("device-%s: Requested open un-supported output", __func__); + + ret = -EINVAL; + goto err_open; + } + + /* Mapping function pointers in Structure audio_stream_in as real function */ + out->stream.common.get_sample_rate = out_get_sample_rate; + out->stream.common.set_sample_rate = out_set_sample_rate; + out->stream.common.get_buffer_size = out_get_buffer_size; + out->stream.common.get_channels = out_get_channels; + out->stream.common.get_format = out_get_format; + out->stream.common.set_format = out_set_format; + out->stream.common.standby = out_standby; + out->stream.common.dump = out_dump; + out->stream.common.set_parameters = out_set_parameters; + out->stream.common.get_parameters = out_get_parameters; + out->stream.common.add_audio_effect = out_add_audio_effect; + out->stream.common.remove_audio_effect = out_remove_audio_effect; + out->stream.get_latency = out_get_latency; + out->stream.set_volume = out_set_volume; + out->stream.write = out_write; + out->stream.get_render_position = out_get_render_position; + out->stream.get_next_write_timestamp = out_get_next_write_timestamp; + out->stream.get_presentation_position = out_get_presentation_position; + + /* Set Platform-specific information */ + pthread_mutex_init(&out->lock, (const pthread_mutexattr_t *) NULL); + pthread_mutex_lock(&out->lock); + + out->sstate = STATE_STANDBY; // Not open Device + ALOGI("%s-%s: Transit to Standby", usage_table[out->ausage], __func__); + + pthread_mutex_lock(&adev->lock); + if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + /* Defense code to clear any dangling ausage, if media server is restarted */ + active_ausage = get_dangling_ausage_from_list(adev, AUSAGE_PLAYBACK, out->handle); + if (active_ausage) { + /* Clear dangling ausage from list, one ausage can exist at a time*/ + remove_audio_usage(adev, AUSAGE_PLAYBACK, active_ausage->stream.out); + ALOGD("%s-%s: Remove Dangling ausage from list!!", usage_table[out->ausage], __func__); + } + } + + /* Add this audio usage into Audio Usage List */ + add_audio_usage(adev, AUSAGE_PLAYBACK, (void *)out); + pthread_mutex_unlock(&adev->lock); + + pthread_mutex_unlock(&out->lock); + + /* Set Structure audio_stream_in for return */ + *stream_out = &out->stream; + + ALOGD("device-%s: Opened %s stream", __func__, usage_table[out->ausage]); + return 0; + +err_open: + free(out); + *stream_out = NULL; + + ALOGE("device-%s: Cannot open this stream as error(%d)", __func__, ret); + return ret; +} + +static void adev_close_output_stream( + struct audio_hw_device *dev, + struct audio_stream_out *stream) +{ + struct audio_device *adev = (struct audio_device *)dev; + struct stream_out *out = (struct stream_out *)stream; + audio_usage_id_t id = out->ausage; + + ALOGD("device-%s: enter with Audio Usage(%s)", __func__, usage_table[id]); + + if (out) { + out_standby(&stream->common); + + /* Clean up Platform-specific information */ + /* Remove this audio usage from Audio Usage List */ + pthread_mutex_lock(&out->lock); + pthread_mutex_lock(&adev->lock); + remove_audio_usage(adev, AUSAGE_PLAYBACK, (void *)out); + pthread_mutex_unlock(&adev->lock); + pthread_mutex_unlock(&out->lock); + + /* Check close Primary Output */ + if (out->ausage == AUSAGE_PLAYBACK_PRIMARY) { + adev->primary_output = NULL; + } else if (out->ausage == AUSAGE_PLAYBACK_COMPR_OFFLOAD) { + if (out->nonblock_flag) + destroy_offload_callback_thread(out); + + pthread_mutex_lock(&out->lock); + if (out->comprconfig.codec != NULL) { + free(out->comprconfig.codec); + out->comprconfig.codec = NULL; + } + pthread_mutex_unlock(&out->lock); + } + + pthread_mutex_destroy(&out->lock); + + free(out); + } + + ALOGD("device-%s: Closed %s stream", __func__, usage_table[id]); + return; +} + +static int adev_set_parameters( + struct audio_hw_device *dev, + const char *kvpairs) +{ + struct audio_device *adev = (struct audio_device *)dev; + struct str_parms *parms; + char value[32]; + int val; + int ret = 0; + + ALOGD("device-%s: enter with key(%s)", __func__, kvpairs); + + pthread_mutex_lock(&adev->lock); + + parms = str_parms_create_str(kvpairs); + + ret = str_parms_get_str(parms, "screen_state", value, sizeof(value)); + if (ret >= 0) { + if (strcmp(value, AUDIO_PARAMETER_VALUE_ON) == 0) + adev->screen_off = false; + else + adev->screen_off = true; + } + + ret = str_parms_get_int(parms, "rotation", &val); + if (ret >= 0) { + switch (val) { + case 0: + case 90: + case 180: + case 270: + ALOGD("device-%s: Set is rotated with %d", __func__, val); + break; + default: + ALOGE("device-%s: unexpected rotation of %d", __func__, val); + } + } + + /* LTE Based Communication - CP Centric */ + ret = str_parms_get_str(parms, "VoLTEstate", value, sizeof(value)); + if (ret >= 0) { + if (!strcmp(value, "voice")) { + /* FIXME: Need to check Handover & control Voice PCM */ + adev->call_state = LTE_CALL; + ALOGD("device-%s: VoLTE Voice Call Start!!", __func__); + } else if (!strcmp(value, "end")) { + /* Need to check Handover & control Voice PCM */ + adev->call_state = CP_CALL; + ALOGD("device-%s: VoLTE Voice Call End!!", __func__); + } else + ALOGD("device-%s: Unknown VoLTE parameters = %s!!", __func__, value); + + adev->usage_amode = get_usage_mode(adev, adev->amode); + ALOGD("device-%s: Platform mode(%d) configured HAL mode(%s)", __func__, adev->amode, mode_table[adev->usage_amode]); + if (adev->usage_amode == AUSAGE_MODE_VOICE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_CS); + else if (adev->usage_amode == AUSAGE_MODE_LTE_CALL) + voice_set_call_mode(adev->voice, VOICE_CALL_PS); + + voice_set_path(adev->voice, adev->primary_output->devices); + } + + /* WiFi Based Communication - AP Centric */ + ret = str_parms_get_str(parms, "VoWiFistate", value, sizeof(value)); + if (ret >= 0) { + if (!strcmp(value, "voice")) { + /* FIXME: Need to check Handover & control Voice PCM */ + adev->call_state = WIFI_CALL; + ALOGD("device-%s: VoWiFI Voice Call Start!!", __func__); + } else if (!strcmp(value, "end")) { + /* FIXME: Need to check Handover & control Voice PCM */ + adev->call_state = CP_CALL; + ALOGD("device-%s: VoWiFI Voice Call End!!", __func__); + } else + ALOGD("device-%s: Unknown VoWiFI parameters = %s!!", __func__, value); + + adev->usage_amode = get_usage_mode(adev, adev->amode); + ALOGD("device-%s: Platform mode(%d) configured HAL mode(%s)", __func__, adev->amode, mode_table[adev->usage_amode]); + + /* Need to re-routing */ + set_audio_route((void *)adev->primary_output, AUSAGE_PLAYBACK, adev->primary_output->ausage, true); + } + + str_parms_destroy(parms); + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit", __func__); + return ret; +} + +static char * adev_get_parameters( + const struct audio_hw_device *dev, + const char *keys) +{ + struct audio_device *adev = (struct audio_device *)dev; + char *str = NULL; + + ALOGD("device-%s: enter with key(%s)", __func__, keys); + + pthread_mutex_lock(&adev->lock); + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit with %s", __func__, str); + return str; +} + +static int adev_init_check(const struct audio_hw_device *dev) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + if (adev) { + if (adev->rinfo == NULL) { + ALOGE("device-%s: Audio Primary HW Device is not initialized", __func__); + ret = -EINVAL; + } else { + ALOGV("device-%s: Audio Primary HW Device is already opened & initialized", __func__); + } + } else { + ALOGE("device-%s: Audio Primary HW Device is not opened", __func__); + ret = -EINVAL; + } + + return ret; +} + +static int adev_set_voice_volume(struct audio_hw_device *dev, float volume) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter with volume level(%f)", __func__, volume); + + pthread_mutex_lock(&adev->lock); + adev->voice_volume = volume; + + if (adev->voice) + ret = voice_set_volume(adev->voice, volume); + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit", __func__); + return ret; +} + +static int adev_set_master_volume(struct audio_hw_device *dev, float volume) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter with volume level(%f)", __func__, volume); + + pthread_mutex_lock(&adev->lock); + ret = -ENOSYS; + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit, Not supported Master Volume by AudioHAL", __func__); + return ret; +} + +static int adev_get_master_volume(struct audio_hw_device *dev, float *volume) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter", __func__); + + pthread_mutex_lock(&adev->lock); + *volume = 0; + ret = -ENOSYS; + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit, Not supported Master Volume by AudioHAL", __func__); + return ret; +} + +static int adev_set_master_mute(struct audio_hw_device *dev, bool muted) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter with mute statue(%d)", __func__, muted); + + pthread_mutex_lock(&adev->lock); + ret = -ENOSYS; + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit, Not supported Master Mute by AudioHAL", __func__); + return ret; +} + +static int adev_get_master_mute(struct audio_hw_device *dev, bool *muted) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter", __func__); + + pthread_mutex_lock(&adev->lock); + *muted = false; + ret = -ENOSYS; + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit, Not supported Master Mute by AudioHAL", __func__); + return ret; +} + +static int adev_set_mode(struct audio_hw_device *dev, audio_mode_t mode) +{ + struct audio_device *adev = (struct audio_device *)dev; + + ALOGD("device-%s: enter with mode = %d", __func__, mode); + ALOGD("device-%s: previous mode = %d", __func__, adev->amode); + + pthread_mutex_lock(&adev->lock); + if (adev->amode != mode) { + if (adev->voice) { + if ((mode == AUDIO_MODE_NORMAL || mode == AUDIO_MODE_IN_COMMUNICATION) + && voice_is_in_call(adev->voice)) { + /* Change from Voice Call Mode to Normal Mode */ + /* Stop Voice Call */ + do_stop_voice_call(adev); + voice_set_audio_clock(adev->voice, VOICE_AUDIO_TURN_OFF_I2S); + voice_close(adev->voice); + ALOGD("device-%s: *** Stopped CP Voice Call ***", __func__); + + /* Changing Call State */ + if (adev->call_state == CP_CALL) adev->call_state = CALL_OFF; + } else if (mode == AUDIO_MODE_IN_CALL) { + /* Change from Normal Mode to Voice Call Mode */ + /* We cannot start Voice Call right now because we don't know which device will be used. + So, we need to delay Voice Call start when get the routing information for Voice Call */ + + /* Changing Call State */ + if (adev->call_state == CALL_OFF) adev->call_state = CP_CALL; + } + } else if (mode == AUDIO_MODE_IN_CALL) { + /* Request to change to Voice Call Mode, But Primary AudioHAL cannot support this */ + ALOGW("device-%s: CP RIL interface is NOT supported!!", __func__); + pthread_mutex_unlock(&adev->lock); + return -EINVAL; + } + + /* Changing Android Audio Mode */ + ALOGD("device-%s: changed mode from %d to %d", __func__, adev->amode, mode); + adev->amode = mode; + + /* Get Audio Usage Mode */ + adev->usage_amode = get_usage_mode(adev, adev->amode); + ALOGD("device-%s: Platform mode(%d) configured HAL mode(%s)", __func__, mode, mode_table[adev->usage_amode]); + } + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit", __func__); + return 0; +} + +static int adev_set_mic_mute(struct audio_hw_device *dev, bool state) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter with mic mute(%d)", __func__, state); + + pthread_mutex_lock(&adev->lock); + if (adev->voice) + ret = voice_set_mic_mute(adev->voice, state); + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit", __func__); + return ret; +} + +static int adev_get_mic_mute(const struct audio_hw_device *dev, bool *state) +{ + struct audio_device *adev = (struct audio_device *)dev; + int ret = 0; + + ALOGD("device-%s: enter", __func__); + + pthread_mutex_lock(&adev->lock); + if (adev->voice) + *state = voice_get_mic_mute(adev->voice); + else + *state = false; + pthread_mutex_unlock(&adev->lock); + + ALOGD("device-%s: exit", __func__); + return ret; +} + +static size_t adev_get_input_buffer_size( + const struct audio_hw_device *dev __unused, + const struct audio_config *config) +{ + size_t size = 0; + int channel_count = audio_channel_count_from_in_mask(config->channel_mask); + + size = get_input_buffer_size(config->sample_rate, channel_count); + + ALOGD("device-%s: exited with %d Bytes", __func__, (int)size); + return size; +} + +static int adev_open_input_stream( + struct audio_hw_device *dev, + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + struct audio_stream_in **stream_in, + audio_input_flags_t flags, + const char *address __unused, + audio_source_t source) +{ + struct audio_device *adev = (struct audio_device *)dev; + struct stream_in *in; + struct exynos_audio_usage *active_ausage; + int channel_count = audio_channel_count_from_in_mask(config->channel_mask); + int ret = 0; + + ALOGD("device-%s: enter: io_handle (%d), sample_rate(%d) channel_mask(%#x) devices(%#x) flags(%#x) source(%d)", + __func__, handle, config->sample_rate, config->channel_mask, devices, flags, source); + + *stream_in = NULL; + + if (check_input_parameters(config->sample_rate, config->format, channel_count) != 0) { + ALOGE("device-%s: Request has unsupported configuration!", __func__); + + config->format = audio_pcmformat_from_alsaformat(pcm_config_audio_capture.format); + config->sample_rate = (uint32_t)(pcm_config_audio_capture.rate); + config->channel_mask = audio_channel_in_mask_from_count(pcm_config_audio_capture.channels); + ALOGD("device-%s: Proposed configuration!", __func__); + return -EINVAL; + } + + /* Allocate memory for Structure audio_stream_in */ + in = (struct stream_in *)calloc(1, sizeof(struct stream_in)); + if (!in) { + ALOGE("device-%s: Fail to allocate memory for stream_in", __func__); + return -ENOMEM; + } + in->adev = adev; + + if (devices == AUDIO_DEVICE_NONE) + devices = AUDIO_DEVICE_IN_BUILTIN_MIC; + + /* Save common parameters from Android Platform */ + /* Mapping function pointers in Structure audio_stream_in as real function */ + in->stream.common.get_sample_rate = in_get_sample_rate; + in->stream.common.set_sample_rate = in_set_sample_rate; + in->stream.common.get_buffer_size = in_get_buffer_size; + in->stream.common.get_channels = in_get_channels; + in->stream.common.get_format = in_get_format; + in->stream.common.set_format = in_set_format; + in->stream.common.standby = in_standby; + in->stream.common.dump = in_dump; + in->stream.common.set_parameters = in_set_parameters; + in->stream.common.get_parameters = in_get_parameters; + in->stream.common.add_audio_effect = in_add_audio_effect; + in->stream.common.remove_audio_effect = in_remove_audio_effect; + in->stream.set_gain = in_set_gain; + in->stream.read = in_read; + in->stream.get_input_frames_lost = in_get_input_frames_lost; + + in->handle = handle; + in->devices = devices; + in->source = source; + in->flags = flags; + in->sample_rate = config->sample_rate; + in->channel_mask = config->channel_mask; + in->format = config->format; + + /* Set basic configuration from devices */ + { + /* Case: Capture For Recording */ + ALOGD("device-%s: Requested open Low Latency input", __func__); + + in->ausage = AUSAGE_CAPTURE_LOW_LATENCY; + + /* Set PCM Configuration */ + in->pcmconfig = pcm_config_audio_capture;// Default PCM Configuration + check_pcm_config(&in->pcmconfig, config); + } + + /* Set Platform-specific information */ + pthread_mutex_init(&in->lock, (const pthread_mutexattr_t *) NULL); + pthread_mutex_lock(&in->lock); + + in->sstate = STATE_STANDBY; + ALOGI("%s-%s: Transit to Standby", usage_table[in->ausage], __func__); + + /* Add this audio usage into Audio Usage List */ + pthread_mutex_lock(&adev->lock); + add_audio_usage(adev, AUSAGE_CAPTURE, (void *)in); + pthread_mutex_unlock(&adev->lock); + + pthread_mutex_unlock(&in->lock); + + /* Set Structure audio_stream_in for return */ + *stream_in = &in->stream; + + ALOGD("device-%s: Opened %s stream", __func__, usage_table[in->ausage]); + return 0; + +err_open: + free(in); + *stream_in = NULL; + + ALOGD("device-%s: Cannot open this stream as error(%d)", __func__, ret); + return ret; +} + +static void adev_close_input_stream( + struct audio_hw_device *dev, + struct audio_stream_in *stream) +{ + struct audio_device *adev = (struct audio_device *)dev; + struct stream_in *in = (struct stream_in *)stream; + audio_usage_id_t id = in->ausage; + + ALOGD("device-%s: enter with Audio Usage(%s)", __func__, usage_table[id]); + + if (in) { + in_standby(&stream->common); + + pthread_mutex_lock(&in->lock); + + /* Remove this audio usage from Audio Usage List */ + pthread_mutex_lock(&adev->lock); + remove_audio_usage(adev, AUSAGE_CAPTURE, (void *)in); + pthread_mutex_unlock(&adev->lock); + + pthread_mutex_unlock(&in->lock); + pthread_mutex_destroy(&in->lock); + free(in); + } + + ALOGD("device-%s: Closed %s stream", __func__, usage_table[id]); + return; +} + +static int adev_dump(const audio_hw_device_t *device, int fd) +{ + struct audio_device *adev = (struct audio_device *)device; + + ALOGV("device-%s: enter with file descriptor(%d)", __func__, fd); + + ALOGV("device-%s: exit - This function is not implemented yet!", __func__); + return 0; +} + +static int adev_close(hw_device_t *device) +{ + struct audio_device *adev = (struct audio_device *)device; + + ALOGV("device-%s: enter", __func__); + + if (adev) { + /* Clean up Platform-specific information */ + pthread_mutex_lock(&adev->lock); + if(adev->offload_visualizer_lib) + dlclose(adev->offload_visualizer_lib); + + if(adev->rinfo) + deinit_route(adev); + + if (adev->voice) { + voice_deinit(adev->voice); + adev->voice = NULL; + } + + pthread_mutex_unlock(&adev->lock); + pthread_mutex_destroy(&adev->lock); + + free(adev); + } + + ALOGV("device-%s: Closed Audio Primary HW Device", __func__); + return 0; +} + +static int onCallbackFromRILAudio(int event, const void *data, unsigned int datalen) +{ + switch (event) { + case VOICE_AUDIO_EVENT_RINGBACK_STATE_CHANGED: + ALOGD("device-%s: On RINGBACK_STATE_CHANGED event! Ignored", __func__); + if (data && datalen > 0) + ALOGD("device-%s: Data Length(4 expected) = %d", __func__, datalen); + break; + + case VOICE_AUDIO_EVENT_IMS_SRVCC_HANDOVER: + ALOGD("device-%s: On IMS_SRVCC_HANDOVER event!", __func__); + break; + + default: + ALOGD("device-%s: On Unsupported event (%d)!", __func__, event); + break; + } + + return 0; +} + +static int adev_open( + const hw_module_t* module, + const char* name, + hw_device_t** device) +{ + struct audio_device *adev; + + ALOGV("device-%s: enter", __func__); + + /* Check Interface Name. It must be AUDIO_HARDWARE_INTERFACE */ + if (strcmp(name, AUDIO_HARDWARE_INTERFACE) != 0) { + ALOGE("device-%s: Invalid request: Interface Name = %s", __func__, name); + return -EINVAL; + } + + /* Allocate memory for Structure audio_device */ + adev = calloc(1, sizeof(struct audio_device)); + if (!adev) { + ALOGE("device-%s: Fail to allocate memory for audio_device", __func__); + return -ENOMEM; + } + + /* Mapping function pointers in Structure audio_hw_device as real function */ + adev->hw_device.common.tag = HARDWARE_DEVICE_TAG; + adev->hw_device.common.version = AUDIO_DEVICE_API_VERSION_2_0;// Now, Must be Version 2.0 + adev->hw_device.common.module = (struct hw_module_t *) module; + adev->hw_device.common.close = adev_close; + + adev->hw_device.init_check = adev_init_check; + adev->hw_device.set_voice_volume = adev_set_voice_volume; + adev->hw_device.set_master_volume = adev_set_master_volume; + adev->hw_device.get_master_volume = adev_get_master_volume; + adev->hw_device.set_master_mute = adev_set_master_mute; + adev->hw_device.get_master_mute = adev_get_master_mute; + adev->hw_device.set_mode = adev_set_mode; + adev->hw_device.set_mic_mute = adev_set_mic_mute; + adev->hw_device.get_mic_mute = adev_get_mic_mute; + adev->hw_device.set_parameters = adev_set_parameters; + adev->hw_device.get_parameters = adev_get_parameters; + adev->hw_device.get_input_buffer_size = adev_get_input_buffer_size; + adev->hw_device.open_output_stream = adev_open_output_stream; + adev->hw_device.close_output_stream = adev_close_output_stream; + adev->hw_device.open_input_stream = adev_open_input_stream; + adev->hw_device.close_input_stream = adev_close_input_stream; + adev->hw_device.dump = adev_dump; + + /* Set Platform-specific information */ + pthread_mutex_init(&adev->lock, (const pthread_mutexattr_t *) NULL); + pthread_mutex_lock(&adev->lock); + + adev->amode = AUDIO_MODE_NORMAL; + adev->usage_amode = AUSAGE_MODE_NORMAL; + adev->call_state = CALL_OFF; + adev->primary_output = NULL; + adev->pcm_capture = NULL; + adev->voice_volume = 0; + + /* Initialize Voice related service */ + adev->voice = voice_init(); + if (!adev->voice) + ALOGE("device-%s: Failed to init Voice Manager!", __func__); + else { + ALOGD("device-%s: Successed to init Voice Manager!", __func__); + + voice_set_callback(adev->voice, (void *)onCallbackFromRILAudio); + } + + /* Initialize Audio Route */ + if (!init_route(adev)) { + ALOGE("device-%s: Failed to init Route!", __func__); + + if (adev->voice) + voice_deinit(adev->voice); + adev->voice = NULL; + pthread_mutex_unlock(&adev->lock); + pthread_mutex_destroy(&adev->lock); + free(adev); + *device = NULL; + return -EINVAL; + } + else + ALOGD("device-%s: Successed to init Route!", __func__); + + /* Initialize Audio Usage List */ + list_init(&adev->audio_usage_list); + pthread_mutex_unlock(&adev->lock); + + /* Set Structure audio_hw_device for return */ + *device = &adev->hw_device.common; + + /* Setup the Link to communicate with VisualizerHAL */ + if (access(OFFLOAD_VISUALIZERHAL_PATH, R_OK) == 0) { + adev->offload_visualizer_lib = dlopen(OFFLOAD_VISUALIZERHAL_PATH, RTLD_NOW); + if (adev->offload_visualizer_lib) { + ALOGV("device-%s: DLOPEN is successful for %s", __func__, OFFLOAD_VISUALIZERHAL_PATH); + adev->notify_start_output_tovisualizer = (int (*)(audio_io_handle_t))dlsym(adev->offload_visualizer_lib, "notify_start_output"); + adev->notify_stop_output_tovisualizer = (int (*)(audio_io_handle_t))dlsym(adev->offload_visualizer_lib, "notify_stop_output"); + } else { + ALOGE("device-%s: DLOPEN is failed for %s", __func__, OFFLOAD_VISUALIZERHAL_PATH); + } + } + + ALOGD("device-%s: Opened Audio Primary HW Device", __func__); + return 0; +} + +/* Entry Point for AudioHAL (Primary Audio HW Module for Android) */ +static struct hw_module_methods_t hal_module_methods = { + .open = adev_open, +}; + +struct audio_module HAL_MODULE_INFO_SYM = { + .common = { + .tag = HARDWARE_MODULE_TAG, + .module_api_version = AUDIO_MODULE_API_VERSION_0_1, + .hal_api_version = HARDWARE_HAL_API_VERSION, + .id = AUDIO_HARDWARE_MODULE_ID, + .name = "Exynos Primary AudioHAL", + .author = "Samsung", + .methods = &hal_module_methods, + }, +}; diff --git a/libaudio/hal/audio_hw.h b/libaudio/hal/audio_hw.h new file mode 100644 index 0000000..aa8996b --- /dev/null +++ b/libaudio/hal/audio_hw.h @@ -0,0 +1,378 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __EXYNOS_AUDIOHAL_H__ +#define __EXYNOS_AUDIOHAL_H__ + +#include + +#include +#include + +/* Definition of AudioHAL */ +#include +#include +#include + +#include + + +/* PCM Interface of ALSA & Compress Offload Interface */ +#include +#include +#include + +/* SoC specific header */ +#include +/* Voice call - RIL interface */ +#include "voice_manager.h" + + + +/* Mixer Path configuration file for working AudioHAL */ +#define VENDOR_MIXER_XML_PATH "/vendor/etc/mixer_paths.xml" +/* Effect HAL and Visualizer library path for offload scenario */ +#define OFFLOAD_VISUALIZERHAL_PATH "/system/lib/soundfx/libexynosvisualizer.so" + + + + +/** + ** Audio Usages For AudioHAL + **/ +typedef enum { + AUSAGE_PLAYBACK, + AUSAGE_CAPTURE, +} audio_usage_type_t; + +/* Audio Usages */ +typedef enum { + AUSAGE_DEFAULT = -1, + AUSAGE_MIN = 0, + + AUSAGE_PLAYBACK_PRIMARY = 0, // For Primary Output Profile + AUSAGE_PLAYBACK_LOW_LATENCY, // For Fast Output Profile + AUSAGE_PLAYBACK_DEEP_BUFFER, // For Deep Buffer Profile + AUSAGE_PLAYBACK_COMPR_OFFLOAD, // For Compress Offload Profile + AUSAGE_PLAYBACK_AUX_DIGITAL, // For HDMI Profile + + + AUSAGE_CAPTURE_LOW_LATENCY, // For Primary Input Profile + + AUSAGE_MAX, + AUSAGE_CNT = AUSAGE_MAX +} audio_usage_id_t; + +/* usage mode definitions */ +typedef enum { + AUSAGE_MODE_NORMAL = 0, + AUSAGE_MODE_VOICE_CALL, + AUSAGE_MODE_VOIP_CALL, + AUSAGE_MODE_LTE_CALL, + AUSAGE_MODE_WIFI_CALL, + + AUSAGE_MODE_NONE, + AUSAGE_MODE_MAX, + AUSAGE_MODE_CNT = AUSAGE_MODE_MAX +} audio_usage_mode_t; + +/** + ** Stream Status + **/ +typedef enum { + STATE_STANDBY = 0, // Stream is opened, but Device(PCM or Compress) is not opened yet. + STATE_IDLE, // Stream is opened & Device(PCM or Compress) is opened. + STATE_PLAYING, // Stream is opened & Device(PCM or Compress) is opened & Device is working. + STATE_PAUSED, // Stream is opened & Device(Compress) is opened & Device is pausing.(only available for Compress Offload Stream) +} stream_state_type_t; + +/** + ** Exynos Offload Message List + **/ +typedef enum { + OFFLOAD_MSG_INVALID = 0, + + OFFLOAD_MSG_WAIT_WRITE, + OFFLOAD_MSG_WAIT_DRAIN, + OFFLOAD_MSG_WAIT_PARTIAL_DRAIN, + OFFLOAD_MSG_EXIT, + + OFFLOAD_MSG_MAX, +} offload_msg_type_t; + +/** + ** Call State + **/ +typedef enum { + CALL_OFF = 0, + CP_CALL, + LTE_CALL, + WIFI_CALL, +} call_state_type_t; + +struct exynos_offload_msg { + struct listnode node; + offload_msg_type_t msg; +}; + +/** + ** Real Audio In/Output Device based on Target Device + **/ +typedef enum { + DEVICE_EARPIECE = 0, // handset or receiver + DEVICE_SPEAKER, + DEVICE_HEADSET, // headphone + mic + DEVICE_HEADPHONE, // headphone or earphone + DEVICE_SPEAKER_AND_HEADSET, + DEVICE_SPEAKER_AND_HEADPHONE, + DEVICE_BT_HEADSET, + + DEVICE_MAIN_MIC, + DEVICE_HEADSET_MIC, + DEVICE_BT_HEADSET_MIC, + + DEVICE_NONE, + DEVICE_MAX, + DEVICE_CNT = DEVICE_MAX +} device_type_t; + + +/** + ** Mapping Audio In/Output Device in Audio.h into Real In/Output Device of Set/Board + **/ +static device_type_t get_device_id(audio_devices_t devices) +{ + if (devices > AUDIO_DEVICE_BIT_IN) { + /* Input Devices */ + if (popcount(devices) == 2) { + switch (devices) { + case AUDIO_DEVICE_IN_BUILTIN_MIC: return DEVICE_MAIN_MIC; + case AUDIO_DEVICE_IN_WIRED_HEADSET: return DEVICE_HEADSET_MIC; + case AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET: return DEVICE_BT_HEADSET_MIC; + + default: return DEVICE_NONE; + } + } + } else { + /* Output Devices */ + if (popcount(devices) == 1) { + /* Single Device */ + switch (devices) { + case AUDIO_DEVICE_OUT_EARPIECE: return DEVICE_EARPIECE; + case AUDIO_DEVICE_OUT_SPEAKER: return DEVICE_SPEAKER; + case AUDIO_DEVICE_OUT_WIRED_HEADSET: return DEVICE_HEADSET; + case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: return DEVICE_HEADPHONE; + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT: + return DEVICE_BT_HEADSET; + + case AUDIO_DEVICE_NONE: + default: return DEVICE_NONE; + } + } else if (popcount(devices) == 2) { + /* Dual Device */ + if (devices == (AUDIO_DEVICE_OUT_SPEAKER | AUDIO_DEVICE_OUT_WIRED_HEADSET)) + return DEVICE_SPEAKER_AND_HEADSET; + if (devices == (AUDIO_DEVICE_OUT_SPEAKER | AUDIO_DEVICE_OUT_WIRED_HEADPHONE)) + return DEVICE_SPEAKER_AND_HEADPHONE; + } + } + + return DEVICE_NONE; +} + +static device_type_t get_indevice_id_from_outdevice(device_type_t devices) +{ + switch (devices) { + case DEVICE_EARPIECE: + case DEVICE_SPEAKER: + case DEVICE_HEADPHONE: + case DEVICE_SPEAKER_AND_HEADPHONE: + return DEVICE_MAIN_MIC; + + case DEVICE_HEADSET: + case DEVICE_SPEAKER_AND_HEADSET: + return DEVICE_HEADSET_MIC; + + case DEVICE_BT_HEADSET: + return DEVICE_BT_HEADSET_MIC; + + case DEVICE_MAIN_MIC: + case DEVICE_HEADSET_MIC: + return devices; + + case DEVICE_NONE: + default: + return DEVICE_NONE; + } + + return DEVICE_NONE; +} + + +/** + ** Structure for Audio Output Stream + ** Implement audio_stream_out structure + **/ +struct stream_out { + struct audio_stream_out stream; + pthread_mutex_t lock; + + /* These variables are needed to save Android Request becuase pcm_config + and audio_config are different */ + audio_io_handle_t handle; + audio_devices_t devices; + audio_output_flags_t flags; + unsigned int sample_rate; + audio_channel_mask_t channel_mask; + audio_format_t format; + + audio_usage_id_t ausage; + stream_state_type_t sstate; + bool mixer_path_setup; + + /* PCM specific */ + struct pcm *pcminfo; + struct pcm_config pcmconfig; + uint64_t written; /* total frames written, not cleared when entering standby */ + + /* Offload specific */ + struct compress *comprinfo; + struct compr_config comprconfig; + int nonblock_flag; + float vol_left, vol_right; + + stream_callback_t offload_callback; + void *offload_cookie; + + pthread_t offload_callback_thread; + + pthread_cond_t offload_msg_cond; + struct listnode offload_msg_list; + + pthread_cond_t offload_sync_cond; + bool callback_thread_blocked; + + struct compr_gapless_mdata offload_metadata; + int ready_new_metadata; + + unsigned long err_count; + struct audio_device *adev; +}; + +/** + ** Structure for Audio Input Stream + ** Implement audio_stream_in structure + **/ +struct stream_in { + struct audio_stream_in stream; + pthread_mutex_t lock; + + /* These variables are needed to save Android Request becuase pcm_config + and audio_config are different */ + audio_io_handle_t handle; + audio_devices_t devices; + audio_source_t source; + audio_input_flags_t flags; + unsigned int sample_rate; + audio_channel_mask_t channel_mask; + audio_format_t format; + + audio_usage_id_t ausage; + stream_state_type_t sstate; + bool mixer_path_setup; + + /* PCM specific */ + struct pcm *pcminfo; + struct pcm_config pcmconfig; + + unsigned long err_count; + struct audio_device *adev; +}; + +/** + ** Exynos AudioHAL Usage List + **/ +union stream_ptr { + struct stream_in *in; + struct stream_out *out; +}; + +struct exynos_audio_usage { + struct listnode node; + + audio_usage_type_t usage_type; /* Audio Usage Type */ + audio_usage_id_t usage_id; /* Audio Usage ID */ + + device_type_t out_device_id; /* related out_device */ + device_type_t in_device_id; /* related in_device */ + audio_usage_mode_t out_device_amode; /*related out device usage mode */ + audio_usage_mode_t in_device_amode; /*related in device usage mode */ + + union stream_ptr stream; /* related stream_in/stream_out structure */ +}; + +/** + ** Routing Information + **/ +struct route_info { + unsigned int card_num; + struct audio_route *aroute; +}; + +/** + ** Structure for Audio Primary HW Module + ** Implement audio_hw_device structure + **/ +struct audio_device { + struct audio_hw_device hw_device; + pthread_mutex_t lock; /* see note below on mutex acquisition order */ + + audio_mode_t amode; + audio_usage_mode_t usage_amode; + call_state_type_t call_state; + bool screen_off; + + struct stream_out *primary_output; // Need to know which stream is primary + struct pcm *pcm_capture; // Capture PCM Device is unique + + struct route_info *rinfo; + struct mixer *mixerinfo; // For Volume control & Mute + struct mixer_ctl * vol_ctrl; + + struct listnode audio_usage_list; + + /* Voice */ + struct voice_manager *voice; + struct pcm *pcm_voice_rx; // PCM Device for Voice Capture + struct pcm *pcm_voice_tx; // PCM Device for Voice Playback + float voice_volume; + + /* BT-SCO */ + struct pcm *pcm_btsco_in; // PCM Device for bt-sco Capture + struct pcm *pcm_btsco_out; // PCM Device for bt-sco Playback + + /* Visualizer Library Link */ + void *offload_visualizer_lib; + int (*notify_start_output_tovisualizer)(audio_io_handle_t); + int (*notify_stop_output_tovisualizer)(audio_io_handle_t); +}; + +#define MAX_PATH_NAME_LEN 50 +#define MAX_ERR_COUNT 10 + +#endif // __EXYNOS_AUDIOHAL_H__ diff --git a/libaudio/hal/audio_hw_def.h b/libaudio/hal/audio_hw_def.h new file mode 100644 index 0000000..b711615 --- /dev/null +++ b/libaudio/hal/audio_hw_def.h @@ -0,0 +1,170 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __EXYNOS_AUDIOHAL_DEF_H__ +#define __EXYNOS_AUDIOHAL_DEF_H__ + +#include + +char * offload_msg_table[OFFLOAD_MSG_MAX] = { + [OFFLOAD_MSG_INVALID] = "Offload Message_Invalid", + [OFFLOAD_MSG_WAIT_WRITE] = "Offload Message_Wait to write", + [OFFLOAD_MSG_WAIT_DRAIN] = "Offload Message_Wait to drain", + [OFFLOAD_MSG_WAIT_PARTIAL_DRAIN] = "Offload Message_Wait to drain partially", + [OFFLOAD_MSG_EXIT] = "Offload Message_Wait to exit", +}; + +/** + ** Default PCM Configuration + ** + ** start_threshold: PCM Device start automatically + ** when PCM data in ALSA Buffer it equal or greater than this value. + ** stop_threshold: PCM Device stop automatically + ** when available room in ALSA Buffer it equal or greater than this value. + **/ +struct pcm_config pcm_config_primary = { + .channels = DEFAULT_OUTPUT_CHANNELS, + .rate = DEFAULT_OUTPUT_SAMPLING_RATE, + .period_size = PRIMARY_OUTPUT_PERIOD_SIZE, + .period_count = PRIMARY_OUTPUT_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, + .start_threshold = PRIMARY_OUTPUT_PERIOD_SIZE, + .stop_threshold = PRIMARY_OUTPUT_STOP_THREASHOLD, +// .silence_threshold = 0, +// .avail_min = PRIMARY_OUTPUT_PERIOD_SIZE, +}; + +struct pcm_config pcm_config_low_latency = { + .channels = DEFAULT_OUTPUT_CHANNELS, + .rate = DEFAULT_OUTPUT_SAMPLING_RATE, + .period_size = LOW_LATENCY_OUTPUT_PERIOD_SIZE, + .period_count = LOW_LATENCY_OUTPUT_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, + .start_threshold = LOW_LATENCY_OUTPUT_PERIOD_SIZE, + .stop_threshold = LOW_LATENCY_OUTPUT_STOP_THREASHOLD, +// .silence_threshold = 0, +// .avail_min = LOW_LATENCY_OUTPUT_PERIOD_SIZE, +}; + +struct pcm_config pcm_config_deep_buffer = { + .channels = DEFAULT_OUTPUT_CHANNELS, + .rate = DEFAULT_OUTPUT_SAMPLING_RATE, + .period_size = DEEP_BUFFER_OUTPUT_PERIOD_SIZE, + .period_count = DEEP_BUFFER_OUTPUT_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, + .start_threshold = DEEP_BUFFER_OUTPUT_PERIOD_SIZE, + .stop_threshold = DEEP_BUFFER_OUTPUT_STOP_THREASHOLD, +// .silence_threshold = 0, +// .avail_min = DEEP_BUFFER_OUTPUT_PERIOD_SIZE, +}; + +struct pcm_config pcm_config_audio_capture = { + .channels = DEFAULT_INPUT_CHANNELS, + .rate = DEFAULT_INPUT_SAMPLING_RATE, + .period_size = AUDIO_CAPTURE_PERIOD_SIZE, + .period_count = AUDIO_CAPTURE_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, +}; + +struct pcm_config pcm_config_vc_nb = { + .channels = DEFAULT_VOICE_CHANNELS, + .rate = NB_VOICE_SAMPLING_RATE, + .period_size = WB_VOICE_PERIOD_SIZE, + .period_count = WB_VOICE_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, +}; + +struct pcm_config pcm_config_vc_wb = { + .channels = DEFAULT_VOICE_CHANNELS, + .rate = WB_VOICE_SAMPLING_RATE, + .period_size = WB_VOICE_PERIOD_SIZE, + .period_count = WB_VOICE_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, +}; + +struct pcm_config pcm_config_bt_sco = { + .channels = DEFAULT_BT_SCO_CHANNELS, + .rate = WB_VOICE_SAMPLING_RATE, + .period_size = BT_SCO_PERIOD_SIZE, + .period_count = BT_SCO_PERIOD_COUNT, + .format = PCM_FORMAT_S16_LE, +}; + +/** + ** Sound Card and Sound Device for specific Audio usage + **/ +int sound_device_table[AUSAGE_MAX][2] = { + [AUSAGE_PLAYBACK_PRIMARY] = {PRIMARY_SOUND_CARD, PRIMARY_PLAYBACK_DEVICE}, + [AUSAGE_PLAYBACK_LOW_LATENCY] = {LOW_LATENCY_SOUND_CARD, LOW_LATENCY_PLAYBACK_DEVICE}, + [AUSAGE_PLAYBACK_DEEP_BUFFER] = {DEEP_BUFFER_SOUND_CARD, DEEP_BUFFER_PLAYBACK_DEVICE}, + [AUSAGE_PLAYBACK_COMPR_OFFLOAD] = {COMPR_OFFLOAD_SOUND_CARD, COMPR_OFFLOAD_PLAYBACK_DEVICE}, + [AUSAGE_PLAYBACK_AUX_DIGITAL] = {AUX_DIGITAL_SOUND_CARD, AUX_DIGITAL_PLAYBACK_DEVICE}, + [AUSAGE_CAPTURE_LOW_LATENCY] = {LOW_LATENCY_SOUND_CARD, LOW_LATENCY_CAPTURE_DEVICE}, +}; + +/** + ** Audio Usage & Mode Table for readable log messages + **/ +char * usage_table[AUSAGE_CNT] = { + [AUSAGE_PLAYBACK_PRIMARY] = "primary_out", + [AUSAGE_PLAYBACK_LOW_LATENCY] = "fast_out", + [AUSAGE_PLAYBACK_DEEP_BUFFER] = "deep_out", + [AUSAGE_PLAYBACK_COMPR_OFFLOAD] = "offload_out", + [AUSAGE_PLAYBACK_AUX_DIGITAL] = "aux_out", + [AUSAGE_CAPTURE_LOW_LATENCY] = "primary_in", +}; + +char * mode_table[AUSAGE_CNT] = { + [AUSAGE_MODE_NORMAL] = "normal", + [AUSAGE_MODE_VOICE_CALL] = "voice_call", + [AUSAGE_MODE_VOIP_CALL] = "voip_call", + [AUSAGE_MODE_LTE_CALL] = "LTE_call", + [AUSAGE_MODE_WIFI_CALL] = "WiFi_call", + [AUSAGE_MODE_NONE] = "none", +}; + +/** + ** Device Path(Codec to Device) Configuration based on Audio Input/Output Device + **/ +char * device_path_table[DEVICE_CNT] = { + [DEVICE_EARPIECE] = "handset", + [DEVICE_SPEAKER] = "speaker", + [DEVICE_HEADSET] = "headset", + [DEVICE_HEADPHONE] = "headset", + [DEVICE_SPEAKER_AND_HEADSET] = "speaker-headset", + [DEVICE_SPEAKER_AND_HEADPHONE] = "speaker-headset", + [DEVICE_BT_HEADSET] = "bt-sco-headset", + [DEVICE_MAIN_MIC] = "mic", + [DEVICE_HEADSET_MIC] = "headset-mic", + [DEVICE_BT_HEADSET_MIC] = "bt-sco-mic", + [DEVICE_NONE] = "none", +}; + +/* Audio Routing Path = Ausage_Mode or Service Name + -(Hyphen) + Device Name */ + +/** + ** Service Path(AP/CP to Codec) Configuration based on Audio Usage + **/ + char * mode_path_table[AUSAGE_CNT] = { + [AUSAGE_MODE_NORMAL] = "media", + [AUSAGE_MODE_VOICE_CALL] = "incall", + [AUSAGE_MODE_VOIP_CALL] = "communication", + [AUSAGE_MODE_LTE_CALL] = "incall", + [AUSAGE_MODE_WIFI_CALL] = "media", + [AUSAGE_MODE_NONE] = "none", +}; + +#endif // __EXYNOS_AUDIOHAL_DEF_H__ diff --git a/libaudio/hal/sec/voice_manager.c b/libaudio/hal/sec/voice_manager.c new file mode 100644 index 0000000..2bdd666 --- /dev/null +++ b/libaudio/hal/sec/voice_manager.c @@ -0,0 +1,312 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "voice_manager" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include + +#include "voice_manager.h" +#include + +#define VOLUME_STEPS_DEFAULT "5" +#define VOLUME_STEPS_PROPERTY "ro.config.vc_call_vol_steps" + +bool voice_is_in_call(struct voice_manager *voice) +{ + return voice->state_call; +} + +int voice_set_call_mode(struct voice_manager *voice, enum voice_call_mode cmode) +{ + int ret = 0; + + if (voice) { + voice->mode = cmode; + ALOGD("%s: Set Call Mode = %d!", __func__, voice->mode); + } + + return ret; +} + +int voice_set_mic_mute(struct voice_manager *voice, bool state) +{ + int ret = 0; + + voice->state_mic_mute = state; + if (voice->state_call) { + if (voice->rilc.ril_set_mute) + voice->rilc.ril_set_mute(voice->rilc.client, (int)state); + + ALOGD("%s: MIC Mute = %d!", __func__, state); + } + + return ret; +} + +bool voice_get_mic_mute(struct voice_manager *voice) +{ + ALOGD("%s: MIC Mute = %d!", __func__, voice->state_mic_mute); + return voice->state_mic_mute; +} + +int voice_set_volume(struct voice_manager *voice, float volume) +{ + int ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_set_volume) + voice->rilc.ril_set_volume(voice->rilc.client, voice->rilc.sound_type, (int)(volume * voice->volume_steps_max)); + + ALOGD("%s: Volume = %d(%f)!", __func__, (int)(volume * voice->volume_steps_max), volume); + } + + return ret; +} + +int voice_set_audio_clock(struct voice_manager *voice, enum ril_audio_clockmode clockmode) +{ + int ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_set_clock_sync) { + if (clockmode == VOICE_AUDIO_TURN_OFF_I2S) { + voice->rilc.ril_set_clock_sync(voice->rilc.client, SOUND_CLOCK_STOP); + ALOGD("%s: Sound Clock Stopped", __func__); + } + else if (clockmode == VOICE_AUDIO_TURN_ON_I2S) { + voice->rilc.ril_set_clock_sync(voice->rilc.client, SOUND_CLOCK_START); + ALOGD("%s: Sound Clock Started", __func__); + } + } + } + + return ret; +} + +static AudioPath map_incall_device(audio_devices_t devices) +{ + AudioPath device_type = SOUND_AUDIO_PATH_SPEAKER; + + switch(devices) { + case AUDIO_DEVICE_OUT_EARPIECE: + device_type = SOUND_AUDIO_PATH_HANDSET; + break; + case AUDIO_DEVICE_OUT_SPEAKER: + device_type = SOUND_AUDIO_PATH_SPEAKER; + break; + case AUDIO_DEVICE_OUT_WIRED_HEADSET: + device_type = SOUND_AUDIO_PATH_HEADSET; + break; + case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: + device_type = SOUND_AUDIO_PATH_HEADSET; + break; + default: + device_type = SOUND_AUDIO_PATH_HANDSET; + break; + } + + return device_type; +} + +static SoundType map_sound_fromdevice(AudioPath path) +{ + SoundType sound_type = SOUND_TYPE_VOICE; + + switch(path) { + case SOUND_AUDIO_PATH_SPEAKER: + sound_type = SOUND_TYPE_SPEAKER; + break; + case SOUND_AUDIO_PATH_HEADSET: + sound_type = SOUND_TYPE_HEADSET; + break; + case SOUND_AUDIO_PATH_HANDSET: + default: + sound_type = SOUND_TYPE_VOICE; + break; + } + + return sound_type; +} + + +int voice_set_path(struct voice_manager *voice, audio_devices_t devices) +{ + int ret = 0; + AudioPath path; + + if (voice->state_call) { + /* Mapping */ + path = map_incall_device(devices); + voice->rilc.sound_type = map_sound_fromdevice(path); + + if (voice->rilc.ril_set_audio_path) { + ret = voice->rilc.ril_set_audio_path(voice->rilc.client, path, ORIGINAL_PATH); + if (ret == 0) + ALOGD("%s: Set Audio Path to %d!", __func__, path); + else { + ALOGE("%s: Failed to set path in RIL Client!", __func__); + return ret; + } + } else { + ALOGE("%s: ril_set_audio_path is not available.", __func__); + ret = -1; + } + } else { + ALOGE("%s: Voice is not IN_CALL", __func__); + ret = -1; + } + + return ret; +} + + +int voice_open(struct voice_manager *voice) +{ + int ret = -1; + void *client = NULL; + + if (!voice->state_call) { + if (voice->rilc.ril_open_client) { + client = voice->rilc.ril_open_client(); + if (client) { + if (voice->rilc.ril_is_connected(client)) + ALOGD("%s: RIL Client is already connected with RIL", __func__); + else if (voice->rilc.ril_connect(client)) { + ALOGD("%s: RIL Client cannot connect with RIL", __func__); + voice->rilc.ril_close_client(client); + return ret; + } + + voice->rilc.client = client; + voice->state_call = true; + ALOGD("%s: Opened RIL Client, Transit to IN_CALL!", __func__); + + ret = 0; + } else { + ALOGE("%s: Failed to open RIL Client!", __func__); + } + } else { + ALOGE("%s: ril_open_client is not available.", __func__); + } + } + + return ret; +} + +int voice_close(struct voice_manager *voice) +{ + int ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_close_client && voice->rilc.ril_disconnect) { + if (voice->rilc.ril_is_connected(voice->rilc.client)) { + voice->rilc.ril_disconnect(voice->rilc.client); + ALOGD("%s: RIL Client dis-connect with RIL", __func__); + } + + ret = voice->rilc.ril_close_client(voice->rilc.client); + if (ret == 0) { + voice->state_call = false; + ALOGD("%s: Closed RIL Client, Transit to NOT_IN_CALL!", __func__); + } else { + ALOGE("%s: Failed to close RIL Client!", __func__); + } + } else { + ALOGE("%s: ril_close_client is not available.", __func__); + ret = -1; + } + } + + return ret; +} + +int voice_set_callback(struct voice_manager * voice, void * callback_func) +{ + int ret = 0; + + return ret; +} + +void voice_deinit(struct voice_manager *voice) +{ + if (voice) { + if (voice->rilc.handle) + dlclose(voice->rilc.handle); + + free(voice); + } + + return ; +} + +struct voice_manager* voice_init(void) +{ + struct voice_manager *voice = NULL; + char property[PROPERTY_VALUE_MAX]; + + voice = calloc(1, sizeof(struct voice_manager)); + if (voice) { + if (access(RIL_CLIENT_LIBPATH, R_OK) == 0) { + voice->rilc.handle = dlopen(RIL_CLIENT_LIBPATH, RTLD_NOW); + if (voice->rilc.handle) { + voice->rilc.ril_open_client = (int (*)(void))dlsym(voice->rilc.handle, "OpenClient_RILD"); + voice->rilc.ril_close_client = (int (*)(void))dlsym(voice->rilc.handle, "CloseClient_RILD"); + voice->rilc.ril_connect = (int (*)(void))dlsym(voice->rilc.handle, "Connect_RILD"); + voice->rilc.ril_is_connected = (int (*)(void))dlsym(voice->rilc.handle, "isConnected_RILD"); + voice->rilc.ril_disconnect = (int (*)(void))dlsym(voice->rilc.handle, "Disconnect_RILD"); + voice->rilc.ril_set_volume = (int (*)(int))dlsym(voice->rilc.handle, "SetCallVolume"); + voice->rilc.ril_set_audio_path = (int (*)(int))dlsym(voice->rilc.handle, "SetCallAudioPath"); + voice->rilc.ril_set_clock_sync = (int (*)(int))dlsym(voice->rilc.handle, "SetCallClockSync"); + voice->rilc.ril_set_mute = (int (*)(int))dlsym(voice->rilc.handle, "SetMute"); + + ALOGD("%s: Successed to open SIPC RIL Client Interface!", __func__); + } else { + ALOGE("%s: Failed to open SIPC RIL Client Interface(%s)!", __func__, RIL_CLIENT_LIBPATH); + goto open_err; + } + } else { + ALOGE("%s: Failed to access SIPC RIL Client Interface(%s)!", __func__, RIL_CLIENT_LIBPATH); + goto open_err; + } + + voice->state_mic_mute = false; + voice->mode = VOICE_CALL_NONE; + voice->state_call = false; + + property_get(VOLUME_STEPS_PROPERTY, property, VOLUME_STEPS_DEFAULT); + voice->volume_steps_max = atoi(property); + /* this catches the case where VOLUME_STEPS_PROPERTY does not contain an integer */ + if (voice->volume_steps_max == 0) + voice->volume_steps_max = atoi(VOLUME_STEPS_DEFAULT); + } + + return voice; + +open_err: + if (voice) { + free(voice); + voice = NULL; + } + + return voice; +} diff --git a/libaudio/hal/sec/voice_manager.h b/libaudio/hal/sec/voice_manager.h new file mode 100644 index 0000000..5b89d09 --- /dev/null +++ b/libaudio/hal/sec/voice_manager.h @@ -0,0 +1,166 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __EXYNOS_VOICE_SERVICE_H__ +#define __EXYNOS_VOICE_SERVICE_H__ + +#include + +#define RIL_CLIENT_LIBPATH "/system/lib/libsecril-client.so" + + +typedef enum _SoundType { + SOUND_TYPE_VOICE, + SOUND_TYPE_SPEAKER, + SOUND_TYPE_HEADSET, + SOUND_TYPE_BTVOICE +} SoundType; + +typedef enum _AudioPath { + SOUND_AUDIO_PATH_HANDSET, + SOUND_AUDIO_PATH_HEADSET, + SOUND_AUDIO_PATH_SPEAKER, + SOUND_AUDIO_PATH_BLUETOOTH, + SOUND_AUDIO_PATH_STEREO_BT, + SOUND_AUDIO_PATH_HEADPHONE, + SOUND_AUDIO_PATH_BLUETOOTH_NO_NR, + SOUND_AUDIO_PATH_MIC1, + SOUND_AUDIO_PATH_MIC2, + SOUND_AUDIO_PATH_BLUETOOTH_WB, + SOUND_AUDIO_PATH_BLUETOOTH_WB_NO_NR, + SOUND_AUDIO_PATH_HANDSET_HAC, + SOUND_AUDIO_PATH_SLD, + SOUND_AUDIO_PATH_VOLTE_HANDSET = 30, + SOUND_AUDIO_PATH_VOLTE_HEADSET, + SOUND_AUDIO_PATH_VOLTE_SPEAKER, + SOUND_AUDIO_PATH_VOLTE_BLUETOOTH, + SOUND_AUDIO_PATH_VOLTE_STEREO_BT, + SOUND_AUDIO_PATH_VOLTE_HEADPHONE, + SOUND_AUDIO_PATH_VOLTE_BLUETOOTH_NO_NR, + SOUND_AUDIO_PATH_VOLTE_MIC1, + SOUND_AUDIO_PATH_VOLTE_MIC2, + SOUND_AUDIO_PATH_VOLTE_BLUETOOTH_WB, + SOUND_AUDIO_PATH_VOLTE_BLUETOOTH_WB_NO_NR, + SOUND_AUDIO_PATH_VOLTE_HANDSET_HAC, + SOUND_AUDIO_PATH_VOLTE_SLD, + SOUND_AUDIO_PATH_CALL_FWD = 50, + SOUND_AUDIO_PATH_HEADSET_MIC1, + SOUND_AUDIO_PATH_HEADSET_MIC2, + SOUND_AUDIO_PATH_HEADSET_MIC3 +} AudioPath; + +/* Voice Audio Multi-MIC */ +enum ril_audio_multimic { + VOICE_MULTI_MIC_OFF, + VOICE_MULTI_MIC_ON, +}; + +typedef enum _ExtraVolume { + ORIGINAL_PATH, + EXTRA_VOLUME_PATH, + EMERGENCY_PATH +} ExtraVolume; + +typedef enum _SoundClockCondition { + SOUND_CLOCK_STOP, + SOUND_CLOCK_START +} SoundClockCondition; + +typedef enum _MuteCondition { + TX_UNMUTE, /* 0x00: TX UnMute */ + TX_MUTE, /* 0x01: TX Mute */ + RX_UNMUTE, /* 0x02: RX UnMute */ + RX_MUTE, /* 0x03: RX Mute */ + RXTX_UNMUTE, /* 0x04: RXTX UnMute */ + RXTX_MUTE, /* 0x05: RXTX Mute */ +} MuteCondition; + +enum ril_audio_clockmode { + VOICE_AUDIO_TURN_OFF_I2S, + VOICE_AUDIO_TURN_ON_I2S +}; + +/* Voice Call Mode */ +enum voice_call_mode { + VOICE_CALL_NONE = 0, + VOICE_CALL_CS, // CS(Circit Switched) Call + VOICE_CALL_PS, // PS(Packet Switched) Call + VOICE_CALL_MAX, +}; + +/* Event from RIL Audio Client */ +#define VOICE_AUDIO_EVENT_BASE 10000 +#define VOICE_AUDIO_EVENT_RINGBACK_STATE_CHANGED (VOICE_AUDIO_EVENT_BASE + 1) +#define VOICE_AUDIO_EVENT_IMS_SRVCC_HANDOVER (VOICE_AUDIO_EVENT_BASE + 2) + + +struct rilclient_intf { + /* The pointer of interface library for RIL Client*/ + void *handle; + + /* The SIPC RIL Client Handle */ + /* This will be used as parameter of RIL Client Functions */ + void *client; + + SoundType sound_type; + + /* Function pointers */ + void *(*ril_open_client)(void); + int (*ril_close_client)(void *); + int (*ril_connect)(void *); + int (*ril_is_connected)(void *); + int (*ril_disconnect)(void *); + int (*ril_set_volume)(void *, SoundType, int); + int (*ril_set_audio_path)(void *, AudioPath, ExtraVolume); + int (*ril_set_clock_sync)(void *, SoundClockCondition); + int (*ril_set_mute)(void *, MuteCondition); +}; + + +struct voice_manager { + struct rilclient_intf rilc; + + bool state_call; // Current Call Status + enum voice_call_mode mode; // Current Call Mode + bool state_mic_mute; // Current Main MIC Mute Status + + int volume_steps_max; // Voice Volume maximum steps + + int (*callback)(int, const void *, unsigned int); // Callback Function Pointer +}; + + +/* General Functiuons */ +bool voice_is_in_call(struct voice_manager *voice); +int voice_set_call_mode(struct voice_manager *voice, enum voice_call_mode cmode); + +/* RIL Audio Client related Functions */ +int voice_open(struct voice_manager * voice); +int voice_close(struct voice_manager * voice); +int voice_set_callback(struct voice_manager * voice, void * callback_func); + +int voice_set_volume(struct voice_manager *voice, float volume); +int voice_set_path(struct voice_manager * voice, audio_devices_t devices); +int voice_set_multimic(struct voice_manager *voice, enum ril_audio_multimic mmic); +int voice_set_mic_mute(struct voice_manager *voice, bool state); +bool voice_get_mic_mute(struct voice_manager *voice); +int voice_set_audio_clock(struct voice_manager *voice, enum ril_audio_clockmode clockmode); + +/* Voice Manager related Functiuons */ +void voice_deinit(struct voice_manager *voice); +struct voice_manager * voice_init(void); + +#endif // __EXYNOS_VOICE_SERVICE_H__ diff --git a/libaudio/hal/ww/voice_manager.c b/libaudio/hal/ww/voice_manager.c new file mode 100644 index 0000000..2b9bf70 --- /dev/null +++ b/libaudio/hal/ww/voice_manager.c @@ -0,0 +1,337 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "voice_manager" +//#define LOG_NDEBUG 0 + +#include +#include +#include +#include +#include + +#include + +#include "voice_manager.h" +#include + +#define VOLUME_STEPS_DEFAULT "5" +#define VOLUME_STEPS_PROPERTY "ro.config.vc_call_vol_steps" + +bool voice_is_in_call(struct voice_manager *voice) +{ + return voice->state_call; +} + +int voice_set_call_mode(struct voice_manager *voice, enum voice_call_mode cmode) +{ + int ret = 0; + + if (voice) { + voice->mode = cmode; + ALOGD("%s: Set Call Mode = %d!", __func__, voice->mode); + } + + return ret; +} + +int voice_callback(void * handle, int event, const void *data, unsigned int datalen) +{ + struct voice_manager *voice = (struct voice_manager *)handle; + int (*funcp)(int, const void *, unsigned int) = NULL; + + ALOGD("%s: Called Callback Function from RIL Audio Client!", __func__); + if (voice) { + switch (event) { + case VOICE_AUDIO_EVENT_RINGBACK_STATE_CHANGED: + ALOGD("%s: Received RINGBACK_STATE_CHANGED event!", __func__); + break; + + case VOICE_AUDIO_EVENT_IMS_SRVCC_HANDOVER: + ALOGD("%s: Received IMS_SRVCC_HANDOVER event!", __func__); + break; + + default: + ALOGD("%s: Received Unsupported event (%d)!", __func__, event); + return 0; + } + + funcp = voice->callback; + funcp(event, data, datalen); + } + + return 0; +} + +int voice_set_mic_mute(struct voice_manager *voice, bool state) +{ + int ret = 0; + + voice->state_mic_mute = state; + if (voice->state_call) { + if (voice->rilc.ril_set_mute) { + if (state) + voice->rilc.ril_set_mute(VOICE_AUDIO_MUTE_ENABLED); + else + voice->rilc.ril_set_mute(VOICE_AUDIO_MUTE_DISABLED); + } + ALOGD("%s: MIC Mute = %d!", __func__, state); + } + + return ret; +} + +bool voice_get_mic_mute(struct voice_manager *voice) +{ + ALOGD("%s: MIC Mute = %d!", __func__, voice->state_mic_mute); + return voice->state_mic_mute; +} + +int voice_set_volume(struct voice_manager *voice, float volume) +{ + int vol, ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_set_audio_volume) + voice->rilc.ril_set_audio_volume((int)(volume * voice->volume_steps_max)); + + ALOGD("%s: Volume = %d(%f)!", __func__, (int)(volume * voice->volume_steps_max), volume); + } + + return ret; +} + +int voice_set_audio_clock(struct voice_manager *voice, enum ril_audio_clockmode clockmode) +{ + int ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_set_audio_clock) + voice->rilc.ril_set_audio_clock((int)clockmode); + + ALOGD("%s: AudioClock Mode = %s!", __func__, (clockmode? "ON" : "OFF")); + } + + return ret; +} + +static enum ril_audio_path map_incall_device(struct voice_manager *voice, audio_devices_t devices) +{ + enum ril_audio_path device_type = VOICE_AUDIO_PATH_HANDSET; + + switch(devices) { + case AUDIO_DEVICE_OUT_EARPIECE: + if (voice->mode == VOICE_CALL_CS) + device_type = VOICE_AUDIO_PATH_HANDSET; + else + device_type = VOICE_AUIDO_PATH_VOLTE_HANDSET; + break; + + case AUDIO_DEVICE_OUT_SPEAKER: + if (voice->mode == VOICE_CALL_CS) + device_type = VOICE_AUIDO_PATH_SPEAKRERPHONE; + else + device_type = VOICE_AUIDO_PATH_VOLTE_SPEAKRERPHONE; + break; + + case AUDIO_DEVICE_OUT_WIRED_HEADSET: + case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: + if (voice->mode == VOICE_CALL_CS) + device_type = VOICE_AUIDO_PATH_HEADSET; + else + device_type = VOICE_AUIDO_PATH_VOLTE_HEADSET; + break; + + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT: + if (voice->mode == VOICE_CALL_CS) + device_type = VOICE_AUIDO_PATH_STEREO_BLUETOOTH; + else + device_type = VOICE_AUIDO_PATH_VOLTE_STEREO_BLUETOOTH; + break; + + default: + if (voice->mode == VOICE_CALL_CS) + device_type = VOICE_AUDIO_PATH_HANDSET; + else + device_type = VOICE_AUIDO_PATH_VOLTE_HANDSET; + break; + } + + return device_type; +} + +int voice_set_path(struct voice_manager *voice, audio_devices_t devices) +{ + int ret = 0; + enum ril_audio_path path; + + if (voice->state_call) { + /* Mapping */ + path = map_incall_device(voice, devices); + + if (voice->rilc.ril_set_audio_path) { + ret = voice->rilc.ril_set_audio_path(path); + if (ret == 0) { + ALOGD("%s: Set Audio Path to %d!", __func__, path); + } else { + ALOGE("%s: Failed to set path in RIL Client!", __func__); + return ret; + } + } else { + ALOGE("%s: ril_set_audio_path is not available.", __func__); + ret = -1; + } + } else { + ALOGE("%s: Voice is not IN_CALL", __func__); + ret = -1; + } + + return ret; +} + + +int voice_open(struct voice_manager *voice) +{ + int ret = 0; + + if (!voice->state_call) { + if (voice->rilc.ril_open_client) { + ret = voice->rilc.ril_open_client(); + if (ret == 0) { + voice->state_call = true; + ALOGD("%s: Opened RIL Client, Transit to IN_CALL!", __func__); + } else { + ALOGE("%s: Failed to open RIL Client!", __func__); + } + } else { + ALOGE("%s: ril_open_client is not available.", __func__); + ret = -1; + } + } + + return ret; +} + +int voice_close(struct voice_manager *voice) +{ + int ret = 0; + + if (voice->state_call) { + if (voice->rilc.ril_close_client) { + ret = voice->rilc.ril_close_client(); + if (ret == 0) { + voice->state_call = false; + ALOGD("%s: Closed RIL Client, Transit to NOT_IN_CALL!", __func__); + } else { + ALOGE("%s: Failed to close RIL Client!", __func__); + } + } else { + ALOGE("%s: ril_close_client is not available.", __func__); + ret = -1; + } + } + + return ret; +} + +int voice_set_callback(struct voice_manager * voice, void * callback_func) +{ + int ret = 0; + + if (voice->rilc.ril_register_callback) { + ret = voice->rilc.ril_register_callback((void *)voice, (int *)voice_callback); + if (ret == 0) { + ALOGD("%s: Succeded to register Callback Function!", __func__); + voice->callback = callback_func; + } + else + ALOGE("%s: Failed to register Callback Function!", __func__); + } + else { + ALOGE("%s: ril_register_callback is not available.", __func__); + ret = -1; + } + + return ret; +} + +void voice_deinit(struct voice_manager *voice) +{ + if (voice) { + if (voice->rilc.handle) + dlclose(voice->rilc.handle); + + free(voice); + } + + return ; +} + +struct voice_manager* voice_init(void) +{ + struct voice_manager *voice = NULL; + char property[PROPERTY_VALUE_MAX]; + + voice = calloc(1, sizeof(struct voice_manager)); + if (voice) { + if (access(RIL_CLIENT_LIBPATH, R_OK) == 0) { + voice->rilc.handle = dlopen(RIL_CLIENT_LIBPATH, RTLD_NOW); + if (voice->rilc.handle) { + voice->rilc.ril_open_client = (int (*)(void))dlsym(voice->rilc.handle, "Open"); + voice->rilc.ril_close_client = (int (*)(void))dlsym(voice->rilc.handle, "Close"); + voice->rilc.ril_register_callback = (int (*)(void *, int *))dlsym(voice->rilc.handle, "RegisterEventCallback"); + voice->rilc.ril_set_audio_volume = (int (*)(int))dlsym(voice->rilc.handle, "SetAudioVolume"); + voice->rilc.ril_set_audio_path = (int (*)(int))dlsym(voice->rilc.handle, "SetAudioPath"); + voice->rilc.ril_set_multi_mic = (int (*)(int))dlsym(voice->rilc.handle, "SetMultiMic"); + voice->rilc.ril_set_mute = (int (*)(int))dlsym(voice->rilc.handle, "SetMute"); + voice->rilc.ril_set_audio_clock = (int (*)(int))dlsym(voice->rilc.handle, "SetAudioClock"); + voice->rilc.ril_set_audio_loopback = (int (*)(int, int))dlsym(voice->rilc.handle, "SetAudioLoopback"); + + ALOGD("%s: Successed to open SMI RIL Client Interface!", __func__); + } else { + ALOGE("%s: Failed to open SMI RIL Client Interface(%s)!", __func__, RIL_CLIENT_LIBPATH); + goto open_err; + } + } else { + ALOGE("%s: Failed to access SMI RIL Client Interface(%s)!", __func__, RIL_CLIENT_LIBPATH); + goto open_err; + } + + voice->state_call = false; + voice->mode = VOICE_CALL_NONE; + voice->state_mic_mute = false; + + property_get(VOLUME_STEPS_PROPERTY, property, VOLUME_STEPS_DEFAULT); + voice->volume_steps_max = atoi(property); + /* this catches the case where VOLUME_STEPS_PROPERTY does not contain an integer */ + if (voice->volume_steps_max == 0) + voice->volume_steps_max = atoi(VOLUME_STEPS_DEFAULT); + + voice->callback = NULL; + } + + return voice; + +open_err: + if (voice) { + free(voice); + voice = NULL; + } + + return voice; +} diff --git a/libaudio/hal/ww/voice_manager.h b/libaudio/hal/ww/voice_manager.h new file mode 100644 index 0000000..357167a --- /dev/null +++ b/libaudio/hal/ww/voice_manager.h @@ -0,0 +1,187 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __EXYNOS_VOICE_SERVICE_H__ +#define __EXYNOS_VOICE_SERVICE_H__ + +#include + +#define RIL_CLIENT_LIBPATH "/system/lib/libsmiril-audio.so" + + +/* Syncup with RIL Audio Client */ + +/* Voice Audio Path */ +enum ril_audio_path { + VOICE_AUIDO_PATH_NONE = 0, + + VOICE_AUDIO_PATH_HANDSET = 1, + VOICE_AUIDO_PATH_HEADSET = 2, + VOICE_AUIDO_PATH_HANDSFREE = 3, + VOICE_AUIDO_PATH_BLUETOOTH = 4, + VOICE_AUIDO_PATH_STEREO_BLUETOOTH = 5, + VOICE_AUIDO_PATH_SPEAKRERPHONE = 6, + VOICE_AUIDO_PATH_35PI_HEADSET = 7, + VOICE_AUIDO_PATH_BT_NS_EC_OFF = 8, + VOICE_AUIDO_PATH_WB_BLUETOOTH = 9, + VOICE_AUIDO_PATH_WB_BT_NS_EC_OFF = 10, + VOICE_AUIDO_PATH_HANDSET_HAC = 11, + + VOICE_AUIDO_PATH_VOLTE_HANDSET = 65, + VOICE_AUIDO_PATH_VOLTE_HEADSET = 66, + VOICE_AUIDO_PATH_VOLTE_HFK = 67, + VOICE_AUIDO_PATH_VOLTE_BLUETOOTH = 68, + VOICE_AUIDO_PATH_VOLTE_STEREO_BLUETOOTH = 69, + VOICE_AUIDO_PATH_VOLTE_SPEAKRERPHONE = 70, + VOICE_AUIDO_PATH_VOLTE_35PI_HEADSET = 71, + VOICE_AUIDO_PATH_VOLTE_BT_NS_EC_OFF = 72, + VOICE_AUIDO_PATH_VOLTE_WB_BLUETOOTH = 73, + VOICE_AUIDO_PATH_VOLTE_WB_BT_NS_EC_OFF = 74, + VOICE_AUIDO_PATH_MAX +}; + +/* Voice Audio Multi-MIC */ +enum ril_audio_multimic { + VOICE_MULTI_MIC_OFF, + VOICE_MULTI_MIC_ON, +}; + +/* Voice Audio Volume */ +enum ril_audio_volume { + VOICE_AUDIO_VOLUME_INVALID = -1, + VOICE_AUDIO_VOLUME_LEVEL0 = 0, + VOICE_AUDIO_VOLUME_LEVEL1, + VOICE_AUDIO_VOLUME_LEVEL2, + VOICE_AUDIO_VOLUME_LEVEL3, + VOICE_AUDIO_VOLUME_LEVEL4, + VOICE_AUDIO_VOLUME_LEVEL5, + VOICE_AUDIO_VOLUME_LEVEL_MAX = VOICE_AUDIO_VOLUME_LEVEL5, +}; + +/* Voice Audio Mute */ +enum ril_audio_mute { + VOICE_AUDIO_MUTE_DISABLED, + VOICE_AUDIO_MUTE_ENABLED, +}; + +/* Voice Audio Clock */ +enum ril_audio_clockmode { + VOICE_AUDIO_TURN_OFF_I2S, + VOICE_AUDIO_TURN_ON_I2S, +}; + +/* Voice Loopback */ +enum ril_audio_loopback { + VOICE_AUDIO_LOOPBACK_STOP, + VOICE_AUDIO_LOOPBACK_START, +}; + +enum ril_audio_loopback_path { + VOICE_AUDIO_LOOPBACK_PATH_NA = 0, //0: N/A + + VOICE_AUDIO_LOOPBACK_PATH_HANDSET = 1, //1: handset + VOICE_AUDIO_LOOPBACK_PATH_HEADSET = 2, //2: headset + VOICE_AUDIO_LOOPBACK_PATH_HANDSFREE = 3, //3: handsfree + VOICE_AUDIO_LOOPBACK_PATH_BT = 4, //4: Bluetooth + VOICE_AUDIO_LOOPBACK_PATH_STEREO_BT = 5, //5: stereo Bluetooth + VOICE_AUDIO_LOOPBACK_PATH_SPK = 6, //6: speaker phone + VOICE_AUDIO_LOOPBACK_PATH_35PI_HEADSET = 7, //7: 3.5pi headset + VOICE_AUDIO_LOOPBACK_PATH_BT_NS_EC_OFF = 8, //8: BT NS/EC off + VOICE_AUDIO_LOOPBACK_PATH_WB_BT = 9, //9: WB Bluetooth + VOICE_AUDIO_LOOPBACK_PATH_WB_BT_NS_EC_OFF = 10, //10: WB BT NS/EC + VOICE_AUDIO_LOOPBACK_PATH_HANDSET_HAC = 11, //11: handset HAC + + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_HANDSET = 65, //65: VOLTE handset + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_HEADSET = 66, //66: VOLTE headset + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_HANDSFREE = 67, //67: VOLTE hands + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_BT = 68, //68: VOLTE Bluetooth + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_STEREO_BT = 69, //69: VOLTE stere + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_SPK = 70, //70: VOLTE speaker phone + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_35PI_HEADSET = 71, //71: VOLTE 3.5pi + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_BT_NS_EC_OFF = 72, //72: VOLTE BT NS + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_WB_BT = 73, //73: VOLTE WB Blueto + VOICE_AUDIO_LOOPBACK_PATH_VOLTE_WB_BT_NS_EC_OFF = 74, //74: VOLTE W + + VOICE_AUDIO_LOOPBACK_PATH_HEADSET_MIC1 = 129, //129: Headset ? MIC1 + VOICE_AUDIO_LOOPBACK_PATH_HEADSET_MIC2 = 130, //130: Headset ? MIC2 + VOICE_AUDIO_LOOPBACK_PATH_HEADSET_MIC3 = 131, //131: Headset ? MIC3 +}; + +/* Voice Call Mode */ +enum voice_call_mode { + VOICE_CALL_NONE = 0, + VOICE_CALL_CS, // CS(Circit Switched) Call + VOICE_CALL_PS, // PS(Packet Switched) Call + VOICE_CALL_MAX, +}; + +/* Event from RIL Audio Client */ +#define VOICE_AUDIO_EVENT_BASE 10000 +#define VOICE_AUDIO_EVENT_RINGBACK_STATE_CHANGED (VOICE_AUDIO_EVENT_BASE + 1) +#define VOICE_AUDIO_EVENT_IMS_SRVCC_HANDOVER (VOICE_AUDIO_EVENT_BASE + 2) + +/* RIL Audio Client Interface Structure */ +struct rilclient_intf { + /* The pointer of interface library for RIL Client*/ + void *handle; + + /* Function pointers */ + int (*ril_open_client)(void); + int (*ril_close_client)(void); + int (*ril_register_callback)(void *, int *); + int (*ril_set_audio_volume)(int); + int (*ril_set_audio_path)(int); + int (*ril_set_multi_mic)(int); + int (*ril_set_mute)(int); + int (*ril_set_audio_clock)(int); + int (*ril_set_audio_loopback)(int, int); +}; + + +struct voice_manager { + struct rilclient_intf rilc; + + bool state_call; // Current Call Status + enum voice_call_mode mode; // Current Call Mode + bool state_mic_mute; // Current Main MIC Mute Status + + int volume_steps_max; // Voice Volume maximum steps + + int (*callback)(int, const void *, unsigned int); // Callback Function Pointer +}; + + +/* General Functiuons */ +bool voice_is_in_call(struct voice_manager *voice); +int voice_set_call_mode(struct voice_manager *voice, enum voice_call_mode cmode); + +/* RIL Audio Client related Functions */ +int voice_open(struct voice_manager * voice); +int voice_close(struct voice_manager * voice); +int voice_set_callback(struct voice_manager * voice, void * callback_func); + +int voice_set_volume(struct voice_manager *voice, float volume); +int voice_set_path(struct voice_manager * voice, audio_devices_t devices); +int voice_set_multimic(struct voice_manager *voice, enum ril_audio_multimic mmic); +int voice_set_mic_mute(struct voice_manager *voice, bool state); +bool voice_get_mic_mute(struct voice_manager *voice); +int voice_set_audio_clock(struct voice_manager *voice, enum ril_audio_clockmode clockmode); + +/* Voice Manager related Functiuons */ +void voice_deinit(struct voice_manager *voice); +struct voice_manager * voice_init(void); + +#endif // __EXYNOS_VOICE_SERVICE_H__ diff --git a/libcamera/34xx/ExynosCameraActivityControl.cpp b/libcamera/34xx/ExynosCameraActivityControl.cpp new file mode 100644 index 0000000..ff5329b --- /dev/null +++ b/libcamera/34xx/ExynosCameraActivityControl.cpp @@ -0,0 +1,851 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityControl" + +#include "ExynosCameraActivityControl.h" + +namespace android { + +ExynosCameraActivityControl::ExynosCameraActivityControl(__unused int cameraId) +{ + flagAutoFocusRunning = false; + touchAFMode = false; + touchAFModeForFlash = false; + + m_autofocusMgr = new ExynosCameraActivityAutofocus(); + m_flashMgr = new ExynosCameraActivityFlash(); + m_specialCaptureMgr = new ExynosCameraActivitySpecialCapture(); + m_uctlMgr = new ExynosCameraActivityUCTL(); + m_focusMode = FOCUS_MODE_AUTO; + m_fpsValue = 1; + m_halVersion = IS_HAL_VER_1_0; +} + +ExynosCameraActivityControl::~ExynosCameraActivityControl() +{ + if (m_autofocusMgr != NULL) { + delete m_autofocusMgr; + m_autofocusMgr = NULL; + } + + if (m_flashMgr != NULL) { + delete m_flashMgr; + m_flashMgr = NULL; + } + + if (m_specialCaptureMgr != NULL) { + delete m_specialCaptureMgr; + m_specialCaptureMgr = NULL; + } + + if (m_uctlMgr != NULL) { + delete m_uctlMgr; + m_uctlMgr = NULL; + } +} + +bool ExynosCameraActivityControl::autoFocus(int focusMode, int focusType) +{ + bool ret = true; + int newfocusMode = 0; + int currentAutofocusState = ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_NONE; + int newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_BASE; + int oldMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_BASE; + bool flagAutoFocusTringger = false; + bool flagPreFlash = false; + bool flagLockFocus = false; + + if (focusType == AUTO_FOCUS_SERVICE) { + m_flashMgr->updateAeState(); + } + ALOGD("DEBUG(%s[%d]):NeedCaptureFlash=%d", __FUNCTION__, __LINE__, m_flashMgr->getNeedCaptureFlash()); + if (m_flashMgr->getNeedCaptureFlash() == true) { + /* start Pre-flash */ + if (startPreFlash(this->getAutoFocusMode()) != NO_ERROR) + ALOGE("ERR(%s[%d]):start Pre-flash Fail", __FUNCTION__, __LINE__); + } + + if (focusType == AUTO_FOCUS_HAL) + newfocusMode = FOCUS_MODE_AUTO; + else + newfocusMode = this->getAutoFocusMode(); + + oldMgrAutofocusMode = m_autofocusMgr->getAutofocusMode(); + ALOGD("DEBUG(%s[%d]):newfocusMode=%d", __FUNCTION__, __LINE__, newfocusMode); + + switch (newfocusMode) { + case FOCUS_MODE_AUTO: + if (touchAFMode == true) + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_TOUCH; + else + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_AUTO; + break; + case FOCUS_MODE_INFINITY: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_INFINITY; + break; + case FOCUS_MODE_MACRO: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_MACRO; + break; + case FOCUS_MODE_CONTINUOUS_VIDEO: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO; + break; + case FOCUS_MODE_CONTINUOUS_PICTURE: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE; + break; + case FOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO; + break; + case FOCUS_MODE_TOUCH: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_TOUCH; + break; + case FOCUS_MODE_FIXED: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED; + break; + case FOCUS_MODE_EDOF: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF; + break; + default: + ALOGE("ERR(%s):Unsupported focusMode=%d", __FUNCTION__, newfocusMode); + return false; + break; + } + if (focusMode == newfocusMode) { + switch (newMgrAutofocusMode) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + flagLockFocus = true; + break; + default: + break; + } + } + + /* + * Applications can call autoFocus(AutoFocusCallback) in this mode. + * If the autofocus is in the middle of scanning, + * the focus callback will return when it completes + * If the autofocus is not scanning, + * the focus callback will immediately return with a boolean + * that indicates whether the focus is sharp or not. + */ + + /* + * But, When Continuous af is running, + * auto focus api can be triggered, + * and then, af will be lock. (af lock) + */ + switch (newMgrAutofocusMode) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + flagAutoFocusTringger = false; + break; + default: + switch(oldMgrAutofocusMode) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + flagAutoFocusTringger = true; + break; + default: + if (oldMgrAutofocusMode == newMgrAutofocusMode) { + currentAutofocusState = m_autofocusMgr->getCurrentState(); + + if (currentAutofocusState != ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SCANNING) + flagAutoFocusTringger = true; + else + flagAutoFocusTringger = false; + } else { + flagAutoFocusTringger = true; + } + break; + } + break; + } + + if (flagAutoFocusTringger == true) { + if (m_autofocusMgr->flagAutofocusStart() == true) + m_autofocusMgr->stopAutofocus(); + + m_autofocusMgr->setAutofocusMode(newMgrAutofocusMode); + + m_autofocusMgr->startAutofocus(); + } else { + m_autofocusMgr->setAutofocusMode(newMgrAutofocusMode); + + switch (newMgrAutofocusMode) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF: + ret = false; + goto done; + break; + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + currentAutofocusState = m_autofocusMgr->getCurrentState(); + + if (m_autofocusMgr->flagLockAutofocus() == true && + currentAutofocusState != ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SUCCEESS) { + /* for make it fail */ + currentAutofocusState = ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_FAIL; + } + + if (currentAutofocusState == ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SUCCEESS) { + ret = true; + goto done; + } else if (currentAutofocusState == ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_FAIL) { + ret = false; + goto done; + } + + break; + default: + break; + } + } + + ret = m_autofocusMgr->getAutofocusResult(flagLockFocus); + +done : + /* The focus position is locked after autoFocus call with CAF + * But, the focus position is not locked when cancelAutoFocus after TAF is called + * CF) If cancelAutoFocus is called when TAF, the focusMode is changed from TAF to CAF */ + + if (flagLockFocus) { + m_autofocusMgr->lockAutofocus(); + } + + /* Stop pre flash */ + stopPreFlash(); + + return ret; +} + +bool ExynosCameraActivityControl::cancelAutoFocus(void) +{ + /* + * Cancels any auto-focus function in progress. + * Whether or not auto-focus is currently in progress, + * this function will return the focus position to the default. + * If the camera does not support auto-focus, this is a no-op. + */ + + touchAFMode = false; + touchAFModeForFlash = false; + int mode = m_autofocusMgr->getAutofocusMode(); + + switch (m_autofocusMgr->getAutofocusMode()) { + /* If applications want to resume the continuous focus, cancelAutoFocus must be called */ + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (m_autofocusMgr->flagLockAutofocus() == true) { + m_autofocusMgr->unlockAutofocus(); + + if (m_autofocusMgr->flagAutofocusStart() == false) + m_autofocusMgr->startAutofocus(); + } + break; + default: + if (m_autofocusMgr->flagLockAutofocus() == true) + m_autofocusMgr->unlockAutofocus(); + + if (m_autofocusMgr->flagAutofocusStart() == true) + m_autofocusMgr->stopAutofocus(); + + break; + } + + enum ExynosCameraActivityFlash::FLASH_TRIGGER triggerPath; + m_flashMgr->getFlashTrigerPath(&triggerPath); + + if ((triggerPath == ExynosCameraActivityFlash::FLASH_TRIGGER_LONG_BUTTON) || + ((m_flashMgr->getNeedCaptureFlash() == true) && (m_flashMgr->getFlashStatus()!= AA_FLASHMODE_OFF))) + this->cancelFlash(); + else + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_OFF); + + return true; +} + +void ExynosCameraActivityControl::setAutoFocusMode(int focusMode) +{ + int newMgrAutofocusMode = 0; + int oldMgrAutofocusMode = m_autofocusMgr->getAutofocusMode(); + + m_focusMode = focusMode; + + ALOGD("DEBUG(%s[%d]):m_focusMode=%d", __FUNCTION__, __LINE__, m_focusMode); + + switch (m_focusMode) { + case FOCUS_MODE_INFINITY: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_INFINITY; + break; + case FOCUS_MODE_FIXED: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED; + break; + case FOCUS_MODE_EDOF: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF; + break; + case FOCUS_MODE_CONTINUOUS_VIDEO: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO; + break; + case FOCUS_MODE_CONTINUOUS_PICTURE: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE; + break; + case FOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + newMgrAutofocusMode = ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO; + break; + default: + break; + } + + ALOGD("DEBUG(%s[%d]):newMgrAutofocusMode=%d, oldMgrAutofocusMode=%d", + __FUNCTION__, __LINE__, newMgrAutofocusMode, oldMgrAutofocusMode); + + if (oldMgrAutofocusMode != newMgrAutofocusMode) { + if (m_autofocusMgr->flagLockAutofocus() == true) + m_autofocusMgr->unlockAutofocus(); + } + + if (newMgrAutofocusMode) { /* Continuous autofocus, infinity, fixed ... */ + /* + * If applications want to resume the continuous focus, + * cancelAutoFocus must be called. + * Restarting the preview will not resume the continuous autofocus. + * To stop continuous focus, applications should change the focus mode to other modes. + */ + bool flagRestartAutofocus = false; + + if (oldMgrAutofocusMode == newMgrAutofocusMode) { + if (m_autofocusMgr->flagAutofocusStart() == false && + m_autofocusMgr->flagLockAutofocus() == false) + flagRestartAutofocus = true; + else + flagRestartAutofocus = false; + } else { + flagRestartAutofocus = true; + } + + if (flagRestartAutofocus == true && + m_autofocusMgr->flagAutofocusStart() == true) + m_autofocusMgr->stopAutofocus(); + + if (oldMgrAutofocusMode != newMgrAutofocusMode) + if (m_autofocusMgr->setAutofocusMode(newMgrAutofocusMode) == false) + ALOGE("ERR(%s):setAutofocusMode fail", __FUNCTION__); + + if (flagRestartAutofocus == true) + m_autofocusMgr->startAutofocus(); + + enum ExynosCameraActivityFlash::FLASH_TRIGGER triggerPath; + m_flashMgr->getFlashTrigerPath(&triggerPath); + + if ((triggerPath == ExynosCameraActivityFlash::FLASH_TRIGGER_LONG_BUTTON) || + ((m_flashMgr->getNeedCaptureFlash() == true) && (m_flashMgr->getFlashStatus() != AA_FLASHMODE_OFF))) { + this->cancelFlash(); + } else { + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); + } + + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_OFF); + + } else { /* single autofocus */ + switch (oldMgrAutofocusMode) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_INFINITY: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_FIXED: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_EDOF: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (m_autofocusMgr->flagAutofocusStart() == true) + m_autofocusMgr->stopAutofocus(); + + break; + default: + break; + } + } +} + +int ExynosCameraActivityControl::getAutoFocusMode(void) +{ + return m_focusMode; +} + +void ExynosCameraActivityControl::setAutoFcousArea(ExynosRect2 rect, int weight) +{ + m_autofocusMgr->setFocusAreas(rect, weight); +} + +void ExynosCameraActivityControl::setAutoFocusMacroPosition( + int oldAutoFocusMacroPosition, + int autoFocusMacroPosition) +{ + int macroPosition = ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_BASE; + + switch (autoFocusMacroPosition) { + case 1: + macroPosition = ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_CENTER; + break; + case 2: + macroPosition = ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_CENTER_UP; + break; + default: + break; + } + + m_autofocusMgr->setMacroPosition(macroPosition); + + /* if macro option change, we need to restart CAF */ + if (oldAutoFocusMacroPosition != autoFocusMacroPosition) { + if ((m_autofocusMgr->getAutofocusMode() == ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE || + m_autofocusMgr->getAutofocusMode() == ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_VIDEO || + m_autofocusMgr->getAutofocusMode() == ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO) && + m_autofocusMgr->flagAutofocusStart() == true && + m_autofocusMgr->flagLockAutofocus() == false) { + m_autofocusMgr->stopAutofocus(); + m_autofocusMgr->startAutofocus(); + } + } +} + +int ExynosCameraActivityControl::getCAFResult(void) +{ + return m_autofocusMgr->getCAFResult(); +} + +void ExynosCameraActivityControl::stopAutoFocus(void) +{ + if (m_autofocusMgr->flagLockAutofocus() == true) + m_autofocusMgr->unlockAutofocus(); + if (m_autofocusMgr->flagAutofocusStart() == true) + m_autofocusMgr->stopAutofocus(); +} + +bool ExynosCameraActivityControl::setFlashMode(int flashMode) +{ + enum flash_mode m_flashMode; + enum aa_ae_flashmode aeflashMode; + + switch (flashMode) { + case FLASH_MODE_OFF: + m_flashMode = ::CAM2_FLASH_MODE_OFF; + aeflashMode = ::AA_FLASHMODE_OFF; + + m_flashMgr->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_OFF); + break; + case FLASH_MODE_AUTO: + m_flashMode = ::CAM2_FLASH_MODE_SINGLE; + /* aeflashMode = ::AA_FLASHMODE_AUTO; */ + aeflashMode = ::AA_FLASHMODE_CAPTURE; + + m_flashMgr->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_AUTO); + break; + case FLASH_MODE_ON: + m_flashMode = ::CAM2_FLASH_MODE_SINGLE; + /* aeflashMode = ::AA_FLASHMODE_ON; */ + aeflashMode = ::AA_FLASHMODE_ON_ALWAYS; + + m_flashMgr->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_ON); + break; + case FLASH_MODE_TORCH: + m_flashMode = ::CAM2_FLASH_MODE_TORCH; + aeflashMode = ::AA_FLASHMODE_ON_ALWAYS; + + m_flashMgr->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_TORCH); + break; + case FLASH_MODE_RED_EYE: + default: + ALOGE("ERR(%s):Unsupported value(%d)", __FUNCTION__, flashMode); + return false; + break; + } + return true; +} + +status_t ExynosCameraActivityControl::startPreFlash(int focusMode) +{ + enum ExynosCameraActivityFlash::FLASH_TRIGGER triggerPath; + bool flagPreFlash = false; + int ret = NO_ERROR; + + if (m_flashMgr->getFlashWaitCancel() == true) + m_flashMgr->setFlashWaitCancel(false); + + m_flashMgr->setCaptureStatus(true); + m_flashMgr->setMainFlashFiring(false); + m_flashMgr->getFlashTrigerPath(&triggerPath); + + if (touchAFModeForFlash == false && triggerPath != ExynosCameraActivityFlash::FLASH_TRIGGER_SHORT_BUTTON) { + if (focusMode == FOCUS_MODE_AUTO) { + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_LONG_BUTTON); + ALOGD("DEBUG(%s):FLASH_TRIGGER_LONG_BUTTON !!!", __FUNCTION__); + flagPreFlash = true; + } else { + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_SHORT_BUTTON); + ALOGD("DEBUG(%s):FLASH_TRIGGER_SHORT_BUTTON !!!", __FUNCTION__); + flagPreFlash = false; + } + } else { + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_TOUCH_DISPLAY); + ALOGD("DEBUG(%s):FLASH_TRIGGER_TOUCH_DISPLAY !!!", __FUNCTION__); + flagPreFlash = true; + } + + if (flagPreFlash == true) { + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_PRE_START); + + if (m_flashMgr->waitAeDone() == false) { + ALOGE("ERR(%s):waitAeDone() fail", __FUNCTION__); + ret = INVALID_OPERATION; + } + } + + return ret; +} + +bool ExynosCameraActivityControl::flagFocusing(struct camera2_shot_ext *shot_ext, int focusMode) +{ + bool ret = false; + + if (shot_ext == NULL) { + ALOGE("ERR(%s):shot_ext === NULL", __func__); + return false; + } + + switch (focusMode) { + case FOCUS_MODE_INFINITY: + case FOCUS_MODE_MACRO: + case FOCUS_MODE_FIXED: + case FOCUS_MODE_EDOF: + case FOCUS_MODE_CONTINUOUS_VIDEO: + return false; + default: + break; + } + + ExynosCameraActivityAutofocus::AUTOFOCUS_STATE autoFocusState = m_autofocusMgr->afState2AUTOFOCUS_STATE(shot_ext->shot.dm.aa.afState); + switch(autoFocusState) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SCANNING: + ret = true; + break; + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_NONE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SUCCEESS: + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_FAIL: + ret = false; + break; + default: + ALOGD("DEBUG(%s):invalid autoFocusState(%d)", __func__, autoFocusState); + ret = false; + break; + } + + return ret; +} + +void ExynosCameraActivityControl::stopPreFlash(void) +{ + enum ExynosCameraActivityFlash::FLASH_STEP flashStep; + m_flashMgr->getFlashStep(&flashStep); + + if (flashStep == ExynosCameraActivityFlash::FLASH_STEP_PRE_START) + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_PRE_DONE); +} + +bool ExynosCameraActivityControl::waitFlashMainReady() +{ + int totalWaitingTime = 0; + + while ((totalWaitingTime <= FLASH_WAITING_SLEEP_TIME * 30) && (m_flashMgr->checkPreFlash() == false)) { + usleep(FLASH_WAITING_SLEEP_TIME); + totalWaitingTime += FLASH_WAITING_SLEEP_TIME; + } + + if (FLASH_WAITING_SLEEP_TIME * 30 < totalWaitingTime) { + ALOGE("ERR(%s):waiting too much (%d msec)", __FUNCTION__, totalWaitingTime); + } + + return m_flashMgr->waitMainReady(); +} + +int ExynosCameraActivityControl::startMainFlash(void) +{ + int totalWaitingTime = 0; + int waitCount = 0; + unsigned int shotFcount = 0; + enum ExynosCameraActivityFlash::FLASH_TRIGGER triggerPath; + enum ExynosCameraActivityFlash::FLASH_STEP flashStep; + + m_flashMgr->getFlashTrigerPath(&triggerPath); + + /* check preflash to prevent FW lock-up */ + do { + usleep(FLASH_WAITING_SLEEP_TIME); + totalWaitingTime += FLASH_WAITING_SLEEP_TIME; + } while ((totalWaitingTime <= FLASH_WAITING_SLEEP_TIME * 30) && (m_flashMgr->checkPreFlash() == false)); + + if (m_flashMgr->checkPreFlash() == false) { + ALOGD("DEBUG(%s): preflash is not done : m_flashMgr->checkPreFlash(%d)", __FUNCTION__, m_flashMgr->checkPreFlash()); + return -1; + } + + if (m_flashMgr->waitMainReady() == false) + ALOGW("WARN(%s):waitMainReady() timeout", __FUNCTION__); + + if (m_flashMgr->getFlashWaitCancel() == true) { + ALOGD("DEBUG(%s): getFlashWaitCancel(%d)", __FUNCTION__, m_flashMgr->getFlashWaitCancel()); + return -1; + } + + m_flashMgr->getFlashStep(&flashStep); + if (flashStep == ExynosCameraActivityFlash::FLASH_STEP_OFF) { + ALOGD("DEBUG(%s): getFlashStep is changed FLASH_STEP_OFF", __FUNCTION__); + return -1; + } + m_flashMgr->setMainFlashFiring(true); + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_MAIN_START); + + /* get best shot frame count */ + totalWaitingTime = 0; + waitCount = 0; + m_flashMgr->resetShotFcount(); + do { + waitCount = m_flashMgr->getWaitingCount(); + if (0 < waitCount) { + usleep(FLASH_WAITING_SLEEP_TIME); + totalWaitingTime += FLASH_WAITING_SLEEP_TIME; + } + } while (0 < waitCount && totalWaitingTime <= FLASH_MAX_WAITING_TIME); + + if (0 < waitCount || FLASH_MAX_WAITING_TIME < totalWaitingTime) + ALOGE("ERR(%s):waiting too much (%d msec)", __FUNCTION__, totalWaitingTime); + + shotFcount = m_flashMgr->getShotFcount(); + + return shotFcount; +} + +void ExynosCameraActivityControl::stopMainFlash(void) +{ + m_flashMgr->setCaptureStatus(false); + m_flashMgr->setMainFlashFiring(false); + + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_OFF); + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); +} + +void ExynosCameraActivityControl::cancelFlash(void) +{ + ALOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_CANCEL); + m_flashMgr->setFlashWaitCancel(true); +} + +void ExynosCameraActivityControl::setHdrMode(bool hdrMode) +{ + if (hdrMode) + m_specialCaptureMgr->setCaptureMode(ExynosCameraActivitySpecialCapture::SCAPTURE_MODE_HDR); + else + m_specialCaptureMgr->setCaptureMode(ExynosCameraActivitySpecialCapture::SCAPTURE_MODE_NONE); +} + +int ExynosCameraActivityControl::getHdrFcount(int index) +{ + int startFcount = 0; + int totalWaitingTime = 0; + unsigned int shotFcount = 0; + + do { + startFcount = m_specialCaptureMgr->getHdrStartFcount(index); + if (startFcount == 0) { + usleep(HDR_WAITING_SLEEP_TIME); + totalWaitingTime += HDR_WAITING_SLEEP_TIME; + } + } while (startFcount == 0 && totalWaitingTime <= HDR_MAX_WAITING_TIME); + + if (startFcount == 0 || totalWaitingTime >= HDR_MAX_WAITING_TIME) + ALOGE("ERR(%s):waiting too much (%d msec)", __FUNCTION__, totalWaitingTime); + + shotFcount = startFcount + m_specialCaptureMgr->getHdrWaitFcount(); + + return shotFcount; +} + +void ExynosCameraActivityControl::activityBeforeExecFunc(int pipeId, void *args) +{ + switch(pipeId) { + case PIPE_FLITE: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_BEFORE, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_BEFORE, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_BEFORE, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_BEFORE, args); + break; + case PIPE_3AA: + case PIPE_3AA_ISP: + if (m_halVersion != IS_HAL_VER_3_2) { + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + } else { + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE_HAL3, args); + } + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + /* Added it for FD-AF. 2015/05/04*/ + case PIPE_VRA: + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + break; + /* to set metadata of ISP buffer */ + case PIPE_POST_3AA_ISP: + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + break; + /* TO DO : seperate 3aa & isp pipe */ +/* + case PIPE_3AA: + case PIPE_3AA_ISP: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_BEFORE, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_BEFORE, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_BEFORE, args); + break; +*/ + case PIPE_ISPC: + case PIPE_SCC: + if (m_halVersion != IS_HAL_VER_3_2) + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_BEFORE, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_BEFORE, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_BEFORE, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_BEFORE, args); + break; + case PIPE_SCP: + if (m_halVersion != IS_HAL_VER_3_2) + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_BEFORE, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_BEFORE, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_BEFORE, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_BEFORE, args); + break; + /* HW FD Orientation is enabled 2014/04/28 */ + case PIPE_ISP: + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_BEFORE, args); + break; + default: + break; + } +} + +void ExynosCameraActivityControl::activityAfterExecFunc(int pipeId, void *args) +{ + switch(pipeId) { + case PIPE_FLITE: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_AFTER, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_AFTER, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_AFTER, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SENSOR_AFTER, args); + break; + case PIPE_3AA: + case PIPE_3AA_ISP: + if (m_halVersion != IS_HAL_VER_3_2) { + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_AFTER, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_AFTER, args); + } else { + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_AFTER_HAL3, args); + } + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_AFTER, args); + break; + case PIPE_POST_3AA_ISP: + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_3A_AFTER, args); + break; + /* TO DO : seperate 3aa & isp pipe + case PIPE_3AA: + case PIPE_3AA_ISP: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_AFTER, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_AFTER, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_ISP_AFTER, args); + break; + */ + case PIPE_ISPC: + case PIPE_SCC: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_AFTER, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_AFTER, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_AFTER, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCC_AFTER, args); + break; + case PIPE_SCP: + m_autofocusMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_AFTER, args); + m_flashMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_AFTER, args); + m_specialCaptureMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_AFTER, args); + m_uctlMgr->execFunction(ExynosCameraActivityBase::CALLBACK_TYPE_SCP_AFTER, args); + break; + default: + break; + } +} + +ExynosCameraActivityFlash *ExynosCameraActivityControl::getFlashMgr(void) +{ + return m_flashMgr; +} + +ExynosCameraActivitySpecialCapture *ExynosCameraActivityControl::getSpecialCaptureMgr(void) +{ + return m_specialCaptureMgr; +} + +ExynosCameraActivityAutofocus*ExynosCameraActivityControl::getAutoFocusMgr(void) +{ + return m_autofocusMgr; +} + +ExynosCameraActivityUCTL*ExynosCameraActivityControl::getUCTLMgr(void) +{ + return m_uctlMgr; +} + +void ExynosCameraActivityControl::setFpsValue(int fpsValue) +{ + m_fpsValue = fpsValue; + + m_autofocusMgr->setFpsValue(m_fpsValue); + m_flashMgr->setFpsValue(m_fpsValue); + + ALOGI("INFO(%s[%d]): m_fpsValue(%d)", __FUNCTION__, __LINE__, m_fpsValue); +} + +int ExynosCameraActivityControl::getFpsValue() +{ + return m_fpsValue; +} + +void ExynosCameraActivityControl::setHalVersion(int halVersion) +{ + m_halVersion = halVersion; + ALOGI("INFO(%s[%d]): m_halVersion(%d)", __FUNCTION__, __LINE__, m_halVersion); + + return; +} +}; /* namespace android */ + diff --git a/libcamera/34xx/ExynosCameraActivityControl.h b/libcamera/34xx/ExynosCameraActivityControl.h new file mode 100644 index 0000000..b9c3cef --- /dev/null +++ b/libcamera/34xx/ExynosCameraActivityControl.h @@ -0,0 +1,124 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_ACTIVITY_CONTROL_H +#define EXYNOS_CAMERA_ACTIVITY_CONTROL_H + +#include +#include +#include +#include +#include + +#include "ExynosCameraActivityAutofocus.h" +#include "ExynosCameraActivityFlash.h" +#include "ExynosCameraActivitySpecialCapture.h" +#include "ExynosCameraActivityUCTL.h" +#include "ExynosCameraSensorInfo.h" +#include "ExynosRect.h" + +namespace android{ + +enum auto_focus_type { + AUTO_FOCUS_SERVICE = 0, + AUTO_FOCUS_HAL, +}; + +class ExynosCameraActivityControl { +public: + +public: + /* Constructor */ + ExynosCameraActivityControl(int cameraId); + + /* Destructor */ + virtual ~ExynosCameraActivityControl(); + /* Destroy the instance */ + bool destroy(void); + /* Check if the instance was created */ + bool flagCreate(void); + /* Starts camera auto-focus and registers a callback function to run when the camera is focused. */ + bool autoFocus(int focusMode, int focusType); + /* Cancel auto-focus operation */ + bool cancelAutoFocus(void); + /* Set auto-focus mode */ + void setAutoFocusMode(int focusMode); + int getAutoFocusMode(void); + /* Set auto-focus area */ + void setAutoFcousArea(ExynosRect2 rect, int weight); + /* Sets auto-focus macro position */ + void setAutoFocusMacroPosition( + int oldautoFocusMacroPosition, + int autoFocusMacroPosition); + int getCAFResult(void); + /* Check Whether auto-focus running */ + bool flagFocusing(struct camera2_shot_ext *shot_ext, int focusMode); + /* Stop auto-focus */ + void stopAutoFocus(void); + /* Sets flash mode */ + bool setFlashMode(int flashMode); + /* Start pre flash */ + status_t startPreFlash(int focusMode); + /* Stop pre flash */ + void stopPreFlash(void); + /* Start main flash */ + bool waitFlashMainReady(); + + /* Start main flash */ + int startMainFlash(void); + /* Stop main flash */ + void stopMainFlash(void); + /* Cancel flash */ + void cancelFlash(void); + /* Sets HDR mode */ + void setHdrMode(bool hdrMode); + int getHdrFcount(int index); + + /* Sets FPS Value */ + void setFpsValue(int fpsValue); + int getFpsValue(); + + void setHalVersion(int halVersion); + + void activityBeforeExecFunc(int pipeId, void *args); + void activityAfterExecFunc(int pipeId, void *args); + ExynosCameraActivityFlash *getFlashMgr(void); + ExynosCameraActivitySpecialCapture *getSpecialCaptureMgr(void); + ExynosCameraActivityAutofocus *getAutoFocusMgr(void); + ExynosCameraActivityUCTL *getUCTLMgr(void); + +public: + bool flagAutoFocusRunning; + bool touchAFMode; + bool touchAFModeForFlash; + +private: + ExynosCameraActivityAutofocus *m_autofocusMgr; + ExynosCameraActivityFlash *m_flashMgr; + ExynosCameraActivitySpecialCapture * m_specialCaptureMgr; + ExynosCameraActivityUCTL * m_uctlMgr; + + unsigned int m_fpsValue; + int m_focusMode; + + int m_halVersion; +}; + +} + +#endif + diff --git a/libcamera/34xx/ExynosCameraDefine.h b/libcamera/34xx/ExynosCameraDefine.h new file mode 100644 index 0000000..5ff4316 --- /dev/null +++ b/libcamera/34xx/ExynosCameraDefine.h @@ -0,0 +1,130 @@ + +#ifndef EXYNOS_CAMERA_CLASS_COMMON_DEFINE +#define EXYNOS_CAMERA_CLASS_COMMON_DEFINE + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include "csc.h" + +#include "ExynosCameraParameters.h" + +#ifdef USE_CAMERA2_API_SUPPORT +#include "ExynosCamera3FrameFactory.h" +#else +#include "ExynosCameraFrameFactory.h" +#include "ExynosCameraFrameFactoryPreview.h" +#include "ExynosCameraFrameFactory3aaIspM2M.h" +#include "ExynosCameraFrameFactory3aaIspM2MTpu.h" +#include "ExynosCameraFrameFactory3aaIspOtf.h" +#include "ExynosCameraFrameFactory3aaIspOtfTpu.h" +#include "ExynosCameraFrameReprocessingFactory.h" +#include "ExynosCameraFrameFactoryVision.h" +#include "ExynosCameraFrameFactoryFront.h" +#endif + +#include "ExynosCameraMemory.h" +#include "ExynosCameraBufferManager.h" +#include "ExynosCameraBufferLocker.h" +#include "ExynosCameraActivityControl.h" +#include "ExynosCameraScalableSensor.h" +#include "ExynosCameraFrameSelector.h" + +namespace android { + +#ifdef BURST_CAPTURE +#define BURST_SAVE_PATH_PHONE "/data/media/0" +#define BURST_SAVE_PATH_EXT "/mnt/extSdCard" +#define BURST_CAPTURE_FILEPATH_SIZE 100 +#endif + +typedef struct ExynosCameraJpegCallbackBuffer { + ExynosCameraBuffer buffer; + int callbackNumber; +} jpeg_callback_buffer_t; + +typedef ExynosCameraList frame_queue_t; +typedef ExynosCameraList buffer_queue_t; + +#ifdef USE_CAMERA2_API_SUPPORT +#else +typedef ExynosCameraList framefactory_queue_t; +#endif + +typedef ExynosCameraList jpeg_callback_queue_t; +typedef ExynosCameraList postview_callback_queue_t; +typedef ExynosCameraList thumbnail_callback_queue_t; + +typedef ExynosCameraList capture_queue_t; + + +typedef enum buffer_direction_type { + SRC_BUFFER_DIRECTION = 0, + DST_BUFFER_DIRECTION = 1, + INVALID_BUFFER_DIRECTION, +} buffer_direction_type_t; + +enum jpeg_save_thread { + JPEG_SAVE_THREAD0 = 0, + JPEG_SAVE_THREAD1 = 1, + JPEG_SAVE_THREAD2, + JPEG_SAVE_THREAD_MAX_COUNT, +}; +/* +typedef struct { + uint32_t frameNumber; + camera3_stream_buffer streamBuffer; +} result_buffer_info_t; +*/ +/* +enum FRAME_FACTORY_TYPE { + FRAME_FACTORY_TYPE_CAPTURE_PREVIEW = 0, + FRAME_FACTORY_TYPE_RECORDING_PREVIEW, + FRAME_FACTORY_TYPE_DUAL_PREVIEW, + FRAME_FACTORY_TYPE_REPROCESSING, + FRAME_FACTORY_TYPE_VISION, + FRAME_FACTORY_TYPE_MAX, +}; +*/ +enum FRAME_FACTORY_TYPE { + FRAME_FACTORY_TYPE_3AA_ISP_M2M = 0, + FRAME_FACTORY_TYPE_3AA_ISP_M2M_TPU, + FRAME_FACTORY_TYPE_3AA_ISP_OTF, + FRAME_FACTORY_TYPE_3AA_ISP_OTF_TPU, + FRAME_FACTORY_TYPE_CAPTURE_PREVIEW, + FRAME_FACTORY_TYPE_RECORDING_PREVIEW, + FRAME_FACTORY_TYPE_DUAL_PREVIEW, + FRAME_FACTORY_TYPE_REPROCESSING, + FRAME_FACTORY_TYPE_REPROCESSING_NV21, + FRAME_FACTORY_TYPE_VISION, + FRAME_FACTORY_TYPE_MAX, +}; + +enum EXYNOS_CAMERA_STREAM_CHARACTERISTICS_ID { + HAL_STREAM_ID_RAW = 0, + HAL_STREAM_ID_PREVIEW = 1, + HAL_STREAM_ID_VIDEO = 2, + HAL_STREAM_ID_JPEG = 3, + HAL_STREAM_ID_CALLBACK = 4, + HAL_STREAM_ID_ZSL_INPUT = 5, + HAL_STREAM_ID_ZSL_OUTPUT = 6, + HAL_STREAM_ID_MAX = 7, +}; + +//typedef ExynosCameraList framefactory_queue_t; + +} + +#endif diff --git a/libcamera/34xx/ExynosCameraMetadata.h b/libcamera/34xx/ExynosCameraMetadata.h new file mode 100644 index 0000000..0f98744 --- /dev/null +++ b/libcamera/34xx/ExynosCameraMetadata.h @@ -0,0 +1,40 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_METADATA_H +#define EXYNOS_CAMERA_METADATA_H + +#include "fimc-is-metadata.h" + +namespace android { + +class ExynosCameraMetadata { +public: + ExynosCameraMetadata(); + virtual ~ExynosCameraMetadata(); + + bool create(); + + status_t setCropRegion(); + status_t getCropRegion(); + +private: + struct camera2_shot_ext m_metadata; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/34xx/ExynosCameraParameters.h b/libcamera/34xx/ExynosCameraParameters.h new file mode 100644 index 0000000..6f6eff3 --- /dev/null +++ b/libcamera/34xx/ExynosCameraParameters.h @@ -0,0 +1,460 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PARAMETERS_H +#define EXYNOS_CAMERA_PARAMETERS_H + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "ExynosCameraConfig.h" + +#include "ExynosCameraSensorInfoBase.h" +#include "ExynosCameraCounter.h" +#include "fimc-is-metadata.h" +#include "ExynosRect.h" +#include "exynos_format.h" +#include "ExynosExif.h" +#include "ExynosCameraUtils.h" +//#include "ExynosCameraUtilsModule.h" +#include "ExynosCameraActivityControl.h" +#include "ExynosCameraAutoTimer.h" +#include + + +#define EXYNOS_CONFIG_DEFINED (-1) +#define EXYNOS_CONFIG_NOTDEFINED (-2) + +#define STATE_REG_RTHDR_AUTO (1<<20) +#define STATE_REG_NEED_LLS (1<<18) +#define STATE_REG_ZOOM_INDOOR (1<<16) +#define STATE_REG_ZOOM_OUTDOOR (1<<14) +#define STATE_REG_ZOOM (1<<12) +#define STATE_REG_RTHDR_ON (1<<10) +#define STATE_REG_RECORDINGHINT (1<<8) +#define STATE_REG_DUAL_RECORDINGHINT (1<<6) +#define STATE_REG_UHD_RECORDING (1<<4) +#define STATE_REG_DUAL_MODE (1<<2) +#define STATE_REG_FLAG_REPROCESSING (1) + + +#define STATE_STILL_PREVIEW (0) +#define STATE_STILL_PREVIEW_WDR_ON (STATE_REG_RTHDR_ON) +#define STATE_STILL_PREVIEW_WDR_AUTO (STATE_REG_RTHDR_AUTO) +#define STATE_STILL_PREVIEW_LLS (STATE_REG_NEED_LLS) + +#define STATE_STILL_CAPTURE (STATE_REG_FLAG_REPROCESSING) +#define STATE_STILL_CAPTURE_ZOOM (STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM) +#define STATE_STILL_CAPTURE_ZOOM_OUTDOOR (STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_OUTDOOR) +#define STATE_STILL_CAPTURE_ZOOM_INDOOR (STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_INDOOR) +#define STATE_VIDEO_CAPTURE (STATE_REG_FLAG_REPROCESSING|STATE_REG_RECORDINGHINT) +#define STATE_STILL_CAPTURE_LLS (STATE_REG_FLAG_REPROCESSING|STATE_REG_NEED_LLS) + +#define STATE_STILL_CAPTURE_WDR_ON (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING) +#define STATE_STILL_CAPTURE_WDR_ON_ZOOM (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM) +#define STATE_STILL_CAPTURE_WDR_ON_ZOOM_OUTDOOR (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_OUTDOOR) +#define STATE_STILL_CAPTURE_WDR_ON_ZOOM_INDOOR (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_INDOOR) +#define STATE_VIDEO_CAPTURE_WDR_ON (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING|STATE_REG_RECORDINGHINT) +#define STATE_VIDEO_CAPTURE_WDR_ON_LLS (STATE_REG_RTHDR_ON|STATE_REG_FLAG_REPROCESSING|STATE_REG_NEED_LLS) + +#define STATE_STILL_CAPTURE_WDR_AUTO (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING) +#define STATE_STILL_CAPTURE_WDR_AUTO_ZOOM (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM) +#define STATE_STILL_CAPTURE_WDR_AUTO_ZOOM_OUTDOOR (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_OUTDOOR) +#define STATE_STILL_CAPTURE_WDR_AUTO_ZOOM_INDOOR (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING|STATE_REG_ZOOM_INDOOR) +#define STATE_VIDEO_CAPTURE_WDR_AUTO (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING|STATE_REG_RECORDINGHINT) +#define STATE_STILL_CAPTURE_WDR_AUTO_LLS (STATE_REG_RTHDR_AUTO|STATE_REG_FLAG_REPROCESSING|STATE_REG_NEED_LLS) + +#define STATE_VIDEO (STATE_REG_RECORDINGHINT) +#define STATE_VIDEO_WDR_ON (STATE_REG_RECORDINGHINT|STATE_REG_RTHDR_ON) +#define STATE_VIDEO_WDR_AUTO (STATE_REG_RECORDINGHINT|STATE_REG_RTHDR_AUTO) + +#define STATE_DUAL_VIDEO (STATE_REG_DUAL_RECORDINGHINT|STATE_REG_DUAL_MODE) +#define STATE_DUAL_VIDEO_CAPTURE (STATE_REG_DUAL_RECORDINGHINT|STATE_REG_DUAL_MODE|STATE_REG_FLAG_REPROCESSING) +#define STATE_DUAL_STILL_PREVIEW (STATE_REG_DUAL_MODE) +#define STATE_DUAL_STILL_CAPTURE (STATE_REG_DUAL_MODE|STATE_REG_FLAG_REPROCESSING) + +#define STATE_UHD_PREVIEW (STATE_REG_UHD_RECORDING) +#define STATE_UHD_PREVIEW_WDR_ON (STATE_REG_UHD_RECORDING|STATE_REG_RTHDR_ON) +#define STATE_UHD_PREVIEW_WDR_AUTO (STATE_REG_UHD_RECORDING|STATE_REG_RTHDR_AUTO) +#define STATE_UHD_VIDEO (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT) +#define STATE_UHD_VIDEO_WDR_ON (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT|STATE_REG_RTHDR_ON) +#define STATE_UHD_VIDEO_WDR_AUTO (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT|STATE_REG_RTHDR_AUTO) + +#define STATE_UHD_PREVIEW_CAPTURE (STATE_REG_UHD_RECORDING|STATE_REG_FLAG_REPROCESSING) +#define STATE_UHD_PREVIEW_CAPTURE_WDR_ON (STATE_REG_UHD_RECORDING|STATE_REG_FLAG_REPROCESSING|STATE_REG_RTHDR_ON) +#define STATE_UHD_PREVIEW_CAPTURE_WDR_AUTO (STATE_REG_UHD_RECORDING|STATE_REG_FLAG_REPROCESSING|STATE_REG_RTHDR_AUTO) +#define STATE_UHD_VIDEO_CAPTURE (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT|STATE_REG_FLAG_REPROCESSING) +#define STATE_UHD_VIDEO_CAPTURE_WDR_ON (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT|STATE_REG_FLAG_REPROCESSING|STATE_REG_RTHDR_ON) +#define STATE_UHD_VIDEO_CAPTURE_WDR_AUTO (STATE_REG_UHD_RECORDING|STATE_REG_RECORDINGHINT|STATE_REG_FLAG_REPROCESSING|STATE_REG_RTHDR_AUTO) + + +namespace android { + +using namespace std; + +namespace CONFIG_MODE { + enum MODE { + NORMAL = 0x00, + HIGHSPEED_60, + HIGHSPEED_120, + HIGHSPEED_240, + MAX + }; +}; + +struct CONFIG_PIPE { + uint32_t prepare[MAX_PIPE_NUM_REPROCESSING]; +}; + +struct CONFIG_BUFFER { + uint32_t num_sensor_buffers; + uint32_t num_bayer_buffers; + uint32_t init_bayer_buffers; + uint32_t num_3aa_buffers; + uint32_t num_hwdis_buffers; + uint32_t num_vra_buffers; + uint32_t num_preview_buffers; + uint32_t num_preview_cb_buffers; + uint32_t num_picture_buffers; + uint32_t num_reprocessing_buffers; + uint32_t num_recording_buffers; + uint32_t num_fastaestable_buffer; + uint32_t reprocessing_bayer_hold_count; + uint32_t front_num_bayer_buffers; + uint32_t front_num_picture_buffers; + uint32_t preview_buffer_margin; + /* for USE_CAMERA2_API_SUPPORT */ + uint32_t num_request_raw_buffers; + uint32_t num_request_preview_buffers; + uint32_t num_request_callback_buffers; + uint32_t num_request_video_buffers; + uint32_t num_request_jpeg_buffers; + uint32_t num_min_block_request; + uint32_t num_max_block_request; + /* for USE_CAMERA2_API_SUPPORT */ +}; + +struct CONFIG_BUFFER_PIPE { + struct CONFIG_PIPE pipeInfo; + struct CONFIG_BUFFER bufInfo; +}; + +struct ExynosConfigInfo { + struct CONFIG_BUFFER_PIPE *current; + struct CONFIG_BUFFER_PIPE info[CONFIG_MODE::MAX]; + uint32_t mode; +}; + +class IMetadata { +public: + IMetadata(){}; + virtual ~IMetadata(){}; + + virtual status_t duplicateCtrlMetadata(void *buf) = 0; +}; + +class IHWConfig { +public: + IHWConfig(){}; + virtual ~IHWConfig(){}; + virtual bool getUsePureBayerReprocessing(void) = 0; + virtual bool isSccCapture(void) = 0; + virtual bool isReprocessing(void) = 0; + virtual void getHwBayerCropRegion(int *w, int *h, int *x, int *y) = 0; + virtual void getHwPreviewSize(int *w, int *h) = 0; + virtual void getVideoSize(int *w, int *h) = 0; + virtual void getPictureSize(int *w, int *h) = 0; + virtual bool getHWVdisMode(void) = 0; + virtual status_t getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) = 0; + virtual status_t getPictureBdsSize(ExynosRect *dstRect) = 0; + virtual void getHwSensorSize(int *w, int *h) = 0; + virtual bool isReprocessing3aaIspOTF(void) = 0; + virtual int getHwPreviewFormat(void) = 0; + virtual bool getDvfsLock(void) = 0; + virtual void getMaxSensorSize(int *w, int *h) = 0; + virtual status_t getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) = 0; + virtual status_t getPreviewBdsSize(ExynosRect *dstRect) = 0; + virtual bool getUseDynamicScc(void) = 0; + virtual bool getUseDynamicBayerVideoSnapShot(void) = 0; + virtual int getHWVdisFormat(void) = 0; + virtual void getHwVraInputSize(int *w, int *h) = 0; + virtual int getHwVraInputFormat(void) = 0; + virtual uint32_t getBnsScaleRatio(void) = 0; + virtual bool needGSCForCapture(int camId) = 0; + virtual bool getSetFileCtlMode(void) = 0; + virtual bool getSetFileCtl3AA_ISP(void) = 0; + virtual bool getSetFileCtl3AA(void) = 0; + virtual bool getSetFileCtlISP(void) = 0; + virtual bool getSetFileCtlSCP(void) = 0; + virtual uint64_t getCaptureExposureTime(void) = 0; + virtual bool isUseEarlyFrameReturn(void) {return false;}; +}; + +class IModeConfig { +public: + IModeConfig(){}; + virtual ~IModeConfig(){}; + + virtual bool getHdrMode(void) = 0; + virtual bool getRecordingHint(void) = 0; + virtual int getFocusMode(void) = 0; + virtual int getZoomLevel(void) = 0; + virtual void getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) = 0; + virtual float getZoomRatio(int zoom) = 0; + virtual struct ExynosConfigInfo *getConfig() = 0; + virtual void setFlipHorizontal(int val) = 0; + virtual int getFlipHorizontal(void) = 0; + virtual void setFlipVertical(int val) = 0; + virtual int getFlipVertical(void) = 0; + virtual int getPictureFormat(void) = 0; + virtual status_t getFdMeta(bool reprocessing, void *buf) = 0; + virtual float getMaxZoomRatio(void)= 0; + virtual int getHalVersion(void) = 0; + + + +// virtual int getGrallocUsage(void) = 0; +// virtual int getGrallocLockUsage(void) = 0; + virtual int getHDRDelay(void) = 0; + virtual int getReprocessingBayerHoldCount(void) = 0; + virtual int getFastenAeFps(void) = 0; + + virtual int getPerFrameControlPipe(void) = 0; + virtual int getPerFrameControlReprocessingPipe(void) = 0; + + virtual int getPerFrameInfo3AA(void) = 0; + virtual int getPerFrameInfoIsp(void) = 0; + virtual int getPerFrameInfoDis(void) = 0; + virtual int getPerFrameInfoReprocessingPure3AA(void) = 0; + virtual int getPerFrameInfoReprocessingPureIsp(void) = 0; + + virtual int getScalerNodeNumPicture(void) = 0; + + virtual bool isOwnScc(int cameraId) = 0; +}; + +class IModeVendorConfig { +public: + IModeVendorConfig(){}; + virtual ~IModeVendorConfig(){}; + + virtual int getShotMode(void) = 0; +// virtual bool getOISCaptureModeOn(void) = 0; +// virtual void setOISCaptureModeOn(bool enable) = 0; + virtual int getSeriesShotCount(void) = 0; + virtual bool getHighResolutionCallbackMode(void) = 0; +}; + +class IActivityController { +public: + IActivityController(){}; + virtual ~IActivityController(){}; + + virtual ExynosCameraActivityControl *getActivityControl(void) = 0; + +}; + +class IJpegConfig { +public: + IJpegConfig(){}; + virtual ~IJpegConfig(){}; + + virtual int getJpegQuality(void) = 0; + virtual int getThumbnailQuality(void) = 0; + virtual status_t getFixedExifInfo(exif_attribute_t *exifInfo) = 0; + virtual debug_attribute_t *getDebugAttribute(void) = 0; + virtual void getThumbnailSize(int *w, int *h) = 0; + virtual void setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) = 0; +}; + +class ISensorStaticInfo { +public: + ISensorStaticInfo(){}; + virtual ~ISensorStaticInfo(){}; + + virtual struct ExynosSensorInfoBase *getSensorStaticInfo() = 0; +}; + +class ExynosCameraParameters : public IMetadata, public IHWConfig, public IModeConfig, public IModeVendorConfig, public IActivityController, public IJpegConfig, public ISensorStaticInfo { +public: + ExynosCameraParameters(){}; + virtual ~ExynosCameraParameters(){}; + +//class Interface Metadata + virtual status_t duplicateCtrlMetadata(void *buf); + +//class Interface HWConfig + virtual bool getUsePureBayerReprocessing(void) = 0; + virtual bool isSccCapture(void) = 0; + virtual bool isReprocessing(void) = 0; + virtual void getHwBayerCropRegion(int *w, int *h, int *x, int *y) = 0; + virtual void getHwPreviewSize(int *w, int *h) = 0; + virtual void getPictureSize(int *w, int *h) = 0; + virtual void getYuvSize(int *w, int *h, int index) = 0; + virtual bool getHWVdisMode(void) = 0; + virtual status_t getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) = 0; + virtual void getHwSensorSize(int *w, int *h) = 0; + virtual status_t getPictureBdsSize(ExynosRect *dstRect) = 0; + virtual bool isReprocessing3aaIspOTF(void) = 0; + virtual int getHwPreviewFormat(void) = 0; + virtual bool getDvfsLock(void) = 0; + virtual void getMaxSensorSize(int *w, int *h) = 0; + virtual status_t getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) = 0; + virtual status_t getPreviewBdsSize(ExynosRect *dstRect) = 0; + virtual bool getUseDynamicScc(void) = 0; + virtual bool getUseDynamicBayerVideoSnapShot(void) = 0; + virtual int getHWVdisFormat(void) = 0; + virtual void getHwVraInputSize(int *w, int *h) = 0; + virtual int getHwVraInputFormat(void) = 0; + virtual uint32_t getBnsScaleRatio(void) = 0; + virtual bool needGSCForCapture(int camId) = 0; + virtual bool getSetFileCtlMode(void) = 0; + virtual bool getSetFileCtl3AA_ISP(void) = 0; + virtual bool getSetFileCtl3AA(void) = 0; + virtual bool getSetFileCtlISP(void) = 0; + virtual bool getSetFileCtlSCP(void) = 0; + virtual uint64_t getCaptureExposureTime(void) = 0; + virtual bool isUseEarlyFrameReturn(void) {return false;}; + virtual bool isUseThumbnailHWFC(void) = 0; + virtual bool isMcscVraOtf(void) = 0; + + virtual int32_t getLongExposureShotCount(void) = 0; +//class Interface ModeConfig + virtual bool getHdrMode(void) = 0; + virtual bool getRecordingHint(void) = 0; + virtual int getFocusMode(void) = 0; + virtual int getZoomLevel(void) = 0; + virtual void getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) = 0; + virtual float getZoomRatio(int zoom) = 0; + virtual struct ExynosConfigInfo *getConfig() = 0; + virtual void setFlipHorizontal(int val) = 0; + virtual int getFlipHorizontal(void) = 0; + virtual void setFlipVertical(int val) = 0; + virtual int getFlipVertical(void) = 0; + virtual int getPictureFormat(void) = 0; + virtual int getHwPictureFormat(void) = 0; + + virtual int getHalVersion(void) = 0; + +#ifdef DEBUG_RAWDUMP + virtual bool checkBayerDumpEnable(void) = 0; +#endif/* DEBUG_RAWDUMP */ +#ifdef RAWDUMP_CAPTURE + virtual void setRawCaptureModeOn(bool enable) = 0; + virtual bool getRawCaptureModeOn(void) = 0; +#endif /* RAWDUMP_CAPTURE */ + +// virtual int getGrallocUsage(void) = 0; +// virtual int getGrallocLockUsage(void) = 0; + virtual int getHDRDelay(void) = 0; + virtual int getReprocessingBayerHoldCount(void) = 0; + virtual int getFastenAeFps(void) = 0; + virtual int getPerFrameControlPipe(void) = 0; + virtual int getPerFrameControlReprocessingPipe(void) = 0; + virtual int getPerFrameInfo3AA(void) = 0; + virtual int getPerFrameInfoIsp(void) = 0; + virtual int getPerFrameInfoDis(void) = 0; + virtual int getPerFrameInfoReprocessingPure3AA(void) = 0; + virtual int getPerFrameInfoReprocessingPureIsp(void) = 0; + + virtual int getScalerNodeNumPicture(void) = 0; + virtual int getScalerNodeNumPreview(void) = 0; + virtual int getScalerNodeNumVideo(void) = 0; + virtual bool isOwnScc(int cameraId) = 0; + + virtual bool getTpuEnabledMode(void) = 0; + + virtual int getCameraId(void) = 0; + + + +//class Interface ModeVendorConfig + virtual int getShotMode(void) = 0; + virtual bool getOISCaptureModeOn(void){return false;}; + virtual void setOISCaptureModeOn(__unused bool enable){}; + virtual int getSeriesShotCount(void) = 0; + virtual bool getHighResolutionCallbackMode(void) = 0; + + virtual void setNormalBestFrameCount(__unused uint32_t count){}; + virtual uint32_t getNormalBestFrameCount(void){return 0;}; + virtual void resetNormalBestFrameCount(void){}; + virtual void setSCPFrameCount(__unused uint32_t count){}; + virtual uint32_t getSCPFrameCount(void){return 0;}; + virtual void resetSCPFrameCount(void){}; + virtual void setBayerFrameCount(__unused uint32_t count){}; + virtual uint32_t getBayerFrameCount(void){return 0;}; + virtual void resetBayerFrameCount(void){}; + virtual bool getUseBestPic(void){return false;}; + virtual int getSeriesShotMode(void) = 0; + + virtual void setLLSCaptureCount(__unused int count){}; + virtual int getLLSCaptureCount(){return 0;}; + +//class Interface ActivityController + virtual ExynosCameraActivityControl *getActivityControl(void) = 0; + +//class Interface JpegConfig + virtual int getJpegQuality(void) = 0; + virtual int getThumbnailQuality(void) = 0; + virtual status_t getFixedExifInfo(exif_attribute_t *exifInfo) = 0; + virtual debug_attribute_t *getDebugAttribute(void) = 0; + virtual void getThumbnailSize(int *w, int *h) = 0; + virtual void setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) = 0; + virtual status_t setMarkingOfExifFlash(int flag) = 0; + virtual int getMarkingOfExifFlash(void) = 0; + +//Sensor Static Info + virtual struct ExynosSensorInfoBase *getSensorStaticInfo() = 0; + +}; + + +#if 0 +class ExynosCameraConfiguration { +public: + /* Constructor */ + ExynosCameraConfiguration(int cameraId); + + /* Destructor */ + virtual ~ExynosCameraConfiguration(); + + status_t addKeyTable(int *keyTable); + int getKey(int key); + /* status_t setKey(int key, int value); */ + +protected: + typedef map ConfigMap; +}; +#endif + + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/ExynosCameraScalableSensor.cpp b/libcamera/34xx/ExynosCameraScalableSensor.cpp new file mode 100644 index 0000000..a71da6e --- /dev/null +++ b/libcamera/34xx/ExynosCameraScalableSensor.cpp @@ -0,0 +1,66 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera" +#include + +#include "ExynosCameraScalableSensor.h" + +namespace android { + +ExynosCameraScalableSensor::ExynosCameraScalableSensor() +{ + m_mode = EXYNOS_CAMERA_SCALABLE_NONE; + m_isAllocedPreview = false; + m_state = 0; +} + +ExynosCameraScalableSensor::~ExynosCameraScalableSensor() +{ +} + +int ExynosCameraScalableSensor::getState() +{ + return m_state; +} + +int ExynosCameraScalableSensor::getMode() +{ + return m_mode; +} + +void ExynosCameraScalableSensor::setMode(int Mode) +{ + m_mode = Mode; + + return; +} + +bool ExynosCameraScalableSensor::isAllocedPreview() +{ + return m_isAllocedPreview; +} + +void ExynosCameraScalableSensor::setAllocedPreivew(bool value) +{ + m_isAllocedPreview = value; + + return; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/ExynosCameraScalableSensor.h b/libcamera/34xx/ExynosCameraScalableSensor.h new file mode 100644 index 0000000..fd8b05e --- /dev/null +++ b/libcamera/34xx/ExynosCameraScalableSensor.h @@ -0,0 +1,63 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + +#ifndef EXYNOS_CAMERA_SCALABLE_SENSOR_H +#define EXYNOS_CAMERA_SCALABLE_SENSOR_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +namespace android { + +#define EXYNOS_CAMERA_SCALABLE_NONE (0) +#define EXYNOS_CAMERA_SCALABLE_CHANGING (1) +#define EXYNOS_CAMERA_SCALABLE_2M (2) +#define EXYNOS_CAMERA_SCALABLE_13M (3) + +#define EXYNOS_CAMERA_SCALABLE_2M_2_13M (11) +#define EXYNOS_CAMERA_SCALABLE_13M_2_2M (12) + + +class ExynosCameraScalableSensor { +public: + ExynosCameraScalableSensor(); + virtual ~ExynosCameraScalableSensor(); + int getState(void); + int getMode(void); + void setMode(int Mode); + bool isAllocedPreview(void); + void setAllocedPreivew(bool value); +private: + int m_state; + int m_mode; + bool m_isAllocedPreview; +}; + +}; +#endif /* EXYNOS_CAMERA_SCALABLE_SENSOR_H */ + diff --git a/libcamera/34xx/ExynosCameraUtilsModule.cpp b/libcamera/34xx/ExynosCameraUtilsModule.cpp new file mode 100644 index 0000000..b4400f4 --- /dev/null +++ b/libcamera/34xx/ExynosCameraUtilsModule.cpp @@ -0,0 +1,628 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraUtilsModule" +#include + +#include "ExynosCameraUtilsModule.h" + +////////////////////////////////////////////////////////////////////////////////////////////////// +// HACK +////////////// +#define isOwnScc(cameraId) ((cameraId == CAMERA_ID_BACK) ? MAIN_CAMERA_HAS_OWN_SCC : FRONT_CAMERA_HAS_OWN_SCC) + +namespace android { + +void updateNodeGroupInfoMainPreview( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + __unused ExynosRect bdsSize, + int previewW, int previewH, + __unused int pictureW, __unused int pictureH) +{ + ALOGV("Leader before (%d, %d, %d, %d)(%d, %d, %d, %d)(%d %d)", + node_group_info_3aa->leader.input.cropRegion[0], + node_group_info_3aa->leader.input.cropRegion[1], + node_group_info_3aa->leader.input.cropRegion[2], + node_group_info_3aa->leader.input.cropRegion[3], + node_group_info_3aa->leader.output.cropRegion[0], + node_group_info_3aa->leader.output.cropRegion[1], + node_group_info_3aa->leader.output.cropRegion[2], + node_group_info_3aa->leader.output.cropRegion[3], + node_group_info_3aa->leader.request, + node_group_info_3aa->leader.vid); + + /* Leader : 3AA : BCrop */ + node_group_info_3aa->leader.input.cropRegion[0] = bayerCropSize.x; + node_group_info_3aa->leader.input.cropRegion[1] = bayerCropSize.y; + node_group_info_3aa->leader.input.cropRegion[2] = bayerCropSize.w; + node_group_info_3aa->leader.input.cropRegion[3] = bayerCropSize.h; + node_group_info_3aa->leader.output.cropRegion[0] = node_group_info_3aa->leader.input.cropRegion[0]; + node_group_info_3aa->leader.output.cropRegion[1] = node_group_info_3aa->leader.input.cropRegion[1]; + node_group_info_3aa->leader.output.cropRegion[2] = node_group_info_3aa->leader.input.cropRegion[2]; + node_group_info_3aa->leader.output.cropRegion[3] = node_group_info_3aa->leader.input.cropRegion[3]; + + /* Capture 0 : 3AC -[X] - output cropX, cropY should be Zero */ + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].input.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].input.cropRegion[1] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].input.cropRegion[2] = node_group_info_3aa->leader.input.cropRegion[2]; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].input.cropRegion[3] = node_group_info_3aa->leader.input.cropRegion[3]; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].output.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].output.cropRegion[1] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].output.cropRegion[2] = node_group_info_3aa->leader.input.cropRegion[2]; + node_group_info_3aa->capture[PERFRAME_BACK_3AC_POS].output.cropRegion[3] = node_group_info_3aa->leader.input.cropRegion[3]; + + /* Capture 1 : 3AP - [BDS] */ + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].input.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].input.cropRegion[1] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].input.cropRegion[2] = bayerCropSize.w; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].input.cropRegion[3] = bayerCropSize.h; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[1] = 0; +#if (defined(CAMERA_HAS_OWN_BDS) && (CAMERA_HAS_OWN_BDS)) + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[2] = (bayerCropSize.w < bdsSize.w) ? bayerCropSize.w : bdsSize.w; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[3] = (bayerCropSize.h < bdsSize.h) ? bayerCropSize.h : bdsSize.h; +#else + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[2] = bayerCropSize.w; + node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[3] = bayerCropSize.h; +#endif + /* Leader : ISP */ + node_group_info_isp->leader.input.cropRegion[0] = 0; + node_group_info_isp->leader.input.cropRegion[1] = 0; + node_group_info_isp->leader.input.cropRegion[2] = node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[2]; + node_group_info_isp->leader.input.cropRegion[3] = node_group_info_3aa->capture[PERFRAME_BACK_3AP_POS].output.cropRegion[3]; + node_group_info_isp->leader.output.cropRegion[0] = 0; + node_group_info_isp->leader.output.cropRegion[1] = 0; + node_group_info_isp->leader.output.cropRegion[2] = node_group_info_isp->leader.input.cropRegion[2]; + node_group_info_isp->leader.output.cropRegion[3] = node_group_info_isp->leader.input.cropRegion[3]; + + /* Capture : ISPP */ + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].input.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].input.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].output.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].output.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].output.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_BACK_ISPP_POS].output.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + /* Capture 0 : SCP - [scaling] */ + if (isOwnScc(cameraId) == true) { + /* HACK: When Driver do not support SCP scaling */ + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[2] = previewW; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[3] = previewH; + } else { + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + } + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[2] = + (node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[2] < (unsigned)previewW ? node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[2] : previewW); + node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[3] = + (node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[3] < (unsigned)previewH ? node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[3] : previewH); + + /* + * HACK + * in OTF case, we need to set perframe size on 3AA. + * This set 3aa perframe size by isp perframe size. + * The reason of hack is that worry about modify and code sync. + */ + for (int i = 0; i < 4; i++) { + node_group_info_3aa->capture[PERFRAME_BACK_SCP_POS].input. cropRegion[i] = node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].input. cropRegion[i]; + node_group_info_3aa->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[i] = node_group_info_isp->capture[PERFRAME_BACK_SCP_POS].output.cropRegion[i]; + } + + ALOGV("Leader after (%d, %d, %d, %d)(%d, %d, %d, %d)(%d %d)", + node_group_info_3aa->leader.input.cropRegion[0], + node_group_info_3aa->leader.input.cropRegion[1], + node_group_info_3aa->leader.input.cropRegion[2], + node_group_info_3aa->leader.input.cropRegion[3], + node_group_info_3aa->leader.output.cropRegion[0], + node_group_info_3aa->leader.output.cropRegion[1], + node_group_info_3aa->leader.output.cropRegion[2], + node_group_info_3aa->leader.output.cropRegion[3], + node_group_info_3aa->leader.request, + node_group_info_3aa->leader.vid); +} + + +void updateNodeGroupInfoReprocessing( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSizePreview, + ExynosRect bayerCropSizePicture, + ExynosRect bdsSize, + int pictureW, int pictureH, + bool pureBayerReprocessing, + bool flag3aaIspOtf) +{ + int perFramePos = 0; + ALOGV("Leader before (%d, %d, %d, %d)(%d, %d, %d, %d)(%d %d)", + node_group_info_3aa->leader.input.cropRegion[0], + node_group_info_3aa->leader.input.cropRegion[1], + node_group_info_3aa->leader.input.cropRegion[2], + node_group_info_3aa->leader.input.cropRegion[3], + node_group_info_3aa->leader.output.cropRegion[0], + node_group_info_3aa->leader.output.cropRegion[1], + node_group_info_3aa->leader.output.cropRegion[2], + node_group_info_3aa->leader.output.cropRegion[3], + node_group_info_3aa->leader.request, + node_group_info_3aa->leader.vid); + + if (pureBayerReprocessing == true) { + /* Leader : 3AA */ + node_group_info_3aa->leader.input.cropRegion[0] = bayerCropSizePicture.x; + node_group_info_3aa->leader.input.cropRegion[1] = bayerCropSizePicture.y; + node_group_info_3aa->leader.input.cropRegion[2] = bayerCropSizePicture.w; + node_group_info_3aa->leader.input.cropRegion[3] = bayerCropSizePicture.h; + node_group_info_3aa->leader.output.cropRegion[0] = node_group_info_3aa->leader.input.cropRegion[0]; + node_group_info_3aa->leader.output.cropRegion[1] = node_group_info_3aa->leader.input.cropRegion[1]; + node_group_info_3aa->leader.output.cropRegion[2] = node_group_info_3aa->leader.input.cropRegion[2]; + node_group_info_3aa->leader.output.cropRegion[3] = node_group_info_3aa->leader.input.cropRegion[3]; + + perFramePos = PERFRAME_REPROCESSING_3AP_POS; + + /* Capture 1 : 3AC - [BDS] */ + node_group_info_3aa->capture[perFramePos].input.cropRegion[0] = 0; + node_group_info_3aa->capture[perFramePos].input.cropRegion[1] = 0; + node_group_info_3aa->capture[perFramePos].input.cropRegion[2] = bayerCropSizePicture.w; + node_group_info_3aa->capture[perFramePos].input.cropRegion[3] = bayerCropSizePicture.h; + node_group_info_3aa->capture[perFramePos].output.cropRegion[0] = 0; + node_group_info_3aa->capture[perFramePos].output.cropRegion[1] = 0; + node_group_info_3aa->capture[perFramePos].output.cropRegion[2] = (bayerCropSizePicture.w < bdsSize.w) ? bayerCropSizePicture.w : bdsSize.w; + node_group_info_3aa->capture[perFramePos].output.cropRegion[3] = (bayerCropSizePicture.h < bdsSize.h) ? bayerCropSizePicture.h : bdsSize.h; + + /* Leader : ISP */ + node_group_info_isp->leader.input.cropRegion[0] = 0; + node_group_info_isp->leader.input.cropRegion[1] = 0; + node_group_info_isp->leader.input.cropRegion[2] = node_group_info_3aa->capture[perFramePos].output.cropRegion[2]; + node_group_info_isp->leader.input.cropRegion[3] = node_group_info_3aa->capture[perFramePos].output.cropRegion[3]; + node_group_info_isp->leader.output.cropRegion[0] = 0; + node_group_info_isp->leader.output.cropRegion[1] = 0; + node_group_info_isp->leader.output.cropRegion[2] = node_group_info_isp->leader.input.cropRegion[2]; + node_group_info_isp->leader.output.cropRegion[3] = node_group_info_isp->leader.input.cropRegion[3]; + + /* Capture 1 : SCC */ + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + if (isOwnScc(cameraId) == true) { + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2] = pictureW; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3] = pictureH; + } else { + /* ISPC does not support scaling */ + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3]; + } + + /* + * HACK + * in OTF case, we need to set perframe size on 3AA. + * just set 3aa_isp otf size by isp perframe size. + * The reason of hack is + * worry about modify and code sync. + */ + if (flag3aaIspOtf == true) { + /* Capture 1 : ISPC */ + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2]; + node_group_info_3aa->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3]; + } + } else { + /* Leader : ISP */ + node_group_info_isp->leader.input.cropRegion[0] = 0; + node_group_info_isp->leader.input.cropRegion[1] = 0; + node_group_info_isp->leader.input.cropRegion[2] = bayerCropSizePreview.w; + node_group_info_isp->leader.input.cropRegion[3] = bayerCropSizePreview.h; + node_group_info_isp->leader.output.cropRegion[0] = 0; + node_group_info_isp->leader.output.cropRegion[1] = 0; + node_group_info_isp->leader.output.cropRegion[2] = node_group_info_isp->leader.input.cropRegion[2]; + node_group_info_isp->leader.output.cropRegion[3] = node_group_info_isp->leader.input.cropRegion[3]; + + /* Capture 1 : SCC */ + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0] = (bayerCropSizePreview.w - bayerCropSizePicture.w) / 2; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1] = (bayerCropSizePreview.h - bayerCropSizePicture.h) / 2; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2] = bayerCropSizePicture.w; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3] = bayerCropSizePicture.h; + + if (isOwnScc(cameraId) == true) { + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2] = pictureW; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3] = pictureH; + } else { + /* ISPC does not support scaling */ + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3] = node_group_info_isp->capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3]; + } + } + + ALOGV("Leader after (%d, %d, %d, %d)(%d, %d, %d, %d)(%d %d)", + node_group_info_3aa->leader.input.cropRegion[0], + node_group_info_3aa->leader.input.cropRegion[1], + node_group_info_3aa->leader.input.cropRegion[2], + node_group_info_3aa->leader.input.cropRegion[3], + node_group_info_3aa->leader.output.cropRegion[0], + node_group_info_3aa->leader.output.cropRegion[1], + node_group_info_3aa->leader.output.cropRegion[2], + node_group_info_3aa->leader.output.cropRegion[3], + node_group_info_3aa->leader.request, + node_group_info_3aa->leader.vid); +} + +void updateNodeGroupInfoFront( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + __unused ExynosRect bdsSize, + int previewW, int previewH, + __unused int pictureW, __unused int pictureH) +{ + + /* Leader : 3AA : BCrop */ + node_group_info_3aa->leader.input.cropRegion[0] = bayerCropSize.x; + node_group_info_3aa->leader.input.cropRegion[1] = bayerCropSize.y; + node_group_info_3aa->leader.input.cropRegion[2] = bayerCropSize.w; + node_group_info_3aa->leader.input.cropRegion[3] = bayerCropSize.h; + node_group_info_3aa->leader.output.cropRegion[0] = node_group_info_3aa->leader.input.cropRegion[0]; + node_group_info_3aa->leader.output.cropRegion[1] = node_group_info_3aa->leader.input.cropRegion[1]; + node_group_info_3aa->leader.output.cropRegion[2] = node_group_info_3aa->leader.input.cropRegion[2]; + node_group_info_3aa->leader.output.cropRegion[3] = node_group_info_3aa->leader.input.cropRegion[3]; + + /* Capture 0 :3AP : BDS */ + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].input.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].input.cropRegion[1] = 0; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].input.cropRegion[2] = node_group_info_3aa->leader.output.cropRegion[2]; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].input.cropRegion[3] = node_group_info_3aa->leader.output.cropRegion[3]; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[0] = 0; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[1] = 0; +#if (defined(CAMERA_HAS_OWN_BDS) && (CAMERA_HAS_OWN_BDS)) + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[2] = (bayerCropSize.w < bdsSize.w)? bayerCropSize.w : bdsSize.w; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[3] = (bayerCropSize.h < bdsSize.h)? bayerCropSize.h : bdsSize.h; +#else + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[2] = bayerCropSize.w; + node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[3] = bayerCropSize.h; +#endif + + /* Leader : ISP */ + node_group_info_isp->leader.input.cropRegion[0] = 0; + node_group_info_isp->leader.input.cropRegion[1] = 0; + node_group_info_isp->leader.input.cropRegion[2] = node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[2]; + node_group_info_isp->leader.input.cropRegion[3] = node_group_info_3aa->capture[PERFRAME_FRONT_3AP_POS].output.cropRegion[3]; + node_group_info_isp->leader.output.cropRegion[0] = 0; + node_group_info_isp->leader.output.cropRegion[1] = 0; + node_group_info_isp->leader.output.cropRegion[2] = node_group_info_isp->leader.input.cropRegion[2]; + node_group_info_isp->leader.output.cropRegion[3] = node_group_info_isp->leader.input.cropRegion[3]; + + /* Capture : ISPP */ + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].input.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].input.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].output.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].output.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].output.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_ISPP_POS].output.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + /* Capture 0 : SCC */ + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[0] = node_group_info_isp->leader.output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[1] = node_group_info_isp->leader.output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[2] = node_group_info_isp->leader.output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[3] = node_group_info_isp->leader.output.cropRegion[3]; + + if (isOwnScc(cameraId) == true) { + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[2] = previewW; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[3] = previewH; + } else { + /* ISPC does not support scaling */ + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[0] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[1] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[2] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[3] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].input.cropRegion[3]; + } + + /* Capture 1 : SCP */ + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[0] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[1] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[2] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[3] = node_group_info_isp->capture[PERFRAME_FRONT_SCC_POS].output.cropRegion[3]; + + if (isOwnScc(cameraId) == true) { + /* HACK: When Driver do not support SCP scaling */ + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[0] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[0]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[1] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[1]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[2] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[2]; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[3] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[3]; + } else { + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[0] = 0; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[1] = 0; + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[2] = + (node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[2] < (unsigned)previewW ? node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[2] : previewW); + node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[3] = + (node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[3] < (unsigned)previewH ? node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input.cropRegion[3] : previewH); + } + + /* + * HACK + * in OTF case, we need to set perframe size on 3AA + * just set 3aa perframe size by isp perframe size. + * The reason of hack is + * worry about modify and code sync. + */ + for (int i = 0; i < 4; i++) { + node_group_info_3aa->capture[PERFRAME_FRONT_SCP_POS].input. cropRegion[i] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].input. cropRegion[i]; + node_group_info_3aa->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[i] = node_group_info_isp->capture[PERFRAME_FRONT_SCP_POS].output.cropRegion[i]; + } +} + +void ExynosCameraNodeGroup::updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH) +{ + if (cameraId == CAMERA_ID_BACK) { + updateNodeGroupInfoMainPreview( + cameraId, + node_group_info_3aa, + node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + } else { + updateNodeGroupInfoFront( + cameraId, + node_group_info_3aa, + node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + } + + // m_dump("3AA", cameraId, node_group_info_3aa); + // m_dump("ISP", cameraId, node_group_info_isp); +} + +void ExynosCameraNodeGroup::updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSizePreview, + ExynosRect bayerCropSizePicture, + ExynosRect bdsSize, + int pictureW, int pictureH, + bool pureBayerReprocessing, + bool flag3aaIspOtf) +{ + updateNodeGroupInfoReprocessing( + cameraId, + node_group_info_3aa, + node_group_info_isp, + bayerCropSizePreview, + bayerCropSizePicture, + bdsSize, + pictureW, pictureH, + pureBayerReprocessing, + flag3aaIspOtf); + + // m_dump("3AA", cameraId, node_group_info_3aa); + // m_dump("ISP", cameraId, node_group_info_isp); +} + +void ExynosCameraNodeGroup::m_dump(const char *name, int cameraId, camera2_node_group *node_group_info) +{ + + ALOGD("[CAM_ID(%d)][%s]-DEBUG(%s[%d]):node_group_info->leader(in : %d, %d, %d, %d) -> (out : %d, %d, %d, %d)(request : %d, vid : %d)", + cameraId, + name, + __FUNCTION__, __LINE__, + node_group_info->leader.input.cropRegion[0], + node_group_info->leader.input.cropRegion[1], + node_group_info->leader.input.cropRegion[2], + node_group_info->leader.input.cropRegion[3], + node_group_info->leader.output.cropRegion[0], + node_group_info->leader.output.cropRegion[1], + node_group_info->leader.output.cropRegion[2], + node_group_info->leader.output.cropRegion[3], + node_group_info->leader.request, + node_group_info->leader.vid); + + for (int i = 0; i < CAPTURE_NODE_MAX; i++) { + ALOGD("[CAM_ID(%d)][%s]-DEBUG(%s[%d]):node_group_info->capture[%d](in : %d, %d, %d, %d) -> (out : %d, %d, %d, %d)(request : %d, vid : %d)", + cameraId, + name, + __FUNCTION__, __LINE__, + i, + node_group_info->capture[i].input.cropRegion[0], + node_group_info->capture[i].input.cropRegion[1], + node_group_info->capture[i].input.cropRegion[2], + node_group_info->capture[i].input.cropRegion[3], + node_group_info->capture[i].output.cropRegion[0], + node_group_info->capture[i].output.cropRegion[1], + node_group_info->capture[i].output.cropRegion[2], + node_group_info->capture[i].output.cropRegion[3], + node_group_info->capture[i].request, + node_group_info->capture[i].vid); + } +} + +void ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH) +{ + camera2_node_group node_group_info_isp; + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + + if (cameraId == CAMERA_ID_BACK) { + updateNodeGroupInfoMainPreview( + cameraId, + node_group_info, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + } else { + updateNodeGroupInfoFront( + cameraId, + node_group_info, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + } + + // m_dump("3AA", cameraId, node_group_info); +} + +void ExynosCameraNodeGroupISP::updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH, + bool dis) +{ + camera2_node_group node_group_info_3aa; + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + + int ispW = previewW; + int ispH = previewH; + + if (dis == true) { + ispW = bdsSize.w; + ispH = bdsSize.h; + } + + if (cameraId == CAMERA_ID_BACK) { + updateNodeGroupInfoMainPreview( + cameraId, + &node_group_info_3aa, + node_group_info, + bayerCropSize, + bdsSize, + ispW, ispH, + pictureW, pictureH); + } else { + updateNodeGroupInfoFront( + cameraId, + &node_group_info_3aa, + node_group_info, + bayerCropSize, + bdsSize, + ispW, ispH, + pictureW, pictureH); + } + + // m_dump("ISP", cameraId, node_group_info); +} + +void ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH, + bool dis) +{ + camera2_node_group node_group_info_3aa; + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + + if (cameraId == CAMERA_ID_BACK) { + updateNodeGroupInfoMainPreview( + cameraId, + &node_group_info_3aa, + node_group_info, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + } else { + updateNodeGroupInfoFront( + cameraId, + &node_group_info_3aa, + node_group_info, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + } + + /* + * make DIS output smaller than output. + * (DIS output = DIS input / HW_VDIS_RATIO) + */ + if (dis == true) { + node_group_info->leader.output.cropRegion[2] = ALIGN_UP((int)(node_group_info->leader.input.cropRegion[2] / HW_VDIS_W_RATIO), 2); + node_group_info->leader.output.cropRegion[3] = ALIGN_UP((int)(node_group_info->leader.input.cropRegion[3] / HW_VDIS_H_RATIO), 2); + } + + /* + * In case of DIS, + * DIS's output crop size must be same SCP's input crop size. + * because SCP's input image comes from DIS output filtering. + * (SCP input = DIS output) + */ + for (int i = 0; i < 4; i++) + node_group_info->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[i] = node_group_info->leader.output.cropRegion[i]; + + + // m_dump("DIS", cameraId, node_group_info); +} + +}; /* namespace android */ diff --git a/libcamera/34xx/ExynosCameraUtilsModule.h b/libcamera/34xx/ExynosCameraUtilsModule.h new file mode 100644 index 0000000..2874c7a --- /dev/null +++ b/libcamera/34xx/ExynosCameraUtilsModule.h @@ -0,0 +1,153 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_UTILS_MODULE_H +#define EXYNOS_CAMERA_UTILS_MODULE_H + +#include +#include +#include + + +#include "ExynosCameraConfig.h" + +#include "ExynosRect.h" +#include "fimc-is-metadata.h" +/* HACK: BDS for FHD in Helsinki Prime */ +#include "ExynosCameraUtils.h" + +namespace android { + +/* + * Will deprecated this API. + * Please, use ExynosCameraNodeGroup class. + */ +void updateNodeGroupInfoMainPreview( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH); + +void updateNodeGroupInfoReprocessing( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSizePreview, + ExynosRect bayerCropSizePicture, + ExynosRect bdsSize, + int pictureW, int pictureH, + bool pureBayerReprocessing, + bool flag3aaIspOtf); + +void updateNodeGroupInfoFront( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH); + +class ExynosCameraNodeGroup { +private: + ExynosCameraNodeGroup(); + virtual ~ExynosCameraNodeGroup(); + +public: + /* this is for preview */ + static void updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH); + + /* this is for reprocessing */ + static void updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info_3aa, + camera2_node_group *node_group_info_isp, + ExynosRect bayerCropSizePreview, + ExynosRect bayerCropSizePicture, + ExynosRect bdsSize, + int pictureW, int pictureH, + bool pureBayerReprocessing, + bool flag3aaIspOtf); + +protected: + static void m_dump(const char *name, int cameraId, camera2_node_group *node_group_info); +}; + +class ExynosCameraNodeGroup3AA : public ExynosCameraNodeGroup { +private: + ExynosCameraNodeGroup3AA(); + virtual ~ExynosCameraNodeGroup3AA(); + +public: + /* this is for preview */ + static void updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH); +}; + +class ExynosCameraNodeGroupISP : public ExynosCameraNodeGroup { +private: + ExynosCameraNodeGroupISP(); + virtual ~ExynosCameraNodeGroupISP(); + +public: + /* this is for preview */ + static void updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH, + bool dis); +}; + +class ExynosCameraNodeGroupDIS : public ExynosCameraNodeGroup { +private: + ExynosCameraNodeGroupDIS(); + virtual ~ExynosCameraNodeGroupDIS(); + +public: + /* this is for preview */ + static void updateNodeGroupInfo( + int cameraId, + camera2_node_group *node_group_info, + ExynosRect bayerCropSize, + ExynosRect bdsSize, + int previewW, int previewH, + int pictureW, int pictureH, + bool dis = false); +}; + +}; /* namespace android */ + +#endif + diff --git a/libcamera/34xx/fimc-is-metadata.h b/libcamera/34xx/fimc-is-metadata.h new file mode 100644 index 0000000..95fc5d2 --- /dev/null +++ b/libcamera/34xx/fimc-is-metadata.h @@ -0,0 +1,1965 @@ +/* Copyright (c) 2015 Samsung Electronics Co, Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + + * + + * Alternatively, Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*2012.04.18 Version 0.1 Initial Release*/ +/*2012.04.23 Version 0.2 Added static metadata (draft)*/ +/*2012.07.04 Version 0.3 Applied google's undocumented changes (draft)*/ +/*2012.07.11 Version 0.4 Added FD parameters */ +/*2012.07.27 Version 0.5 Modified HSB control and DM */ +/*2012.08.28 Version 0.6 Added AA_SCENE_MODE_NIGHT_CAPTURE */ + +#if defined(SUPPORT_HAL3_3_METADATA) +#include "fimc-is-metadata_for_hal3.3.h" +#else + +#ifndef FIMC_IS_METADATA_H_ +#define FIMC_IS_METADATA_H_ + +#ifndef _LINUX_TYPES_H +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef signed short int16_t; +typedef signed int int32_t; +typedef unsigned int uint32_t; +/*typedef unsigned long long uint64_t;*/ +#endif + +struct rational { + uint32_t num; + uint32_t den; +}; + +#define CAMERA2_MAX_AVAILABLE_MODE 21 +#define CAMERA2_MAX_FACES 16 +#define CAMERA2_MAX_VENDER_LENGTH 400 +#define CAPTURE_NODE_MAX 4 +#define CAMERA2_MAX_PDAF_MULTIROI_COLUMN 9 +#define CAMERA2_MAX_PDAF_MULTIROI_ROW 5 +#define CAMERA2_MAX_UCTL_VENDER_LENGTH 32 + + +#define OPEN_MAGIC_NUMBER 0x01020304 +#define SHOT_MAGIC_NUMBER 0x23456789 + +/* + *controls/dynamic metadata + */ + +/* android.request */ +enum metadata_mode { + METADATA_MODE_NONE, + METADATA_MODE_FULL +}; + +enum is_subscenario_id { + ISS_SUB_SCENARIO_STILL_PREVIEW = 0, /* 0: still preview */ + ISS_SUB_SCENARIO_VIDEO = 1, /* 1: video */ + ISS_SUB_SCENARIO_DUAL_STILL = 2, /* 2: dual still preview */ + ISS_SUB_SCENARIO_DUAL_VIDEO = 3, /* 3: dual video */ + ISS_SUB_SCENARIO_VIDEO_HIGH_SPEED = 4, /* 4: video high speed */ + ISS_SUB_SCENARIO_STILL_CAPTURE = 5, /* 5: still capture */ + ISS_SUB_SCENARIO_FHD_60FPS = 6, /* 6: video FHD 60fps */ + ISS_SUB_SCENARIO_UHD_30FPS = 7, /* 7: video UHD 30fps */ + ISS_SUB_SCENARIO_WVGA_300FPS = 8, /* 8: video WVGA 300fps */ + ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_ON = 9, + ISS_SUB_SCENARIO_VIDEO_WDR_ON = 10, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON = 11, + ISS_SUB_SCENARIO_UHD_30FPS_WDR_ON = 12, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM = 13, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM_OUTDOOR = 14, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM_INDOOR = 15, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM = 16, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM_OUTDOOR = 17, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM_INDOOR = 18, + ISS_SUB_SCENARIO_STILL_CAPTURE_LLS = 19, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_LLS = 20, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM = 21, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM_OUTDOOR = 22, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM_INDOOR = 23, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO = 24, + ISS_SUB_SCENARIO_VIDEO_WDR_AUTO = 25, + ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_AUTO = 26, + ISS_SUB_SCENARIO_FHD_240FPS = 27, + + ISS_SUB_SCENARIO_FRONT_VT1 = 31, /* 31: front camera VT1 */ + ISS_SUB_SCENARIO_FRONT_VT2 = 32, /* 32: front camera VT2 */ + ISS_SUB_SCENARIO_FRONT_SMART_STAY = 33, /* 33: front camera smart stay */ + ISS_SUB_SCENARIO_FRONT_PANORAMA = 34, /* 34: front camera front panorama */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_STILL_PREVIEW = 35, /* 35: C2 off front still preview */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_STILL_CAPTURE = 36, /* 36: C2 off front still capture */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_VIDEO = 37, /* 37: C2 off front video */ + ISS_SUB_SCENARIO_FRONT_VT4 = 38, /* 38: front camera VT4 */ + ISS_SUB_SCENARIO_FRONT_VT1_STILL_CAPTURE = 39, /* 39: front camera VT1 still capture */ + ISS_SUB_END, +}; + +enum available_capabilities { + REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE =0, + REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR, + REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING, + REQUEST_AVAILABLE_CAPABILITIES_RAW, + REQUEST_AVAILABLE_CAPABILITIES_ZSL, +}; +struct camera2_request_ctl { + uint32_t frameCount; + uint32_t id; + enum metadata_mode metadataMode; + + /* vendor feature */ + uint32_t vendor_frameCount; +}; + +struct camera2_request_dm { + uint32_t frameCount; + uint32_t id; + enum metadata_mode metadataMode; + uint8_t pipelineDepth; + + /* vendor feature */ + uint32_t vendor_frameCount; +}; + +struct camera2_request_sm { + uint32_t maxNumOutputStreams[3]; + uint32_t maxNumOutputRaw; + uint32_t maxNumOutputProc; + uint32_t maxNumOutputProcStalling; + uint32_t maxNumInputStreams; + uint8_t pipelineMaxDepth; + uint32_t partialResultCount; + uint8_t availableCapabilities[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableRequestKeys[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableResultKeys[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableCharacteristicsKeys[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +struct camera2_entry_ctl { + /** \brief + per-frame control for entry control + \remarks + low parameter is 0bit ~ 31bit flag + high parameter is 32bit ~ 63bit flag + */ + uint32_t lowIndexParam; + uint32_t highIndexParam; + uint32_t parameter[2048]; +}; + +struct camera2_entry_dm { + uint32_t lowIndexParam; + uint32_t highIndexParam; +}; + +/* android.lens */ +enum optical_stabilization_mode { + OPTICAL_STABILIZATION_MODE_OFF = 0, + OPTICAL_STABILIZATION_MODE_ON = 1, + + /* vendor feature */ + OPTICAL_STABILIZATION_MODE_STILL = 100, // Still mode + OPTICAL_STABILIZATION_MODE_STILL_ZOOM, // Still Zoom mode + OPTICAL_STABILIZATION_MODE_VIDEO, // Recording mode + OPTICAL_STABILIZATION_MODE_SINE_X, // factory mode x + OPTICAL_STABILIZATION_MODE_SINE_Y, // factory mode y + OPTICAL_STABILIZATION_MODE_CENTERING, // Centering mode + OPTICAL_STABILIZATION_MODE_VDIS, + OPTICAL_STABILIZATION_MODE_VIDEO_RATIO_4_3, // Recording mode +}; + +enum lens_state { + LENS_STATE_STATIONARY = 0, + LENS_STATE_MOVING, +}; + +enum lens_focus_distance_calibration { + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0, + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE, + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED, +}; + +enum lens_facing { + LENS_FACING_BACK, + LENS_FACING_FRONT +}; + +struct camera2_lens_ctl { + float aperture; + float filterDensity; + float focalLength; + float focusDistance; + enum optical_stabilization_mode opticalStabilizationMode; +}; + +struct camera2_lens_dm { + float aperture; + float filterDensity; + float focalLength; + float focusDistance; + float focusRange[2]; + enum optical_stabilization_mode opticalStabilizationMode; + enum lens_state state; +}; + +struct camera2_lens_sm { + float availableApertures[CAMERA2_MAX_AVAILABLE_MODE]; /* assuming 1 aperture */ + float availableFilterDensities[CAMERA2_MAX_AVAILABLE_MODE]; /* assuming 1 ND filter value */ + float availableFocalLength[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableOpticalStabilization[CAMERA2_MAX_AVAILABLE_MODE]; /* assuming # of enum optical_stabilization_mode */ + float hyperfocalDistance; + float minimumFocusDistance; + uint32_t shadingMapSize[2]; + enum lens_focus_distance_calibration focusDistanceCalibration; + enum lens_facing facing; + float opticalAxisAngle[2]; /* degrees: [0~90, 0~360] */ + float position[3]; +}; + +/* android.sensor */ +enum sensor_test_pattern_mode { + SENSOR_TEST_PATTERN_MODE_OFF = 1, + SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, + SENSOR_TEST_PATTERN_MODE_COLOR_BARS, + SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, + SENSOR_TEST_PATTERN_MODE_PN9, + SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 257, +}; + +enum sensor_colorfilterarrangement { + SENSOR_COLORFILTERARRANGEMENT_RGGB, + SENSOR_COLORFILTERARRANGEMENT_GRBG, + SENSOR_COLORFILTERARRANGEMENT_GBRG, + SENSOR_COLORFILTERARRANGEMENT_BGGR, + SENSOR_COLORFILTERARRANGEMENT_RGB +}; + +enum sensor_timestamp_calibration { + SENSOR_INFO_TIMESTAMP_CALIBRATION_UNCALIBRATED = 0, + SENSOR_INFO_TIMESTAMP_CALIBRATION_CALIBRATED, +}; + +enum sensor_ref_illuminant { + SENSOR_ILLUMINANT_DAYLIGHT = 1, + SENSOR_ILLUMINANT_FLUORESCENT = 2, + SENSOR_ILLUMINANT_TUNGSTEN = 3, + SENSOR_ILLUMINANT_FLASH = 4, + SENSOR_ILLUMINANT_FINE_WEATHER = 9, + SENSOR_ILLUMINANT_CLOUDY_WEATHER = 10, + SENSOR_ILLUMINANT_SHADE = 11, + SENSOR_ILLUMINANT_DAYLIGHT_FLUORESCENT = 12, + SENSOR_ILLUMINANT_DAY_WHITE_FLUORESCENT = 13, + SENSOR_ILLUMINANT_COOL_WHITE_FLUORESCENT = 14, + SENSOR_ILLUMINANT_WHITE_FLUORESCENT = 15, + SENSOR_ILLUMINANT_STANDARD_A = 17, + SENSOR_ILLUMINANT_STANDARD_B = 18, + SENSOR_ILLUMINANT_STANDARD_C = 19, + SENSOR_ILLUMINANT_D55 = 20, + SENSOR_ILLUMINANT_D65 = 21, + SENSOR_ILLUMINANT_D75 = 22, + SENSOR_ILLUMINANT_D50 = 23, + SENSOR_ILLUMINANT_ISO_STUDIO_TUNGSTEN = 24 +}; + +struct camera2_sensor_ctl { + uint64_t exposureTime; /* unit : nano */ + uint64_t frameDuration; /* unit : nano */ + uint32_t sensitivity; /* unit : ISO arithmetic units (Min <= 100, Max >= 1600) */ + int32_t testPatternData[4]; /*[R, G_even, G_odd, B] */ // TODO: [API32] not implemented yet + enum sensor_test_pattern_mode testPatternMode; // TODO: [API32] not implemented yet +}; + +struct camera2_sensor_dm { + uint64_t exposureTime; + uint64_t frameDuration; + uint32_t sensitivity; + uint64_t timeStamp; + float temperature; + struct rational neutralColorPoint[3]; /* [R, G, B] */ + double noiseProfile[4][2]; + /* [4] # of Color Filter Arrangement channel */ + /* [2] sensor amplification (S) and sensor readout noise (O) */ + float profileHueSatMap[2][2][2][3]; + /* cylindrical coordinate */ + /* [2] TODO: # of samples profileHueSatMapDimensions[0]*/ + /* [2] TODO: # of samples profileHueSatMapDimensions[1]*/ + /* [2] TODO: # of samples profileHueSatMapDimensions[2]*/ + /* [3]*/ + float profileToneCurve[32][2]; + float greenSplit; + int32_t testPatternData[4]; + enum sensor_test_pattern_mode testPatternMode; + uint64_t rollingShutterSkew; +}; + +struct camera2_sensor_sm { + uint32_t activeArraySize[4]; + uint32_t sensitivityRange[2]; + enum sensor_colorfilterarrangement colorFilterArrangement; + uint64_t exposureTimeRange[2]; /* unit : nano */ + uint64_t maxFrameDuration; /* unit : nano */ + float physicalSize[2]; + uint32_t pixelArraySize[2]; + uint32_t whiteLevel; + enum sensor_timestamp_calibration timestampCalibration; + enum sensor_ref_illuminant referenceIlluminant1; + enum sensor_ref_illuminant referenceIlluminant2; + struct rational calibrationTransform1[9]; + struct rational calibrationTransform2[9]; + struct rational colorTransform1[9]; + struct rational colorTransform2[9]; + struct rational forwardMatrix1[9]; + struct rational forwardMatrix2[9]; + struct rational baseGainFactor; + uint32_t blackLevelPattern[4]; + uint32_t maxAnalogSensitivity; + uint32_t orientation; + uint32_t profileHueSatMapDimensions[3]; /* [hue, saturation, value] */ + uint32_t availableTestPatternModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.flash */ +enum flash_mode { + CAM2_FLASH_MODE_NONE = 0, + CAM2_FLASH_MODE_OFF, + CAM2_FLASH_MODE_SINGLE, + CAM2_FLASH_MODE_TORCH, + + /* vendor feature */ + CAM2_FLASH_MODE_BEST = 100, + CAM2_FLASH_MODE_LCD = 101, +}; + +enum flash_state { + FLASH_STATE_UNAVAILABLE = 0, + FLASH_STATE_CHARGING, + FLASH_STATE_READY, + FLASH_STATE_FIRED, + FLASH_STATE_PARTIAL, +}; + +enum capture_state { + CAPTURE_STATE_NONE = 0, + CAPTURE_STATE_FLASH = 1, + CAPTURE_STATE_HDR_DARK = 12, + CAPTURE_STATE_HDR_NORMAL = 13, + CAPTURE_STATE_HDR_BRIGHT = 14, + CAPTURE_STATE_ZSL_LIKE = 20, + CAPTURE_STATE_STREAM_ON_CAPTURE = 30, + CAPTURE_STATE_RAW_CAPTURE = 100, +}; /* firingStable state */ + +enum flash_info_available { + FLASH_INFO_AVAILABLE_FALSE = 0, + FLASH_INFO_AVAILABLE_TRUE, +}; + +struct camera2_flash_ctl { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +struct camera2_flash_dm { + uint32_t firingPower; /*10 is max power*/ + uint64_t firingTime; /*unit : microseconds*/ + enum flash_mode flashMode; + enum flash_state flashState; + + /* vendor feature */ + uint32_t vendor_firingStable; /*1 : stable, 0 : unstable*/ + uint32_t vendor_decision; /*1 : success, 0 : fail*/ + uint32_t vendor_flashReady; /*0: None, 1 : pre, 2 : main flash ready*/ + uint32_t vendor_flashOffReady; /*0: None, 1 : pre, 2 : main flash off ready*/ +}; + +struct camera2_flash_sm { + enum flash_info_available available; + uint64_t chargeDuration; + uint8_t colorTemperature; + uint8_t maxEnergy; +}; + +/* android.hotpixel */ +enum processing_mode { + PROCESSING_MODE_OFF = 1, + PROCESSING_MODE_FAST, + PROCESSING_MODE_HIGH_QUALITY +}; + +struct camera2_hotpixel_ctl { + enum processing_mode mode; +}; + +struct camera2_hotpixel_dm { + enum processing_mode mode; +}; + +struct camera2_hotpixel_sm { + uint8_t availableHotPixelModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.demosaic */ +enum demosaic_processing_mode { + DEMOSAIC_PROCESSING_MODE_FAST = 1, + DEMOSAIC_PROCESSING_MODE_HIGH_QUALITY +}; + +struct camera2_demosaic_ctl { + enum demosaic_processing_mode mode; +}; + +struct camera2_demosaic_dm { + enum demosaic_processing_mode mode; +}; + +/* android.noiseReduction */ +struct camera2_noisereduction_ctl { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_noisereduction_dm { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_noisereduction_sm { + uint8_t availableNoiseReductionModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.shading */ +struct camera2_shading_ctl { + enum processing_mode mode; + uint8_t strength; /* Range: 1 ~ 10 */ +}; + +struct camera2_shading_dm { + enum processing_mode mode; + uint8_t strength; /* Range: 1 ~ 10 */ +}; + +/* android.colorCorrection */ +enum colorcorrection_mode { + COLORCORRECTION_MODE_TRANSFORM_MATRIX = 1, + COLORCORRECTION_MODE_FAST, + COLORCORRECTION_MODE_HIGH_QUALITY, +}; + +struct camera2_colorcorrection_ctl { + enum colorcorrection_mode mode; + struct rational transform[9]; + float gains[4]; + enum processing_mode aberrationCorrectionMode; + + /* vendor feature */ + uint32_t vendor_hue; + uint32_t vendor_saturation; + uint32_t vendor_brightness; + uint32_t vendor_contrast; +}; + +struct camera2_colorcorrection_dm { + enum colorcorrection_mode mode; + struct rational transform[9]; + float gains[4]; + enum processing_mode aberrationCorrectionMode; + + /* vendor feature */ + uint32_t vendor_hue; + uint32_t vendor_saturation; + uint32_t vendor_brightness; + uint32_t vendor_contrast; +}; + +struct camera2_colorcorrection_sm { + uint8_t availableModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableAberrationCorrectionModes[CAMERA2_MAX_AVAILABLE_MODE]; + + /* vendor feature */ + uint32_t vendor_hueRange[2]; + uint32_t vendor_saturationRange[2]; + uint32_t vendor_brightnessRange[2]; + uint32_t vendor_contrastRange[2]; +}; + +/* android.tonemap */ +enum tonemap_mode { + TONEMAP_MODE_CONTRAST_CURVE = 1, + TONEMAP_MODE_FAST, + TONEMAP_MODE_HIGH_QUALITY, +}; + +struct camera2_tonemap_ctl { + /* assuming maxCurvePoints = 64 */ + float curveBlue[64]; + float curveGreen[64]; + float curveRed[64]; + float curve; + enum tonemap_mode mode; +}; + +struct camera2_tonemap_dm { + /* assuming maxCurvePoints = 64 */ + float curveBlue[64]; + float curveGreen[64]; + float curveRed[64]; + float curve; + enum tonemap_mode mode; +}; + +struct camera2_tonemap_sm { + uint32_t maxCurvePoints; + uint8_t availableToneMapModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.edge */ +struct camera2_edge_ctl { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_edge_dm { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_edge_sm { + uint8_t availableEdgeModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.scaler */ +struct camera2_scaler_ctl { + uint32_t cropRegion[4]; + /* cropRegion[0]: X offset, cropRegion[1]: Y offset + * cropRegion[2]: width, cropRegion[3]: height + */ +}; + +struct camera2_scaler_dm { + uint32_t cropRegion[4]; + /* cropRegion[0]: X offset, cropRegion[1]: Y offset + * cropRegion[2]: width, cropRegion[3]: height + */ +}; + +enum available_stream_configurations { + SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0, + SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT, +}; + +enum scaler_cropping_type { + SCALER_CROPPING_TYPE_CENTER_ONLY = 0, + SCALER_CROPPING_TYPE_FREEFORM, +}; + +struct camera2_scaler_sm { + float availableMaxDigitalZoom; + int32_t availableInputOutputFormatsMap[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableStreamConfigurations[CAMERA2_MAX_AVAILABLE_MODE][4]; /* [format, width, height, out/in] */ + uint64_t availableMinFrameDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; /* [format, width, height, ns] */ + uint64_t availableStallDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; /* [format, width, height, ns] */ + int32_t streamConfigurationMap; + enum scaler_cropping_type croppingType; +}; + +/* android.jpeg */ +struct camera2_jpeg_ctl { + uint8_t gpsLocation; + double gpsCoordinates[3]; + uint8_t* gpsProcessingMethod; + uint64_t gpsTimestamp; + uint32_t orientation; + uint8_t quality; + uint8_t thumbnailQuality; + uint32_t thumbnailSize[2]; +}; + +struct camera2_jpeg_dm { + uint8_t gpsLocation; + double gpsCoordinates[3]; + uint8_t* gpsProcessingMethod; + uint64_t gpsTimestamp; + uint32_t orientation; + uint8_t quality; + uint32_t size; /* unit: byte(1000000 => 1 MB) */ + uint8_t thumbnailQuality; + uint32_t thumbnailSize[2]; +}; + +struct camera2_jpeg_sm { + uint32_t availableThumbnailSizes[8][2]; + uint32_t maxSize; /* unit: byte */ +}; + +/* android.statistics */ +enum facedetect_mode { + FACEDETECT_MODE_OFF = 1, + FACEDETECT_MODE_SIMPLE, + FACEDETECT_MODE_FULL +}; + +enum stats_mode { + STATS_MODE_OFF = 1, + STATS_MODE_ON +}; + +enum stats_scene_flicker { + STATISTICS_SCENE_FLICKER_NONE = 1, + STATISTICS_SCENE_FLICKER_50HZ, + STATISTICS_SCENE_FLICKER_60HZ, +}; + +enum stats_lowlightmode { + STATE_LLS_LEVEL_ZSL = 0, + STATE_LLS_LEVEL_LOW = 1, + STATE_LLS_LEVEL_HIGH = 2, + STATE_LLS_LEVEL_SIS = 3, + STATE_LLS_LEVEL_ZSL_LIKE = 4, + STATE_LLS_LEVEL_ZSL_FLASH = 16, +}; + +enum stats_wdrAutoState { + STATE_WDR_AUTO_OFF = 1, + STATE_WDR_AUTO_REQUIRED = 2, +}; + +struct camera2_stats_ctl { + enum facedetect_mode faceDetectMode; + enum stats_mode histogramMode; + enum stats_mode sharpnessMapMode; + enum stats_mode hotPixelMapMode; + enum stats_mode lensShadingMapMode; +}; + +struct camera2_stats_dm { + enum facedetect_mode faceDetectMode; + uint32_t faceIds[CAMERA2_MAX_FACES]; + uint32_t faceLandmarks[CAMERA2_MAX_FACES][6]; + uint32_t faceRectangles[CAMERA2_MAX_FACES][4]; + uint8_t faceScores[CAMERA2_MAX_FACES]; + uint32_t faces[CAMERA2_MAX_FACES]; + uint32_t histogram[3 * 256]; + enum stats_mode histogramMode; + int32_t sharpnessMap[2][2][3]; + + enum stats_mode sharpnessMapMode; + uint8_t lensShadingCorrectionMap; + float lensShadingMap[2][2][4]; + /* [2] TODO: lens_sm.shadingMapSize[0] */ + /* [2] TODO: lens_sm.shadingMapSize[1] */ + /* [4] [R, G_even, G_odd, B] */ + enum stats_scene_flicker sceneFlicker; + enum stats_mode hotPixelMapMode; + int32_t hotPixelMap[CAMERA2_MAX_AVAILABLE_MODE][2]; + enum stats_mode lensShadingMapMode; + + /* vendor feature */ + enum stats_lowlightmode vendor_LowLightMode; + uint32_t vendor_lls_tuning_set_index; + uint32_t vendor_lls_brightness_index; + enum stats_wdrAutoState vendor_wdrAutoState; +}; + +struct camera2_stats_sm { + /*assuming supported modes = 3;*/ + uint8_t availableFaceDetectModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t histogramBucketCount; + uint32_t maxFaceCount; + uint32_t maxHistogramCount; + uint32_t maxSharpnessMapValue; + uint32_t sharpnessMapSize[2]; /* at least 32 x 32 */ + uint32_t availableHotPixelMapModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.control */ +enum aa_capture_intent { + AA_CAPTURE_INTENT_CUSTOM = 0, + AA_CAPTURE_INTENT_PREVIEW, + AA_CAPTURE_INTENT_STILL_CAPTURE, + AA_CAPTURE_INTENT_VIDEO_RECORD, + AA_CAPTURE_INTENT_VIDEO_SNAPSHOT, + AA_CAPTURE_INTENT_ZERO_SHUTTER_LAG, + AA_CAPTURE_INTENT_MANUAL, + + /* vendor feature */ + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_SINGLE = 100, + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_MULTI, + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_BEST, + AA_CAPTRUE_INTENT_STILL_CAPTURE_COMP_BYPASS, + AA_CAPTRUE_INTENT_STILL_CAPTURE_DYNAMIC_SHOT = 105, +}; + +enum aa_mode { + AA_CONTROL_OFF = 1, + AA_CONTROL_AUTO, + AA_CONTROL_USE_SCENE_MODE, + AA_CONTROL_OFF_KEEP_STATE, +}; + +enum aa_scene_mode { + AA_SCENE_MODE_DISABLED = 1, + AA_SCENE_MODE_FACE_PRIORITY, + AA_SCENE_MODE_ACTION, + AA_SCENE_MODE_PORTRAIT, + AA_SCENE_MODE_LANDSCAPE, + AA_SCENE_MODE_NIGHT, + AA_SCENE_MODE_NIGHT_PORTRAIT, + AA_SCENE_MODE_THEATRE, + AA_SCENE_MODE_BEACH, + AA_SCENE_MODE_SNOW, + AA_SCENE_MODE_SUNSET, + AA_SCENE_MODE_STEADYPHOTO, + AA_SCENE_MODE_FIREWORKS, + AA_SCENE_MODE_SPORTS, + AA_SCENE_MODE_PARTY, + AA_SCENE_MODE_CANDLELIGHT, + AA_SCENE_MODE_BARCODE, + AA_SCENE_MODE_HIGH_SPEED_VIDEO, + AA_SCENE_MODE_HDR, + + /* vendor feature */ + AA_SCENE_MODE_NIGHT_CAPTURE = 100, + AA_SCENE_MODE_ANTISHAKE, + AA_SCENE_MODE_LLS, + AA_SCENE_MODE_FDAE, + AA_SCENE_MODE_DUAL, + AA_SCENE_MODE_DRAMA, + AA_SCENE_MODE_ANIMATED, + AA_SCENE_MODE_PANORAMA, + AA_SCENE_MODE_GOLF, + AA_SCENE_MODE_PREVIEW, + AA_SCENE_MODE_VIDEO, + AA_SCENE_MODE_SLOWMOTION_2, + AA_SCENE_MODE_SLOWMOTION_4_8, + AA_SCENE_MODE_DUAL_PREVIEW, + AA_SCENE_MODE_DUAL_VIDEO, + AA_SCENE_MODE_120_PREVIEW, + AA_SCENE_MODE_LIGHT_TRACE, + AA_SCENE_MODE_FOOD, + AA_SCENE_MODE_AQUA +}; + +enum aa_effect_mode { + AA_EFFECT_OFF = 1, + AA_EFFECT_MONO, + AA_EFFECT_NEGATIVE, + AA_EFFECT_SOLARIZE, + AA_EFFECT_SEPIA, + AA_EFFECT_POSTERIZE, + AA_EFFECT_WHITEBOARD, + AA_EFFECT_BLACKBOARD, + AA_EFFECT_AQUA, + + /* vendor feature */ + AA_EFFECT_EMBOSS = 100, + AA_EFFECT_EMBOSS_MONO, + AA_EFFECT_SKETCH, + AA_EFFECT_RED_YELLOW_POINT, + AA_EFFECT_GREEN_POINT, + AA_EFFECT_BLUE_POINT, + AA_EFFECT_MAGENTA_POINT, + AA_EFFECT_WARM_VINTAGE, + AA_EFFECT_COLD_VINTAGE, + AA_EFFECT_WASHED, + AA_EFFECT_BEAUTY_FACE, + +}; + +enum aa_ae_lock { + AA_AE_LOCK_OFF = 1, + AA_AE_LOCK_ON, +}; + +enum aa_aemode { + AA_AEMODE_OFF = 1, + AA_AEMODE_ON, + AA_AEMODE_ON_AUTO_FLASH, + AA_AEMODE_ON_ALWAYS_FLASH, + AA_AEMODE_ON_AUTO_FLASH_REDEYE, + + /* vendor feature */ + AA_AEMODE_CENTER = 100, + AA_AEMODE_AVERAGE, + AA_AEMODE_MATRIX, + AA_AEMODE_SPOT, + AA_AEMODE_CENTER_TOUCH, + AA_AEMODE_AVERAGE_TOUCH, + AA_AEMODE_MATRIX_TOUCH, + AA_AEMODE_SPOT_TOUCH, + UNKNOWN_AA_AE_MODE +}; + +enum aa_ae_flashmode { + /*all flash control stop*/ + AA_FLASHMODE_OFF = 1, + /*flash start*/ + AA_FLASHMODE_START, + /*flash cancle*/ + AA_FLASHMODE_CANCEL, + /*internal 3A can control flash*/ + AA_FLASHMODE_ON, + /*internal 3A can do auto flash algorithm*/ + AA_FLASHMODE_AUTO, + /*internal 3A can fire flash by auto result*/ + AA_FLASHMODE_CAPTURE, + /*internal 3A can control flash forced*/ + AA_FLASHMODE_ON_ALWAYS +}; + +enum aa_ae_antibanding_mode { + AA_AE_ANTIBANDING_OFF = 1, + AA_AE_ANTIBANDING_50HZ, + AA_AE_ANTIBANDING_60HZ, + AA_AE_ANTIBANDING_AUTO, + + /* vendor feature */ + AA_AE_ANTIBANDING_AUTO_50HZ = 100, /*50Hz + Auto*/ + AA_AE_ANTIBANDING_AUTO_60HZ /*60Hz + Auto*/ +}; + +enum aa_awb_lock { + AA_AWB_LOCK_OFF = 1, + AA_AWB_LOCK_ON, +}; + +enum aa_awbmode { + AA_AWBMODE_OFF = 1, + AA_AWBMODE_WB_AUTO, + AA_AWBMODE_WB_INCANDESCENT, + AA_AWBMODE_WB_FLUORESCENT, + AA_AWBMODE_WB_WARM_FLUORESCENT, + AA_AWBMODE_WB_DAYLIGHT, + AA_AWBMODE_WB_CLOUDY_DAYLIGHT, + AA_AWBMODE_WB_TWILIGHT, + AA_AWBMODE_WB_SHADE +}; + +enum aa_ae_precapture_trigger { + AA_AE_PRECAPTURE_TRIGGER_IDLE = 0, + AA_AE_PRECAPTURE_TRIGGER_START, +}; + +enum aa_afmode { + AA_AFMODE_OFF = 1, + AA_AFMODE_AUTO, /* Start @ (afTrigger == 1) */ + AA_AFMODE_MACRO, /* Start @ (afTrigger == 1) */ + AA_AFMODE_CONTINUOUS_VIDEO, + AA_AFMODE_CONTINUOUS_PICTURE, + AA_AFMODE_EDOF, /* Not supported yet */ +}; + +enum aa_afmode_option_bit { + AA_AFMODE_OPTION_BIT_VIDEO = 0, + AA_AFMODE_OPTION_BIT_MACRO = 1, + AA_AFMODE_OPTION_BIT_FACE = 2, + AA_AFMODE_OPTION_BIT_DELAYED = 3, + AA_AFMODE_OPTION_BIT_OUT_FOCUSING = 4, + AA_AFMODE_OPTION_BIT_OBJECT_TRACKING = 5, +}; + +enum aa_afmode_ext { + AA_AFMODE_EXT_OFF = 1, + /* Increase macro range for special app */ + AA_AFMODE_EXT_ADVANCED_MACRO_FOCUS = 2, + /* Set AF region for OCR */ + AA_AFMODE_EXT_FOCUS_LOCATION = 3, +}; + +enum aa_af_trigger { + AA_AF_TRIGGER_IDLE = 0, + AA_AF_TRIGGER_START, + AA_AF_TRIGGER_CANCEL, +}; + +enum aa_afstate { + AA_AFSTATE_INACTIVE = 1, + AA_AFSTATE_PASSIVE_SCAN, + AA_AFSTATE_PASSIVE_FOCUSED, + AA_AFSTATE_ACTIVE_SCAN, + AA_AFSTATE_FOCUSED_LOCKED, + AA_AFSTATE_NOT_FOCUSED_LOCKED, + AA_AFSTATE_PASSIVE_UNFOCUSED, +}; + +enum ae_state { + AE_STATE_INACTIVE = 1, + AE_STATE_SEARCHING, + AE_STATE_CONVERGED, + AE_STATE_LOCKED, + AE_STATE_FLASH_REQUIRED, + AE_STATE_PRECAPTURE, + AE_STATE_LOCKED_CONVERGED = 10, + AE_STATE_LOCKED_FLASH_REQUIRED, + AE_STATE_SEARCHING_FLASH_REQUIRED, +}; + +enum awb_state { + AWB_STATE_INACTIVE = 1, + AWB_STATE_SEARCHING, + AWB_STATE_CONVERGED, + AWB_STATE_LOCKED +}; + +enum aa_videostabilization_mode { + VIDEO_STABILIZATION_MODE_OFF = 0, + VIDEO_STABILIZATION_MODE_ON, +}; + +enum aa_isomode { + AA_ISOMODE_AUTO = 1, + AA_ISOMODE_MANUAL, +}; + +enum aa_cameraid { + AA_CAMERAID_FRONT = 1, + AA_CAMERAID_REAR, +}; + +enum aa_videomode { + AA_VIDEOMODE_OFF = 0, + AA_VIDEOMODE_ON, +}; + +enum aa_ae_facemode { + AA_AE_FACEMODE_OFF = 0, + AA_AE_FACEMODE_ON, +}; + +struct camera2_aa_ctl { + enum aa_ae_antibanding_mode aeAntibandingMode; + int32_t aeExpCompensation; + enum aa_ae_lock aeLock; + enum aa_aemode aeMode; + uint32_t aeRegions[5]; /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/ + uint32_t aeTargetFpsRange[2]; + enum aa_ae_precapture_trigger aePrecaptureTrigger; + enum aa_afmode afMode; + uint32_t afRegions[5]; /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/ + enum aa_af_trigger afTrigger; + enum aa_awb_lock awbLock; + enum aa_awbmode awbMode; + uint32_t awbRegions[5]; /* not supported */ + enum aa_capture_intent captureIntent; + enum aa_effect_mode effectMode; + enum aa_mode mode; + enum aa_scene_mode sceneMode; + enum aa_videostabilization_mode videoStabilizationMode; + + /* vendor feature */ + float vendor_aeExpCompensationStep; + uint32_t vendor_afmode_option; + enum aa_afmode_ext vendor_afmode_ext; + enum aa_ae_flashmode vendor_aeflashMode; + enum aa_isomode vendor_isoMode; + uint32_t vendor_isoValue; + int32_t vendor_awbValue; + enum aa_cameraid vendor_cameraId; + enum aa_videomode vendor_videoMode; + enum aa_ae_facemode vendor_aeFaceMode; + enum aa_afstate vendor_afState; + int32_t vendor_exposureValue; + uint32_t vendor_touchAeDone; + uint32_t vendor_touchBvChange; + uint32_t vendor_captureCount; + uint32_t vendor_captureExposureTime; + uint32_t vendor_reserved[4]; +}; + +struct camera2_aa_dm { + enum aa_ae_antibanding_mode aeAntibandingMode; + int32_t aeExpCompensation; + enum aa_ae_lock aeLock; + enum aa_aemode aeMode; + uint32_t aeRegions[5]; /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/ + uint32_t aeTargetFpsRange[2]; + enum aa_ae_precapture_trigger aePrecaptureTrigger; + enum ae_state aeState; + enum aa_afmode afMode; + uint32_t afRegions[5]; /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region*/ + enum aa_af_trigger afTrigger; + enum aa_afstate afState; + enum aa_awb_lock awbLock; + enum aa_awbmode awbMode; + uint32_t awbRegions[5]; + enum aa_capture_intent captureIntent; + enum awb_state awbState; + enum aa_effect_mode effectMode; + enum aa_mode mode; + enum aa_scene_mode sceneMode; + enum aa_videostabilization_mode videoStabilizationMode; + + /* vendor feature */ + float vendor_aeExpCompensationStep; + uint32_t vendor_afmode_option; + enum aa_afmode_ext vendor_afmode_ext; + enum aa_ae_flashmode vendor_aeflashMode; + enum aa_isomode vendor_isoMode; + uint32_t vendor_isoValue; + int32_t vendor_awbValue; + enum aa_cameraid vendor_cameraId; + enum aa_videomode vendor_videoMode; + enum aa_ae_facemode vendor_aeFaceMode; + enum aa_afstate vendor_afState; + int32_t vendor_exposureValue; + uint32_t vendor_touchAeDone; + uint32_t vendor_touchBvChange; + uint32_t vendor_captureCount; + uint32_t vendor_captureExposureTime; + uint32_t vendor_reserved[4]; +}; + +struct camera2_aa_sm { + uint8_t aeAvailableAntibandingModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t aeAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; /*assuming # of available ae modes = 8*/ + uint32_t aeAvailableTargetFpsRanges[CAMERA2_MAX_AVAILABLE_MODE][2]; + int32_t aeCompensationRange[2]; + struct rational aeCompensationStep; + uint8_t afAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; /*assuming # of afAvailableModes = 4*/ + uint8_t availableEffects[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableSceneModes[CAMERA2_MAX_AVAILABLE_MODE]; /*assuming # of available scene modes = 10*/ + uint8_t availableVideoStabilizationModes[4]; /*assuming # of availableVideoStabilizationModes = 4*/ + uint8_t awbAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; /*assuming # of awbAvailableModes = 10*/ + uint32_t maxRegions[3]; + uint32_t maxRegionsAe; + uint32_t maxRegionsAwb; + uint32_t maxRegionsAf; // TODO: [API32] not implemented yet + uint8_t sceneModeOverrides[CAMERA2_MAX_AVAILABLE_MODE][3]; /* [AE, AWB, AF] */ // TODO: [API32] not implemented yet + uint32_t availableHighSpeedVideoConfigurations[CAMERA2_MAX_AVAILABLE_MODE][4]; /* [width, height, fps_min, fps_max] */ // TODO: [API32] not implemented yet + + /* vendor feature */ + uint32_t vendor_isoRange[2]; +}; + +/* android.led */ +enum led_transmit { + TRANSMIT_OFF = 0, + TRANSMIT_ON, +}; + +struct camera2_led_ctl { + enum led_transmit transmit; +}; + +struct camera2_led_dm { + enum led_transmit transmit; +}; + +struct camera2_led_sm { + uint8_t availableLeds[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.info */ +enum info_supported_hardware_level { + INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0, + INFO_SUPPORTED_HARDWARE_LEVEL_FULL, + INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, +}; + +struct camera2_info_sm { + enum info_supported_hardware_level supportedHardwareLevel; +}; + +/* android.blacklevel */ +enum blacklevel_lock { + BLACK_LEVEL_LOCK_OFF = 0, + BLACK_LEVEL_LOCK_ON, +}; + +struct camera2_blacklevel_ctl { + enum blacklevel_lock lock; +}; + +struct camera2_blacklevel_dm { + enum blacklevel_lock lock; +}; + +/* android.sync */ +enum sync_frame_number { + SYNC_FRAME_NUMBER_CONVERGING = -1, + SYNC_FRAME_NUMBER_UNKNOWN = -2, +}; + +enum sync_max_latency { + SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, + SYNC_MAX_LATENCY_UNKNOWN = -1, +}; + +struct camera2_sync_ctl { + int64_t frameNumber; +}; + +struct camera2_sync_dm { + int32_t maxLatency; +}; + +struct camera2_lens_usm { + uint32_t focusDistanceFrameDelay; +}; + +struct camera2_sensor_usm { + uint32_t exposureTimeFrameDelay; + uint32_t frameDurationFrameDelay; + uint32_t sensitivityFrameDelay; +}; + +struct camera2_flash_usm { + uint32_t flashModeFrameDelay; + uint32_t firingPowerFrameDelay; + uint64_t firingTimeFrameDelay; +}; + +struct camera2_ctl { + struct camera2_colorcorrection_ctl color; + struct camera2_aa_ctl aa; + struct camera2_demosaic_ctl demosaic; + struct camera2_edge_ctl edge; + struct camera2_flash_ctl flash; + struct camera2_hotpixel_ctl hotpixel; + struct camera2_jpeg_ctl jpeg; + struct camera2_lens_ctl lens; + struct camera2_noisereduction_ctl noise; + struct camera2_request_ctl request; + struct camera2_scaler_ctl scaler; + struct camera2_sensor_ctl sensor; + struct camera2_shading_ctl shading; + struct camera2_stats_ctl stats; + struct camera2_tonemap_ctl tonemap; + struct camera2_led_ctl led; + struct camera2_blacklevel_ctl blacklevel; + struct camera2_sync_ctl sync; + + /* vendor feature */ + struct camera2_entry_ctl vendor_entry; +}; + +struct camera2_dm { + struct camera2_colorcorrection_dm color; + struct camera2_aa_dm aa; + struct camera2_demosaic_dm demosaic; + struct camera2_edge_dm edge; + struct camera2_flash_dm flash; + struct camera2_hotpixel_dm hotpixel; + struct camera2_jpeg_dm jpeg; + struct camera2_lens_dm lens; + struct camera2_noisereduction_dm noise; + struct camera2_request_dm request; + struct camera2_scaler_dm scaler; + struct camera2_sensor_dm sensor; + struct camera2_shading_dm shading; + struct camera2_stats_dm stats; + struct camera2_tonemap_dm tonemap; + struct camera2_led_dm led; + struct camera2_blacklevel_dm blacklevel; + struct camera2_sync_dm sync; + + /* vendor feature */ + struct camera2_entry_dm vendor_entry; +}; + +struct camera2_sm { + struct camera2_colorcorrection_sm color; + struct camera2_aa_sm aa; + struct camera2_edge_sm edge; + struct camera2_flash_sm flash; + struct camera2_hotpixel_sm hotpixel; + struct camera2_jpeg_sm jpeg; + struct camera2_lens_sm lens; + struct camera2_noisereduction_sm noise; + struct camera2_request_sm request; + struct camera2_scaler_sm scaler; + struct camera2_sensor_sm sensor; + struct camera2_stats_sm stats; + struct camera2_tonemap_sm tonemap; + struct camera2_led_sm led; + struct camera2_info_sm info; + + /** User-defined(ispfw specific) static metadata. */ + struct camera2_lens_usm lensUd; + struct camera2_sensor_usm sensorUd; + struct camera2_flash_usm flashUd; +}; + +struct camera2_obj_af_info { + int32_t focusState; + int32_t focusROILeft; + int32_t focusROIRight; + int32_t focusROITop; + int32_t focusROIBottom; + int32_t focusWeight; + int32_t w_movement; + int32_t h_movement; + int32_t w_velocity; + int32_t h_velocity; +}; + +struct camera2_hrm_sensor_info { + uint32_t visible_data; + uint32_t ir_data; + int32_t status; +}; + +struct camera2_illuminaion_sensor_info { + uint16_t visible_cdata; + uint16_t visible_rdata; + uint16_t visible_gdata; + uint16_t visible_bdata; + uint16_t visible_gain; + uint16_t visible_exptime; + uint16_t ir_north; + uint16_t ir_south; + uint16_t ir_east; + uint16_t ir_west; + uint16_t ir_gain; + uint16_t ir_exptime; +}; + +struct camera2_gyro_sensor_info { + float x; + float y; + float z; +}; + +struct camera2_aa_uctl { + struct camera2_obj_af_info af_data; + struct camera2_hrm_sensor_info hrmInfo; + struct camera2_illuminaion_sensor_info illuminationInfo; + struct camera2_gyro_sensor_info gyroInfo; +}; + +struct camera2_aa_udm { + struct camera2_obj_af_info af_data; + struct camera2_hrm_sensor_info hrmInfo; + struct camera2_illuminaion_sensor_info illuminationInfo; + struct camera2_gyro_sensor_info gyroInfo; +}; + +/** \brief + User-defined control for lens. + */ +struct camera2_lens_uctl { + /** It depends by posSize */ + uint32_t pos; + /** It depends by af algorithm(AF pos bit. normally 8 or 9 or 10) */ + uint32_t posSize; + /** It depends by af algorithm */ + uint32_t direction; + /** Some actuator support slew rate control. */ + uint32_t slewRate; + /** */ + uint32_t oisCoefVal; +}; + +/** \brief + User-defined metadata for lens. + */ +struct camera2_lens_udm { + /** It depends by posSize */ + uint32_t pos; + /** It depends by af algorithm(AF pos bit. normally 8 or 9 or 10) */ + uint32_t posSize; + /** It depends by af algorithm */ + uint32_t direction; + /** Some actuator support slew rate control. */ + uint32_t slewRate; + /** */ + uint32_t oisCoefVal; +}; + +/** \brief + User-defined metadata for ae. + */ +struct camera2_ae_udm { + /** vendor specific length */ + uint32_t vsLength; + /** vendor specific data array */ + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +/** \brief + User-defined metadata for awb. + */ +struct camera2_awb_udm { + /** vendor specific length */ + uint32_t vsLength; + /** vendor specific data array */ + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +/** \brief + User-defined metadata for af. + */ +struct camera2_af_udm { + /** vendor specific length */ + uint32_t vsLength; + /** vendor specific data array */ + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; + int32_t lensPositionInfinity; + int32_t lensPositionMacro; + int32_t lensPositionCurrent; +}; + +/** \brief + User-defined metadata for anti-shading. + */ +struct camera2_as_udm { + /** vendor specific length */ + uint32_t vsLength; + /** vendor specific data array */ + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +/** \brief + User-defined metadata for anti-shading. + */ +struct camera2_ipc_udm { + /** vendor specific length */ + uint32_t vsLength; + /** vendor specific data array */ + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +/** \brief + User-defined metadata for aa. + */ +struct camera2_internal_udm { + /** vendor specific data array */ + uint32_t ProcessedFrameInfo; + uint32_t vendorSpecific1[CAMERA2_MAX_VENDER_LENGTH]; + uint32_t vendorSpecific2[CAMERA2_MAX_VENDER_LENGTH]; + /* + vendorSpecific2[0] : 3aaIspSircSdk + vendorSpedific2[1] : IspTpuSirdSdk + vendorSpecific2[100] : exposure + vendorSpecific2[101] : iso(gain) + vendorSpecific2[102] : Bv + vendorSpecific2[103] : Tv + */ +}; + +/** \brief + User-defined control for sensor. + */ +struct camera2_sensor_uctl { + /** Dynamic frame duration. + This feature is decided to max. value between + 'sensor.exposureTime'+alpha and 'sensor.frameDuration'. + */ + uint64_t dynamicFrameDuration; + uint32_t analogGain; + uint32_t digitalGain; + uint64_t longExposureTime; /* For supporting WDR */ + uint64_t shortExposureTime; + uint32_t longAnalogGain; + uint32_t shortAnalogGain; + uint32_t longDigitalGain; + uint32_t shortDigitalGain; + + // ctl params for backward compatibility + uint32_t exposureTime; + uint32_t frameDuration; + uint32_t sensitivity; +}; + +struct camera2_sensor_udm { + /** Dynamic frame duration. + This feature is decided to max. value between + 'sensor.exposureTime'+alpha and 'sensor.frameDuration'. + */ + uint64_t dynamicFrameDuration; + uint32_t analogGain; + uint32_t digitalGain; + uint64_t longExposureTime; + uint64_t shortExposureTime; + uint32_t longAnalogGain; + uint32_t shortAnalogGain; + uint32_t longDigitalGain; + uint32_t shortDigitalGain; + uint64_t timeStampBoot; +}; + +struct camera2_scaler_uctl { + /* Input Image Address */ + uint32_t sourceAddress[4]; + /** \brief + target address for next frame. + \remarks + [0] invalid address, stop + [others] valid address + */ + uint32_t txcTargetAddress[4]; /* 3AA capture DMA */ + uint32_t txpTargetAddress[4]; /* 3AA preview DMA */ + uint32_t ixcTargetAddress[4]; + uint32_t ixpTargetAddress[4]; + uint32_t sccTargetAddress[4]; + uint32_t scpTargetAddress[4]; + uint32_t orientation; +}; + +struct camera2_flash_uctl { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +struct camera2_flash_udm { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +enum companion_drc_mode { + COMPANION_DRC_OFF = 1, + COMPANION_DRC_ON, +}; + +enum companion_wdr_mode { + COMPANION_WDR_OFF = 1, + COMPANION_WDR_ON = 2, + COMPANION_WDR_AUTO = 3, + TOTALCOUNT_COMPANION_WDR, + COMPANION_WDR_UNKNOWN, +}; + +enum companion_paf_mode { + COMPANION_PAF_OFF = 1, + COMPANION_PAF_ON, +}; + +enum companion_bypass_mode { + COMPANION_FULL_BYPASS_OFF = 1, + COMPANION_FULL_BYPASS_ON, +}; + +enum companion_lsc_mode { + COMPANION_LSC_OFF = 1, + COMPANION_LSC_ON, +}; + +enum companion_bpc_mode { + COMPANION_BPC_OFF = 1, + COMPANION_BPC_ON, +}; + +enum camera_op_mode { + CAMERA_OP_MODE_GED = 0, // default + CAMERA_OP_MODE_TW, + CAMERA_OP_MODE_HAL3_GED, +}; + +struct camera2_companion_uctl { + enum companion_drc_mode drc_mode; + enum companion_wdr_mode wdr_mode; + enum companion_paf_mode paf_mode; + enum companion_lsc_mode lsc_mode; // lsc on/off + enum companion_bpc_mode bpc_mode; // bpc on/off + enum companion_bypass_mode bypass_mode; // full bypass +}; + +struct camera2_pdaf_single_result { + uint16_t mode; + uint16_t goalPos; + uint16_t reliability; + uint16_t currentPos; +}; + +struct camera2_pdaf_multi_result { + uint16_t mode; + uint16_t goalPos; + uint16_t reliability; +}; + +struct camera2_pdaf_udm { + uint16_t numCol; /* width of PDAF map, 0 means no multi PDAF data */ + uint16_t numRow; /* height of PDAF map, 0 means no multi PDAF data */ + struct camera2_pdaf_multi_result multiResult[CAMERA2_MAX_PDAF_MULTIROI_COLUMN][CAMERA2_MAX_PDAF_MULTIROI_ROW]; + struct camera2_pdaf_single_result singleResult; + uint16_t lensPosResolution; /* 1023(unsigned 10bit) */ +}; + +struct camera2_companion_udm { + enum companion_drc_mode drc_mode; + enum companion_wdr_mode wdr_mode; + enum companion_paf_mode paf_mode; + enum companion_lsc_mode lsc_mode; // lsc on/off + enum companion_bpc_mode bpc_mode; // bpc on/off + enum companion_bypass_mode bypass_mode; // full bypass + struct camera2_pdaf_udm pdaf; +}; + +struct camera2_fd_uctl +{ + enum facedetect_mode faceDetectMode; + uint32_t faceIds[CAMERA2_MAX_FACES]; + uint32_t faceLandmarks[CAMERA2_MAX_FACES][6]; + uint32_t faceRectangles[CAMERA2_MAX_FACES][4]; + uint8_t faceScores[CAMERA2_MAX_FACES]; + uint32_t faces[CAMERA2_MAX_FACES]; + + uint32_t vendorSpecific[CAMERA2_MAX_UCTL_VENDER_LENGTH]; + /* --------------------------------------------------------- + vendorSpecific[0] = fdMapAddress[0]; + vendorSpecific[1] = fdMapAddress[1]; + vendorSpecific[2] = fdMapAddress[2]; + vendorSpecific[4] = fdMapAddress[4]; + vendorSpecific[5] = fdMapAddress[5]; + vendorSpecific[6] = fdMapAddress[6]; + vendorSpecific[7] = fdMapAddress[7]; + vendorSpecific[8] = fdYMapAddress; + vendorSpecific[9] = fdCoefK; + vendorSpecific[10] = fdUp; + vendorSpecific[11] = fdShift; + vendorSpecific[12] ~ vendorSpecific[31] : reserved + --------------------------------------------------------- + */ +}; + +struct camera2_fd_udm +{ + uint32_t vendorSpecific[CAMERA2_MAX_UCTL_VENDER_LENGTH]; + /* --------------------------------------------------------- + vendorSpecific[0] = fdSat; + vendorSpecific[1] ~ vendorSpecific[31] : reserved + --------------------------------------------------------- + */ +}; + +enum camera2_drc_mode { + DRC_OFF = 1, + DRC_ON, +}; + +struct camera2_drc_uctl { + enum camera2_drc_mode uDrcEn; +}; + +enum camera_vt_mode { + VT_MODE_OFF = 0, + VT_MODE_1, /* QCIF ~ QVGA */ + VT_MODE_2, /* QVGA ~ VGA*/ + VT_MODE_3, /* Reserved : Smart Stay */ + VT_MODE_4, /* VGA ~ HD */ +}; + +/** \brief + User-defined control area. + \remarks + sensor, lens, flash category is empty value. + It should be filled by a5 for SET_CAM_CONTROL command. + Other category is filled already from host. + */ +struct camera2_uctl { + /** \brief + Set sensor, lens, flash control for next frame. + \remarks + This flag can be combined. + [0 bit] lens + [1 bit] sensor + [2 bit] flash + */ + uint32_t uUpdateBitMap; + + /** For debugging */ + uint32_t uFrameNumber; + + struct camera2_aa_uctl aaUd; + + /** ispfw specific control(user-defined) of lens. */ + struct camera2_lens_uctl lensUd; + /** ispfw specific control(user-defined) of sensor. */ + struct camera2_sensor_uctl sensorUd; + /** ispfw specific control(user-defined) of flash. */ + struct camera2_flash_uctl flashUd; + + struct camera2_scaler_uctl scalerUd; + /** ispfw specific control(user-defined) of Bcrop1. */ + struct camera2_companion_uctl companionUd; + struct camera2_fd_uctl fdUd; + struct camera2_drc_uctl drcUd; + enum camera_vt_mode vtMode; + float zoomRatio; + uint32_t reserved[9]; +}; + +struct camera2_me_udm { + uint32_t vendorSpecific[200]; +}; + +struct camera2_udm { + struct camera2_aa_udm aa; + struct camera2_lens_udm lens; + struct camera2_sensor_udm sensor; + struct camera2_flash_udm flash; + struct camera2_ae_udm ae; + struct camera2_awb_udm awb; + struct camera2_af_udm af; + struct camera2_as_udm as; + struct camera2_ipc_udm ipc; + struct camera2_internal_udm internal; + struct camera2_companion_udm companion; + struct camera2_fd_udm fd; + struct camera2_me_udm me; + enum camera_vt_mode vtMode; + float zoomRatio; + uint32_t reserved[9]; +}; + +struct camera2_shot { + /*google standard area*/ + struct camera2_ctl ctl; + struct camera2_dm dm; + /*user defined area*/ + struct camera2_uctl uctl; + struct camera2_udm udm; + /*magic : 23456789*/ + uint32_t magicNumber; +}; + +struct camera2_node_input { + /** \brief + intput crop region + \remarks + [0] x axis + [1] y axie + [2] width + [3] height + */ + uint32_t cropRegion[4]; +}; + +struct camera2_node_output { + /** \brief + output crop region + \remarks + [0] x axis + [1] y axie + [2] width + [3] height + */ + uint32_t cropRegion[4]; +}; + +struct camera2_node { + /** \brief + video node id + \remarks + [x] video node id + */ + uint32_t vid; + + /** \brief + stream control + \remarks + [0] disable stream out + [1] enable stream out + */ + uint32_t request; + + struct camera2_node_input input; + struct camera2_node_output output; +}; + +struct camera2_node_group { + /** \brief + output device node + \remarks + this node can pull in image + */ + struct camera2_node leader; + + /** \brief + capture node list + \remarks + this node can get out image + 3AAC, 3AAP, SCC, SCP, VDISC + */ + struct camera2_node capture[CAPTURE_NODE_MAX]; +}; + +/** \brief + Structure for interfacing between HAL and driver. + */ +struct camera2_shot_ext { + /* + * --------------------------------------------------------------------- + * HAL Control Part + * --------------------------------------------------------------------- + */ + + /** \brief + setfile change + \remarks + [x] mode for setfile + */ + uint32_t setfile; + + /** \brief + node group control + \remarks + per frame control + */ + struct camera2_node_group node_group; + + /** \brief + post processing control(DRC) + \remarks + [0] bypass off + [1] bypass on + */ + uint32_t drc_bypass; + + /** \brief + post processing control(DIS) + \remarks + [0] bypass off + [1] bypass on + */ + uint32_t dis_bypass; + + /** \brief + post processing control(3DNR) + \remarks + [0] bypass off + [1] bypass on + */ + uint32_t dnr_bypass; + + /** \brief + post processing control(FD) + \remarks + [0] bypass off + [1] bypass on + */ + uint32_t fd_bypass; + + /* + * --------------------------------------------------------------------- + * DRV Control Part + * --------------------------------------------------------------------- + */ + + /** \brief + requested frames state. + driver return the information everytime + when dequeue is requested. + \remarks + [X] count + */ + uint32_t free_cnt; + uint32_t request_cnt; + uint32_t process_cnt; + uint32_t complete_cnt; + + /** \brief + shot validation + \remarks + [0] valid + [1] invalid + */ + uint32_t invalid; + + /* reserved for future */ + uint32_t reserved[14]; + + /** \brief + processing time debugging + \remarks + taken time(unit : struct timeval) + [0][x] flite start + [1][x] flite end + [2][x] DRV Shot + [3][x] DRV Shot done + [4][x] DRV Meta done + */ + uint32_t timeZone[10][2]; + + /* + * --------------------------------------------------------------------- + * Camera API + * --------------------------------------------------------------------- + */ + + struct camera2_shot shot; +}; + +/** \brief + stream structure for scaler. + */ +struct camera2_stream { + /** \brief + this address for verifying conincidence of index and address + \remarks + [X] kernel virtual address for this buffer + */ + uint32_t address; + + /** \brief + this frame count is from FLITE through dm.request.fcount, + this count increases every frame end. initial value is 1. + \remarks + [X] frame count + */ + uint32_t fcount; + + /** \brief + this request count is from HAL through ctl.request.fcount, + this count is the unique. + \remarks + [X] request count + */ + uint32_t rcount; + + /** \brief + frame index of isp framemgr. + this value is for driver internal debugging + \remarks + [X] frame index + */ + uint32_t findex; + + /** \brief + frame validation of isp framemgr. + this value is for driver and HAL internal debugging + \remarks + [X] frame valid + */ + uint32_t fvalid; + + /** \brief + output crop region + this value mean the output image places the axis of memory space + \remarks + [0] crop x axis + [1] crop y axis + [2] width + [3] height + */ + uint32_t input_crop_region[4]; + uint32_t output_crop_region[4]; +}; + +#define CAM_LENS_CMD (0x1 << 0x0) +#define CAM_SENSOR_CMD (0x1 << 0x1) +#define CAM_FLASH_CMD (0x1 << 0x2) + +/* typedefs below are for firmware sources */ + +typedef enum metadata_mode metadata_mode_t; +typedef struct camera2_request_ctl camera2_request_ctl_t; +typedef struct camera2_request_dm camera2_request_dm_t; +typedef enum optical_stabilization_mode optical_stabilization_mode_t; +typedef enum lens_facing lens_facing_t; +typedef struct camera2_entry_ctl camera2_entry_ctl_t; +typedef struct camera2_entry_dm camera2_entry_dm_t; +typedef struct camera2_lens_ctl camera2_lens_ctl_t; +typedef struct camera2_lens_dm camera2_lens_dm_t; +typedef struct camera2_lens_sm camera2_lens_sm_t; +typedef enum sensor_colorfilterarrangement sensor_colorfilterarrangement_t; +typedef enum sensor_ref_illuminant sensor_ref_illuminant_t; +typedef struct camera2_sensor_ctl camera2_sensor_ctl_t; +typedef struct camera2_sensor_dm camera2_sensor_dm_t; +typedef struct camera2_sensor_sm camera2_sensor_sm_t; +typedef enum flash_mode flash_mode_t; +typedef struct camera2_flash_ctl camera2_flash_ctl_t; +typedef struct camera2_flash_dm camera2_flash_dm_t; +typedef struct camera2_flash_sm camera2_flash_sm_t; +typedef enum processing_mode processing_mode_t; +typedef struct camera2_hotpixel_ctl camera2_hotpixel_ctl_t; +typedef struct camera2_hotpixel_dm camera2_hotpixel_dm_t; +typedef struct camera2_hotpixel_sm camera2_hotpixel_sm_t; + +typedef struct camera2_demosaic_ctl camera2_demosaic_ctl_t; +typedef struct camera2_demosaic_dm camera2_demosaic_dm_t; +typedef struct camera2_noisereduction_ctl camera2_noisereduction_ctl_t; +typedef struct camera2_noisereduction_dm camera2_noisereduction_dm_t; +typedef struct camera2_noisereduction_sm camera2_noisereduction_sm_t; +typedef struct camera2_shading_ctl camera2_shading_ctl_t; +typedef struct camera2_shading_dm camera2_shading_dm_t; +typedef enum colorcorrection_mode colorcorrection_mode_t; +typedef struct camera2_colorcorrection_ctl camera2_colorcorrection_ctl_t; +typedef struct camera2_colorcorrection_dm camera2_colorcorrection_dm_t; +typedef struct camera2_colorcorrection_sm camera2_colorcorrection_sm_t; +typedef enum tonemap_mode tonemap_mode_t; +typedef struct camera2_tonemap_ctl camera2_tonemap_ctl_t; +typedef struct camera2_tonemap_dm camera2_tonemap_dm_t; +typedef struct camera2_tonemap_sm camera2_tonemap_sm_t; + +typedef struct camera2_edge_ctl camera2_edge_ctl_t; +typedef struct camera2_edge_dm camera2_edge_dm_t; +typedef struct camera2_edge_sm camera2_edge_sm_t; +typedef struct camera2_scaler_ctl camera2_scaler_ctl_t; +typedef struct camera2_scaler_dm camera2_scaler_dm_t; +typedef struct camera2_jpeg_ctl camera2_jpeg_ctl_t; +typedef struct camera2_jpeg_dm camera2_jpeg_dm_t; +typedef struct camera2_jpeg_sm camera2_jpeg_sm_t; +typedef enum facedetect_mode facedetect_mode_t; +typedef enum stats_mode stats_mode_t; +typedef struct camera2_stats_ctl camera2_stats_ctl_t; +typedef struct camera2_stats_dm camera2_stats_dm_t; +typedef struct camera2_stats_sm camera2_stats_sm_t; +typedef enum aa_capture_intent aa_capture_intent_t; +typedef enum aa_mode aa_mode_t; +typedef enum aa_scene_mode aa_scene_mode_t; +typedef enum aa_effect_mode aa_effect_mode_t; +typedef enum aa_aemode aa_aemode_t; +typedef enum aa_ae_antibanding_mode aa_ae_antibanding_mode_t; +typedef enum aa_awbmode aa_awbmode_t; +typedef enum aa_afmode aa_afmode_t; +typedef enum aa_afstate aa_afstate_t; +typedef struct camera2_aa_ctl camera2_aa_ctl_t; +typedef struct camera2_aa_dm camera2_aa_dm_t; +typedef struct camera2_aa_sm camera2_aa_sm_t; +typedef struct camera2_lens_usm camera2_lens_usm_t; +typedef struct camera2_sensor_usm camera2_sensor_usm_t; +typedef struct camera2_flash_usm camera2_flash_usm_t; +typedef struct camera2_ctl camera2_ctl_t; +typedef struct camera2_uctl camera2_uctl_t; +typedef struct camera2_dm camera2_dm_t; +typedef struct camera2_sm camera2_sm_t; + +typedef struct camera2_scaler_sm camera2_scaler_sm_t; +typedef struct camera2_scaler_uctl camera2_scaler_uctl_t; + +typedef struct camera2_fd_uctl camera2_fd_uctl_t; +typedef struct camera2_fd_udm camera2_fd_udm_t; + +typedef struct camera2_sensor_uctl camera2_sensor_uctl_t; + +typedef struct camera2_aa_uctl camera2_aa_uctl_t; +typedef struct camera2_aa_udm camera2_aa_udm_t; + +typedef struct camera2_me_udm camera2_me_udm_t; + +typedef struct camera2_lens_uctl camera2_lens_uctl_t; +typedef struct camera2_lens_udm camera2_lens_udm_t; + +typedef struct camera2_ae_udm camera2_ae_udm_t; +typedef struct camera2_awb_udm camera2_awb_udm_t; +typedef struct camera2_af_udm camera2_af_udm_t; +typedef struct camera2_as_udm camera2_as_udm_t; +typedef struct camera2_ipc_udm camera2_ipc_udm_t; + +typedef struct camera2_internal_udm camera2_internal_udm_t; + +typedef struct camera2_flash_uctl camera2_flash_uctl_t; + +typedef struct camera2_shot camera2_shot_t; + +#endif +#endif //#if !defined(SUPPORT_HAL3_3_METADATA) diff --git a/libcamera/34xx/fimc-is-metadata_for_hal3.3.h b/libcamera/34xx/fimc-is-metadata_for_hal3.3.h new file mode 100644 index 0000000..6daf410 --- /dev/null +++ b/libcamera/34xx/fimc-is-metadata_for_hal3.3.h @@ -0,0 +1,1903 @@ +/* Copyright (c) 2015 Samsung Electronics Co, Ltd. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 as + * published by the Free Software Foundation. + + * + + * Alternatively, Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*2012.04.18 Version 0.1 Initial Release*/ +/*2012.04.23 Version 0.2 Added static metadata (draft)*/ +/*2012.07.04 Version 0.3 Applied google's undocumented changes (draft)*/ +/*2012.07.11 Version 0.4 Added FD parameters */ +/*2012.07.27 Version 0.5 Modified HSB control and DM */ +/*2012.08.28 Version 0.6 Added AA_SCENE_MODE_NIGHT_CAPTURE */ +/*2012.10.15 Version 0.7 Added contrast in colorcorrection */ +/*2012.10.22 Version 0.8 Added analogGain and digitalGain in sensor control */ +/*2012.10.30 Version 0.9 Added disTargetAddress in camera2_scaler_uctl */ +/*2012.10.31 Version 0.10 Added user-defined metadata for 3A */ +/*2012.11.27 Version 0.11 Added image effects */ +/*2012.12.10 Version 0.12 Modified aa_aemode */ + +#ifndef FIMC_IS_METADATA_H_ +#define FIMC_IS_METADATA_H_ + +#ifndef _LINUX_TYPES_H +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef signed short int16_t; +typedef signed int int32_t; +typedef unsigned int uint32_t; +/*typedef unsigned long long uint64_t;*/ +#endif + +struct rational { + int32_t num; + int32_t den; +}; + +#define CAMERA2_MAX_AVAILABLE_MODE 21 +#define CAMERA2_MAX_FACES 16 +#define CAMERA2_MAX_VENDER_LENGTH 400 +#define CAPTURE_NODE_MAX 5 +#define CAMERA2_MAX_PDAF_MULTIROI_COLUMN 13 +#define CAMERA2_MAX_PDAF_MULTIROI_ROW 9 +#define CAMERA2_MAX_UCTL_VENDER_LENGTH 32 + +#define CAMERA2_MAX_UCTL_VENDOR2_LENGTH 400 +#define CAMERA2_MAX_UDM_VENDOR2_LENGTH 32 +#define OPEN_MAGIC_NUMBER 0x01020304 +#define SHOT_MAGIC_NUMBER 0x23456789 + +#include "ExynosCameraConfig.h" + +/* + *controls/dynamic metadata + */ + +/* android.request */ + +enum metadata_mode { + METADATA_MODE_NONE, + METADATA_MODE_FULL +}; + +enum is_subscenario_id { + ISS_SUB_SCENARIO_STILL_PREVIEW = 0, /* 0: still preview */ + ISS_SUB_SCENARIO_VIDEO = 1, /* 1: video */ + ISS_SUB_SCENARIO_DUAL_STILL = 2, /* 2: dual still preview */ + ISS_SUB_SCENARIO_DUAL_VIDEO = 3, /* 3: dual video */ + ISS_SUB_SCENARIO_VIDEO_HIGH_SPEED = 4, /* 4: video high speed */ + ISS_SUB_SCENARIO_STILL_CAPTURE = 5, /* 5: still capture */ + ISS_SUB_SCENARIO_FHD_60FPS = 6, /* 6: video FHD 60fps */ + ISS_SUB_SCENARIO_UHD_30FPS = 7, /* 7: video UHD 30fps */ + ISS_SUB_SCENARIO_WVGA_300FPS = 8, /* 8: video WVGA 300fps */ + ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_ON = 9, + ISS_SUB_SCENARIO_VIDEO_WDR_ON = 10, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON = 11, + ISS_SUB_SCENARIO_UHD_30FPS_WDR_ON = 12, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM = 13, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM_INDOOR = 14, + ISS_SUB_SCENARIO_STILL_CAPTURE_ZOOM_OUTDOOR = 15, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM = 16, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM_INDOOR = 17, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_ZOOM_OUTDOOR = 18, + ISS_SUB_SCENARIO_STILL_CAPTURE_LLS = 19, + ISS_SUB_SCENARIO_MERGED_STILL_CAPTURE_WDR_AUTO = ISS_SUB_SCENARIO_STILL_CAPTURE_LLS, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_LLS = 20, + ISS_SUB_SCENARIO_MERGED_STILL_CAPTURE_WDR_ON = ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON_LLS, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM = 21, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM_INDOOR = 22, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO_ZOOM_OUTDOOR = 23, + ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO = 24, + ISS_SUB_SCENARIO_VIDEO_WDR_AUTO = 25, + ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_AUTO = 26, + ISS_SUB_SCENARIO_FHD_240FPS = 27, + ISS_SUB_SCENARIO_MERGED_STILL_CAPTURE = 28, + ISS_SUB_SCENARIO_STILL_CAPTURE_SHARPEN = 29, + ISS_SUB_SCENARIO_STILL_CAPTURE_LONG = 30, + ISS_SUB_SCENARIO_STILL_CAPTURE_MANUAL_ISO = 31, + + ISS_SUB_SCENARIO_FRONT_VT1 = 31, /* 31: front camera VT1 */ + ISS_SUB_SCENARIO_FRONT_VT2 = 32, /* 32: front camera VT2 */ + ISS_SUB_SCENARIO_FRONT_SMART_STAY = 33, /* 33: front camera smart stay */ + ISS_SUB_SCENARIO_FRONT_PANORAMA = 34, /* 34: front camera front panorama */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_STILL_PREVIEW = 35, /* 35: C2 off front still preview */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_STILL_CAPTURE = 36, /* 36: C2 off front still capture */ + ISS_SUB_SCENARIO_FRONT_C2_OFF_VIDEO = 37, /* 37: C2 off front video */ + ISS_SUB_SCENARIO_FRONT_VT4 = 38, /* 38: front camera VT4 */ + ISS_SUB_SCENARIO_FRONT_VT1_STILL_CAPTURE = 39, /* 39: front camera VT1 still capture */ + ISS_SUB_END, +}; + +enum available_capabilities { + REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0, + REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR, + REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING, + REQUEST_AVAILABLE_CAPABILITIES_RAW, + REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING, + REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS, + REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE, + REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING, + REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT, + REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO, +}; + +struct camera2_request_ctl { + uint32_t frameCount; + uint32_t id; + enum metadata_mode metadataMode; + + /* vendor feature */ + uint32_t vendor_frameCount; +}; + +struct camera2_request_dm { + uint32_t frameCount; + uint32_t id; + enum metadata_mode metadataMode; + uint8_t pipelineDepth; + uint32_t vendor_frameCount; +}; + +struct camera2_request_sm { + uint32_t maxNumOutputStreams[3]; + uint32_t maxNumOutputRaw; + uint32_t maxNumOutputProc; + uint32_t maxNumOutputProcStalling; + uint32_t maxNumInputStreams; + uint8_t pipelineMaxDepth; + uint32_t partialResultCount; + uint8_t availableCapabilities[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableRequestKeys[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableResultKeys[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t availableCharacteristicsKeys[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +struct camera2_entry_ctl { + uint32_t lowIndexParam; + uint32_t highIndexParam; + uint32_t parameter[2048]; +}; + +struct camera2_entry_dm { + uint32_t lowIndexParam; + uint32_t highIndexParam; +}; + +/* android.lens */ + +enum optical_stabilization_mode { + OPTICAL_STABILIZATION_MODE_OFF = 0, + OPTICAL_STABILIZATION_MODE_ON = 1, + + /* vendor feature */ + OPTICAL_STABILIZATION_MODE_STILL = 100, // Still mode + OPTICAL_STABILIZATION_MODE_STILL_ZOOM, // Still Zoom mode + OPTICAL_STABILIZATION_MODE_VIDEO, // Recording mode + OPTICAL_STABILIZATION_MODE_SINE_X, // factory mode x + OPTICAL_STABILIZATION_MODE_SINE_Y, // factory mode y + OPTICAL_STABILIZATION_MODE_CENTERING, // Centering mode + OPTICAL_STABILIZATION_MODE_VDIS, // VDIS mode + OPTICAL_STABILIZATION_MODE_VIDEO_RATIO_4_3, // Recording mode(VGA) +}; + +enum lens_state { + LENS_STATE_STATIONARY = 0, + LENS_STATE_MOVING, +}; + + +enum lens_focus_distance_calibration { + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0, + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE, + LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED, +}; + +enum lens_facing { + LENS_FACING_BACK, + LENS_FACING_FRONT, + LENS_FACING_EXTERNAL, +}; + +struct camera2_lens_ctl { + float aperture; + float filterDensity; + float focalLength; + float focusDistance; + enum optical_stabilization_mode opticalStabilizationMode; +}; + +struct camera2_lens_dm { + float aperture; + float filterDensity; + float focalLength; + float focusDistance; + float focusRange[2]; + enum optical_stabilization_mode opticalStabilizationMode; + enum lens_state state; + float poseRotation[4]; + float poseTranslation[3]; + float intrinsicCalibration[5]; + float radialDistortion[6]; +}; + +struct camera2_lens_sm { + float availableApertures[CAMERA2_MAX_AVAILABLE_MODE]; + float availableFilterDensities[CAMERA2_MAX_AVAILABLE_MODE]; + float availableFocalLength[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableOpticalStabilization[CAMERA2_MAX_AVAILABLE_MODE]; + float hyperfocalDistance; + float minimumFocusDistance; + uint32_t shadingMapSize[2]; + enum lens_focus_distance_calibration focusDistanceCalibration; + enum lens_facing facing; + float poseRotation[4]; + float poseTranslation[3]; + float intrinsicCalibration[5]; + float radialDistortion[6]; +}; + +/* android.sensor */ + +enum sensor_test_pattern_mode { + SENSOR_TEST_PATTERN_MODE_OFF = 1, + SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, + SENSOR_TEST_PATTERN_MODE_COLOR_BARS, + SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, + SENSOR_TEST_PATTERN_MODE_PN9, + SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 257, +}; + +enum sensor_colorfilterarrangement { + SENSOR_COLORFILTERARRANGEMENT_RGGB, + SENSOR_COLORFILTERARRANGEMENT_GRBG, + SENSOR_COLORFILTERARRANGEMENT_GBRG, + SENSOR_COLORFILTERARRANGEMENT_BGGR, + SENSOR_COLORFILTERARRANGEMENT_RGB +}; + +enum sensor_timestamp_calibration { + SENSOR_INFO_TIMESTAMP_CALIBRATION_UNCALIBRATED = 0, + SENSOR_INFO_TIMESTAMP_CALIBRATION_CALIBRATED, +}; + +enum sensor_lensshading_applied { + SENSOR_INFO_LENS_SHADING_APPLIED_FALSE = 0, + SENSOR_INFO_LENS_SHADING_APPLIED_TRUE, +}; + +enum sensor_ref_illuminant { + SENSOR_ILLUMINANT_DAYLIGHT = 1, + SENSOR_ILLUMINANT_FLUORESCENT = 2, + SENSOR_ILLUMINANT_TUNGSTEN = 3, + SENSOR_ILLUMINANT_FLASH = 4, + SENSOR_ILLUMINANT_FINE_WEATHER = 9, + SENSOR_ILLUMINANT_CLOUDY_WEATHER = 10, + SENSOR_ILLUMINANT_SHADE = 11, + SENSOR_ILLUMINANT_DAYLIGHT_FLUORESCENT = 12, + SENSOR_ILLUMINANT_DAY_WHITE_FLUORESCENT = 13, + SENSOR_ILLUMINANT_COOL_WHITE_FLUORESCENT = 14, + SENSOR_ILLUMINANT_WHITE_FLUORESCENT = 15, + SENSOR_ILLUMINANT_STANDARD_A = 17, + SENSOR_ILLUMINANT_STANDARD_B = 18, + SENSOR_ILLUMINANT_STANDARD_C = 19, + SENSOR_ILLUMINANT_D55 = 20, + SENSOR_ILLUMINANT_D65 = 21, + SENSOR_ILLUMINANT_D75 = 22, + SENSOR_ILLUMINANT_D50 = 23, + SENSOR_ILLUMINANT_ISO_STUDIO_TUNGSTEN = 24 +}; + +struct camera2_sensor_ctl { + uint64_t exposureTime; + uint64_t frameDuration; + uint32_t sensitivity; + int32_t testPatternData[4]; + enum sensor_test_pattern_mode testPatternMode; +}; + +struct camera2_sensor_dm { + uint64_t exposureTime; + uint64_t frameDuration; + uint32_t sensitivity; + uint64_t timeStamp; + float temperature; + struct rational neutralColorPoint[3]; + double noiseProfile[4][2]; + float profileHueSatMap[2][2][2][3]; + float profileToneCurve[32][2]; + float greenSplit; + int32_t testPatternData[4]; + enum sensor_test_pattern_mode testPatternMode; + uint64_t rollingShutterSkew; +}; + +struct camera2_sensor_sm { + uint32_t activeArraySize[4]; + uint32_t preCorrectionActiveArraySize[4]; + uint32_t sensitivityRange[2]; + enum sensor_colorfilterarrangement colorFilterArrangement; + uint64_t exposureTimeRange[2]; + uint64_t maxFrameDuration; + float physicalSize[2]; + uint32_t pixelArraySize[2]; + uint32_t whiteLevel; + enum sensor_timestamp_calibration timestampCalibration; + enum sensor_lensshading_applied lensShadingApplied; + enum sensor_ref_illuminant referenceIlluminant1; + enum sensor_ref_illuminant referenceIlluminant2; + struct rational calibrationTransform1[9]; + struct rational calibrationTransform2[9]; + struct rational colorTransform1[9]; + struct rational colorTransform2[9]; + struct rational forwardMatrix1[9]; + struct rational forwardMatrix2[9]; + struct rational baseGainFactor; + uint32_t blackLevelPattern[4]; + uint32_t maxAnalogSensitivity; + uint32_t orientation; + uint32_t profileHueSatMapDimensions[3]; + uint32_t availableTestPatternModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + + + +/* android.flash */ + +enum flash_mode { + CAM2_FLASH_MODE_NONE = 0, + CAM2_FLASH_MODE_OFF = 1, + CAM2_FLASH_MODE_SINGLE, + CAM2_FLASH_MODE_TORCH, + + /* vendor feature */ + CAM2_FLASH_MODE_BEST = 100, + CAM2_FLASH_MODE_LCD = 101, +}; + +enum flash_state { + FLASH_STATE_UNAVAILABLE = 0, + FLASH_STATE_CHARGING, + FLASH_STATE_READY, + FLASH_STATE_FIRED, + FLASH_STATE_PARTIAL, +}; + +enum capture_state { + CAPTURE_STATE_NONE = 0, + CAPTURE_STATE_FLASH = 1, + CAPTURE_STATE_HDR_DARK = 12, + CAPTURE_STATE_HDR_NORMAL = 13, + CAPTURE_STATE_HDR_BRIGHT = 14, + CAPTURE_STATE_ZSL_LIKE = 20, + CAPTURE_STATE_STREAM_ON_CAPTURE = 30, + CAPTURE_STATE_RAW_CAPTURE = 100, +}; /* firingStable state */ + +enum flash_info_available { + FLASH_INFO_AVAILABLE_FALSE = 0, + FLASH_INFO_AVAILABLE_TRUE, +}; + +struct camera2_flash_ctl { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +struct camera2_flash_dm { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; + enum flash_state flashState; + + /* vendor feature */ + uint32_t vendor_firingStable; + uint32_t vendor_decision; + uint32_t vendor_flashReady; + uint32_t vendor_flashOffReady; +}; + +struct camera2_flash_sm { + enum flash_info_available available; + uint64_t chargeDuration; + uint8_t colorTemperature; + uint8_t maxEnergy; +}; + + +/* android.hotpixel */ + +enum processing_mode { + PROCESSING_MODE_OFF = 1, + PROCESSING_MODE_FAST, + PROCESSING_MODE_HIGH_QUALITY, + PROCESSING_MODE_MINIMAL, +}; + + +struct camera2_hotpixel_ctl { + enum processing_mode mode; +}; + +struct camera2_hotpixel_dm { + enum processing_mode mode; +}; + +struct camera2_hotpixel_sm { + uint8_t availableHotPixelModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + + + +/* android.demosaic */ + +enum demosaic_processing_mode { + DEMOSAIC_PROCESSING_MODE_FAST = 1, + DEMOSAIC_PROCESSING_MODE_HIGH_QUALITY +}; + +struct camera2_demosaic_ctl { + enum demosaic_processing_mode mode; +}; + +struct camera2_demosaic_dm { + enum demosaic_processing_mode mode; +}; + + + +/* android.noiseReduction */ + +struct camera2_noisereduction_ctl { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_noisereduction_dm { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_noisereduction_sm { + uint8_t availableNoiseReductionModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + + + +/* android.shading */ + +struct camera2_shading_ctl { + enum processing_mode mode; + uint8_t strength; /* Range: 1 ~ 10 */ // TODO: [API32] not implemented yet +}; + +struct camera2_shading_dm { + enum processing_mode mode; + uint8_t strength; /* Range: 1 ~ 10 */ // TODO: [API32] not implemented yet +}; + +struct camera2_shading_sm { + enum processing_mode availableModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + + +/* android.colorCorrection */ + +enum colorcorrection_mode { + COLORCORRECTION_MODE_TRANSFORM_MATRIX = 1, + COLORCORRECTION_MODE_FAST, + COLORCORRECTION_MODE_HIGH_QUALITY, +}; + + +struct camera2_colorcorrection_ctl { + enum colorcorrection_mode mode; + struct rational transform[9]; + float gains[4]; + enum processing_mode aberrationCorrectionMode; + + /* vendor feature */ + uint32_t vendor_hue; + uint32_t vendor_saturation; + uint32_t vendor_brightness; + uint32_t vendor_contrast; +}; + +struct camera2_colorcorrection_dm { + enum colorcorrection_mode mode; + struct rational transform[9]; + float gains[4]; + enum processing_mode aberrationCorrectionMode; + + /* vendor feature */ + uint32_t vendor_hue; + uint32_t vendor_saturation; + uint32_t vendor_brightness; + uint32_t vendor_contrast; +}; + +struct camera2_colorcorrection_sm { + uint8_t availableModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableAberrationCorrectionModes[CAMERA2_MAX_AVAILABLE_MODE]; + + /* vendor feature */ + uint32_t vendor_hueRange[2]; + uint32_t vendor_saturationRange[2]; + uint32_t vendor_brightnessRange[2]; + uint32_t vendor_contrastRange[2]; +}; + + +/* android.tonemap */ + +enum tonemap_mode { + TONEMAP_MODE_CONTRAST_CURVE = 1, + TONEMAP_MODE_FAST, + TONEMAP_MODE_HIGH_QUALITY, + TONEMAP_MODE_GAMMA_VALUE, + TONEMAP_MODE_PRESET_CURVE, +}; + +enum tonemap_presetCurve { + TONEMAP_PRESET_CURVE_SRGB, + TONEMAP_PRESET_CURVE_REC709, +}; + +struct camera2_tonemap_ctl { + float curveBlue[64]; + float curveGreen[64]; + float curveRed[64]; + float curve; + enum tonemap_mode mode; + float gamma; + enum tonemap_presetCurve presetCurve; +}; + +struct camera2_tonemap_dm { + float curveBlue[64]; + float curveGreen[64]; + float curveRed[64]; + float curve; + enum tonemap_mode mode; + float gamma; + enum tonemap_presetCurve presetCurve; +}; + +struct camera2_tonemap_sm { + uint32_t maxCurvePoints; + uint8_t availableToneMapModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.edge */ + +struct camera2_edge_ctl { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_edge_dm { + enum processing_mode mode; + uint8_t strength; +}; + +struct camera2_edge_sm { + uint8_t availableEdgeModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + + + +/* android.scaler */ + +struct camera2_scaler_ctl { + uint32_t cropRegion[4]; +}; + +struct camera2_scaler_dm { + uint32_t cropRegion[4]; +}; + +enum available_stream_configurations { + SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0, + SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT, +}; + +enum scaler_cropping_type { + SCALER_CROPPING_TYPE_CENTER_ONLY = 0, + SCALER_CROPPING_TYPE_FREEFORM, +}; + +struct camera2_scaler_sm { + float availableMaxDigitalZoom; + int32_t availableInputOutputFormatsMap; + uint8_t availableStreamConfigurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + uint64_t availableMinFrameDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + uint64_t availableStallDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + int32_t streamConfigurationMap; + enum scaler_cropping_type croppingType; +}; + +/* android.jpeg */ +struct camera2_jpeg_ctl { + uint8_t gpsLocation; + double gpsCoordinates[3]; + uint8_t gpsProcessingMethod[32]; + uint64_t gpsTimestamp; + uint32_t orientation; + uint8_t quality; + uint8_t thumbnailQuality; + uint32_t thumbnailSize[2]; +}; + +struct camera2_jpeg_dm { + uint8_t gpsLocation; + double gpsCoordinates[3]; + uint8_t gpsProcessingMethod[32]; + uint64_t gpsTimestamp; + uint32_t orientation; + uint8_t quality; + uint32_t size; + uint8_t thumbnailQuality; + uint32_t thumbnailSize[2]; +}; + +struct camera2_jpeg_sm { + uint32_t availableThumbnailSizes[8][2]; + uint32_t maxSize; +}; + +/* android.statistics */ + +enum facedetect_mode { + FACEDETECT_MODE_OFF = 1, + FACEDETECT_MODE_SIMPLE, + FACEDETECT_MODE_FULL +}; + +enum stats_mode { + STATS_MODE_OFF = 1, + STATS_MODE_ON +}; + + +enum stats_scene_flicker { + STATISTICS_SCENE_FLICKER_NONE = 1, + STATISTICS_SCENE_FLICKER_50HZ, + STATISTICS_SCENE_FLICKER_60HZ, +}; + +enum stats_lowlightmode { + STATE_LLS_LEVEL_ZSL = 0, + STATE_LLS_LEVEL_LOW = 1, + STATE_LLS_LEVEL_HIGH = 2, + STATE_LLS_LEVEL_SIS = 3, + STATE_LLS_LEVEL_ZSL_LIKE = 4, + STATE_LLS_LEVEL_ZSL_LIKE1 = 7, + STATE_LLS_LEVEL_SHARPEN_SINGLE = 8, + STATE_LLS_MANUAL_ISO = 9, + STATE_LLS_LEVEL_FLASH = 16, + STATE_LLS_LEVEL_MULTI_MERGE_2 = 18, + STATE_LLS_LEVEL_MULTI_MERGE_3 = 19, + STATE_LLS_LEVEL_MULTI_MERGE_4 = 20, + STATE_LLS_LEVEL_MULTI_PICK_2 = 34, + STATE_LLS_LEVEL_MULTI_PICK_3 = 35, + STATE_LLS_LEVEL_MULTI_PICK_4 = 36, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_2 = 50, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_3 = 51, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_4 = 52, + STATE_LLS_LEVEL_FLASH_2 = 66, + STATE_LLS_LEVEL_FLASH_3 = 67, + STATE_LLS_LEVEL_FLASH_4 = 68, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_LOW_2 = 82, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_LOW_3 = 83, + STATE_LLS_LEVEL_MULTI_MERGE_INDICATOR_LOW_4 = 84, + STATE_LLS_LEVEL_FLASH_LOW_2 = 98, + STATE_LLS_LEVEL_FLASH_LOW_3 = 99, + STATE_LLS_LEVEL_FLASH_LOW_4 = 100, + STATE_LLS_LEVEL_DUMMY = 150, +}; + +enum stats_wdrAutoState { + STATE_WDR_AUTO_OFF = 1, + STATE_WDR_AUTO_REQUIRED = 2, +}; + +struct camera2_stats_ctl { + enum facedetect_mode faceDetectMode; + enum stats_mode histogramMode; + enum stats_mode sharpnessMapMode; + enum stats_mode hotPixelMapMode; + enum stats_mode lensShadingMapMode; +}; + + +struct camera2_stats_dm { + enum facedetect_mode faceDetectMode; + uint32_t faceIds[CAMERA2_MAX_FACES]; + uint32_t faceLandmarks[CAMERA2_MAX_FACES][6]; + uint32_t faceRectangles[CAMERA2_MAX_FACES][4]; + uint8_t faceScores[CAMERA2_MAX_FACES]; + uint32_t faces[CAMERA2_MAX_FACES]; + uint32_t histogram[3 * 256]; + enum stats_mode histogramMode; + int32_t sharpnessMap[2][2][3]; + enum stats_mode sharpnessMapMode; + uint8_t lensShadingCorrectionMap; + float lensShadingMap[2][2][4]; + enum stats_scene_flicker sceneFlicker; + enum stats_mode hotPixelMapMode; + int32_t hotPixelMap[CAMERA2_MAX_AVAILABLE_MODE][2]; + enum stats_mode lensShadingMapMode; + + /* vendor feature */ + enum stats_lowlightmode vendor_LowLightMode; + uint32_t vendor_lls_tuning_set_index; + uint32_t vendor_lls_brightness_index; + enum stats_wdrAutoState vendor_wdrAutoState; +}; + + +struct camera2_stats_sm { + uint8_t availableFaceDetectModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t histogramBucketCount; + uint32_t maxFaceCount; + uint32_t maxHistogramCount; + uint32_t maxSharpnessMapValue; + uint32_t sharpnessMapSize[2]; + uint32_t availableHotPixelMapModes[CAMERA2_MAX_AVAILABLE_MODE]; + enum stats_mode availableLensShadingMapModes[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.control */ + +enum aa_capture_intent { + AA_CAPTURE_INTENT_CUSTOM = 0, + AA_CAPTURE_INTENT_PREVIEW, + AA_CAPTURE_INTENT_STILL_CAPTURE, + AA_CAPTURE_INTENT_VIDEO_RECORD, + AA_CAPTURE_INTENT_VIDEO_SNAPSHOT, + AA_CAPTURE_INTENT_ZERO_SHUTTER_LAG, + AA_CAPTURE_INTENT_MANUAL, + + /* vendor feature */ + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_SINGLE = 100, + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_MULTI, + AA_CAPTURE_INTENT_STILL_CAPTURE_OIS_BEST, + AA_CAPTRUE_INTENT_STILL_CAPTURE_COMP_BYPASS, + AA_CAPTRUE_INTENT_STILL_CAPTURE_OIS_DEBLUR, + AA_CAPTRUE_INTENT_STILL_CAPTURE_DEBLUR_DYNAMIC_SHOT, + AA_CAPTRUE_INTENT_STILL_CAPTURE_OIS_DYNAMIC_SHOT, + AA_CAPTRUE_INTENT_STILL_CAPTURE_EXPOSURE_DYNAMIC_SHOT, + AA_CAPTURE_INTENT_STILL_CAPTURE_CANCEL, +}; + +enum aa_mode { + AA_CONTROL_OFF = 1, + AA_CONTROL_AUTO, + AA_CONTROL_USE_SCENE_MODE, + AA_CONTROL_OFF_KEEP_STATE, +}; + +enum aa_scene_mode { + AA_SCENE_MODE_DISABLED = 1, + AA_SCENE_MODE_FACE_PRIORITY, + AA_SCENE_MODE_ACTION, + AA_SCENE_MODE_PORTRAIT, + AA_SCENE_MODE_LANDSCAPE, + AA_SCENE_MODE_NIGHT, + AA_SCENE_MODE_NIGHT_PORTRAIT, + AA_SCENE_MODE_THEATRE, + AA_SCENE_MODE_BEACH, + AA_SCENE_MODE_SNOW, + AA_SCENE_MODE_SUNSET, + AA_SCENE_MODE_STEADYPHOTO, + AA_SCENE_MODE_FIREWORKS, + AA_SCENE_MODE_SPORTS, + AA_SCENE_MODE_PARTY, + AA_SCENE_MODE_CANDLELIGHT, + AA_SCENE_MODE_BARCODE, + AA_SCENE_MODE_HIGH_SPEED_VIDEO, + AA_SCENE_MODE_HDR, + AA_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT, + + /* vendor feature */ + AA_SCENE_MODE_NIGHT_CAPTURE = 100, + AA_SCENE_MODE_ANTISHAKE, + AA_SCENE_MODE_LLS, + AA_SCENE_MODE_FDAE, + AA_SCENE_MODE_DUAL, + AA_SCENE_MODE_DRAMA, + AA_SCENE_MODE_ANIMATED, + AA_SCENE_MODE_PANORAMA, + AA_SCENE_MODE_GOLF, + AA_SCENE_MODE_PREVIEW, + AA_SCENE_MODE_VIDEO, + AA_SCENE_MODE_SLOWMOTION_2, + AA_SCENE_MODE_SLOWMOTION_4_8, + AA_SCENE_MODE_DUAL_PREVIEW, + AA_SCENE_MODE_DUAL_VIDEO, + AA_SCENE_MODE_120_PREVIEW, + AA_SCENE_MODE_LIGHT_TRACE, + AA_SCENE_MODE_FOOD, + AA_SCENE_MODE_AQUA, + AA_SCENE_MODE_THERMAL, + AA_SCENE_MODE_VIDEO_COLLAGE, + AA_SCENE_MODE_PRO_MODE, +}; + +enum aa_effect_mode { + AA_EFFECT_OFF = 1, + AA_EFFECT_MONO, + AA_EFFECT_NEGATIVE, + AA_EFFECT_SOLARIZE, + AA_EFFECT_SEPIA, + AA_EFFECT_POSTERIZE, + AA_EFFECT_WHITEBOARD, + AA_EFFECT_BLACKBOARD, + AA_EFFECT_AQUA, + + /* vendor feature */ + AA_EFFECT_EMBOSS = 100, + AA_EFFECT_EMBOSS_MONO, + AA_EFFECT_SKETCH, + AA_EFFECT_RED_YELLOW_POINT, + AA_EFFECT_GREEN_POINT, + AA_EFFECT_BLUE_POINT, + AA_EFFECT_MAGENTA_POINT, + AA_EFFECT_WARM_VINTAGE, + AA_EFFECT_COLD_VINTAGE, + AA_EFFECT_WASHED, + AA_EFFECT_BEAUTY_FACE, + +}; + +enum aa_ae_lock { + AA_AE_LOCK_OFF = 1, + AA_AE_LOCK_ON, +}; + +enum aa_aemode { + AA_AEMODE_OFF = 1, + AA_AEMODE_ON, + AA_AEMODE_ON_AUTO_FLASH, + AA_AEMODE_ON_ALWAYS_FLASH, + AA_AEMODE_ON_AUTO_FLASH_REDEYE, + + /* vendor feature */ + AA_AEMODE_CENTER = 100, + AA_AEMODE_AVERAGE, + AA_AEMODE_MATRIX, + AA_AEMODE_SPOT, + AA_AEMODE_CENTER_TOUCH, + AA_AEMODE_AVERAGE_TOUCH, + AA_AEMODE_MATRIX_TOUCH, + AA_AEMODE_SPOT_TOUCH, + UNKNOWN_AA_AE_MODE +}; + +enum aa_ae_flashmode { + /*all flash control stop*/ + AA_FLASHMODE_OFF = 1, + /*flash start*/ + AA_FLASHMODE_START, + /*flash cancle*/ + AA_FLASHMODE_CANCEL, + /*internal 3A can control flash*/ + AA_FLASHMODE_ON, + /*internal 3A can do auto flash algorithm*/ + AA_FLASHMODE_AUTO, + /*internal 3A can fire flash by auto result*/ + AA_FLASHMODE_CAPTURE, + /*internal 3A can control flash forced*/ + AA_FLASHMODE_ON_ALWAYS +}; + +enum aa_ae_antibanding_mode { + AA_AE_ANTIBANDING_OFF = 1, + AA_AE_ANTIBANDING_50HZ, + AA_AE_ANTIBANDING_60HZ, + AA_AE_ANTIBANDING_AUTO, + + /* vendor feature */ + AA_AE_ANTIBANDING_AUTO_50HZ = 100, /* 50Hz + Auto */ + AA_AE_ANTIBANDING_AUTO_60HZ /* 60Hz + Auto */ +}; + +enum aa_awb_lock { + AA_AWB_LOCK_OFF = 1, + AA_AWB_LOCK_ON, +}; + +enum aa_awbmode { + AA_AWBMODE_OFF = 1, + AA_AWBMODE_WB_AUTO, + AA_AWBMODE_WB_INCANDESCENT, + AA_AWBMODE_WB_FLUORESCENT, + AA_AWBMODE_WB_WARM_FLUORESCENT, + AA_AWBMODE_WB_DAYLIGHT, + AA_AWBMODE_WB_CLOUDY_DAYLIGHT, + AA_AWBMODE_WB_TWILIGHT, + AA_AWBMODE_WB_SHADE, + AA_AWBMODE_WB_CUSTOM_K +}; + +enum aa_ae_precapture_trigger { + AA_AE_PRECAPTURE_TRIGGER_IDLE = 0, + AA_AE_PRECAPTURE_TRIGGER_START, + AA_AE_PRECAPTURE_TRIGGER_CANCEL, +}; + +enum aa_afmode { + AA_AFMODE_OFF = 1, + AA_AFMODE_AUTO, + AA_AFMODE_MACRO, + AA_AFMODE_CONTINUOUS_VIDEO, + AA_AFMODE_CONTINUOUS_PICTURE, + AA_AFMODE_EDOF, +}; + +enum aa_afmode_option_bit { + AA_AFMODE_OPTION_BIT_VIDEO = 0, + AA_AFMODE_OPTION_BIT_MACRO = 1, + AA_AFMODE_OPTION_BIT_FACE = 2, + AA_AFMODE_OPTION_BIT_DELAYED = 3, + AA_AFMODE_OPTION_BIT_OUT_FOCUSING = 4, + AA_AFMODE_OPTION_BIT_OBJECT_TRACKING = 5, + AA_AFMODE_OPTION_BIT_AF_ROI_NO_CONV = 6, + AA_AFMODE_OPTION_BIT_MULTI_AF = 7, +}; + +enum aa_afmode_ext { + AA_AFMODE_EXT_OFF = 1, + AA_AFMODE_EXT_ADVANCED_MACRO_FOCUS = 2, + AA_AFMODE_EXT_FOCUS_LOCATION = 3, +}; + +enum aa_af_trigger { + AA_AF_TRIGGER_IDLE = 0, + AA_AF_TRIGGER_START, + AA_AF_TRIGGER_CANCEL, +}; + +enum aa_afstate { + AA_AFSTATE_INACTIVE = 1, + AA_AFSTATE_PASSIVE_SCAN, + AA_AFSTATE_PASSIVE_FOCUSED, + AA_AFSTATE_ACTIVE_SCAN, + AA_AFSTATE_FOCUSED_LOCKED, + AA_AFSTATE_NOT_FOCUSED_LOCKED, + AA_AFSTATE_PASSIVE_UNFOCUSED, +}; + +enum ae_state { + AE_STATE_INACTIVE = 1, + AE_STATE_SEARCHING, + AE_STATE_CONVERGED, + AE_STATE_LOCKED, + AE_STATE_FLASH_REQUIRED, + AE_STATE_PRECAPTURE, + AE_STATE_LOCKED_CONVERGED = 10, + AE_STATE_LOCKED_FLASH_REQUIRED, + AE_STATE_SEARCHING_FLASH_REQUIRED, +}; + +enum awb_state { + AWB_STATE_INACTIVE = 1, + AWB_STATE_SEARCHING, + AWB_STATE_CONVERGED, + AWB_STATE_LOCKED +}; + +enum aa_videostabilization_mode { + VIDEO_STABILIZATION_MODE_OFF = 0, + VIDEO_STABILIZATION_MODE_ON, +}; + +enum aa_isomode { + AA_ISOMODE_AUTO = 1, + AA_ISOMODE_MANUAL, +}; + +enum aa_cameraid { + AA_CAMERAID_FRONT = 1, + AA_CAMERAID_REAR, +}; + +enum aa_videomode { + AA_VIDEOMODE_OFF = 0, + AA_VIDEOMODE_ON, +}; + +enum aa_ae_facemode { + AA_AE_FACEMODE_OFF = 0, + AA_AE_FACEMODE_ON, +}; + +enum aa_ae_lockavailable { + AE_LOCK_AVAILABLE_FALSE = 0, + AE_LOCK_AVAILABLE_TRUE, +}; + +enum aa_awb_lockavailable { + AWB_LOCK_AVAILABLE_FALSE = 0, + AWB_LOCK_AVAILABLE_TRUE, +}; + +enum aa_available_mode { + AA_OFF = 0, + AA_AUTO, + /* AA_USE_SCENE_MODE, + * AA_OFF_KEEP_STATE, */ +}; + +struct camera2_aa_ctl { + enum aa_ae_antibanding_mode aeAntibandingMode; + int32_t aeExpCompensation; + enum aa_ae_lock aeLock; + enum aa_aemode aeMode; + uint32_t aeRegions[5]; + uint32_t aeTargetFpsRange[2]; + enum aa_ae_precapture_trigger aePrecaptureTrigger; + enum aa_afmode afMode; + uint32_t afRegions[5]; + enum aa_af_trigger afTrigger; + enum aa_awb_lock awbLock; + enum aa_awbmode awbMode; + uint32_t awbRegions[5]; + enum aa_capture_intent captureIntent; + enum aa_effect_mode effectMode; + enum aa_mode mode; + enum aa_scene_mode sceneMode; + enum aa_videostabilization_mode videoStabilizationMode; + + /* vendor feature */ + float vendor_aeExpCompensationStep; + uint32_t vendor_afmode_option; + enum aa_afmode_ext vendor_afmode_ext; + enum aa_ae_flashmode vendor_aeflashMode; + enum aa_isomode vendor_isoMode; + uint32_t vendor_isoValue; + int32_t vendor_awbValue; + enum aa_cameraid vendor_cameraId; + enum aa_videomode vendor_videoMode; + enum aa_ae_facemode vendor_aeFaceMode; + enum aa_afstate vendor_afState; + int32_t vendor_exposureValue; + uint32_t vendor_touchAeDone; + uint32_t vendor_touchBvChange; + uint32_t vendor_captureCount; + uint32_t vendor_captureExposureTime; + uint32_t vendor_reserved[10]; +}; + +struct camera2_aa_dm { + enum aa_ae_antibanding_mode aeAntibandingMode; + int32_t aeExpCompensation; + enum aa_ae_lock aeLock; + enum aa_aemode aeMode; + uint32_t aeRegions[5]; + uint32_t aeTargetFpsRange[2]; + enum aa_ae_precapture_trigger aePrecaptureTrigger; + enum ae_state aeState; + enum aa_afmode afMode; + uint32_t afRegions[5]; + enum aa_af_trigger afTrigger; + enum aa_afstate afState; + enum aa_awb_lock awbLock; + enum aa_awbmode awbMode; + uint32_t awbRegions[5]; + enum aa_capture_intent captureIntent; + enum awb_state awbState; + enum aa_effect_mode effectMode; + enum aa_mode mode; + enum aa_scene_mode sceneMode; + enum aa_videostabilization_mode videoStabilizationMode; + + /* vendor feature */ + float vendor_aeExpCompensationStep; + uint32_t vendor_afmode_option; + enum aa_afmode_ext vendor_afmode_ext; + enum aa_ae_flashmode vendor_aeflashMode; + enum aa_isomode vendor_isoMode; + uint32_t vendor_isoValue; + int32_t vendor_awbValue; + enum aa_cameraid vendor_cameraId; + enum aa_videomode vendor_videoMode; + enum aa_ae_facemode vendor_aeFaceMode; + enum aa_afstate vendor_afState; + int32_t vendor_exposureValue; + uint32_t vendor_touchAeDone; + uint32_t vendor_touchBvChange; + uint32_t vendor_captureCount; + uint32_t vendor_captureExposureTime; + uint32_t vendor_reserved[10]; +}; + +struct camera2_aa_sm { + uint8_t aeAvailableAntibandingModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t aeAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t aeAvailableTargetFpsRanges[CAMERA2_MAX_AVAILABLE_MODE][2]; + int32_t aeCompensationRange[2]; + struct rational aeCompensationStep; + uint8_t afAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableEffects[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableSceneModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint8_t availableVideoStabilizationModes[4]; + uint8_t awbAvailableModes[CAMERA2_MAX_AVAILABLE_MODE]; + uint32_t maxRegions[3]; + uint32_t maxRegionsAe; + uint32_t maxRegionsAwb; + uint32_t maxRegionsAf; + uint8_t sceneModeOverrides[CAMERA2_MAX_AVAILABLE_MODE][3]; + uint32_t availableHighSpeedVideoConfigurations[CAMERA2_MAX_AVAILABLE_MODE][5]; + enum aa_ae_lockavailable aeLockAvailable; + enum aa_awb_lockavailable awbLockAvailable; + enum aa_available_mode availableModes; + + /* vendor feature */ + uint32_t vendor_isoRange[2]; +}; + +/* android.led */ + +enum led_transmit { + TRANSMIT_OFF = 0, + TRANSMIT_ON, +}; + +struct camera2_led_ctl { + enum led_transmit transmit; +}; + +struct camera2_led_dm { + enum led_transmit transmit; +}; + +struct camera2_led_sm { + uint8_t availableLeds[CAMERA2_MAX_AVAILABLE_MODE]; +}; + +/* android.info */ + +enum info_supported_hardware_level { + INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0, + INFO_SUPPORTED_HARDWARE_LEVEL_FULL, + INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, +}; + +struct camera2_info_sm { + enum info_supported_hardware_level supportedHardwareLevel; +}; + +/* android.blacklevel */ + +enum blacklevel_lock { + BLACK_LEVEL_LOCK_OFF = 0, + BLACK_LEVEL_LOCK_ON, +}; + +struct camera2_blacklevel_ctl { + enum blacklevel_lock lock; +}; + +struct camera2_blacklevel_dm { + enum blacklevel_lock lock; +}; + +/* android.reprocess */ + +struct camera2_reprocess_ctl { + float effectiveExposureFactor; +}; + +struct camera2_reprocess_dm { + float effectiveExposureFactor; +}; + +struct camera2_reprocess_sm { + uint32_t maxCaptureStall; +}; + +/* android.depth */ + +enum depth_available_depth_stream_config { + DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT, + DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_INPUT, +}; + +enum depth_depth_is_exclusive { + DEPTH_DEPTH_IS_EXCLUSIVE_FALSE, + DEPTH_DEPTH_IS_EXCLUSIVE_TRUE, +}; + +struct camera2_depth_sm { + uint32_t maxDepthSamples; + enum depth_available_depth_stream_config availableDepthStreamConfigurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + uint64_t availableDepthMinFrameDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + uint64_t availableDepthStallDurations[CAMERA2_MAX_AVAILABLE_MODE][4]; + enum depth_depth_is_exclusive depthIsExclusive; +}; + +/* android.sync */ + +enum sync_frame_number { + SYNC_FRAME_NUMBER_CONVERGING = -1, + SYNC_FRAME_NUMBER_UNKNOWN = -2, +}; + +enum sync_max_latency { + SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0, + SYNC_MAX_LATENCY_UNKNOWN = -1, +}; + +struct camera2_sync_ctl { + int64_t frameNumber; +}; + +struct camera2_sync_dm { + int32_t maxLatency; +}; + +struct camera2_lens_usm { + uint32_t focusDistanceFrameDelay; +}; + +struct camera2_sensor_usm { + uint32_t exposureTimeFrameDelay; + uint32_t frameDurationFrameDelay; + uint32_t sensitivityFrameDelay; +}; + +struct camera2_flash_usm { + uint32_t flashModeFrameDelay; + uint32_t firingPowerFrameDelay; + uint64_t firingTimeFrameDelay; +}; + +struct camera2_ctl { + struct camera2_colorcorrection_ctl color; + struct camera2_aa_ctl aa; + struct camera2_demosaic_ctl demosaic; + struct camera2_edge_ctl edge; + struct camera2_flash_ctl flash; + struct camera2_hotpixel_ctl hotpixel; + struct camera2_jpeg_ctl jpeg; + struct camera2_lens_ctl lens; + struct camera2_noisereduction_ctl noise; + struct camera2_request_ctl request; + struct camera2_scaler_ctl scaler; + struct camera2_sensor_ctl sensor; + struct camera2_shading_ctl shading; + struct camera2_stats_ctl stats; + struct camera2_tonemap_ctl tonemap; + struct camera2_led_ctl led; + struct camera2_blacklevel_ctl blacklevel; + struct camera2_sync_ctl sync; + struct camera2_reprocess_ctl reprocess; + + /* vendor feature */ + struct camera2_entry_ctl vendor_entry; +}; + +struct camera2_dm { + struct camera2_colorcorrection_dm color; + struct camera2_aa_dm aa; + struct camera2_demosaic_dm demosaic; + struct camera2_edge_dm edge; + struct camera2_flash_dm flash; + struct camera2_hotpixel_dm hotpixel; + struct camera2_jpeg_dm jpeg; + struct camera2_lens_dm lens; + struct camera2_noisereduction_dm noise; + struct camera2_request_dm request; + struct camera2_scaler_dm scaler; + struct camera2_sensor_dm sensor; + struct camera2_shading_dm shading; + struct camera2_stats_dm stats; + struct camera2_tonemap_dm tonemap; + struct camera2_led_dm led; + struct camera2_blacklevel_dm blacklevel; + struct camera2_sync_dm sync; + struct camera2_reprocess_dm reprocess; + + /* vendor feature */ + struct camera2_entry_dm vendor_entry; +}; + +struct camera2_sm { + struct camera2_colorcorrection_sm color; + struct camera2_aa_sm aa; + struct camera2_edge_sm edge; + struct camera2_flash_sm flash; + struct camera2_hotpixel_sm hotpixel; + struct camera2_jpeg_sm jpeg; + struct camera2_lens_sm lens; + struct camera2_noisereduction_sm noise; + struct camera2_request_sm request; + struct camera2_scaler_sm scaler; + struct camera2_sensor_sm sensor; + struct camera2_shading_sm shading; + struct camera2_stats_sm stats; + struct camera2_tonemap_sm tonemap; + struct camera2_led_sm led; + struct camera2_info_sm info; + struct camera2_reprocess_sm reprocess; + struct camera2_depth_sm depth; + + /** User-defined(ispfw specific) static metadata. */ + struct camera2_lens_usm lensUd; + struct camera2_sensor_usm sensorUd; + struct camera2_flash_usm flashUd; +}; + +struct camera2_obj_af_info { + int32_t focusState; + int32_t focusROILeft; + int32_t focusROIRight; + int32_t focusROITop; + int32_t focusROIBottom; + int32_t focusWeight; + int32_t w_movement; + int32_t h_movement; + int32_t w_velocity; + int32_t h_velocity; +}; + +struct camera2_hrm_sensor_info { + uint32_t visible_data; + uint32_t ir_data; + uint32_t flicker_data; // 0: No flicker detect, 100: 50Hz, 120: 60Hz + int32_t status; +}; + +struct camera2_illuminaion_sensor_info { + uint16_t visible_cdata; + uint16_t visible_rdata; + uint16_t visible_gdata; + uint16_t visible_bdata; + uint16_t visible_gain; + uint16_t visible_exptime; + uint16_t ir_north; + uint16_t ir_south; + uint16_t ir_east; + uint16_t ir_west; + uint16_t ir_gain; + uint16_t ir_exptime; +}; + +struct camera2_gyro_sensor_info { + float x; + float y; + float z; +}; + +struct camera2_accelerometer_sensor_info { + float x; + float y; + float z; +}; + +struct camera2_aa_uctl { + struct camera2_obj_af_info af_data; + struct camera2_hrm_sensor_info hrmInfo; + struct camera2_illuminaion_sensor_info illuminationInfo; + struct camera2_gyro_sensor_info gyroInfo; + struct camera2_accelerometer_sensor_info accInfo; +}; + +struct camera2_aa_udm { + struct camera2_obj_af_info af_data; + struct camera2_hrm_sensor_info hrmInfo; + struct camera2_illuminaion_sensor_info illuminationInfo; + struct camera2_gyro_sensor_info gyroInfo; + struct camera2_accelerometer_sensor_info accInfo; +}; + +struct camera2_lens_uctl { + uint32_t pos; + uint32_t posSize; + uint32_t direction; + uint32_t slewRate; + uint32_t oisCoefVal; +}; + +struct camera2_lens_udm { + uint32_t pos; + uint32_t posSize; + uint32_t direction; + uint32_t slewRate; + uint32_t oisCoefVal; +}; + +struct camera2_ae_udm { + uint32_t vsLength; + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; + uint32_t vs2Length; + uint32_t vendorSpecific2[CAMERA2_MAX_UDM_VENDOR2_LENGTH]; +}; + +struct camera2_awb_udm { + uint32_t vsLength; + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; + uint32_t vs2Length; + uint32_t vendorSpecific2[CAMERA2_MAX_UDM_VENDOR2_LENGTH]; +}; + +struct camera2_af_uctl { + uint32_t vs2Length; + uint32_t vendorSpecific2[CAMERA2_MAX_UCTL_VENDOR2_LENGTH]; +}; + +struct camera2_af_udm { + uint32_t vsLength; + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; + int32_t lensPositionInfinity; + int32_t lensPositionMacro; + int32_t lensPositionCurrent; + uint32_t vs2Length; + uint32_t vendorSpecific2[CAMERA2_MAX_UDM_VENDOR2_LENGTH]; +}; + +struct camera2_as_udm { + uint32_t vsLength; + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +struct camera2_ipc_udm { + uint32_t vsLength; + uint32_t vendorSpecific[CAMERA2_MAX_VENDER_LENGTH]; +}; + +struct camera2_internal_udm { + uint32_t ProcessedFrameInfo; + uint32_t vendorSpecific1[CAMERA2_MAX_VENDER_LENGTH]; + uint32_t vendorSpecific2[CAMERA2_MAX_VENDER_LENGTH]; + /* + vendorSpecific2[0] : 3aaIspSircSdk + vendorSpedific2[1] : IspTpuSirdSdk + vendorSpecific2[100] : exposure + vendorSpecific2[101] : iso(gain) + vendorSpecific2[102] : Bv + vendorSpecific2[103] : Tv + */ +}; + +struct camera2_sensor_uctl { + uint64_t dynamicFrameDuration; + uint32_t analogGain; + uint32_t digitalGain; + uint64_t longExposureTime; + uint64_t shortExposureTime; + uint32_t longAnalogGain; + uint32_t shortAnalogGain; + uint32_t longDigitalGain; + uint32_t shortDigitalGain; + uint64_t exposureTime; + uint32_t frameDuration; + uint32_t sensitivity; +}; + +struct camera2_sensor_udm { + uint64_t dynamicFrameDuration; + uint32_t analogGain; + uint32_t digitalGain; + uint64_t longExposureTime; + uint64_t shortExposureTime; + uint32_t longAnalogGain; + uint32_t shortAnalogGain; + uint32_t longDigitalGain; + uint32_t shortDigitalGain; + uint64_t timeStampBoot; +}; + +struct camera2_scaler_uctl { + uint32_t sourceAddress[4]; + uint32_t txcTargetAddress[4]; + uint32_t txpTargetAddress[4]; + uint32_t ixcTargetAddress[4]; + uint32_t ixpTargetAddress[4]; + uint32_t sccTargetAddress[4]; + uint32_t scpTargetAddress[4]; + uint32_t sc0TargetAddress[4]; + uint32_t sc1TargetAddress[4]; + uint32_t sc2TargetAddress[4]; + uint32_t sc3TargetAddress[4]; + uint32_t sc4TargetAddress[4]; + uint32_t orientation; +}; + +struct camera2_flash_uctl { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +struct camera2_flash_udm { + uint32_t firingPower; + uint64_t firingTime; + enum flash_mode flashMode; +}; + +enum companion_drc_mode { + COMPANION_DRC_OFF = 1, + COMPANION_DRC_ON, +}; + +enum companion_wdr_mode { + COMPANION_WDR_OFF = 1, + COMPANION_WDR_ON = 2, + COMPANION_WDR_AUTO = 3, + TOTALCOUNT_COMPANION_WDR, + COMPANION_WDR_UNKNOWN, +}; + +enum companion_paf_mode { + COMPANION_PAF_OFF = 1, + COMPANION_PAF_ON, +}; + +enum companion_caf_mode { + COMPANION_CAF_OFF = 1, + COMPANION_CAF_ON, +}; + +enum companion_bypass_mode { + COMPANION_FULL_BYPASS_OFF = 1, + COMPANION_FULL_BYPASS_ON, +}; + +enum companion_lsc_mode { + COMPANION_LSC_OFF = 1, + COMPANION_LSC_ON, +}; + +enum companion_bpc_mode { + COMPANION_BPC_OFF = 1, + COMPANION_BPC_ON, +}; + +enum companion_disparity_mode { + COMPANION_DISPARITY_OFF = 1, + COMPANION_DISPARITY_SAD, + COMPANION_DISPARITY_CENSUS_MEAN, + COMPANION_DISPARITY_CENSUS_CENTER // Disparity mode default +}; + +enum camera_flash_mode { + CAMERA_FLASH_MODE_OFF = 0, + CAMERA_FLASH_MODE_AUTO, + CAMERA_FLASH_MODE_ON, + CAMERA_FLASH_MODE_RED_EYE, + CAMERA_FLASH_MODE_TORCH +}; + +enum camera_op_mode { + CAMERA_OP_MODE_GED = 0, // default + CAMERA_OP_MODE_TW, + CAMERA_OP_MODE_HAL3_GED, +}; + +struct camera2_companion_uctl { + enum companion_drc_mode drc_mode; + enum companion_wdr_mode wdr_mode; + enum companion_paf_mode paf_mode; + enum companion_caf_mode caf_mode; + enum companion_lsc_mode lsc_mode; + enum companion_bpc_mode bpc_mode; + enum companion_bypass_mode bypass_mode; + enum companion_disparity_mode disparity_mode; +}; + +struct camera2_pdaf_single_result { + uint16_t mode; + uint16_t goalPos; + uint16_t reliability; + uint16_t currentPos; +}; + +struct camera2_pdaf_multi_result { + uint16_t mode; + uint16_t goalPos; + uint16_t reliability; + uint16_t focused; +}; + +struct camera2_pdaf_udm { + uint16_t numCol; + uint16_t numRow; + struct camera2_pdaf_multi_result multiResult[CAMERA2_MAX_PDAF_MULTIROI_ROW][CAMERA2_MAX_PDAF_MULTIROI_COLUMN]; + struct camera2_pdaf_single_result singleResult; + uint16_t lensPosResolution; +}; + +struct camera2_companion_udm { + enum companion_drc_mode drc_mode; + enum companion_wdr_mode wdr_mode; + enum companion_paf_mode paf_mode; + enum companion_caf_mode caf_mode; + enum companion_lsc_mode lsc_mode; + enum companion_bpc_mode bpc_mode; + enum companion_bypass_mode bypass_mode; + struct camera2_pdaf_udm pdaf; + enum companion_disparity_mode disparity_mode; +}; + +struct camera2_fd_uctl { + enum facedetect_mode faceDetectMode; + uint32_t faceIds[CAMERA2_MAX_FACES]; + uint32_t faceLandmarks[CAMERA2_MAX_FACES][6]; + uint32_t faceRectangles[CAMERA2_MAX_FACES][4]; + uint8_t faceScores[CAMERA2_MAX_FACES]; + uint32_t faces[CAMERA2_MAX_FACES]; + uint32_t vendorSpecific[CAMERA2_MAX_UCTL_VENDER_LENGTH]; +/* --------------------------------------------------------- + vendorSpecific[0] = fdMapAddress[0]; + vendorSpecific[1] = fdMapAddress[1]; + vendorSpecific[2] = fdMapAddress[2]; + vendorSpecific[4] = fdMapAddress[4]; + vendorSpecific[5] = fdMapAddress[5]; + vendorSpecific[6] = fdMapAddress[6]; + vendorSpecific[7] = fdMapAddress[7]; + vendorSpecific[8] = fdYMapAddress; + vendorSpecific[9] = fdCoefK; + vendorSpecific[10] = fdUp; + vendorSpecific[11] = fdShift; + vendorSpecific[12] ~ vendorSpecific[31] : reserved + --------------------------------------------------------- +*/ +}; + +struct camera2_fd_udm { + uint32_t vendorSpecific[CAMERA2_MAX_UCTL_VENDER_LENGTH]; +/* --------------------------------------------------------- + vendorSpecific[0] = fdSat; + vendorSpecific[1] ~ vendorSpecific[31] : reserved + --------------------------------------------------------- +*/ +}; + +enum camera2_drc_mode { + DRC_OFF = 1, + DRC_ON, +}; + +struct camera2_drc_uctl { + enum camera2_drc_mode uDrcEn; +}; + +enum camera_vt_mode { + VT_MODE_OFF = 0, + VT_MODE_1, + VT_MODE_2, + VT_MODE_3, + VT_MODE_4, +}; + +struct camera2_uctl { + uint32_t uUpdateBitMap; + uint32_t uFrameNumber; + struct camera2_aa_uctl aaUd; + struct camera2_af_uctl af; + struct camera2_lens_uctl lensUd; + struct camera2_sensor_uctl sensorUd; + struct camera2_flash_uctl flashUd; + struct camera2_scaler_uctl scalerUd; + struct camera2_companion_uctl companionUd; + struct camera2_fd_uctl fdUd; + struct camera2_drc_uctl drcUd; + enum camera_vt_mode vtMode; + float zoomRatio; + enum camera_flash_mode flashMode; + enum camera_op_mode opMode; + uint32_t reserved[8]; +}; + +struct camera2_udm { + struct camera2_aa_udm aa; + struct camera2_lens_udm lens; + struct camera2_sensor_udm sensor; + struct camera2_flash_udm flash; + struct camera2_ae_udm ae; + struct camera2_awb_udm awb; + struct camera2_af_udm af; + struct camera2_as_udm as; + struct camera2_ipc_udm ipc; + struct camera2_internal_udm internal; + struct camera2_companion_udm companion; + struct camera2_fd_udm fd; + enum camera_vt_mode vtMode; + float zoomRatio; + enum camera_flash_mode flashMode; + enum camera_op_mode opMode; + uint32_t reserved[8]; +}; + +struct camera2_shot { + struct camera2_ctl ctl; + struct camera2_dm dm; + struct camera2_uctl uctl; + struct camera2_udm udm; + uint32_t magicNumber; +}; + +struct camera2_node_input { + uint32_t cropRegion[4]; +}; + +struct camera2_node_output { + uint32_t cropRegion[4]; +}; + +struct camera2_node { + uint32_t vid; + uint32_t request; + struct camera2_node_input input; + struct camera2_node_output output; +}; + +struct camera2_node_group { + struct camera2_node leader; + struct camera2_node capture[CAPTURE_NODE_MAX]; +}; + +struct camera2_shot_ext { + uint32_t setfile; + struct camera2_node_group node_group; + uint32_t drc_bypass; + uint32_t dis_bypass; + uint32_t dnr_bypass; + uint32_t fd_bypass; + uint32_t free_cnt; + uint32_t request_cnt; + uint32_t process_cnt; + uint32_t complete_cnt; + uint32_t invalid; + uint32_t reserved[14]; + uint32_t timeZone[10][2]; + struct camera2_shot shot; +}; + +struct camera2_stream { + uint32_t address; + uint32_t fcount; + uint32_t rcount; + uint32_t findex; + uint32_t fvalid; + uint32_t input_crop_region[4]; + uint32_t output_crop_region[4]; +}; + +#define CAM_LENS_CMD (0x1 << 0x0) +#define CAM_SENSOR_CMD (0x1 << 0x1) +#define CAM_FLASH_CMD (0x1 << 0x2) + +/* typedefs below are for firmware sources */ + +typedef enum metadata_mode metadata_mode_t; +typedef struct camera2_request_ctl camera2_request_ctl_t; +typedef struct camera2_request_dm camera2_request_dm_t; +typedef enum optical_stabilization_mode optical_stabilization_mode_t; +typedef enum lens_facing lens_facing_t; +typedef struct camera2_entry_ctl camera2_entry_ctl_t; +typedef struct camera2_entry_dm camera2_entry_dm_t; +typedef struct camera2_lens_ctl camera2_lens_ctl_t; +typedef struct camera2_lens_dm camera2_lens_dm_t; +typedef struct camera2_lens_sm camera2_lens_sm_t; +typedef enum sensor_colorfilterarrangement sensor_colorfilterarrangement_t; +typedef enum sensor_lensshading_applied sensor_lensshading_applied_t; +typedef enum sensor_ref_illuminant sensor_ref_illuminant_t; +typedef struct camera2_sensor_ctl camera2_sensor_ctl_t; +typedef struct camera2_sensor_dm camera2_sensor_dm_t; +typedef struct camera2_sensor_sm camera2_sensor_sm_t; +typedef enum flash_mode flash_mode_t; +typedef struct camera2_flash_ctl camera2_flash_ctl_t; +typedef struct camera2_flash_dm camera2_flash_dm_t; +typedef struct camera2_flash_sm camera2_flash_sm_t; +typedef enum processing_mode processing_mode_t; +typedef struct camera2_hotpixel_ctl camera2_hotpixel_ctl_t; +typedef struct camera2_hotpixel_dm camera2_hotpixel_dm_t; +typedef struct camera2_hotpixel_sm camera2_hotpixel_sm_t; + +typedef struct camera2_demosaic_ctl camera2_demosaic_ctl_t; +typedef struct camera2_demosaic_dm camera2_demosaic_dm_t; +typedef struct camera2_noisereduction_ctl camera2_noisereduction_ctl_t; +typedef struct camera2_noisereduction_dm camera2_noisereduction_dm_t; +typedef struct camera2_noisereduction_sm camera2_noisereduction_sm_t; +typedef struct camera2_shading_ctl camera2_shading_ctl_t; +typedef struct camera2_shading_dm camera2_shading_dm_t; +typedef enum colorcorrection_mode colorcorrection_mode_t; +typedef struct camera2_colorcorrection_ctl camera2_colorcorrection_ctl_t; +typedef struct camera2_colorcorrection_dm camera2_colorcorrection_dm_t; +typedef struct camera2_colorcorrection_sm camera2_colorcorrection_sm_t; +typedef enum tonemap_mode tonemap_mode_t; +typedef enum tonemap_presetCurve tonemap_presetCurve_t; +typedef struct camera2_tonemap_ctl camera2_tonemap_ctl_t; +typedef struct camera2_tonemap_dm camera2_tonemap_dm_t; +typedef struct camera2_tonemap_sm camera2_tonemap_sm_t; + +typedef struct camera2_edge_ctl camera2_edge_ctl_t; +typedef struct camera2_edge_dm camera2_edge_dm_t; +typedef struct camera2_edge_sm camera2_edge_sm_t; +typedef struct camera2_scaler_ctl camera2_scaler_ctl_t; +typedef struct camera2_scaler_dm camera2_scaler_dm_t; +typedef struct camera2_jpeg_ctl camera2_jpeg_ctl_t; +typedef struct camera2_jpeg_dm camera2_jpeg_dm_t; +typedef struct camera2_jpeg_sm camera2_jpeg_sm_t; +typedef enum facedetect_mode facedetect_mode_t; +typedef enum stats_mode stats_mode_t; +typedef struct camera2_stats_ctl camera2_stats_ctl_t; +typedef struct camera2_stats_dm camera2_stats_dm_t; +typedef struct camera2_stats_sm camera2_stats_sm_t; +typedef enum aa_capture_intent aa_capture_intent_t; +typedef enum aa_mode aa_mode_t; +typedef enum aa_scene_mode aa_scene_mode_t; +typedef enum aa_effect_mode aa_effect_mode_t; +typedef enum aa_aemode aa_aemode_t; +typedef enum aa_ae_antibanding_mode aa_ae_antibanding_mode_t; +typedef enum aa_awbmode aa_awbmode_t; +typedef enum aa_afmode aa_afmode_t; +typedef enum aa_afstate aa_afstate_t; +typedef enum aa_ae_lockavailable aa_ae_lockavailable_t; +typedef enum aa_awb_lockavailable aa_awb_lockavailable_t; +typedef enum aa_available_mode aa_available_mode_t; +typedef struct camera2_aa_ctl camera2_aa_ctl_t; +typedef struct camera2_aa_dm camera2_aa_dm_t; +typedef struct camera2_aa_sm camera2_aa_sm_t; +typedef struct camera2_lens_usm camera2_lens_usm_t; +typedef struct camera2_sensor_usm camera2_sensor_usm_t; +typedef struct camera2_flash_usm camera2_flash_usm_t; +typedef struct camera2_ctl camera2_ctl_t; +typedef struct camera2_uctl camera2_uctl_t; +typedef struct camera2_dm camera2_dm_t; +typedef struct camera2_sm camera2_sm_t; + +typedef struct camera2_reprocess_ctl camera2_reprocess_ctl_t; +typedef struct camera2_reprocess_dm camera2_reprocess_dm_t; +typedef struct camera2_reprocess_sm camera2_reprocess_sm_t; +typedef enum depth_available_depth_stream_config depth_available_depth_stream_config_t; +typedef enum depth_depth_is_exclusive depth_depth_is_exclusive_t; +typedef struct camera2_depth_sm camera2_depth_ctl_t; + +typedef struct camera2_scaler_sm camera2_scaler_sm_t; +typedef struct camera2_scaler_uctl camera2_scaler_uctl_t; + +typedef struct camera2_fd_uctl camera2_fd_uctl_t; +typedef struct camera2_fd_udm camera2_fd_udm_t; + +typedef struct camera2_sensor_uctl camera2_sensor_uctl_t; + +typedef struct camera2_aa_uctl camera2_aa_uctl_t; +typedef struct camera2_aa_udm camera2_aa_udm_t; + +typedef struct camera2_lens_uctl camera2_lens_uctl_t; +typedef struct camera2_lens_udm camera2_lens_udm_t; + +typedef struct camera2_ae_udm camera2_ae_udm_t; +typedef struct camera2_awb_udm camera2_awb_udm_t; +typedef struct camera2_af_udm camera2_af_udm_t; +typedef struct camera2_as_udm camera2_as_udm_t; +typedef struct camera2_ipc_udm camera2_ipc_udm_t; +typedef struct camera2_udm camera2_udm_t; + +typedef struct camera2_internal_udm camera2_internal_udm_t; + +typedef struct camera2_flash_uctl camera2_flash_uctl_t; + +typedef struct camera2_companion_udm camera2_companion_udm_t; + +typedef struct camera2_shot camera2_shot_t; +#endif diff --git a/libcamera/34xx/hal1/ExynosCamera.cpp b/libcamera/34xx/hal1/ExynosCamera.cpp new file mode 100644 index 0000000..0618008 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCamera.cpp @@ -0,0 +1,4661 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera" +#include + +#include "ExynosCamera.h" + +namespace android { + +#ifdef MONITOR_LOG_SYNC +uint32_t ExynosCamera::cameraSyncLogId = 0; +#endif + +ExynosCamera::ExynosCamera(int cameraId, camera_device_t *dev) +{ + ExynosCameraActivityUCTL *uctlMgr = NULL; + + BUILD_DATE(); + + checkAndroidVersion(); + + m_cameraId = cameraId; + m_dev = dev; + + initialize(); +} + +void ExynosCamera::initialize() +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + int ret = 0; + + ExynosCameraActivityUCTL *uctlMgr = NULL; + memset(m_name, 0x00, sizeof(m_name)); + + m_parameters = new ExynosCamera1Parameters(m_cameraId); + CLOGD("DEBUG(%s):Parameters(Id=%d) created", __FUNCTION__, m_cameraId); + + m_parameters->setHalVersion(IS_HAL_VER_1_0); + + m_exynosCameraActivityControl = m_parameters->getActivityControl(); + + m_previewFrameFactory = NULL; + m_reprocessingFrameFactory = NULL; + /* vision */ + m_visionFrameFactory= NULL; + + m_ionAllocator = NULL; + m_grAllocator = NULL; + m_mhbAllocator = NULL; + + m_frameMgr = NULL; + + m_createInternalBufferManager(&m_bayerBufferMgr, "BAYER_BUF"); + m_createInternalBufferManager(&m_3aaBufferMgr, "3A1_BUF"); + m_createInternalBufferManager(&m_ispBufferMgr, "ISP_BUF"); + m_createInternalBufferManager(&m_hwDisBufferMgr, "HW_DIS_BUF"); + m_createInternalBufferManager(&m_vraBufferMgr, "VRA_BUF"); + + /* reprocessing Buffer */ + m_createInternalBufferManager(&m_ispReprocessingBufferMgr, "ISP_RE_BUF"); + m_createInternalBufferManager(&m_sccReprocessingBufferMgr, "SCC_RE_BUF"); + + m_createInternalBufferManager(&m_sccBufferMgr, "SCC_BUF"); + m_createInternalBufferManager(&m_gscBufferMgr, "GSC_BUF"); + m_createInternalBufferManager(&m_jpegBufferMgr, "JPEG_BUF"); + m_createInternalBufferManager(&m_thumbnailBufferMgr, "THUMBNAIL_BUF"); + + /* preview Buffer */ + m_scpBufferMgr = NULL; + m_createInternalBufferManager(&m_previewCallbackBufferMgr, "PREVIEW_CB_BUF"); + m_createInternalBufferManager(&m_highResolutionCallbackBufferMgr, "HIGH_RESOLUTION_CB_BUF"); + m_fdCallbackHeap = NULL; + + /* recording Buffer */ + m_recordingCallbackHeap = NULL; + m_createInternalBufferManager(&m_recordingBufferMgr, "REC_BUF"); + + m_createThreads(); + + m_pipeFrameDoneQ = new frame_queue_t; + dstIspReprocessingQ = new frame_queue_t; + dstSccReprocessingQ = new frame_queue_t; + dstGscReprocessingQ = new frame_queue_t; + m_zoomPreviwWithCscQ = new frame_queue_t; + dstJpegReprocessingQ = new frame_queue_t; + /* vision */ + m_pipeFrameVisionDoneQ = new frame_queue_t; + + m_frameFactoryQ = new framefactory_queue_t; + m_facedetectQ = new frame_queue_t; + m_facedetectQ->setWaitTime(500000000); + + m_previewQ = new frame_queue_t; + m_previewQ->setWaitTime(500000000); + + m_vraThreadQ = new frame_queue_t; + m_vraThreadQ->setWaitTime(500000000); + m_vraGscDoneQ = new frame_queue_t; + m_vraGscDoneQ->setWaitTime(500000000); + m_vraPipeDoneQ = new frame_queue_t; + m_vraPipeDoneQ->setWaitTime(500000000); + + m_previewCallbackGscFrameDoneQ = new frame_queue_t; + m_recordingQ = new frame_queue_t; + m_recordingQ->setWaitTime(500000000); + m_postPictureQ = new frame_queue_t(m_postPictureThread); + for(int i = 0 ; i < MAX_NUM_PIPES ; i++ ) { + m_mainSetupQ[i] = new frame_queue_t; + m_mainSetupQ[i]->setWaitTime(500000000); + } + m_jpegCallbackQ = new jpeg_callback_queue_t; + m_postviewCallbackQ = new postview_callback_queue_t; + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + m_jpegSaveQ[threadNum] = new jpeg_callback_queue_t; + m_jpegSaveQ[threadNum]->setWaitTime(2000000000); + m_burst[threadNum] = false; + m_running[threadNum] = false; + } + + dstIspReprocessingQ->setWaitTime(20000000); + dstSccReprocessingQ->setWaitTime(50000000); + dstGscReprocessingQ->setWaitTime(500000000); + dstJpegReprocessingQ->setWaitTime(500000000); + /* vision */ + m_pipeFrameVisionDoneQ->setWaitTime(2000000000); + + m_jpegCallbackQ->setWaitTime(1000000000); + m_postviewCallbackQ->setWaitTime(1000000000); + + memset(&m_frameMetadata, 0, sizeof(camera_frame_metadata_t)); + memset(m_faces, 0, sizeof(camera_face_t) * NUM_OF_DETECTED_FACES); + + m_exitAutoFocusThread = false; + m_autoFocusRunning = false; + m_previewEnabled = false; + m_pictureEnabled = false; + m_recordingEnabled = false; + m_zslPictureEnabled = false; + m_flagStartFaceDetection = false; + m_flagLLSStart = false; + m_flagLightCondition = false; + m_fdThreshold = 0; + m_captureSelector = NULL; + m_sccCaptureSelector = NULL; + m_autoFocusType = 0; + m_hdrEnabled = false; + m_doCscRecording = true; + m_recordingBufferCount = NUM_RECORDING_BUFFERS; + m_frameSkipCount = 0; + m_isZSLCaptureOn = false; + m_isSuccessedBufferAllocation = false; + m_skipCount = 0; + +#ifdef FPS_CHECK + for (int i = 0; i < DEBUG_MAX_PIPE_NUM; i++) + m_debugFpsCount[i] = 0; +#endif + + m_stopBurstShot = false; + m_disablePreviewCB = false; + m_checkFirstFrameLux = false; + + m_callbackState = 0; + m_callbackStateOld = 0; + m_callbackMonitorCount = 0; + + m_highResolutionCallbackRunning = false; + m_highResolutionCallbackQ = new frame_queue_t(m_highResolutionCallbackThread); + m_highResolutionCallbackQ->setWaitTime(500000000); + m_skipReprocessing = false; + m_isFirstStart = true; + m_parameters->setIsFirstStartFlag(m_isFirstStart); +#ifdef RAWDUMP_CAPTURE + m_RawCaptureDumpQ = new frame_queue_t(m_RawCaptureDumpThread); +#endif +#ifdef RAWDUMP_CAPTURE + ExynosCameraActivitySpecialCapture *m_sCapture = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + m_sCapture->resetRawCaptureFcount(); +#endif + + m_exynosconfig = NULL; + m_setConfigInform(); + + m_setFrameManager(); + + + /* HACK Reset Preview Flag*/ + m_resetPreview = false; + + m_dynamicSccCount = 0; + m_previewBufferCount = NUM_PREVIEW_BUFFERS; + + /* HACK for CTS2.0 */ + m_oldPreviewW = 0; + m_oldPreviewH = 0; + +#ifdef FIRST_PREVIEW_TIME_CHECK + m_flagFirstPreviewTimerOn = false; +#endif + + /* init infomation of fd orientation*/ + m_parameters->setDeviceOrientation(0); + uctlMgr = m_exynosCameraActivityControl->getUCTLMgr(); + if (uctlMgr != NULL) + uctlMgr->setDeviceRotation(m_parameters->getFdOrientation()); +#ifdef MONITOR_LOG_SYNC + m_syncLogDuration = 0; +#endif + vendorSpecificConstructor(m_cameraId, m_dev); + + m_callbackCookie = 0; + m_fliteFrameCount = 0; + m_3aa_ispFrameCount = 0; + m_ispFrameCount = 0; + m_sccFrameCount = 0; + m_scpFrameCount = 0; + m_vraRunningCount = 0; + m_lastRecordingTimeStamp = 0; + m_recordingStartTimeStamp = 0; + m_fdCallbackHeap = 0; + m_burstSaveTimerTime = 0; + m_burstDuration = 0; + m_burstInitFirst = 0; + m_burstRealloc = 0; + m_displayPreviewToggle = 0; + m_hdrSkipedFcount = 0; + m_curMinFps = 0; + m_visionFps = 0; + m_visionAe = 0; + m_hackForAlignment = 0; + m_recordingFrameSkipCount = 0; + m_faceDetected = false; + m_flagThreadStop = false; + m_isNeedAllocPictureBuffer = false; + m_isCancelBurstCapture = false; + m_isTryStopFlash = false; + m_notifyCb = NULL; + m_dataCb = NULL; + m_dataCbTimestamp = NULL; + m_getMemoryCb = NULL; + m_previewWindow = NULL; + m_initFrameFactory(); + + m_tempshot = new struct camera2_shot_ext; + m_fdmeta_shot = new struct camera2_shot_ext; + m_meta_shot = new struct camera2_shot_ext; +} + +status_t ExynosCamera::m_setConfigInform() { + struct ExynosConfigInfo exynosConfig; + memset((void *)&exynosConfig, 0x00, sizeof(exynosConfig)); + + exynosConfig.mode = CONFIG_MODE::NORMAL; + + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_sensor_buffers = (getCameraId() == CAMERA_ID_BACK) ? NUM_SENSOR_BUFFERS : FRONT_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_bayer_buffers = (getCameraId() == CAMERA_ID_BACK) ? NUM_BAYER_BUFFERS : FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.init_bayer_buffers = INIT_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_3aa_buffers = (getCameraId() == CAMERA_ID_BACK) ? NUM_3AA_BUFFERS : FRONT_NUM_3AA_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_hwdis_buffers = NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_vra_buffers = NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_preview_buffers = NUM_PREVIEW_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_picture_buffers = (getCameraId() == CAMERA_ID_BACK) ? NUM_PICTURE_BUFFERS : FRONT_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_reprocessing_buffers = NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_recording_buffers = NUM_RECORDING_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_fastaestable_buffer = INITIAL_SKIP_FRAME; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.reprocessing_bayer_hold_count = REPROCESSING_BAYER_HOLD_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.preview_buffer_margin = NUM_PREVIEW_BUFFERS_MARGIN; + + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_FLITE] = (getCameraId() == CAMERA_ID_BACK) ? PIPE_FLITE_PREPARE_COUNT : PIPE_FLITE_FRONT_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_3AC] = PIPE_3AC_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_3AA] = PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_3AA_ISP] = PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_ISP] = PIPE_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_ISPC] = (getCameraId() == CAMERA_ID_BACK) ? PIPE_3AA_ISP_PREPARE_COUNT : PIPE_FLITE_FRONT_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_SCC] = (getCameraId() == CAMERA_ID_BACK) ? PIPE_3AA_ISP_PREPARE_COUNT : PIPE_FLITE_FRONT_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_SCP] = (getCameraId() == CAMERA_ID_BACK) ? PIPE_SCP_PREPARE_COUNT : PIPE_SCP_FRONT_PREPARE_COUNT; + + /* reprocessing */ + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_SCP_REPROCESSING] = PIPE_SCP_REPROCESSING_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_ISPC_REPROCESSING] = PIPE_SCC_REPROCESSING_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_SCC_REPROCESSING] = PIPE_SCC_REPROCESSING_PREPARE_COUNT; + +#if (USE_HIGHSPEED_RECORDING) + /* Config HIGH_SPEED 60 buffer & pipe info */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_sensor_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS60_NUM_NUM_SENSOR_BUFFERS : FPS60_FRONT_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_bayer_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS60_NUM_NUM_BAYER_BUFFERS : FPS60_FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.init_bayer_buffers = FPS60_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_3aa_buffers = FPS60_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_hwdis_buffers = FPS60_NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_vra_buffers = FPS60_NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_preview_buffers = FPS60_NUM_PREVIEW_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_picture_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS60_NUM_PICTURE_BUFFERS : FPS60_FRONT_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_reprocessing_buffers = FPS60_NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_recording_buffers = FPS60_NUM_RECORDING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_fastaestable_buffer = FPS60_INITIAL_SKIP_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.reprocessing_bayer_hold_count = FPS60_REPROCESSING_BAYER_HOLD_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.preview_buffer_margin = FPS60_NUM_PREVIEW_BUFFERS_MARGIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_FLITE] = FPS60_PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_3AC] = FPS60_PIPE_3AC_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_3AA] = FPS60_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_3AA_ISP] = FPS60_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_SCP] = FPS60_PIPE_SCP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_SCP_REPROCESSING] = FPS60_PIPE_SCP_REPROCESSING_PREPARE_COUNT; + + /* Config HIGH_SPEED 120 buffer & pipe info */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_sensor_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS120_NUM_NUM_SENSOR_BUFFERS : FPS120_FRONT_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_bayer_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS120_NUM_NUM_BAYER_BUFFERS : FPS120_FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.init_bayer_buffers = FPS120_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_3aa_buffers = FPS120_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_hwdis_buffers = FPS120_NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_vra_buffers = FPS120_NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_preview_buffers = (FPS120_NUM_PREVIEW_BUFFERS > MAX_BUFFERS) ? MAX_BUFFERS : FPS120_NUM_PREVIEW_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_picture_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS120_NUM_PICTURE_BUFFERS : FPS120_FRONT_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_reprocessing_buffers = FPS120_NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_recording_buffers = FPS120_NUM_RECORDING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_fastaestable_buffer = FPS120_INITIAL_SKIP_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.reprocessing_bayer_hold_count = FPS120_REPROCESSING_BAYER_HOLD_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.preview_buffer_margin = FPS120_NUM_PREVIEW_BUFFERS_MARGIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_FLITE] = FPS120_PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_3AC] = FPS120_PIPE_3AC_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_3AA] = FPS120_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_3AA_ISP] = FPS120_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_SCP] = FPS120_PIPE_SCP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_SCP_REPROCESSING] = FPS120_PIPE_SCP_REPROCESSING_PREPARE_COUNT; +#ifdef SUPPORT_HIGH_SPEED_240FPS + /* Config HIGH_SPEED 240 buffer & pipe info */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_sensor_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS240_NUM_NUM_SENSOR_BUFFERS : FPS240_FRONT_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_bayer_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS240_NUM_NUM_BAYER_BUFFERS : FPS240_FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.init_bayer_buffers = FPS240_INIT_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_3aa_buffers = FPS240_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_hwdis_buffers = FPS240_NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_vra_buffers = FPS240_NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_preview_buffers = (FPS240_NUM_PREVIEW_BUFFERS > MAX_BUFFERS) ? MAX_BUFFERS : FPS240_NUM_PREVIEW_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_picture_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS240_NUM_PICTURE_BUFFERS : FPS240_FRONT_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_reprocessing_buffers = FPS240_NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_recording_buffers = FPS240_NUM_RECORDING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.num_fastaestable_buffer = FPS240_INITIAL_SKIP_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.reprocessing_bayer_hold_count = FPS240_REPROCESSING_BAYER_HOLD_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].bufInfo.preview_buffer_margin = FPS240_NUM_PREVIEW_BUFFERS_MARGIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_FLITE] = FPS240_PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_3AC] = FPS240_PIPE_3AC_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_3AA] = FPS240_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_3AA_ISP] = FPS240_PIPE_3AA_ISP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_SCP] = FPS240_PIPE_SCP_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_240].pipeInfo.prepare[PIPE_SCP_REPROCESSING] = FPS240_PIPE_SCP_REPROCESSING_PREPARE_COUNT; +#endif +#endif + + m_parameters->setConfig(&exynosConfig); + + m_exynosconfig = m_parameters->getConfig(); + + return NO_ERROR; +} + +void ExynosCamera::m_createThreads(void) +{ + m_mainThread = new mainCameraThread(this, &ExynosCamera::m_mainThreadFunc, "ExynosCameraThread", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThread created", __FUNCTION__); + + m_previewThread = new mainCameraThread(this, &ExynosCamera::m_previewThreadFunc, "previewThread", PRIORITY_DISPLAY); + CLOGD("DEBUG(%s):previewThread created", __FUNCTION__); + + /* + * In here, we cannot know single, dual scenario. + * So, make all threads. + */ + /* if (m_parameters->isFlite3aaOtf() == true) { */ + if (1) { + m_mainSetupQThread[INDEX(PIPE_FLITE)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetupFLITE, "mainThreadQSetupFLITE", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetupFLITEThread created", __FUNCTION__); + +/* Change 3AA_ISP, 3AC, SCP to ISP */ +/* + m_mainSetupQThread[INDEX(PIPE_3AC)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetup3AC, "mainThreadQSetup3AC", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetup3ACThread created", __FUNCTION__); + + m_mainSetupQThread[INDEX(PIPE_3AA_ISP)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetup3AA_ISP, "mainThreadQSetup3AA_ISP", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetup3AA_ISPThread created", __FUNCTION__); + + m_mainSetupQThread[INDEX(PIPE_ISP)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetupISP, "mainThreadQSetupISP", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetupISPThread created", __FUNCTION__); + + m_mainSetupQThread[INDEX(PIPE_SCP)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetupSCP, "mainThreadQSetupSCP", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetupSCPThread created", __FUNCTION__); +*/ + + m_mainSetupQThread[INDEX(PIPE_3AA)] = new mainCameraThread(this, &ExynosCamera::m_mainThreadQSetup3AA, "mainThreadQSetup3AA", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):mainThreadQSetup3AAThread created", __FUNCTION__); + + } + m_setBuffersThread = new mainCameraThread(this, &ExynosCamera::m_setBuffersThreadFunc, "setBuffersThread"); + CLOGD("DEBUG(%s):setBuffersThread created", __FUNCTION__); + + m_startPictureInternalThread = new mainCameraThread(this, &ExynosCamera::m_startPictureInternalThreadFunc, "startPictureInternalThread"); + CLOGD("DEBUG(%s):startPictureInternalThread created", __FUNCTION__); + + m_startPictureBufferThread = new mainCameraThread(this, &ExynosCamera::m_startPictureBufferThreadFunc, "startPictureBufferThread"); + CLOGD("DEBUG(%s):startPictureBufferThread created", __FUNCTION__); + + m_prePictureThread = new mainCameraThread(this, &ExynosCamera::m_prePictureThreadFunc, "prePictureThread"); + CLOGD("DEBUG(%s):prePictureThread created", __FUNCTION__); + + m_pictureThread = new mainCameraThread(this, &ExynosCamera::m_pictureThreadFunc, "PictureThread"); + CLOGD("DEBUG(%s):pictureThread created", __FUNCTION__); + + m_postPictureThread = new mainCameraThread(this, &ExynosCamera::m_postPictureThreadFunc, "postPictureThread"); + CLOGD("DEBUG(%s):postPictureThread created", __FUNCTION__); + + m_vraThread = new mainCameraThread(this, &ExynosCamera::m_vraThreadFunc, "vraThread"); + CLOGD("DEBUG(%s[%d]):recordingThread created", __FUNCTION__, __LINE__); + + m_recordingThread = new mainCameraThread(this, &ExynosCamera::m_recordingThreadFunc, "recordingThread"); + CLOGD("DEBUG(%s):recordingThread created", __FUNCTION__); + + m_autoFocusThread = new mainCameraThread(this, &ExynosCamera::m_autoFocusThreadFunc, "AutoFocusThread"); + CLOGD("DEBUG(%s):autoFocusThread created", __FUNCTION__); + + m_autoFocusContinousThread = new mainCameraThread(this, &ExynosCamera::m_autoFocusContinousThreadFunc, "AutoFocusContinousThread"); + CLOGD("DEBUG(%s):autoFocusContinousThread created", __FUNCTION__); + + m_facedetectThread = new mainCameraThread(this, &ExynosCamera::m_facedetectThreadFunc, "FaceDetectThread"); + CLOGD("DEBUG(%s):FaceDetectThread created", __FUNCTION__); + + m_monitorThread = new mainCameraThread(this, &ExynosCamera::m_monitorThreadFunc, "monitorThread"); + CLOGD("DEBUG(%s):monitorThread created", __FUNCTION__); + + m_framefactoryThread = new mainCameraThread(this, &ExynosCamera::m_frameFactoryInitThreadFunc, "FrameFactoryInitThread"); + CLOGD("DEBUG(%s):FrameFactoryInitThread created", __FUNCTION__); + + m_jpegCallbackThread = new mainCameraThread(this, &ExynosCamera::m_jpegCallbackThreadFunc, "jpegCallbackThread"); + CLOGD("DEBUG(%s):jpegCallbackThread created", __FUNCTION__); + + /* saveThread */ + char threadName[20]; + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + snprintf(threadName, sizeof(threadName), "jpegSaveThread%d", threadNum); + m_jpegSaveThread[threadNum] = new mainCameraThread(this, &ExynosCamera::m_jpegSaveThreadFunc, threadName); + CLOGD("DEBUG(%s):%s created", __FUNCTION__, threadName); + } + + /* high resolution preview callback Thread */ + m_highResolutionCallbackThread = new mainCameraThread(this, &ExynosCamera::m_highResolutionCallbackThreadFunc, "m_highResolutionCallbackThread"); + CLOGD("DEBUG(%s):highResolutionCallbackThread created", __FUNCTION__); + + /* vision */ + m_visionThread = new mainCameraThread(this, &ExynosCamera::m_visionThreadFunc, "VisionThread", PRIORITY_URGENT_DISPLAY); + CLOGD("DEBUG(%s):visionThread created", __FUNCTION__); + + /* Shutter callback */ + m_shutterCallbackThread = new mainCameraThread(this, &ExynosCamera::m_shutterCallbackThreadFunc, "shutterCallbackThread"); + CLOGD("DEBUG(%s):shutterCallbackThread created", __FUNCTION__); + +} + +status_t ExynosCamera::m_setupFrameFactory(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + + if (m_parameters->getVisionMode() == true) { + /* about vision */ + if( m_frameFactory[FRAME_FACTORY_TYPE_VISION] == NULL) { + m_frameFactory[FRAME_FACTORY_TYPE_VISION] = new ExynosCameraFrameFactoryVision(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_VISION]->setFrameManager(m_frameMgr); + } + m_visionFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_VISION]; + + if (m_frameFactory[FRAME_FACTORY_TYPE_VISION] != NULL && m_frameFactory[FRAME_FACTORY_TYPE_VISION]->isCreated() == false) { + CLOGD("DEBUG(%s[%d]):setupFrameFactory pushProcessQ(%d)", __FUNCTION__, __LINE__, FRAME_FACTORY_TYPE_VISION); + m_frameFactoryQ->pushProcessQ(&m_frameFactory[FRAME_FACTORY_TYPE_VISION]); + } + } else { + /* about preview */ + if (m_parameters->getDualMode() == true) { + m_previewFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_DUAL_PREVIEW]; + } else if (m_parameters->getTpuEnabledMode() == true) { + if (m_parameters->is3aaIspOtf() == true) + m_previewFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_OTF_TPU]; + else + m_previewFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_M2M_TPU]; + } else { + if (m_parameters->is3aaIspOtf() == true) + m_previewFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_OTF]; + else + m_previewFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_M2M]; + } + + /* find previewFrameFactory and push */ + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + if (m_previewFrameFactory == m_frameFactory[i]) { + if (m_frameFactory[i] != NULL && m_frameFactory[i]->isCreated() == false) { + CLOGD("DEBUG(%s[%d]):setupFrameFactory pushProcessQ(%d)", __FUNCTION__, __LINE__, i); + m_frameFactoryQ->pushProcessQ(&m_frameFactory[i]); + } + break; + } + } + + /* about reprocessing */ + if (m_parameters->isReprocessing() == true) { + int numOfReprocessingFactory = m_parameters->getNumOfReprocessingFactory(); + + for (int i = FRAME_FACTORY_TYPE_REPROCESSING; i < numOfReprocessingFactory + FRAME_FACTORY_TYPE_REPROCESSING; i++) { + if (m_frameFactory[i] != NULL && m_frameFactory[i]->isCreated() == false) { + CLOGD("DEBUG(%s[%d]):setupFrameFactory pushProcessQ(%d)", __FUNCTION__, __LINE__, FRAME_FACTORY_TYPE_REPROCESSING); + m_frameFactoryQ->pushProcessQ(&m_frameFactory[i]); + } + } + } + } + + /* + * disable until multi-instace is possible. + */ + /* + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + if (m_frameFactory[i] != NULL && m_frameFactory[i]->isCreated() == false) { + CLOGD("DEBUG(%s[%d]):setupFrameFactory pushProcessQ(%d)", __FUNCTION__, __LINE__, i); + m_frameFactoryQ->pushProcessQ(&m_frameFactory[i]); + } else { + CLOGD("DEBUG(%s[%d]):setupFrameFactory no Push(%d)", __FUNCTION__, __LINE__, i); + } + } + */ + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCamera::m_initFrameFactory(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + status_t ret = NO_ERROR; + ExynosCameraFrameFactory *factory = NULL; + + m_previewFrameFactory = NULL; + m_pictureFrameFactory = NULL; + m_reprocessingFrameFactory = NULL; + m_visionFrameFactory = NULL; + + for(int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) + m_frameFactory[i] = NULL; + + /* + * new all FrameFactories. + * because this called on open(). so we don't know current scenario + */ + + factory = new ExynosCameraFrameFactory3aaIspM2M(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_M2M] = factory; + /* hack : for dual */ + if (getCameraId() == CAMERA_ID_FRONT) { + factory = new ExynosCameraFrameFactoryFront(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_DUAL_PREVIEW] = factory; + } else { + factory = new ExynosCameraFrameFactory3aaIspM2M(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_DUAL_PREVIEW] = factory; + } + + factory = new ExynosCameraFrameFactory3aaIspM2MTpu(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_M2M_TPU] = factory; + + factory = new ExynosCameraFrameFactory3aaIspOtf(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_OTF] = factory; + + factory = new ExynosCameraFrameFactory3aaIspOtfTpu(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_3AA_ISP_OTF_TPU] = factory; + + factory = new ExynosCameraFrameReprocessingFactory(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING] = factory; + + factory = new ExynosCameraFrameReprocessingFactoryNV21(m_cameraId, m_parameters); + m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING_NV21] = factory; + + for (int i = 0 ; i < FRAME_FACTORY_TYPE_MAX ; i++) { + factory = m_frameFactory[i]; + if( factory != NULL ) + factory->setFrameManager(m_frameMgr); + } + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCamera::m_deinitFrameFactory(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + ExynosCameraFrameFactory *frameFactory = NULL; + + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + if (m_frameFactory[i] != NULL) { + frameFactory = m_frameFactory[i]; + + for (int k = i + 1; k < FRAME_FACTORY_TYPE_MAX; k++) { + if (frameFactory == m_frameFactory[k]) { + CLOGD("DEBUG(%s[%d]): m_frameFactory index(%d) and index(%d) are same instance, set index(%d) = NULL", + __FUNCTION__, __LINE__, i, k, k); + m_frameFactory[k] = NULL; + } + } + + if (m_frameFactory[i]->isCreated() == true) { + ret = m_frameFactory[i]->destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_frameFactory[%d] destroy fail", __FUNCTION__, __LINE__, i); + } + + SAFE_DELETE(m_frameFactory[i]); + + CLOGD("DEBUG(%s[%d]):m_frameFactory[%d] destroyed", __FUNCTION__, __LINE__, i); + } + } + + m_previewFrameFactory = NULL; + m_pictureFrameFactory = NULL; + m_reprocessingFrameFactory = NULL; + m_visionFrameFactory = NULL; + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCamera::m_setFrameManager() +{ + sp worker; + m_frameMgr = new ExynosCameraFrameManager("FRAME MANAGER", m_cameraId, FRAMEMGR_OPER::SLIENT, 100, 150); + + worker = new CreateWorker("CREATE FRAME WORKER", m_cameraId, FRAMEMGR_OPER::SLIENT, 200); + m_frameMgr->setWorker(FRAMEMGR_WORKER::CREATE, worker); + + worker = new DeleteWorker("DELETE FRAME WORKER", m_cameraId, FRAMEMGR_OPER::SLIENT); + m_frameMgr->setWorker(FRAMEMGR_WORKER::DELETE, worker); + + sp key = new KeyBox("FRAME KEYBOX", m_cameraId); + + m_frameMgr->setKeybox(key); + + return NO_ERROR; +} + + +ExynosCamera::~ExynosCamera() +{ + this->release(); +} + +void ExynosCamera::release() +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + int ret = 0; + + m_stopCompanion(); + + if (m_frameMgr != NULL) { + m_frameMgr->stop(); + } + + /* release all framefactory */ + m_deinitFrameFactory(); + + if (m_parameters != NULL) { + delete m_parameters; + m_parameters = NULL; + CLOGD("DEBUG(%s):Parameters(Id=%d) destroyed", __FUNCTION__, m_cameraId); + } + + /* free all buffers */ + m_releaseBuffers(); + + if (m_ionAllocator != NULL) { + delete m_ionAllocator; + m_ionAllocator = NULL; + } + + if (m_grAllocator != NULL) { + delete m_grAllocator; + m_grAllocator = NULL; + } + + if (m_mhbAllocator != NULL) { + delete m_mhbAllocator; + m_mhbAllocator = NULL; + } + + if (m_pipeFrameDoneQ != NULL) { + delete m_pipeFrameDoneQ; + m_pipeFrameDoneQ = NULL; + } + + if (m_zoomPreviwWithCscQ != NULL) { + delete m_zoomPreviwWithCscQ; + m_zoomPreviwWithCscQ = NULL; + } + + /* vision */ + if (m_pipeFrameVisionDoneQ != NULL) { + delete m_pipeFrameVisionDoneQ; + m_pipeFrameVisionDoneQ = NULL; + } + + if (dstIspReprocessingQ != NULL) { + delete dstIspReprocessingQ; + dstIspReprocessingQ = NULL; + } + + if (dstSccReprocessingQ != NULL) { + delete dstSccReprocessingQ; + dstSccReprocessingQ = NULL; + } + + if (dstGscReprocessingQ != NULL) { + delete dstGscReprocessingQ; + dstGscReprocessingQ = NULL; + } + +#ifdef STK_PICTURE + if (dstStkPictureQ != NULL) { + delete dstStkPictureQ; + dstStkPictureQ = NULL; + } +#endif + + if (dstJpegReprocessingQ != NULL) { + delete dstJpegReprocessingQ; + dstJpegReprocessingQ = NULL; + } + + if (m_postPictureQ != NULL) { + delete m_postPictureQ; + m_postPictureQ = NULL; + } + + if (m_jpegCallbackQ != NULL) { + delete m_jpegCallbackQ; + m_jpegCallbackQ = NULL; + } + + if (m_postviewCallbackQ != NULL) { + delete m_postviewCallbackQ; + m_postviewCallbackQ = NULL; + } + + if (m_facedetectQ != NULL) { + delete m_facedetectQ; + m_facedetectQ = NULL; + } + + if (m_previewQ != NULL) { + delete m_previewQ; + m_previewQ = NULL; + } + + if (m_vraThreadQ != NULL) { + delete m_vraThreadQ; + m_vraThreadQ = NULL; + } + + if (m_vraGscDoneQ != NULL) { + delete m_vraGscDoneQ; + m_vraGscDoneQ = NULL; + } + + if (m_vraPipeDoneQ != NULL) { + delete m_vraPipeDoneQ; + m_vraPipeDoneQ = NULL; + } + + for(int i = 0 ; i < MAX_NUM_PIPES ; i++ ) { + if (m_mainSetupQ[i] != NULL) { + delete m_mainSetupQ[i]; + m_mainSetupQ[i] = NULL; + } + } + + if (m_previewCallbackGscFrameDoneQ != NULL) { + delete m_previewCallbackGscFrameDoneQ; + m_previewCallbackGscFrameDoneQ = NULL; + } + + if (m_recordingQ != NULL) { + delete m_recordingQ; + m_recordingQ = NULL; + } + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + if (m_jpegSaveQ[threadNum] != NULL) { + delete m_jpegSaveQ[threadNum]; + m_jpegSaveQ[threadNum] = NULL; + } + } + + if (m_highResolutionCallbackQ != NULL) { + delete m_highResolutionCallbackQ; + m_highResolutionCallbackQ = NULL; + } + + if (m_frameFactoryQ != NULL) { + delete m_frameFactoryQ; + m_frameFactoryQ = NULL; + CLOGD("DEBUG(%s):FrameFactoryQ destroyed", __FUNCTION__); + } + + if (m_bayerBufferMgr != NULL) { + delete m_bayerBufferMgr; + m_bayerBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(bayerBufferMgr) destroyed", __FUNCTION__); + } + + if (m_3aaBufferMgr != NULL) { + delete m_3aaBufferMgr; + m_3aaBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(3aaBufferMgr) destroyed", __FUNCTION__); + } + + if (m_ispBufferMgr != NULL) { + delete m_ispBufferMgr; + m_ispBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(ispBufferMgr) destroyed", __FUNCTION__); + } + + if (m_hwDisBufferMgr != NULL) { + delete m_hwDisBufferMgr; + m_hwDisBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(m_hwDisBufferMgr) destroyed", __FUNCTION__); + } + + if (m_scpBufferMgr != NULL) { + delete m_scpBufferMgr; + m_scpBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(scpBufferMgr) destroyed", __FUNCTION__); + } + + if (m_vraBufferMgr != NULL) { + delete m_vraBufferMgr; + m_vraBufferMgr = NULL; + CLOGD("DEBUG(%s[%d]):BufferManager(vraBufferMgr) destroyed", __FUNCTION__, __LINE__); + } + + if (m_ispReprocessingBufferMgr != NULL) { + delete m_ispReprocessingBufferMgr; + m_ispReprocessingBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(ispReprocessingBufferMgr) destroyed", __FUNCTION__); + } + + if (m_sccReprocessingBufferMgr != NULL) { + delete m_sccReprocessingBufferMgr; + m_sccReprocessingBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(sccReprocessingBufferMgr) destroyed", __FUNCTION__); + } + + if (m_sccBufferMgr != NULL) { + delete m_sccBufferMgr; + m_sccBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(sccBufferMgr) destroyed", __FUNCTION__); + } + + if (m_gscBufferMgr != NULL) { + delete m_gscBufferMgr; + m_gscBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(gscBufferMgr) destroyed", __FUNCTION__); + } + + if (m_jpegBufferMgr != NULL) { + delete m_jpegBufferMgr; + m_jpegBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(jpegBufferMgr) destroyed", __FUNCTION__); + } + + if (m_thumbnailBufferMgr != NULL) { + delete m_thumbnailBufferMgr; + m_thumbnailBufferMgr = NULL; + CLOGD("DEBUG(%s[%d]):BufferManager(thumbnailBufferMgr) destroyed", __FUNCTION__, __LINE__); + } + + if (m_previewCallbackBufferMgr != NULL) { + delete m_previewCallbackBufferMgr; + m_previewCallbackBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(previewCallbackBufferMgr) destroyed", __FUNCTION__); + } + + if (m_highResolutionCallbackBufferMgr != NULL) { + delete m_highResolutionCallbackBufferMgr; + m_highResolutionCallbackBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(m_highResolutionCallbackBufferMgr) destroyed", __FUNCTION__); + } + + if (m_recordingBufferMgr != NULL) { + delete m_recordingBufferMgr; + m_recordingBufferMgr = NULL; + CLOGD("DEBUG(%s):BufferManager(recordingBufferMgr) destroyed", __FUNCTION__); + } + + if (m_captureSelector != NULL) { + delete m_captureSelector; + m_captureSelector = NULL; + } + + if (m_sccCaptureSelector != NULL) { + delete m_sccCaptureSelector; + m_sccCaptureSelector = NULL; + } + + if (m_recordingCallbackHeap != NULL) { + m_recordingCallbackHeap->release(m_recordingCallbackHeap); + delete m_recordingCallbackHeap; + m_recordingCallbackHeap = NULL; + CLOGD("DEBUG(%s):BufferManager(recordingCallbackHeap) destroyed", __FUNCTION__); + } + + m_isFirstStart = true; + m_previewBufferCount = NUM_PREVIEW_BUFFERS; + + if (m_frameMgr != NULL) { + delete m_frameMgr; + m_frameMgr = NULL; + } + + if (m_tempshot != NULL) { + delete m_tempshot; + m_tempshot = NULL; + } + + if (m_fdmeta_shot != NULL) { + delete m_fdmeta_shot; + m_fdmeta_shot = NULL; + } + + if (m_meta_shot != NULL) { + delete m_meta_shot; + m_meta_shot = NULL; + } + + vendorSpecificDestructor(); + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); +} + +int ExynosCamera::getCameraId() const +{ + return m_cameraId; +} + +int ExynosCamera::getShotBufferIdex() const +{ + return NUM_PLANES(V4L2_PIX_2_HAL_PIXEL_FORMAT(SCC_OUTPUT_COLOR_FMT)); +} + +void ExynosCamera::setCallbacks( + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + int ret = 0; + + m_notifyCb = notify_cb; + m_dataCb = data_cb; + m_dataCbTimestamp = data_cb_timestamp; + m_getMemoryCb = get_memory; + m_callbackCookie = user; + + if (m_mhbAllocator == NULL) + m_mhbAllocator = new ExynosCameraMHBAllocator(); + + ret = m_mhbAllocator->init(get_memory); + if (ret < 0) { + CLOGE("ERR(%s[%d]:m_mhbAllocator init failed", __FUNCTION__, __LINE__); + } +} + +void ExynosCamera::enableMsgType(int32_t msgType) +{ + if (m_parameters) { + CLOGV("INFO(%s[%d]): enable Msg (%x)", __FUNCTION__, __LINE__, msgType); + m_parameters->enableMsgType(msgType); + } +} + +void ExynosCamera::disableMsgType(int32_t msgType) +{ + if (m_parameters) { + CLOGV("INFO(%s[%d]): disable Msg (%x)", __FUNCTION__, __LINE__, msgType); + m_parameters->disableMsgType(msgType); + } +} + +bool ExynosCamera::msgTypeEnabled(int32_t msgType) +{ + bool IsEnabled = false; + + if (m_parameters) { + CLOGV("INFO(%s[%d]): Msg type enabled (%x)", __FUNCTION__, __LINE__, msgType); + IsEnabled = m_parameters->msgTypeEnabled(msgType); + } + + return IsEnabled; +} + +bool ExynosCamera::previewEnabled() +{ + CLOGI("INFO(%s[%d]):m_previewEnabled=%d", + __FUNCTION__, __LINE__, (int)m_previewEnabled); + + /* in scalable mode, we should controll out state */ + if (m_parameters != NULL && + (m_parameters->getScalableSensorMode() == true) && + (m_scalableSensorMgr.getMode() == EXYNOS_CAMERA_SCALABLE_CHANGING)) + return true; + else + return m_previewEnabled; +} + +status_t ExynosCamera::storeMetaDataInBuffers(__unused bool enable) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + return OK; +} + +bool ExynosCamera::recordingEnabled() +{ + bool ret = m_getRecordingEnabled(); + CLOGI("INFO(%s[%d]):m_recordingEnabled=%d", + __FUNCTION__, __LINE__, (int)ret); + + return ret; +} + +void ExynosCamera::releaseRecordingFrame(const void *opaque) +{ + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return; + } + } + + if (m_getRecordingEnabled() == false) { + CLOGW("WARN(%s[%d]):m_recordingEnabled equals false", __FUNCTION__, __LINE__); + /* m_stopRecordingInternal() will wait for recording frame release */ + /* return; */ + } + + if (m_recordingCallbackHeap == NULL) { + CLOGW("WARN(%s[%d]):recordingCallbackHeap equals NULL", __FUNCTION__, __LINE__); + return; + } + + bool found = false; + struct addrs *recordAddrs = (struct addrs *)m_recordingCallbackHeap->data; + struct addrs *releaseFrame = (struct addrs *)opaque; + + if (recordAddrs != NULL) { + for (int32_t i = 0; i < m_recordingBufferCount; i++) { + if ((char *)(&(recordAddrs[i])) == (char *)opaque) { + found = true; + CLOGV("DEBUG(%s[%d]):releaseFrame->bufIndex=%d, fdY=%d", + __FUNCTION__, __LINE__, releaseFrame->bufIndex, releaseFrame->fdPlaneY); + + if (m_doCscRecording == true) { + m_releaseRecordingBuffer(releaseFrame->bufIndex); + } else { + m_recordingTimeStamp[releaseFrame->bufIndex] = 0L; + m_recordingBufAvailable[releaseFrame->bufIndex] = true; + } + + break; + } + m_isFirstStart = false; + if (m_parameters != NULL) { + m_parameters->setIsFirstStartFlag(m_isFirstStart); + } + } + } + + if (found == false) + CLOGW("WARN(%s[%d]):**** releaseFrame not founded ****", __FUNCTION__, __LINE__); + +} + +status_t ExynosCamera::autoFocus() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return INVALID_OPERATION; + } + } + + if (m_previewEnabled == false) { + CLOGE("ERR(%s[%d]): preview is not enabled", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (m_autoFocusRunning == false) { + m_autoFocusType = AUTO_FOCUS_SERVICE; + m_autoFocusThread->requestExitAndWait(); + m_autoFocusThread->run(PRIORITY_DEFAULT); + } else { + CLOGW("WRN(%s[%d]): auto focus is inprogressing", __FUNCTION__, __LINE__); + } + +#if 0 // not used. + if (m_parameters != NULL) { + if (m_parameters->getFocusMode() == FOCUS_MODE_AUTO) { + CLOGI("INFO(%s[%d]) ae awb lock", __FUNCTION__, __LINE__); + m_parameters->m_setAutoExposureLock(true); + m_parameters->m_setAutoWhiteBalanceLock(true); + } + } +#endif + + return NO_ERROR; +} + +status_t ExynosCamera::cancelAutoFocus() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return INVALID_OPERATION; + } + } + + m_autoFocusLock.lock(); + m_autoFocusRunning = false; + m_autoFocusLock.unlock(); + +#if 0 // not used. + if (m_parameters != NULL) { + if (m_parameters->getFocusMode() == FOCUS_MODE_AUTO) { + CLOGI("INFO(%s[%d]) ae awb unlock", __FUNCTION__, __LINE__); + m_parameters->m_setAutoExposureLock(false); + m_parameters->m_setAutoWhiteBalanceLock(false); + } + } +#endif + + if (m_exynosCameraActivityControl->cancelAutoFocus() == false) { + CLOGE("ERR(%s):Fail on m_secCamera->cancelAutoFocus()", __FUNCTION__); + return UNKNOWN_ERROR; + } + + /* if autofocusThread is running, we should be wait to receive the AF reseult. */ + m_autoFocusLock.lock(); + m_autoFocusLock.unlock(); + + return NO_ERROR; +} + +status_t ExynosCamera::cancelPicture() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + /* android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); */ + + return NO_ERROR; + } + } + +/* + m_takePictureCounter.clearCount(); + m_reprocessingCounter.clearCount(); + m_pictureCounter.clearCount(); + m_jpegCounter.clearCount(); +*/ + + return NO_ERROR; +} + +CameraParameters ExynosCamera::getParameters() const +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + return m_parameters->getParameters(); +} + +int ExynosCamera::getMaxNumDetectedFaces(void) +{ + return m_parameters->getMaxNumDetectedFaces(); +} + +bool ExynosCamera::m_startFaceDetection(bool toggle) +{ + CLOGD("DEBUG(%s[%d]) toggle : %d", __FUNCTION__, __LINE__, toggle); + + if (toggle == true) { + m_parameters->setFdEnable(true); + m_parameters->setFdMode(FACEDETECT_MODE_FULL); + if (m_parameters->isMcscVraOtf() == false) + m_previewFrameFactory->startThread(PIPE_VRA); + } else { + m_parameters->setFdEnable(false); + m_parameters->setFdMode(FACEDETECT_MODE_OFF); + if (m_parameters->isMcscVraOtf() == false) { + m_vraThread->requestExit(); + + if (m_vraThreadQ != NULL) + m_clearList(m_vraThreadQ); + + if (m_vraGscDoneQ != NULL) + m_clearList(m_vraGscDoneQ); + + if (m_vraPipeDoneQ != NULL) + m_clearList(m_vraPipeDoneQ); + } + } + + memset(&m_frameMetadata, 0, sizeof(camera_frame_metadata_t)); + + return true; +} + +bool ExynosCamera::startFaceDetection(void) +{ + if (m_flagStartFaceDetection == true) { + CLOGD("DEBUG(%s):Face detection already started..", __FUNCTION__); + return true; + } + + /* FD-AE is always on */ +#ifdef USE_FD_AE +#else + m_startFaceDetection(true); +#endif + +#ifdef ENABLE_FDAF_WITH_FD + /* Enable FD-AF according to FD condition */ + if(m_parameters->getFdEnable() == 0) +#else + /* Block FD-AF except for special shot modes */ + if(m_parameters->getShotMode() == SHOT_MODE_BEAUTY_FACE || + m_parameters->getShotMode() == SHOT_MODE_SELFIE_ALARM) +#endif + { + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + + if (autoFocusMgr->setFaceDetection(true) == false) { + CLOGE("ERR(%s[%d]):setFaceDetection(%d)", __FUNCTION__, __LINE__, true); + } else { + /* restart CAF when FD mode changed */ + switch (autoFocusMgr->getAutofocusMode()) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (autoFocusMgr->flagAutofocusStart() == true && + autoFocusMgr->flagLockAutofocus() == false) { + autoFocusMgr->stopAutofocus(); + autoFocusMgr->startAutofocus(); + } + break; + default: + break; + } + } + } + + if (m_facedetectQ->getSizeOfProcessQ() > 0) { + CLOGE("ERR(%s[%d]):startFaceDetection recordingQ(%d)", __FUNCTION__, __LINE__, m_facedetectQ->getSizeOfProcessQ()); + /* + * just empty q on m_facedetectQ. + * m_clearList() can make deadlock by accessing frame + * deleted on stopPreview() + */ + /* m_clearList(m_facedetectQ); */ + m_facedetectQ->release(); + } + + m_flagStartFaceDetection = true; + + if (m_facedetectThread->isRunning() == false) + m_facedetectThread->run(); + + return true; +} + +bool ExynosCamera::stopFaceDetection(void) +{ + if (m_flagStartFaceDetection == false) { + CLOGD("DEBUG(%s [%d]):Face detection already stopped..", __FUNCTION__, __LINE__); + return true; + } + + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + + if (autoFocusMgr->setFaceDetection(false) == false) { + CLOGE("ERR(%s[%d]):setFaceDetection(%d)", __FUNCTION__, __LINE__, false); + } else { + /* restart CAF when FD mode changed */ + switch (autoFocusMgr->getAutofocusMode()) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case ExynosCameraActivityAutofocus::AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (autoFocusMgr->flagAutofocusStart() == true && + autoFocusMgr->flagLockAutofocus() == false) { + autoFocusMgr->stopAutofocus(); + autoFocusMgr->startAutofocus(); + } + break; + default: + break; + } + } + + /* FD-AE is always on */ +#ifdef USE_FD_AE +#else + m_startFaceDetection(false); +#endif + m_flagStartFaceDetection = false; + + return true; +} + +bool ExynosCamera::m_getRecordingEnabled(void) +{ + Mutex::Autolock lock(m_recordingStateLock); + return m_recordingEnabled; +} + +void ExynosCamera::m_setRecordingEnabled(bool enable) +{ + Mutex::Autolock lock(m_recordingStateLock); + m_recordingEnabled = enable; + return; +} + +int ExynosCamera::m_calibratePosition(int w, int new_w, int pos) +{ + return (float)(pos * new_w) / (float)w; +} + +bool ExynosCamera::m_facedetectThreadFunc(void) +{ + int32_t status = 0; + bool ret = true; + + int index = 0; + int count = 0; + + ExynosCameraFrame *newFrame = NULL; + uint32_t frameCnt = 0; + + if (m_previewEnabled == false) { + CLOGD("DEBUG(%s):preview is stopped, thread stop", __FUNCTION__); + ret = false; + goto func_exit; + } + + status = m_facedetectQ->waitAndPopProcessQ(&newFrame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d) m_flagStartFaceDetection(%d)", __FUNCTION__, __LINE__, m_flagThreadStop, m_flagStartFaceDetection); + ret = false; + goto func_exit; + } + + if (status < 0) { + if (status == TIMED_OUT) { + /* Face Detection time out is not meaningful */ + } else { + /* TODO: doing exception handling */ + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + } + goto func_exit; + } + + count = m_facedetectQ->getSizeOfProcessQ(); + if (count >= MAX_FACEDETECT_THREADQ_SIZE) { + CLOGE("ERR(%s[%d]):m_facedetectQ skipped QSize(%d)", __FUNCTION__, __LINE__, count); + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + for (int i = 0 ; i < count-1 ; i++) { + m_facedetectQ->popProcessQ(&newFrame); + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + } + m_facedetectQ->popProcessQ(&newFrame); + } + + if (newFrame != NULL) { + status = m_doFdCallbackFunc(newFrame); + if (status < 0) { + CLOGE("ERR(%s[%d]) m_doFdCallbackFunc failed(%d).", __FUNCTION__, __LINE__, status); + } + } + + if (m_facedetectQ->getSizeOfProcessQ() > 0) { + ret = true; + } + + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + +func_exit: + + return ret; +} + +status_t ExynosCamera::generateFrame(int32_t frameCount, ExynosCameraFrame **newFrame) +{ + Mutex::Autolock lock(m_frameLock); + + int ret = 0; + *newFrame = NULL; + + if (frameCount >= 0) { + ret = m_searchFrameFromList(&m_processList, frameCount, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):searchFrameFromList fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + if (*newFrame == NULL) { + *newFrame = m_previewFrameFactory->createNewFrame(); + + if (*newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return UNKNOWN_ERROR; + } + + bool flagRequested = false; + + if (m_parameters->isOwnScc(getCameraId()) == true) + flagRequested = (*newFrame)->getRequest(PIPE_SCC); + else + flagRequested = (*newFrame)->getRequest(PIPE_ISPC); + + if (flagRequested == true) { + m_dynamicSccCount++; + CLOGV("DEBUG(%s[%d]):dynamicSccCount inc(%d) frameCount(%d)", __FUNCTION__, __LINE__, m_dynamicSccCount, (*newFrame)->getFrameCount()); + } + + m_processList.push_back(*newFrame); + } + + return ret; +} + +status_t ExynosCamera::generateFrameSccScp(uint32_t pipeId, uint32_t *frameCount, ExynosCameraFrame **frame) +{ + int ret = 0; + int regenerateFrame = 0; + int count = *frameCount; + ExynosCameraFrame *newframe = NULL; + + do { + *frame = NULL; + ret = generateFrame(count, frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail, pipeID: %d", __FUNCTION__, __LINE__, pipeId); + return ret; + } + + newframe = *frame; + if (((PIPE_SCP == pipeId) && newframe->getScpDrop()) || + ((m_cameraId == CAMERA_ID_FRONT) && (PIPE_SCC == pipeId) && (newframe->getSccDrop() == true)) || + ((m_cameraId == CAMERA_ID_FRONT) && (PIPE_ISPC == pipeId) && (newframe->getIspcDrop() == true))) { + count++; + + ret = newframe->setEntityState(pipeId, ENTITY_STATE_FRAME_SKIP); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState fail, pipeId(%d), state(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, ENTITY_STATE_FRAME_SKIP, ret); + *frameCount = count; + return ret; + } + + /* let m_mainThreadFunc handle the processlist cleaning part */ + m_pipeFrameDoneQ->pushProcessQ(&newframe); + + regenerateFrame = 1; + continue; + } + regenerateFrame = 0; + } while (regenerateFrame); + + *frameCount = count; + return NO_ERROR; +} + +status_t ExynosCamera::m_setupEntity( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *srcBuf, + ExynosCameraBuffer *dstBuf) +{ + int ret = 0; + entity_buffer_state_t entityBufferState; + + /* set SRC buffer */ + ret = newFrame->getSrcBufferState(pipeId, &entityBufferState); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBufferState fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + if (entityBufferState == ENTITY_BUFFER_STATE_REQUESTED) { + ret = m_setSrcBuffer(pipeId, newFrame, srcBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setSrcBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + } + + /* set DST buffer */ + ret = newFrame->getDstBufferState(pipeId, &entityBufferState); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBufferState fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + if (entityBufferState == ENTITY_BUFFER_STATE_REQUESTED) { + ret = m_setDstBuffer(pipeId, newFrame, dstBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + } + + ret = newFrame->setEntityState(pipeId, ENTITY_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_setSrcBuffer( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *buffer) +{ + int ret = 0; + int bufIndex = -1; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer srcBuf; + + if (buffer == NULL) { + buffer = &srcBuf; + + ret = m_getBufferManager(pipeId, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager(SRC) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + if (bufferMgr == NULL) { + CLOGE("ERR(%s[%d]):buffer manager is NULL, pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + /* get buffers */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, newFrame->getFrameCount(), ret); + bufferMgr->dump(); + return ret; + } + } + + /* set buffers */ + ret = newFrame->setSrcBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setSrcBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_setDstBuffer( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *buffer) +{ + int ret = 0; + int bufIndex = -1; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer dstBuf; + + if (buffer == NULL) { + buffer = &dstBuf; + + ret = m_getBufferManager(pipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager(DST) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + if (bufferMgr == NULL) { + CLOGE("ERR(%s[%d]):buffer manager is NULL, pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + /* get buffers */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + ExynosCameraFrameEntity *curEntity = newFrame->searchEntityByPipeId(pipeId); + if (curEntity != NULL) { + if (curEntity->getBufType() == ENTITY_BUFFER_DELIVERY) { + CLOGV("DEBUG(%s[%d]): pipe(%d) buffer is empty for delivery", __FUNCTION__, __LINE__, pipeId); + buffer->index = -1; + } else { + CLOGE("ERR(%s[%d]):getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, newFrame->getFrameCount(), ret); + return ret; + } + } else { + CLOGE("ERR(%s[%d]):curEntity is NULL, pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + return ret; + } + } + } + + /* set buffers */ + ret = newFrame->setDstBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCamera::generateFrameReprocessing(ExynosCameraFrame **newFrame) +{ + Mutex::Autolock lock(m_frameLock); + + int ret = 0; + struct ExynosCameraBuffer tempBuffer; + int bufIndex = -1; + + /* 1. Make Frame */ + *newFrame = m_reprocessingFrameFactory->createNewFrame(); + if (*newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return UNKNOWN_ERROR; + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_startPictureInternal(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = 0; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int hwPictureW, hwPictureH; + int maxThumbnailW = 0, maxThumbnailH = 0; + int planeCount = 1; + int minBufferCount = 1; + int maxBufferCount = 1; + int numOfReprocessingFactory = 0; + int pictureFormat = m_parameters->getHwPictureFormat(); + bool needMmap = false; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + ExynosCameraBufferManager *taaBufferManager[MAX_NODE]; + ExynosCameraBufferManager *ispBufferManager[MAX_NODE]; + ExynosCameraBufferManager *mcscBufferManager[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + taaBufferManager[i] = NULL; + ispBufferManager[i] = NULL; + mcscBufferManager[i] = NULL; + } + + if (m_zslPictureEnabled == true) { + CLOGD("DEBUG(%s[%d]): zsl picture is already initialized", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (m_parameters->isReprocessing() == true) { + if( m_parameters->getHighSpeedRecording() ) { + m_parameters->getHwSensorSize(&hwPictureW, &hwPictureH); + CLOGI("(%s):HW Picture(HighSpeed) width x height = %dx%d", __FUNCTION__, hwPictureW, hwPictureH); + } else { + m_parameters->getMaxPictureSize(&hwPictureW, &hwPictureH); + CLOGI("(%s):HW Picture width x height = %dx%d", __FUNCTION__, hwPictureW, hwPictureH); + } + + m_parameters->getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) + needMmap = true; + else + needMmap = false; + + if (m_parameters->isHWFCEnabled() == true + && m_parameters->getHighResolutionCallbackMode() == false) { + allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + } + + if (SCC_OUTPUT_COLOR_FMT == V4L2_PIX_FMT_NV21M) { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN); + planeSize[1] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) / 2; + planeCount = 3; + } else if (SCC_OUTPUT_COLOR_FMT == V4L2_PIX_FMT_NV21) { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 3 / 2; + planeCount = 2; + } else { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 2; + planeCount = 2; + } + minBufferCount = 1; + maxBufferCount = NUM_PICTURE_BUFFERS; + + if (m_parameters->getHighResolutionCallbackMode() == true) { + /* SCC Reprocessing Buffer realloc for high resolution callback */ + minBufferCount = 2; + } + + ret = m_allocBuffers(m_sccReprocessingBufferMgr, planeCount, planeSize, bytesPerLine, + minBufferCount, maxBufferCount, type, allocMode, true, needMmap); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_sccReprocessingBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + return ret; + } + + if (m_parameters->getUsePureBayerReprocessing() == true) { + ret = m_setReprocessingBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setReprocessing Buffer fail", __FUNCTION__, __LINE__); + return ret; + } + } + + /* Reprocessing Thumbnail Buffer */ + if (pictureFormat == V4L2_PIX_FMT_NV21M) { + planeCount = 3; + planeSize[0] = maxThumbnailW * maxThumbnailH; + planeSize[1] = maxThumbnailW * maxThumbnailH / 2; + } else { + planeCount = 2; + planeSize[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), maxThumbnailW, maxThumbnailH); + } + minBufferCount = 1; + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + ret = m_allocBuffers(m_thumbnailBufferMgr, planeCount, planeSize, bytesPerLine, + minBufferCount, maxBufferCount, type, allocMode, true, needMmap); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_thumbnailBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + return ret; + } + } + + numOfReprocessingFactory = m_parameters->getNumOfReprocessingFactory(); + + for (int i = FRAME_FACTORY_TYPE_REPROCESSING; i < numOfReprocessingFactory + FRAME_FACTORY_TYPE_REPROCESSING; i++) { + if (m_frameFactory[i]->isCreated() == false) { + ret = m_frameFactory[i]->create(); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory->create() failed", __FUNCTION__); + return ret; + } + + } + + if (i == FRAME_FACTORY_TYPE_REPROCESSING) { + m_reprocessingFrameFactory = m_frameFactory[i]; + m_pictureFrameFactory = m_reprocessingFrameFactory; + } + CLOGD("DEBUG(%s[%d]):FrameFactory(pictureFrameFactory) created", __FUNCTION__, __LINE__); + /* If we want set buffer namanger from m_getBufferManager, use this */ +#if 0 + ret = m_getBufferManager(PIPE_3AA_REPROCESSING, bufferManager[m_reprocessingFrameFactory->getNodeType(PIPE_3AA_REPROCESSING)], SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager() fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, PIPE_3AA_REPROCESSING, ret); + return ret; + } + + ret = m_getBufferManager(PIPE_3AA_REPROCESSING, bufferManager[m_reprocessingFrameFactory->getNodeType(PIPE_3AP_REPROCESSING)], DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager() fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, PIPE_3AP_REPROCESSING, ret); + return ret; + } +#else + if (m_parameters->isReprocessing3aaIspOTF() == false) { + taaBufferManager[m_frameFactory[i]->getNodeType(PIPE_3AA_REPROCESSING)] = m_bayerBufferMgr; + taaBufferManager[m_frameFactory[i]->getNodeType(PIPE_3AP_REPROCESSING)] = m_ispReprocessingBufferMgr; + } else { + taaBufferManager[m_frameFactory[i]->getNodeType(PIPE_3AA_REPROCESSING)] = m_bayerBufferMgr; + taaBufferManager[m_frameFactory[i]->getNodeType(PIPE_ISPC_REPROCESSING)] = m_sccReprocessingBufferMgr; + + taaBufferManager[OUTPUT_NODE] = m_bayerBufferMgr; + taaBufferManager[CAPTURE_NODE] = m_sccReprocessingBufferMgr; + } +#endif + + ret = m_frameFactory[i]->setBufferManagerToPipe(taaBufferManager, PIPE_3AA_REPROCESSING); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory->setBufferManagerToPipe() failed", __FUNCTION__); + return ret; + } + + ispBufferManager[m_frameFactory[i]->getNodeType(PIPE_ISP_REPROCESSING)] = m_ispReprocessingBufferMgr; + ispBufferManager[m_frameFactory[i]->getNodeType(PIPE_ISPC_REPROCESSING)] = m_sccReprocessingBufferMgr; + + ret = m_frameFactory[i]->setBufferManagerToPipe(ispBufferManager, PIPE_ISP_REPROCESSING); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory->setBufferManagerToPipe() failed", __FUNCTION__); + return ret; + } + + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_MCSC_REPROCESSING)] = m_sccBufferMgr; + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_MCSC0_REPROCESSING)] = m_sccReprocessingBufferMgr; + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING)] = m_sccReprocessingBufferMgr; + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)] = m_thumbnailBufferMgr; + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)] = m_jpegBufferMgr; + mcscBufferManager[m_frameFactory[i]->getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING)] = m_thumbnailBufferMgr; + + ret = m_frameFactory[i]->setBufferManagerToPipe(mcscBufferManager, PIPE_MCSC_REPROCESSING); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory->setBufferManagerToPipe() failed", __FUNCTION__); + return ret; + } + + ret = m_frameFactory[i]->initPipes(); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory->initPipes() failed", __FUNCTION__); + return ret; + } + + ret = m_frameFactory[i]->preparePipes(); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory preparePipe fail", __FUNCTION__); + return ret; + } + + /* stream on pipes */ + ret = m_frameFactory[i]->startPipes(); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory startPipe fail", __FUNCTION__); + return ret; + } + } + + m_zslPictureEnabled = true; + + /* + * Make remained frameFactory here. + * in case of reprocessing capture, make here. + */ + m_framefactoryThread->run(); + + return NO_ERROR; +} + +bool ExynosCamera::m_mainThreadQSetup3AA() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId_3AA = PIPE_3AA; + int pipeId_3AC = PIPE_3AC; + int pipeId_ISP = PIPE_ISP; + int pipeId_DIS = PIPE_DIS; + int pipeIdCsc = 0; + int maxbuffers = 0; + int retrycount = 0; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newframe = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId_3AA)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + + /* HACK : + * Prevent to deliver the changed size with per-frame control + * before the H/W size setting is finished. + */ + if (m_parameters->getPreviewSizeChanged() == true) { + CLOGI("INFO(%s[%d]):Preview size is changed. Skip to generate new frame", + __FUNCTION__, __LINE__); + goto func_exit; + } + + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + do { + ret = generateFrame(m_3aa_ispFrameCount, &newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + usleep(100); + } + if(++retrycount >= 10) { + goto func_exit; + } + } while((ret < 0) && (retrycount < 10)); + + if (newframe == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = m_setupEntity(pipeId_3AA, newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + + + if (m_parameters->is3aaIspOtf() == true) { + if (m_parameters->isMcscVraOtf() == true) + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId_3AA); + + if (m_parameters->getTpuEnabledMode() == true) + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId_DIS); + } else { + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, pipeId_3AA); + + if (m_parameters->getTpuEnabledMode() == true) { + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, pipeId_ISP); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId_DIS); + } else { + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId_ISP); + } + } + + if (m_parameters->isMcscVraOtf() == false) { + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_VRA); + } + + m_previewFrameFactory->pushFrameToPipe(&newframe, pipeId_3AA); + m_3aa_ispFrameCount++; + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame);; + frame = NULL; + } + + return loop; +} + +bool ExynosCamera::m_mainThreadQSetup3AA_ISP() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId = PIPE_3AA_ISP; + int pipeIdCsc = 0; + int maxbuffers = 0; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newframe = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + while (m_3aaBufferMgr->getNumOfAvailableBuffer() > 0 && + m_ispBufferMgr->getNumOfAvailableBuffer() > 0) { + ret = generateFrame(m_3aa_ispFrameCount, &newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + if (newframe == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = m_setupEntity(pipeId, newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + m_previewFrameFactory->pushFrameToPipe(&newframe, pipeId); + + m_3aa_ispFrameCount++; + + } + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame);; + frame = NULL; + } + + /* + if (m_mainSetupQ[INDEX(pipeId)]->getSizeOfProcessQ() <= 0) + loop = false; + */ + + return loop; +} + +bool ExynosCamera::m_mainThreadQSetupISP() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = false; + int pipeId = PIPE_ISP; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + + if (m_mainSetupQ[INDEX(pipeId)]->getSizeOfProcessQ() > 0) + loop = true; + + return loop; +} + +bool ExynosCamera::m_mainThreadQSetupFLITE() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId = PIPE_FLITE; + int pipeIdCsc = 0; + int maxbuffers = 0; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newframe = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + goto func_exit; + } + + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + if (m_bayerBufferMgr->getNumOfAvailableBuffer() > 0) { + ret = generateFrame(m_fliteFrameCount, &newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = m_setupEntity(pipeId, newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + ret = newframe->getDstBuffer(pipeId, &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + m_previewFrameFactory->pushFrameToPipe(&newframe, pipeId); + + m_fliteFrameCount++; + } + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame);; + frame = NULL; + } + + /* + if (m_mainSetupQ[INDEX(pipeId)]->getSizeOfProcessQ() <= 0) + loop = false; + */ + + return loop; +} + +bool ExynosCamera::m_mainThreadQSetup3AC() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId = PIPE_3AC; + int pipeIdCsc = 0; + int maxbuffers = 0; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newframe = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + if (m_bayerBufferMgr->getNumOfAvailableBuffer() > 0) { + do { + ret = generateFrame(m_fliteFrameCount, &newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = m_setupEntity(pipeId, newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + ret = newframe->getDstBuffer(pipeId, &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + m_previewFrameFactory->pushFrameToPipe(&newframe, pipeId); + + m_fliteFrameCount++; + //} while (m_bayerBufferMgr->getAllocatedBufferCount() - PIPE_3AC_PREPARE_COUNT + // < m_bayerBufferMgr->getNumOfAvailableBuffer()); + } while (0 < m_bayerBufferMgr->getNumOfAvailableBuffer()); + } + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame);; + } + + /* + if (m_mainSetupQ[INDEX(pipeId)]->getSizeOfProcessQ() <= 0) + loop = false; + */ + + return loop; +} + +bool ExynosCamera::m_mainThreadQSetupSCP() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId = PIPE_SCP; + int pipeIdCsc = 0; + int maxbuffers = 0; + + camera2_node_group node_group_info_isp; + ExynosCameraBuffer resultBuffer; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newframe = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + int frameGen = 1; + + CLOGV("INFO(%s[%d]):wait previewCancelQ", __FUNCTION__, __LINE__); + ret = m_mainSetupQ[INDEX(pipeId)]->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto func_exit; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = generateFrameSccScp(pipeId, &m_scpFrameCount, &newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrameSccScp fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + if( frame->getFrameState() == FRAME_STATE_SKIPPED ) { + ret = frame->getDstBuffer(pipeId, &resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + ret = m_setupEntity(pipeId, newframe, NULL, &resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + ret = newframe->getDstBuffer(pipeId, &resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + } else { + ret = m_setupEntity(pipeId, newframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + } + ret = newframe->getDstBuffer(pipeId, &resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + } + + /*check preview drop...*/ + ret = newframe->getDstBuffer(pipeId, &resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + if (resultBuffer.index < 0) { + newframe->setRequest(pipeId, false); + newframe->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + node_group_info_isp.capture[PERFRAME_BACK_SCP_POS].request = 0; + newframe->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + m_previewFrameFactory->dump(); + + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) + && m_parameters->getHighSpeedRecording() == false) { + CLOGW("WARN(%s[%d]):Recording preview drop. Failed to get preview buffer. FrameSkipCount(%d)", + __FUNCTION__, __LINE__, FRAME_SKIP_COUNT_RECORDING); + /* when preview buffer is not ready, we should drop preview to make preview buffer ready */ + m_parameters->setFrameSkipCount(FRAME_SKIP_COUNT_RECORDING); + } else { + CLOGW("WARN(%s[%d]):Preview drop. Failed to get preview buffer. FrameSkipCount (%d)", + __FUNCTION__, __LINE__, FRAME_SKIP_COUNT_PREVIEW); + /* when preview buffer is not ready, we should drop preview to make preview buffer ready */ + m_parameters->setFrameSkipCount(FRAME_SKIP_COUNT_PREVIEW); + } + } + + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + m_previewFrameFactory->pushFrameToPipe(&newframe, pipeId); + + m_scpFrameCount++; + +func_exit: + if( frame != NULL ) { + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + + /* + if (m_mainSetupQ[INDEX(pipeId)]->getSizeOfProcessQ() <= 0) + loop = false; + */ + + return loop; +} + +bool ExynosCamera::m_mainThreadFunc(void) +{ + int ret = 0; + int index = 0; + ExynosCameraFrame *newFrame = NULL; + uint32_t frameCnt = 0; + + if (m_previewEnabled == false) { + CLOGD("DEBUG(%s):preview is stopped, thread stop", __FUNCTION__); + return false; + } + + ret = m_pipeFrameDoneQ->waitAndPopProcessQ(&newFrame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + return false; + } + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return true; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + return true; + } + + frameCnt = newFrame->getFrameCount(); + +/* HACK Reset Preview Flag*/ +#if 0 + if (m_parameters->getRestartPreview() == true) { + m_resetPreview = true; + ret = m_restartPreviewInternal(); + m_resetPreview = false; + CLOGE("INFO(%s[%d]) m_resetPreview(%d)", __FUNCTION__, __LINE__, m_resetPreview); + if (ret < 0) + CLOGE("(%s[%d]): restart preview internal fail", __FUNCTION__, __LINE__); + + return true; + } +#endif + + if (m_parameters->isFlite3aaOtf() == true) { + ret = m_handlePreviewFrame(newFrame); + } else { + if (m_parameters->getDualMode()) + ret = m_handlePreviewFrameFrontDual(newFrame); + else + ret = m_handlePreviewFrameFront(newFrame); + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):handle preview frame fail", __FUNCTION__, __LINE__); + return ret; + } + + /* Below code block is moved to m_handlePreviewFrame() and m_handlePreviewFrameFront() functions + * because we want to delete the frame as soon as the setFrameState(FRAME_STATE_COMPLETE) is called. + * Otherwise, some other thread might be executed between "setFrameState(FRAME_STATE_COMPLETE)" and + * "delete frame" statements and might delete the same frame. This would cause the second "delete frame" + * (trying to delete the same frame) to behave abnormally since that frame is already deleted. + */ +#if 0 + /* Don't use this lock */ + m_frameFliteDeleteBetweenPreviewReprocessing.lock(); + if (newFrame->isComplete() == true) { + ret = m_removeFrameFromList(&m_processList, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + if (newFrame->getFrameLockState() == false) + { + delete newFrame; + newFrame = NULL; + } + } + m_frameFliteDeleteBetweenPreviewReprocessing.unlock(); +#endif + + + /* + * HACK + * By using MCpipe. we don't use seperated pipe_scc. + * so, we will not meet inputFrameQ's fail issue. + */ + /* m_checkFpsAndUpdatePipeWaitTime(); */ + + if(getCameraId() == CAMERA_ID_BACK) { + m_autoFocusContinousQ.pushProcessQ(&frameCnt); + } + + return true; +} + +bool ExynosCamera::m_frameFactoryInitThreadFunc(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + bool loop = false; + status_t ret = NO_ERROR; + + ExynosCameraFrameFactory *framefactory = NULL; + + ret = m_frameFactoryQ->waitAndPopProcessQ(&framefactory); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + return false; + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (framefactory == NULL) { + CLOGE("ERR(%s[%d]):framefactory is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + if (framefactory->isCreated() == false) { + CLOGD("DEBUG(%s[%d]):framefactory create", __FUNCTION__, __LINE__); + framefactory->create(false); + } else { + CLOGD("DEBUG(%s[%d]):framefactory already create", __FUNCTION__, __LINE__); + } + +func_exit: + if (0 < m_frameFactoryQ->getSizeOfProcessQ()) { + if (m_previewEnabled == true) { + loop = true; + } else { + CLOGW("WARN(%s[%d]):Sudden stopPreview. so, stop making frameFactory (m_frameFactoryQ->getSizeOfProcessQ() : %d)", + __FUNCTION__, __LINE__, m_frameFactoryQ->getSizeOfProcessQ()); + loop = false; + } + } + + CLOGI("INFO(%s[%d]):m_framefactoryThread end loop(%d) m_frameFactoryQ(%d), m_previewEnabled(%d)", + __FUNCTION__, __LINE__, loop, m_frameFactoryQ->getSizeOfProcessQ(), m_previewEnabled); + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return loop; +} + +status_t ExynosCamera::m_setCallbackBufferInfo(ExynosCameraBuffer *callbackBuf, char *baseAddr) +{ + /* + * If it is not 16-aligend, shrink down it as 16 align. ex) 1080 -> 1072 + * But, memory is set on Android format. so, not aligned area will be black. + */ + int dst_width = 0, dst_height = 0, dst_crop_width = 0, dst_crop_height = 0; + int dst_format = m_parameters->getPreviewFormat(); + + m_parameters->getPreviewSize(&dst_width, &dst_height); + dst_crop_width = dst_width; + dst_crop_height = dst_height; + + if (dst_format == V4L2_PIX_FMT_NV21 || + dst_format == V4L2_PIX_FMT_NV21M) { + + callbackBuf->size[0] = (dst_width * dst_height); + callbackBuf->size[1] = (dst_width * dst_height) / 2; + + callbackBuf->addr[0] = baseAddr; + callbackBuf->addr[1] = callbackBuf->addr[0] + callbackBuf->size[0]; + } else if (dst_format == V4L2_PIX_FMT_YVU420 || + dst_format == V4L2_PIX_FMT_YVU420M) { + callbackBuf->size[0] = dst_width * dst_height; + callbackBuf->size[1] = dst_width / 2 * dst_height / 2; + callbackBuf->size[2] = callbackBuf->size[1]; + + callbackBuf->addr[0] = baseAddr; + callbackBuf->addr[1] = callbackBuf->addr[0] + callbackBuf->size[0]; + callbackBuf->addr[2] = callbackBuf->addr[1] + callbackBuf->size[1]; + } + + CLOGV("DEBUG(%s): dst_size(%dx%d), dst_crop_size(%dx%d)", __FUNCTION__, dst_width, dst_height, dst_crop_width, dst_crop_height); + + return NO_ERROR; +} + +status_t ExynosCamera::m_doZoomPrviewWithCSC(int32_t pipeId, int32_t gscPipe, ExynosCameraFrame *frame) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + int32_t previewFormat = 0; + status_t ret = NO_ERROR; + ExynosRect srcRect, dstRect; + ExynosCameraBuffer srcBuf; + ExynosCameraBuffer dstBuf; + uint32_t *output = NULL; + struct camera2_stream *meta = NULL; + int previewH, previewW; + int bufIndex = -1; + int waitCount = 0; + int scpNodeIndex = -1; + int srcNodeIndex = -1; + int srcFmt = -1; + + previewFormat = m_parameters->getHwPreviewFormat(); + m_parameters->getHwPreviewSize(&previewW, &previewH); + + /* To pass the CTS2.0 Test "CameraDeviceTest", + * When changing of preview size is happen consequently, + * Prevent to do the Pipe CSC dqbuf before the H/W size setting is finished. + */ + CLOGV("DEBUG(%s[%d]): getPreviewSizeChanged() : %d", + __FUNCTION__, __LINE__, m_parameters->getPreviewSizeChanged()); + + if (m_parameters->getPreviewSizeChanged() == true) { + CLOGW("INFO(%s[%d]):Preview size is changed. Skip to doZoomPrviewWithCSC", + __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if ((m_parameters->getPreviewSizeChanged() == false) && + (m_oldPreviewW != (unsigned)previewW) && + (m_oldPreviewH != (unsigned)previewH)) { + CLOGI("INFO(%s[%d]):HW Preview size is changed. Update size(%d x %d)", + __FUNCTION__, __LINE__, previewW, previewH); + + m_oldPreviewW = previewW; + m_oldPreviewH = previewH; + } + + /* get Scaler src Buffer Node Index*/ + if (m_parameters->getDualMode() != true) { + srcNodeIndex = m_previewFrameFactory->getNodeType(PIPE_SCP); + srcFmt = previewFormat; + scpNodeIndex = m_previewFrameFactory->getNodeType(PIPE_SCP); + } else { + srcFmt = previewFormat; + return true; + } + + /* get scaler source buffer */ + srcBuf.index = -1; + ret = frame->getDstBuffer(pipeId, &srcBuf, srcNodeIndex); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)",__FUNCTION__, __LINE__, pipeId, ret); + return ret; + } + + /* getMetadata to get buffer size */ + meta = (struct camera2_stream*)srcBuf.addr[srcBuf.planeCount-1]; + if (meta == NULL) { + CLOGE("ERR(%s[%d]):srcBuf.addr is NULL, srcBuf.addr(0x%x)",__FUNCTION__, __LINE__, srcBuf.addr[srcBuf.planeCount-1]); + return INVALID_OPERATION; + } + + output = meta->output_crop_region; + + /* check scaler conditions( compare the crop size & format ) */ + if (output[2] == (unsigned)previewW && output[3] == (unsigned)previewH && srcFmt == previewFormat +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + && m_parameters->getRotationProperty() != FORCE_PREVIEW_BUFFER_CROP_ROTATION_270 +#endif + ) { + /* HACK for CTS2.0 */ + m_oldPreviewW = previewW; + m_oldPreviewH = previewH; + + return ret; + } + + /* wait unitil get buffers */ + do { + dstBuf.index = -2; + waitCount++; + + if (m_scpBufferMgr->getNumOfAvailableBuffer() > 0) + m_scpBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuf); + + if (dstBuf.index < 0) { + usleep(WAITING_TIME); + + if (waitCount % 20 == 0) { + CLOGW("WRN(%s[%d]):retry JPEG getBuffer(%d) m_zoomPreviwWithCscQ->getSizeOfProcessQ(%d)", + __FUNCTION__, __LINE__, bufIndex, m_zoomPreviwWithCscQ->getSizeOfProcessQ()); + m_scpBufferMgr->dump(); + } + } else { + break; + } + /* this will retry until 300msec */ + } while (waitCount < (TOTAL_WAITING_TIME / WAITING_TIME) && previewEnabled() == false); + + if (bufIndex == -1) { + m_scpBufferMgr->cancelBuffer(srcBuf.index); + CLOGE("ERR(%s[%d]):Failed to get the gralloc Buffer for GSC dstBuf ",__FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* csc and scaling */ + srcRect.x = 0; + srcRect.y = 0; + srcRect.w = output[2]; + srcRect.h = output[3]; + srcRect.fullW = output[2]; + srcRect.fullH = output[3]; + srcRect.colorFormat = srcFmt; + +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + if (m_parameters->getRotationProperty() == FORCE_PREVIEW_BUFFER_CROP_ROTATION_270) { + int old_width = srcRect.w; + if (srcRect.w > srcRect.h) { + srcRect.w = ALIGN_DOWN(srcRect.h*srcRect.h / srcRect.w, GSCALER_IMG_ALIGN); + srcRect.x = ALIGN_DOWN((old_width-srcRect.w) / 2, GSCALER_IMG_ALIGN); + } + } +#endif + + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = previewW; + dstRect.h = previewH; + dstRect.fullW = previewW; + dstRect.fullH = previewH; + dstRect.colorFormat = previewFormat; + + CLOGV("DEBUG(%s[%d]): srcBuf(%d) dstBuf(%d) (%d, %d, %d, %d) format(%d) actual(%x) -> (%d, %d, %d, %d) format(%d) actual(%x)", + __FUNCTION__, __LINE__, srcBuf.index, dstBuf.index, + srcRect.x, srcRect.y, srcRect.w, srcRect.h, srcFmt, V4L2_PIX_2_HAL_PIXEL_FORMAT(srcFmt), + dstRect.x, dstRect.y, dstRect.w, dstRect.h, previewFormat, V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat)); + + ret = frame->setSrcRect(gscPipe, srcRect); + ret = frame->setDstRect(gscPipe, dstRect); + + ret = m_setupEntity(gscPipe, frame, &srcBuf, &dstBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, gscPipe, ret); + } + +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + if (m_parameters->getRotationProperty() == FORCE_PREVIEW_BUFFER_CROP_ROTATION_270) { + ret = frame->setRotation(gscPipe, 270); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setRotation(%d) fail", __FUNCTION__, __LINE__, 270); + ret = INVALID_OPERATION; + goto func_exit; + } + } +#endif + + m_previewFrameFactory->setOutputFrameQToPipe(m_zoomPreviwWithCscQ, gscPipe); + m_previewFrameFactory->pushFrameToPipe(&frame, gscPipe); + + /* wait GSC done */ + CLOGV("INFO(%s[%d]):wait GSC output", __FUNCTION__, __LINE__); + waitCount = 0; + + do { + ret = m_zoomPreviwWithCscQ->waitAndPopProcessQ(&frame); + waitCount++; + } while (ret == TIMED_OUT && waitCount < 100 && m_flagThreadStop != true); + + if (ret < 0) { + CLOGW("WARN(%s[%d]):GSC wait and pop return, ret(%d)", __FUNCTION__, __LINE__, ret); + } + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + /* copy metadata src to dst*/ + memcpy(dstBuf.addr[dstBuf.planeCount-1],srcBuf.addr[srcBuf.planeCount-1], sizeof(struct camera2_stream)); + + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + /* in case of dual front path buffer returned frameSelector, do not return buffer. */ + } else { + m_scpBufferMgr->cancelBuffer(srcBuf.index); + } + + ret = frame->setDstBufferState(pipeId, ENTITY_BUFFER_STATE_REQUESTED, scpNodeIndex); + if (ret != NO_ERROR) { + CLOGE("ERR(%s): setDstBufferState state fail", __FUNCTION__); + return ret; + } + + ret = frame->setDstBuffer(pipeId, dstBuf, scpNodeIndex); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setdst Buffer failed(%d)", __FUNCTION__, __LINE__, ret); + } + + ret = frame->setDstBufferState(pipeId, ENTITY_BUFFER_STATE_COMPLETE, scpNodeIndex); + if (ret != NO_ERROR) { + CLOGE("ERR(%s): setDstBufferState state fail", __FUNCTION__); + return ret; + } + +func_exit: + + CLOGV("DEBUG(%s[%d]):--OUT--", __FUNCTION__, __LINE__); + + return ret; +} + +bool ExynosCamera::m_setBuffersThreadFunc(void) +{ + int ret = 0; + + ret = m_setBuffers(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setBuffers failed, releaseBuffer", __FUNCTION__, __LINE__); + + /* TODO: Need release buffers and error exit */ + + m_releaseBuffers(); + m_isSuccessedBufferAllocation = false; + return false; + } + + m_isSuccessedBufferAllocation = true; + return false; +} + +bool ExynosCamera::m_startPictureInternalThreadFunc(void) +{ + int ret = 0; + + ret = m_startPictureInternal(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setBuffers failed", __FUNCTION__, __LINE__); + + /* TODO: Need release buffers and error exit */ + + return false; + } + + return false; +} + +bool ExynosCamera::m_prePictureThreadFunc(void) +{ + bool loop = false; + bool isProcessed = true; + + ExynosCameraDurationTimer m_burstPrePictureTimer; + uint64_t m_burstPrePictureTimerTime; + uint64_t burstWaitingTime; + + status_t ret = NO_ERROR; + + uint64_t seriesShotDuration = m_parameters->getSeriesShotDuration(); + + m_burstPrePictureTimer.start(); + + if (m_parameters->isReprocessing()) + loop = m_reprocessingPrePictureInternal(); + else + loop = m_prePictureInternal(&isProcessed); + + m_burstPrePictureTimer.stop(); + m_burstPrePictureTimerTime = m_burstPrePictureTimer.durationUsecs(); + + if(isProcessed && loop && seriesShotDuration > 0 && m_burstPrePictureTimerTime < seriesShotDuration) { + CLOGD("DEBUG(%s[%d]): The time between shots is too short(%lld)us. Extended to (%lld)us" + , __FUNCTION__, __LINE__, m_burstPrePictureTimerTime, seriesShotDuration); + + burstWaitingTime = seriesShotDuration - m_burstPrePictureTimerTime; + usleep(burstWaitingTime); + } + + return loop; +} + +/* + * pIsProcessed : out parameter + * true if the frame is properly handled. + * false if frame processing is failed or there is no frame to process + */ +bool ExynosCamera::m_prePictureInternal(bool* pIsProcessed) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + bool loop = false; + ExynosCameraFrame *newFrame = NULL; + camera2_shot_ext *shot_ext = NULL; + + ExynosCameraBuffer fliteReprocessingBuffer; + ExynosCameraBuffer ispReprocessingBuffer; +#ifdef DEBUG_RAWDUMP + ExynosCameraBuffer bayerBuffer; + ExynosCameraBuffer pictureBuffer; + ExynosCameraFrame *inListFrame = NULL; + ExynosCameraFrame *bayerFrame = NULL; +#endif + int pipeId = 0; + int bufPipeId = 0; + bool isSrc = false; + int retryCount = 3; + + if (m_hdrEnabled) + retryCount = 15; + + if (m_parameters->isOwnScc(getCameraId()) == true) + bufPipeId = PIPE_SCC; + else if (m_parameters->isUsing3acForIspc() == true) + bufPipeId = PIPE_3AC; + else + bufPipeId = PIPE_ISPC; + + if (m_parameters->is3aaIspOtf() == true) + pipeId = PIPE_3AA; + else + pipeId = PIPE_ISP; + + int postProcessQSize = m_postPictureQ->getSizeOfProcessQ(); + if (postProcessQSize > 2) { + CLOGW("DEBUG(%s[%d]): post picture is delayed(stacked %d frames), skip", __FUNCTION__, __LINE__, postProcessQSize); + usleep(WAITING_TIME); + goto CLEAN; + } + + if (m_parameters->getRecordingHint() == true + && m_parameters->isUsing3acForIspc() == true) + m_sccCaptureSelector->clearList(pipeId, false, m_previewFrameFactory->getNodeType(bufPipeId)); + + newFrame = m_sccCaptureSelector->selectFrames(m_reprocessingCounter.getCount(), pipeId, isSrc, retryCount, m_pictureFrameFactory->getNodeType(bufPipeId)); + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + newFrame->frameLock(); + + CLOGI("DEBUG(%s[%d]):Frame Count (%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + + m_postProcessList.push_back(newFrame); + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == true)) { + m_highResolutionCallbackQ->pushProcessQ(&newFrame); + } else { + dstSccReprocessingQ->pushProcessQ(&newFrame); + } + + m_reprocessingCounter.decCount(); + + CLOGI("INFO(%s[%d]):prePicture complete, remaining count(%d)", __FUNCTION__, __LINE__, m_reprocessingCounter.getCount()); + + if (m_hdrEnabled) { + ExynosCameraActivitySpecialCapture *m_sCaptureMgr; + + m_sCaptureMgr = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + + if (m_reprocessingCounter.getCount() == 0) + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_OFF); + } + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == true)) + loop = true; + + if (m_reprocessingCounter.getCount() > 0) { + loop = true; + + } + + *pIsProcessed = true; +#ifdef DEBUG_RAWDUMP + retryCount = 30; /* 200ms x 30 */ + bayerBuffer.index = -2; + + m_captureSelector->setWaitTime(200000000); + bayerFrame = m_captureSelector->selectFrames(1, PIPE_FLITE, isSrc, retryCount); + if (bayerFrame == NULL) { + CLOGE("ERR(%s[%d]):bayerFrame is NULL", __FUNCTION__, __LINE__); + } else { + ret = bayerFrame->getDstBuffer(PIPE_FLITE, &bayerBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, PIPE_FLITE, ret); + } else { + if (m_parameters->checkBayerDumpEnable()) { + int sensorMaxW, sensorMaxH; + int sensorMarginW, sensorMarginH; + bool bRet; + char filePath[70]; + int fliteFcount = 0; + int pictureFcount = 0; + + camera2_shot_ext *shot_ext = NULL; + + ret = newFrame->getDstBuffer(PIPE_3AA, &pictureBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, PIPE_3AA, ret); + } + + shot_ext = (camera2_shot_ext *)(bayerBuffer.addr[1]); + if (shot_ext != NULL) + fliteFcount = shot_ext->shot.dm.request.frameCount; + else + ALOGE("ERR(%s[%d]):fliteBayerBuffer is null", __FUNCTION__, __LINE__); + + shot_ext->shot.dm.request.frameCount = 0; + shot_ext = (camera2_shot_ext *)(pictureBuffer.addr[1]); + if (shot_ext != NULL) + pictureFcount = shot_ext->shot.dm.request.frameCount; + else + ALOGE("ERR(%s[%d]):PictureBuffer is null", __FUNCTION__, __LINE__); + + CLOGD("DEBUG(%s[%d]):bayer fcount(%d) picture fcount(%d)", __FUNCTION__, __LINE__, fliteFcount, pictureFcount); + /* The driver frame count is used to check the match between the 3AA frame and the FLITE frame. + if the match fails then the bayer buffer does not correspond to the capture output and hence + not written to the file */ + if (fliteFcount == pictureFcount) { + memset(filePath, 0, sizeof(filePath)); + snprintf(filePath, sizeof(filePath), "/data/media/0/RawCapture%d_%d.raw",m_cameraId, pictureFcount); + + bRet = dumpToFile((char *)filePath, + bayerBuffer.addr[0], + bayerBuffer.size[0]); + if (bRet != true) + CLOGE("couldn't make a raw file"); + } + } + + if (bayerFrame != NULL) { + ret = m_bayerBufferMgr->putBuffer(bayerBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putBuffer failed Index is %d", __FUNCTION__, __LINE__, bayerBuffer.index); + m_bayerBufferMgr->printBufferState(); + m_bayerBufferMgr->printBufferQState(); + } + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + bayerFrame->decRef(); +#endif + m_frameMgr->deleteFrame(bayerFrame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, bayerFrame->getFrameCount()); + } + bayerFrame = NULL; + } + } + } +#endif + return loop; + +CLEAN: + if (newFrame != NULL) { + newFrame->printEntity(); + CLOGD("DEBUG(%s[%d]): Picture frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + if (m_hdrEnabled) { + ExynosCameraActivitySpecialCapture *m_sCaptureMgr; + + m_sCaptureMgr = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + + if (m_reprocessingCounter.getCount() == 0) + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_OFF); + } + + if (m_reprocessingCounter.getCount() > 0) + loop = true; + + CLOGI("INFO(%s[%d]): prePicture fail, remaining count(%d)", __FUNCTION__, __LINE__, m_reprocessingCounter.getCount()); + *pIsProcessed = false; // Notify failure + return loop; + +} + + +bool ExynosCamera::m_highResolutionCallbackThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int loop = false; + int retryCountGSC = 4; + + ExynosCameraFrame *newFrame = NULL; + camera2_stream *shot_stream = NULL; + + ExynosCameraBuffer sccReprocessingBuffer; + ExynosCameraBuffer highResolutionCbBuffer; + + int cbPreviewW = 0, cbPreviewH = 0; + int previewFormat = 0; + ExynosRect srcRect, dstRect; + m_parameters->getPreviewSize(&cbPreviewW, &cbPreviewH); + previewFormat = m_parameters->getPreviewFormat(); + + int pipeId_scc = 0; + int pipeId_gsc = 0; + + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int planeCount = getYuvPlaneCount(previewFormat); + int minBufferCount = 1; + int maxBufferCount = 1; + int buffer_idx = getShotBufferIdex(); + + sccReprocessingBuffer.index = -2; + highResolutionCbBuffer.index = -2; + + if (m_parameters->isUseYuvReprocessing() == true) { + pipeId_scc = PIPE_MCSC_REPROCESSING; + pipeId_gsc = PIPE_GSC_REPROCESSING; + } else if (m_parameters->isUsing3acForIspc() == true) { + pipeId_scc = PIPE_3AA; + pipeId_gsc = PIPE_GSC_PICTURE; + } else { + pipeId_scc = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISP_REPROCESSING; + pipeId_gsc = PIPE_GSC_REPROCESSING; + } + + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + if (m_parameters->getHighResolutionCallbackMode() == false && + m_highResolutionCallbackRunning == false) { + CLOGD("DEBUG(%s[%d]): High Resolution Callback Stop", __FUNCTION__, __LINE__); + goto CLEAN; + } + + ret = getYuvPlaneSize(previewFormat, planeSize, cbPreviewW, cbPreviewH); + if (ret < 0) { + CLOGE("ERR(%s[%d]): BAD value, format(%x), size(%dx%d)", + __FUNCTION__, __LINE__, previewFormat, cbPreviewW, cbPreviewH); + return ret; + } + + /* wait SCC */ + CLOGV("INFO(%s[%d]):wait SCC output", __FUNCTION__, __LINE__); + ret = m_highResolutionCallbackQ->waitAndPopProcessQ(&newFrame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + goto CLEAN; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + // TODO: doing exception handling + goto CLEAN; + } + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + ret = newFrame->setEntityState(pipeId_scc, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + return ret; + } + CLOGV("INFO(%s[%d]):SCC output done", __FUNCTION__, __LINE__); + + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) { + /* get GSC src buffer */ + if (m_parameters->isUseYuvReprocessing() == true) { + ret = newFrame->getDstBuffer(pipeId_scc, &highResolutionCbBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_MCSC0_REPROCESSING)); + } else if (m_parameters->isUsing3acForIspc() == true) + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + else + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_SCC_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + + if (m_parameters->isUseYuvReprocessing() == false) { + shot_stream = (struct camera2_stream *)(sccReprocessingBuffer.addr[buffer_idx]); + if (shot_stream == NULL) { + CLOGE("ERR(%s[%d]):shot_stream is NULL. buffer(%d)", + __FUNCTION__, __LINE__, sccReprocessingBuffer.index); + goto CLEAN; + } + + /* alloc GSC buffer */ + if (m_highResolutionCallbackBufferMgr->isAllocated() == false) { + ret = m_allocBuffers(m_highResolutionCallbackBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, false, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_highResolutionCallbackBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + return ret; + } + } + + /* get GSC dst buffer */ + int bufIndex = -2; + m_highResolutionCallbackBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &highResolutionCbBuffer); + } + + /* get preview callback heap */ + camera_memory_t *previewCallbackHeap = NULL; + previewCallbackHeap = m_getMemoryCb(highResolutionCbBuffer.fd[0], highResolutionCbBuffer.size[0], 1, m_callbackCookie); + if (!previewCallbackHeap || previewCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, highResolutionCbBuffer.size[0]); + goto CLEAN; + } + + ret = m_setCallbackBufferInfo(&highResolutionCbBuffer, (char *)previewCallbackHeap->data); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setCallbackBufferInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN; + } + + if (m_parameters->isUseYuvReprocessing() == false) { + /* set src/dst rect */ + srcRect.x = shot_stream->output_crop_region[0]; + srcRect.y = shot_stream->output_crop_region[1]; + srcRect.w = shot_stream->output_crop_region[2]; + srcRect.h = shot_stream->output_crop_region[3]; + + ret = m_calcHighResolutionPreviewGSCRect(&srcRect, &dstRect); + ret = newFrame->setSrcRect(pipeId_gsc, &srcRect); + ret = newFrame->setDstRect(pipeId_gsc, &dstRect); + + CLOGV("DEBUG(%s[%d]):srcRect x : %d, y : %d, w : %d, h : %d", __FUNCTION__, __LINE__, srcRect.x, srcRect.y, srcRect.w, srcRect.h); + CLOGV("DEBUG(%s[%d]):dstRect x : %d, y : %d, w : %d, h : %d", __FUNCTION__, __LINE__, dstRect.x, dstRect.y, dstRect.w, dstRect.h); + + ret = m_setupEntity(pipeId_gsc, newFrame, &sccReprocessingBuffer, &highResolutionCbBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + /* push frame to GSC pipe */ + m_pictureFrameFactory->setOutputFrameQToPipe(dstGscReprocessingQ, pipeId_gsc); + m_pictureFrameFactory->pushFrameToPipe(&newFrame, pipeId_gsc); + + /* wait GSC for high resolution preview callback */ + CLOGI("INFO(%s[%d]):wait GSC output", __FUNCTION__, __LINE__); + while (retryCountGSC > 0) { + ret = dstGscReprocessingQ->waitAndPopProcessQ(&newFrame); + if (ret == TIMED_OUT) { + CLOGW("WRN(%s)(%d):wait and pop timeout, ret(%d)", __FUNCTION__, __LINE__, ret); + m_pictureFrameFactory->startThread(pipeId_gsc); + } else if (ret < 0) { + CLOGE("ERR(%s)(%d):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto CLEAN; + } else { + break; + } + retryCountGSC--; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + goto CLEAN; + } + CLOGI("INFO(%s[%d]):GSC output done", __FUNCTION__, __LINE__); + + /* put SCC buffer */ + if (m_parameters->isUsing3acForIspc() == true) { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + ret = m_putBuffers(m_sccBufferMgr, sccReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } else { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_SCC_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + ret = m_putBuffers(m_sccReprocessingBufferMgr, sccReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } + } + + CLOGV("DEBUG(%s[%d]):high resolution preview callback", __FUNCTION__, __LINE__); + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME)) { + setBit(&m_callbackState, CALLBACK_STATE_PREVIEW_FRAME, false); + m_dataCb(CAMERA_MSG_PREVIEW_FRAME, previewCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_PREVIEW_FRAME, false); + } + + previewCallbackHeap->release(previewCallbackHeap); + + /* put high resolution callback buffer */ + if (m_parameters->isUseYuvReprocessing() == true) + ret = m_putBuffers(m_sccReprocessingBufferMgr, highResolutionCbBuffer.index); + else + ret = m_putBuffers(m_highResolutionCallbackBufferMgr, highResolutionCbBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + } else { + CLOGD("DEBUG(%s[%d]): Preview callback message disabled, skip callback", __FUNCTION__, __LINE__); + /* put SCC buffer */ + if (m_parameters->isUseYuvReprocessing() == true) { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_MCSC0_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + ret = m_putBuffers(m_sccReprocessingBufferMgr, sccReprocessingBuffer.index); + } else if (m_parameters->isUsing3acForIspc() == true) { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + ret = m_putBuffers(m_sccBufferMgr, sccReprocessingBuffer.index); + } else { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_SCC_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + ret = m_putBuffers(m_sccReprocessingBufferMgr, sccReprocessingBuffer.index); + } + } + + if (newFrame != NULL) { + newFrame->frameUnlock(); + ret = m_removeFrameFromList(&m_postProcessList, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGD("DEBUG(%s[%d]): Reprocessing frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + if(m_flagThreadStop != true) { + if (m_highResolutionCallbackQ->getSizeOfProcessQ() > 0 || + m_parameters->getHighResolutionCallbackMode() == true) { + CLOGD("DEBUG(%s[%d]):highResolutionCallbackQ size(%d), highResolutionCallbackMode(%s), start again", + __FUNCTION__, __LINE__, + m_highResolutionCallbackQ->getSizeOfProcessQ(), + (m_parameters->getHighResolutionCallbackMode() == true)? "TRUE" : "FALSE"); + loop = true; + } + } + + CLOGI("INFO(%s[%d]):high resolution callback thread complete, loop(%d)", __FUNCTION__, __LINE__, loop); + + /* one shot */ + return loop; + +CLEAN: + if (sccReprocessingBuffer.index != -2) + ret = m_putBuffers(m_sccReprocessingBufferMgr, sccReprocessingBuffer.index); + if (highResolutionCbBuffer.index != -2) + m_putBuffers(m_highResolutionCallbackBufferMgr, highResolutionCbBuffer.index); + + if (newFrame != NULL) { + newFrame->printEntity(); + + newFrame->frameUnlock(); + ret = m_removeFrameFromList(&m_postProcessList, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + ret = m_deleteFrame(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_deleteFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + } + + if(m_flagThreadStop != true) { + if (m_highResolutionCallbackQ->getSizeOfProcessQ() > 0 || + m_parameters->getHighResolutionCallbackMode() == true) { + CLOGD("DEBUG(%s[%d]):highResolutionCallbackQ size(%d), highResolutionCallbackMode(%s), start again", + __FUNCTION__, __LINE__, + m_highResolutionCallbackQ->getSizeOfProcessQ(), + (m_parameters->getHighResolutionCallbackMode() == true)? "TRUE" : "FALSE"); + loop = true; + } + } + + CLOGI("INFO(%s[%d]):high resolution callback thread fail, loop(%d)", __FUNCTION__, __LINE__, loop); + + /* one shot */ + return loop; +} + +status_t ExynosCamera::m_doPrviewToRecordingFunc( + int32_t pipeId, + ExynosCameraBuffer previewBuf, + ExynosCameraBuffer recordingBuf, + nsecs_t timeStamp) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + CLOGV("DEBUG(%s[%d]):--IN-- (previewBuf.index=%d, recordingBuf.index=%d)", + __FUNCTION__, __LINE__, previewBuf.index, recordingBuf.index); + + status_t ret = NO_ERROR; + ExynosRect srcRect, dstRect; + ExynosCameraFrame *newFrame = NULL; + struct camera2_node_output node; + + newFrame = m_previewFrameFactory->createNewFrameVideoOnly(); + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return UNKNOWN_ERROR; + } + + /* TODO: HACK: Will be removed, this is driver's job */ + m_convertingStreamToShotExt(&previewBuf, &node); + setMetaDmSensorTimeStamp((struct camera2_shot_ext*)previewBuf.addr[previewBuf.planeCount-1], timeStamp); + + /* csc and scaling */ + ret = m_calcRecordingGSCRect(&srcRect, &dstRect); + ret = newFrame->setSrcRect(pipeId, srcRect); + ret = newFrame->setDstRect(pipeId, dstRect); + + ret = m_setupEntity(pipeId, newFrame, &previewBuf, &recordingBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, ret); + ret = INVALID_OPERATION; + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + goto func_exit; + } + m_recordingListLock.lock(); + m_recordingProcessList.push_back(newFrame); + m_recordingListLock.unlock(); + m_previewFrameFactory->setOutputFrameQToPipe(m_recordingQ, pipeId); + + m_recordingStopLock.lock(); + if (m_getRecordingEnabled() == false) { + m_recordingStopLock.unlock(); + CLOGD("DEBUG(%s[%d]): m_getRecordingEnabled is false, skip frame(%d) previewBuf(%d) recordingBuf(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), previewBuf.index, recordingBuf.index); + + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + if (recordingBuf.index >= 0){ + m_putBuffers(m_recordingBufferMgr, recordingBuf.index); + } + goto func_exit; + } + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipeId); + m_recordingStopLock.unlock(); + +func_exit: + + CLOGV("DEBUG(%s[%d]):--OUT--", __FUNCTION__, __LINE__); + return ret; + +} + +bool ExynosCamera::m_vraThreadFunc(void) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + status_t ret = NO_ERROR; + int gscPipeId = PIPE_GSC_VRA; + int vraPipeId = PIPE_VRA; + + ExynosCameraFrame *gscFrame = NULL; + ExynosCameraFrame *gscDoneFrame = NULL; + ExynosCameraFrame *vraFrame = NULL; + ExynosCameraFrame *vraDoneFrame = NULL; + ExynosCameraBuffer srcBuf; + ExynosCameraBuffer dstBuf; + int vraWidth = 0, vraHeight = 0; + int dstBufIndex = -2; + int frameCount = -1; + + int waitCount = 0; + + struct camera2_stream *streamMeta = NULL; + uint32_t *mcscOutputCrop = NULL; + + ExynosRect srcRect, dstRect; + int32_t previewFormat = m_parameters->getHwPreviewFormat(); + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + /* Pop frame from MCSC output Q */ + CLOGV("INFO(%s[%d]):wait MCSC output", __FUNCTION__, __LINE__); + + ret = m_vraThreadQ->waitAndPopProcessQ(&gscFrame); + + if (m_flagThreadStop == true) + goto func_exit; + + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s[%d]):wait timeout", __FUNCTION__, __LINE__); + } else { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + } + goto func_exit; + } + + if (gscFrame == NULL) { + CLOGE("ERR(%s[%d]):gscFrame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + frameCount = gscFrame->getFrameCount(); + CLOGV("INFO(%s[%d]):Get GSC frame for VRA, frameCount(%d)", __FUNCTION__, __LINE__, frameCount); + + m_vraRunningCount++; + + /* Get scaler source buffer */ + ret = gscFrame->getSrcBuffer(gscPipeId, &srcBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto func_exit; + } + + /* Get scaler destination buffer */ + do { + waitCount++; + + if (m_vraBufferMgr->getNumOfAvailableBuffer() > 0) + m_vraBufferMgr->getBuffer(&dstBufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuf); + + if (dstBufIndex < 0) { + usleep(WAITING_TIME); + + if (waitCount % 20 == 0) { + CLOGW("WRN(%s[%d]):retry VRA getBuffer(%d)", __FUNCTION__, __LINE__, dstBufIndex); + m_scpBufferMgr->dump(); + } + } else { + break; + } + /* this will retry until 300msec */ + } while (waitCount < (TOTAL_WAITING_TIME / WAITING_TIME) && previewEnabled() == false); + + /* Get size from metadata */ + streamMeta = (struct camera2_stream*)srcBuf.addr[srcBuf.planeCount-1]; + if (streamMeta == NULL) { + CLOGE("ERR(%s[%d]):srcBuf.addr is NULL, srcBuf.addr(0x%x)",__FUNCTION__, __LINE__, srcBuf.addr[srcBuf.planeCount-1]); + goto func_exit; + } + + /* Set size to GSC frame */ + mcscOutputCrop = streamMeta->output_crop_region; + + srcRect.x = 0; + srcRect.y = 0; + srcRect.w = mcscOutputCrop[2]; + srcRect.h = mcscOutputCrop[3]; + srcRect.fullW = mcscOutputCrop[2]; + srcRect.fullH = mcscOutputCrop[3]; + srcRect.colorFormat = previewFormat; + + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = vraWidth; + dstRect.h = vraHeight; + dstRect.fullW = vraWidth; + dstRect.fullH = vraHeight; + dstRect.colorFormat = m_parameters->getHwVraInputFormat(); + + ret = gscFrame->setSrcRect(gscPipeId, srcRect); + ret = gscFrame->setDstRect(gscPipeId, dstRect); + + ret = m_setupEntity(gscPipeId, gscFrame, &srcBuf, &dstBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, gscPipeId, ret); + } + + m_previewFrameFactory->setOutputFrameQToPipe(m_vraGscDoneQ, gscPipeId); + m_previewFrameFactory->pushFrameToPipe(&gscFrame, gscPipeId); + + /* Wait and Pop frame from GSC output Q */ + CLOGV("INFO(%s[%d]):wait GSC output", __FUNCTION__, __LINE__); + + waitCount = 0; + do { + ret = m_vraGscDoneQ->waitAndPopProcessQ(&gscDoneFrame); + waitCount++; + + if (m_flagThreadStop == true) { + if (m_vraRunningCount > 0) + m_vraRunningCount--; + return false; + } + } while (ret == TIMED_OUT && waitCount < 10); + + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):GSC wait and pop error, ret(%d)", __FUNCTION__, __LINE__, ret); + + if (gscDoneFrame == NULL) { + CLOGE("ERR(%s[%d]):gscFrame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + CLOGV("INFO(%s[%d]):Get frame from GSC Pipe, frameCount(%d)", __FUNCTION__, __LINE__, frameCount); + + vraFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(vraPipeId, frameCount); + + if (vraFrame != NULL) { + /* Set perframe size of VRA */ + camera2_node_group node_group_info; + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + + node_group_info.leader.request = 1; + node_group_info.leader.input.cropRegion[0] = 0; + node_group_info.leader.input.cropRegion[1] = 0; + node_group_info.leader.input.cropRegion[2] = vraWidth; + node_group_info.leader.input.cropRegion[3] = vraHeight; + node_group_info.leader.output.cropRegion[0] = 0; + node_group_info.leader.output.cropRegion[1] = 0; + node_group_info.leader.output.cropRegion[2] = node_group_info.leader.input.cropRegion[2]; + node_group_info.leader.output.cropRegion[3] = node_group_info.leader.input.cropRegion[3]; + + vraFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_VRA); + + /* Copy metadata frame to dst buffer */ + struct camera2_shot_ext shot_ext; + gscDoneFrame->getMetaData(&shot_ext); + memcpy(dstBuf.addr[dstBuf.planeCount-1], &shot_ext, sizeof(struct camera2_shot_ext)); + + /* Set FD bypass from parameters */ + shot_ext.fd_bypass = m_parameters->getFdEnable(); + switch (getCameraId()) { + case CAMERA_ID_FRONT: + /* HACK: Calibrate FD orientation */ + shot_ext.shot.uctl.scalerUd.orientation = (m_parameters->getDeviceOrientation() + FRONT_ROTATION + 180) % 360; + break; + case CAMERA_ID_BACK: + default: + shot_ext.shot.uctl.scalerUd.orientation = m_parameters->getFdOrientation(); + break; + } + + vraFrame->setMetaData(&shot_ext); + + if (shot_ext.fd_bypass == false) { + ret = m_setupEntity(vraPipeId, vraFrame, &dstBuf, &dstBuf); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, vraPipeId, ret); + + m_previewFrameFactory->setOutputFrameQToPipe(m_vraPipeDoneQ, vraPipeId); + m_previewFrameFactory->pushFrameToPipe(&vraFrame, vraPipeId); + + /* Wait and Pop frame from VRA output Q */ + CLOGV("INFO(%s[%d]):wait VRA output", __FUNCTION__, __LINE__); + + waitCount = 0; + do { + ret = m_vraPipeDoneQ->waitAndPopProcessQ(&vraDoneFrame); + waitCount++; + + if (m_flagThreadStop == true) { + if (m_vraRunningCount > 0) + m_vraRunningCount--; + return false; + } + } while (ret == TIMED_OUT && waitCount < 10); + + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):VRA wait and pop error, ret(%d)", __FUNCTION__, __LINE__, ret); + + if (vraDoneFrame == NULL) { + CLOGE("ERR(%s[%d]):vraFrame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + CLOGV("INFO(%s[%d]):Get frame from VRA Pipe, frameCount(%d)", __FUNCTION__, __LINE__, frameCount); + + /* Face detection callback */ + struct camera2_shot_ext fd_shot; + vraDoneFrame->getDynamicMeta(&fd_shot); + + ExynosCameraFrame *fdFrame = m_frameMgr->createFrame(m_parameters, frameCount); + if (fdFrame != NULL) { + fdFrame->storeDynamicMeta(&fd_shot); + m_facedetectQ->pushProcessQ(&fdFrame); + } + } + } + +func_exit: + /* Put VRA buffer */ + if (dstBuf.index > -1) { + ret = m_vraBufferMgr->putBuffer(dstBuf.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):Put VRA buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + /* Delete frame */ + if (gscFrame != NULL) { + gscFrame->decRef(); + m_frameMgr->deleteFrame(gscFrame);; + } else if (gscDoneFrame != NULL) { + gscDoneFrame->decRef(); + m_frameMgr->deleteFrame(gscDoneFrame);; + } + + if (vraFrame != NULL) { + vraFrame->decRef(); + m_frameMgr->deleteFrame(vraFrame);; + } else if (vraDoneFrame != NULL) { + vraDoneFrame->decRef(); + m_frameMgr->deleteFrame(vraDoneFrame);; + } + + if (m_vraRunningCount > 0) + m_vraRunningCount--; + + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + return false; + } else { + return true; + } +} + +status_t ExynosCamera::m_releaseRecordingBuffer(int bufIndex) +{ + status_t ret = NO_ERROR; + + if (bufIndex < 0 || bufIndex >= (int)m_recordingBufferCount) { + CLOGE("ERR(%s):Out of Index! (Max: %d, Index: %d)", __FUNCTION__, m_recordingBufferCount, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + m_recordingTimeStamp[bufIndex] = 0L; + m_recordingBufAvailable[bufIndex] = true; + ret = m_putBuffers(m_recordingBufferMgr, bufIndex); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + +func_exit: + + return ret; +} + +status_t ExynosCamera::m_calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + return m_parameters->calcPreviewGSCRect(srcRect, dstRect); +} + +status_t ExynosCamera::m_calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + return m_parameters->calcHighResolutionPreviewGSCRect(srcRect, dstRect); +} + +status_t ExynosCamera::m_calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + return m_parameters->calcRecordingGSCRect(srcRect, dstRect); +} + +status_t ExynosCamera::m_calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + return m_parameters->calcPictureRect(srcRect, dstRect); +} + +status_t ExynosCamera::m_calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect) +{ + return m_parameters->calcPictureRect(originW, originH, srcRect, dstRect); +} + +status_t ExynosCamera::m_searchFrameFromList(List *list, uint32_t frameCount, ExynosCameraFrame **frame) +{ + Mutex::Autolock lock(m_searchframeLock); + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + if (list->empty()) { + CLOGD("DEBUG(%s[%d]):list is empty", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + r = list->begin()++; + + do { + curFrame = *r; + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is empty", __FUNCTION__); + return INVALID_OPERATION; + } + + if (frameCount == curFrame->getFrameCount()) { + CLOGV("DEBUG(%s):frame count match: expected(%d)", __FUNCTION__, frameCount); + *frame = curFrame; + return NO_ERROR; + } + r++; + } while (r != list->end()); + + CLOGV("DEBUG(%s[%d]):Cannot find match frame, frameCount(%d)", __FUNCTION__, __LINE__, frameCount); + + return NO_ERROR; +} + +status_t ExynosCamera::m_removeFrameFromList(List *list, ExynosCameraFrame *frame) +{ + Mutex::Autolock lock(m_searchframeLock); + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + int frameCount = 0; + int curFrameCount = 0; + List::iterator r; + + if (frame == NULL) { + CLOGE("ERR(%s):frame is NULL", __FUNCTION__); + return BAD_VALUE; + } + + if (list->empty()) { + CLOGD("DEBUG(%s):list is empty", __FUNCTION__); + return NO_ERROR; + } + + frameCount = frame->getFrameCount(); + r = list->begin()++; + + do { + curFrame = *r; + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is empty", __FUNCTION__); + return INVALID_OPERATION; + } + + curFrameCount = curFrame->getFrameCount(); + if (frameCount == curFrameCount) { + CLOGV("DEBUG(%s):frame count match: expected(%d), current(%d)", __FUNCTION__, frameCount, curFrameCount); + list->erase(r); + return NO_ERROR; + } + CLOGW("WARN(%s):frame count mismatch: expected(%d), current(%d)", __FUNCTION__, frameCount, curFrameCount); + /* removed message */ + /* curFrame->printEntity(); */ + r++; + } while (r != list->end()); + + CLOGE("ERR(%s):Cannot find match frame!!!", __FUNCTION__); + + return INVALID_OPERATION; +} + +status_t ExynosCamera::m_deleteFrame(ExynosCameraFrame **frame) +{ + status_t ret = NO_ERROR; + + /* put lock using this frame */ + Mutex::Autolock lock(m_searchframeLock); + + if (*frame == NULL) { + CLOGE("ERR(%s[%d]):frame == NULL. so, fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if ((*frame)->getFrameLockState() == false) { + if ((*frame)->isComplete() == true) { + CLOGD("DEBUG(%s[%d]): Reprocessing frame delete(%d)", __FUNCTION__, __LINE__, (*frame)->getFrameCount()); + + (*frame)->decRef(); + m_frameMgr->deleteFrame(*frame); + } + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_clearList(List *list) +{ + Mutex::Autolock lock(m_searchframeLock); + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + CLOGD("DEBUG(%s):remaining frame(%zd), we remove them all", __FUNCTION__, list->size()); + + while (!list->empty()) { + r = list->begin()++; + curFrame = *r; + if (curFrame != NULL) { + CLOGV("DEBUG(%s):remove frame count %d", __FUNCTION__, curFrame->getFrameCount() ); + curFrame->decRef(); + m_frameMgr->deleteFrame(curFrame); + curFrame = NULL; + } + list->erase(r); + } + CLOGD("DEBUG(%s):EXIT ", __FUNCTION__); + + return NO_ERROR; +} + +status_t ExynosCamera::m_clearList(frame_queue_t *queue) +{ + Mutex::Autolock lock(m_searchframeLock); + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + + CLOGD("DEBUG(%s):remaining frame(%d), we remove them all", __FUNCTION__, queue->getSizeOfProcessQ()); + + while (0 < queue->getSizeOfProcessQ()) { + queue->popProcessQ(&curFrame); + if (curFrame != NULL) { + CLOGV("DEBUG(%s):remove frame count %d", __FUNCTION__, curFrame->getFrameCount() ); + curFrame->decRef(); + m_frameMgr->deleteFrame(curFrame); + curFrame = NULL; + } + } + CLOGD("DEBUG(%s):EXIT ", __FUNCTION__); + + return NO_ERROR; +} + +status_t ExynosCamera::m_clearFrameQ(frame_queue_t *frameQ, uint32_t pipeId, uint32_t dstPipeId, uint32_t direction) { + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraBuffer deleteSccBuffer; + ExynosCameraBufferManager *bufferMgr = NULL; + int ret = NO_ERROR; + + if (frameQ == NULL) { + CLOGE("ERR(%s[%d]):frameQ is NULL.", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]): IN... frameQSize(%d)", __FUNCTION__, __LINE__, frameQ->getSizeOfProcessQ()); + + while (0 < frameQ->getSizeOfProcessQ()) { + newFrame = NULL; + ret = frameQ->popProcessQ(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + continue; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + continue; + } + + if (direction == SRC_BUFFER_DIRECTION) { + ret = newFrame->getSrcBuffer(pipeId, &deleteSccBuffer); + } else { + if(m_previewFrameFactory == NULL) { + return INVALID_OPERATION; + } + ret = newFrame->getDstBuffer(pipeId, &deleteSccBuffer, m_previewFrameFactory->getNodeType(dstPipeId)); + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + continue; + } + + ret = m_getBufferManager(pipeId, &bufferMgr, direction); + if (ret < 0) + CLOGE("ERR(%s[%d]):getBufferManager(SRC) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + + /* put SCC buffer */ + CLOGD("DEBUG(%s)(%d):m_putBuffer by clearjpegthread(dstSccRe), index(%d)", __FUNCTION__, __LINE__, deleteSccBuffer.index); + ret = m_putBuffers(bufferMgr, deleteSccBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]):bufferMgr->putBuffers() fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + } + + return ret; +} + +status_t ExynosCamera::m_printFrameList(List *list) +{ + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + CLOGD("++++++++++++++++++++++++++++++++++++++++++++++++++++++++++"); + CLOGD("\t remaining frame count(%zd)", list->size()); + + r = list->begin()++; + + do { + curFrame = *r; + if (curFrame != NULL) { + CLOGI("\t hal frame count %d", curFrame->getFrameCount() ); + curFrame->printEntity(); + } + + r++; + } while (r != list->end()); + CLOGD("----------------------------------------------------------------------------"); + + return NO_ERROR; +} + +status_t ExynosCamera::m_createIonAllocator(ExynosCameraIonAllocator **allocator) +{ + status_t ret = NO_ERROR; + int retry = 0; + do { + retry++; + CLOGI("INFO(%s[%d]):try(%d) to create IonAllocator", __FUNCTION__, __LINE__, retry); + *allocator = new ExynosCameraIonAllocator(); + ret = (*allocator)->init(false); + if (ret < 0) + CLOGE("ERR(%s[%d]):create IonAllocator fail (retryCount=%d)", __FUNCTION__, __LINE__, retry); + else { + CLOGD("DEBUG(%s[%d]):m_createIonAllocator success (allocator=%p)", __FUNCTION__, __LINE__, *allocator); + break; + } + } while (ret < 0 && retry < 3); + + if (ret < 0 && retry >=3) { + CLOGE("ERR(%s[%d]):create IonAllocator fail (retryCount=%d)", __FUNCTION__, __LINE__, retry); + ret = INVALID_OPERATION; + } + + return ret; +} + +status_t ExynosCamera::m_createInternalBufferManager(ExynosCameraBufferManager **bufferManager, const char *name) +{ + return m_createBufferManager(bufferManager, name, BUFFER_MANAGER_ION_TYPE); +} + +status_t ExynosCamera::m_createBufferManager( + ExynosCameraBufferManager **bufferManager, + const char *name, + buffer_manager_type type) +{ + status_t ret = NO_ERROR; + + if (m_ionAllocator == NULL) { + ret = m_createIonAllocator(&m_ionAllocator); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_createIonAllocator fail", __FUNCTION__, __LINE__); + else + CLOGD("DEBUG(%s[%d]):m_createIonAllocator success", __FUNCTION__, __LINE__); + } + + *bufferManager = ExynosCameraBufferManager::createBufferManager(type); + (*bufferManager)->create(name, m_cameraId, m_ionAllocator); + + CLOGD("DEBUG(%s):BufferManager(%s) created", __FUNCTION__, name); + + return ret; +} + +status_t ExynosCamera::m_setPreviewCallbackBuffer(void) +{ + int ret = 0; + int previewW = 0, previewH = 0; + int previewFormat = 0; + m_parameters->getPreviewSize(&previewW, &previewH); + previewFormat = m_parameters->getPreviewFormat(); + + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + + int planeCount = getYuvPlaneCount(previewFormat); + int bufferCount = 1; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + if (m_previewCallbackBufferMgr == NULL) { + CLOGE("ERR(%s[%d]): m_previewCallbackBufferMgr is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (m_previewCallbackBufferMgr->isAllocated() == true) { + if (m_parameters->getRestartPreview() == true) { + CLOGD("DEBUG(%s[%d]): preview size is changed, realloc buffer", __FUNCTION__, __LINE__); + m_previewCallbackBufferMgr->deinit(); + } else { + return NO_ERROR; + } + } + + ret = getYuvPlaneSize(previewFormat, planeSize, previewW, previewH); + if (ret < 0) { + CLOGE("ERR(%s[%d]): BAD value, format(%x), size(%dx%d)", + __FUNCTION__, __LINE__, previewFormat, previewW, previewH); + return ret; + } + + ret = m_allocBuffers(m_previewCallbackBufferMgr, planeCount, planeSize, bytesPerLine, bufferCount, bufferCount, type, false, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_previewCallbackBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, bufferCount); + return ret; + } + + return NO_ERROR; +} + +bool ExynosCamera::m_startPictureBufferThreadFunc(void) +{ + int ret = 0; + + ret = m_setPictureBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setPictureBuffer failed", __FUNCTION__, __LINE__); + + /* TODO: Need release buffers and error exit */ + + return false; + } + + return false; +} + +status_t ExynosCamera::m_putBuffers(ExynosCameraBufferManager *bufManager, int bufIndex) +{ + if (bufManager != NULL) + bufManager->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + + return NO_ERROR; +} + +status_t ExynosCamera::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int reqBufCount, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + ret = m_allocBuffers( + bufManager, + planeCount, + planeSize, + bytePerLine, + reqBufCount, + reqBufCount, + EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE, + BUFFER_MANAGER_ALLOCATION_ATONCE, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_allocBuffers(reqBufCount=%d) fail", + __FUNCTION__, __LINE__, reqBufCount); + } + + return ret; +} + +status_t ExynosCamera::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + ret = m_allocBuffers( + bufManager, + planeCount, + planeSize, + bytePerLine, + minBufCount, + maxBufCount, + type, + BUFFER_MANAGER_ALLOCATION_ONDEMAND, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_allocBuffers(minBufCount=%d, maxBufCount=%d, type=%d) fail", + __FUNCTION__, __LINE__, minBufCount, maxBufCount, type); + } + + return ret; +} + +status_t ExynosCamera::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):setInfo(planeCount=%d, minBufCount=%d, maxBufCount=%d, type=%d, allocMode=%d)", + __FUNCTION__, __LINE__, planeCount, minBufCount, maxBufCount, (int)type, (int)allocMode); + + ret = bufManager->setInfo( + planeCount, + planeSize, + bytePerLine, + 0, + minBufCount, + maxBufCount, + type, + allocMode, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setInfo fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = bufManager->alloc(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):alloc fail", __FUNCTION__, __LINE__); + goto func_exit; + } + +func_exit: + + return ret; +} + +status_t ExynosCamera::m_checkThreadState(int *threadState, int *countRenew) +{ + int ret = NO_ERROR; + + if ((*threadState == ERROR_POLLING_DETECTED) || (*countRenew > ERROR_DQ_BLOCKED_COUNT)) { + CLOGW("WRN(%s[%d]:SCP DQ Timeout! State:[%d], Duration:%d msec", __FUNCTION__, __LINE__, *threadState, (*countRenew)*(MONITOR_THREAD_INTERVAL/1000)); + ret = false; + } else { + CLOGV("[%s] (%d) (%d)", __FUNCTION__, __LINE__, *threadState); + ret = NO_ERROR; + } + + return ret; +} + +status_t ExynosCamera::m_checkThreadInterval(uint32_t pipeId, uint32_t pipeInterval, int *threadState) +{ + uint64_t *threadInterval; + int ret = NO_ERROR; + + m_previewFrameFactory->getThreadInterval(&threadInterval, pipeId); + if (*threadInterval > pipeInterval) { + CLOGW("WRN(%s[%d]:Pipe(%d) Thread Interval [%lld msec], State:[%d]", __FUNCTION__, __LINE__, pipeId, (*threadInterval)/1000, *threadState); + ret = false; + } else { + CLOGV("Thread IntervalTime [%lld]", *threadInterval); + CLOGV("Thread Renew state [%d]", *threadState); + ret = NO_ERROR; + } + + return ret; +} + +#ifdef MONITOR_LOG_SYNC +uint32_t ExynosCamera::m_getSyncLogId(void) +{ + return ++cameraSyncLogId; +} +#endif + +status_t ExynosCamera::dump(__unused int fd) const +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void ExynosCamera::dump() +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + m_printExynosCameraInfo(__FUNCTION__); + + if (m_previewFrameFactory != NULL) + m_previewFrameFactory->dump(); + + if (m_bayerBufferMgr != NULL) + m_bayerBufferMgr->dump(); + if (m_3aaBufferMgr != NULL) + m_3aaBufferMgr->dump(); + if (m_ispBufferMgr != NULL) + m_ispBufferMgr->dump(); + if (m_hwDisBufferMgr != NULL) + m_hwDisBufferMgr->dump(); + if (m_scpBufferMgr != NULL) + m_scpBufferMgr->dump(); + if (m_vraBufferMgr != NULL) + m_vraBufferMgr->dump(); + + if (m_ispReprocessingBufferMgr != NULL) + m_ispReprocessingBufferMgr->dump(); + if (m_sccReprocessingBufferMgr != NULL) + m_sccReprocessingBufferMgr->dump(); + if (m_sccBufferMgr != NULL) + m_sccBufferMgr->dump(); + if (m_gscBufferMgr != NULL) + m_gscBufferMgr->dump(); + if (m_jpegBufferMgr != NULL) + m_jpegBufferMgr->dump(); + if (m_thumbnailBufferMgr != NULL) + m_thumbnailBufferMgr->dump(); + + return; +} + +uint32_t ExynosCamera::m_getBayerPipeId(void) +{ + uint32_t pipeId = 0; + + if (m_parameters->getUsePureBayerReprocessing() == true) { + pipeId = PIPE_FLITE; + } else { + pipeId = PIPE_3AA; + } +#ifdef DEBUG_RAWDUMP + pipeId = PIPE_FLITE; +#endif + return pipeId; +} + +void ExynosCamera::m_debugFpsCheck(__unused uint32_t pipeId) +{ +#ifdef FPS_CHECK + uint32_t id = pipeId % DEBUG_MAX_PIPE_NUM; + + m_debugFpsCount[id]++; + if (m_debugFpsCount[id] == 1) { + m_debugFpsTimer[id].start(); + } + if (m_debugFpsCount[id] == 31) { + m_debugFpsTimer[id].stop(); + long long durationTime = m_debugFpsTimer[id].durationMsecs(); + CLOGI("INFO(%s[%d]): FPS_CHECK(id:%d), duration %lld / 30 = %lld ms. %lld fps", + __FUNCTION__, __LINE__, pipeId, durationTime, durationTime / 30, 1000 / (durationTime / 30)); + m_debugFpsCount[id] = 0; + } +#endif +} + +status_t ExynosCamera::m_convertingStreamToShotExt(ExynosCameraBuffer *buffer, struct camera2_node_output *outputInfo) +{ +/* TODO: HACK: Will be removed, this is driver's job */ + status_t ret = NO_ERROR; + int bayerFrameCount = 0; + camera2_shot_ext *shot_ext = NULL; + camera2_stream *shot_stream = NULL; + + shot_stream = (struct camera2_stream *)buffer->addr[buffer->planeCount-1]; + bayerFrameCount = shot_stream->fcount; + outputInfo->cropRegion[0] = shot_stream->output_crop_region[0]; + outputInfo->cropRegion[1] = shot_stream->output_crop_region[1]; + outputInfo->cropRegion[2] = shot_stream->output_crop_region[2]; + outputInfo->cropRegion[3] = shot_stream->output_crop_region[3]; + + memset(buffer->addr[buffer->planeCount-1], 0x0, sizeof(struct camera2_shot_ext)); + + shot_ext = (struct camera2_shot_ext *)buffer->addr[buffer->planeCount-1]; + shot_ext->shot.dm.request.frameCount = bayerFrameCount; + + return ret; +} + +status_t ExynosCamera::m_checkBufferAvailable(uint32_t pipeId, ExynosCameraBufferManager *bufferMgr) +{ + status_t ret = TIMED_OUT; + int retry = 0; + + do { + ret = -1; + retry++; + if (bufferMgr->getNumOfAvailableBuffer() > 0) { + ret = OK; + } else { + /* wait available ISP buffer */ + usleep(WAITING_TIME); + } + if (retry % 10 == 0) + CLOGW("WRAN(%s[%d]):retry(%d) setupEntity for pipeId(%d)", __FUNCTION__, __LINE__, retry, pipeId); + } while(ret < 0 && retry < (TOTAL_WAITING_TIME/WAITING_TIME) && m_stopBurstShot == false); + + return ret; +} + +status_t ExynosCamera::m_boostDynamicCapture(void) +{ + status_t ret = NO_ERROR; +#if 0 /* TODO: need to implementation for bayer */ + uint32_t pipeId = (isOwnScc(getCameraId()) == true) ? PIPE_SCC : PIPE_ISPC; + uint32_t size = m_processList.size(); + + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + camera2_node_group node_group_info_isp; + + if (m_processList.empty()) { + CLOGD("DEBUG(%s[%d]):m_processList is empty", __FUNCTION__, __LINE__); + return NO_ERROR; + } + CLOGD("DEBUG(%s[%d]):m_processList size(%d)", __FUNCTION__, __LINE__, m_processList.size()); + r = m_processList.end(); + + for (unsigned int i = 0; i < 3; i++) { + r--; + if (r == m_processList.begin()) + break; + + } + + curFrame = *r; + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is empty", __FUNCTION__); + return INVALID_OPERATION; + } + + if (curFrame->getRequest(pipeId) == true) { + CLOGD("DEBUG(%s[%d]): Boosting dynamic capture is not need", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + CLOGI("INFO(%s[%d]): boosting dynamic capture (frameCount: %d)", __FUNCTION__, __LINE__, curFrame->getFrameCount()); + /* For ISP */ + curFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + m_updateBoostDynamicCaptureSize(&node_group_info_isp); + curFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + + curFrame->setRequest(pipeId, true); + curFrame->setNumRequestPipe(curFrame->getNumRequestPipe() + 1); + + ret = curFrame->setEntityState(pipeId, ENTITY_STATE_REWORK); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState fail, pipeId(%d), state(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, ENTITY_STATE_REWORK, ret); + return ret; + } + + m_previewFrameFactory->pushFrameToPipe(&curFrame, pipeId); + m_dynamicSccCount++; + CLOGV("DEBUG(%s[%d]): dynamicSccCount inc(%d) frameCount(%d)", __FUNCTION__, __LINE__, m_dynamicSccCount, curFrame->getFrameCount()); +#endif + + return ret; +} + +void ExynosCamera::m_updateBoostDynamicCaptureSize(__unused camera2_node_group *node_group_info) +{ +#if 0 /* TODO: need to implementation for bayer */ + ExynosRect sensorSize; + ExynosRect bayerCropSize; + + node_group_info->capture[PERFRAME_BACK_SCC_POS].request = 1; + + m_parameters->getPreviewBayerCropSize(&sensorSize, &bayerCropSize); + + node_group_info->leader.input.cropRegion[0] = bayerCropSize.x; + node_group_info->leader.input.cropRegion[1] = bayerCropSize.y; + node_group_info->leader.input.cropRegion[2] = bayerCropSize.w; + node_group_info->leader.input.cropRegion[3] = bayerCropSize.h; + node_group_info->leader.output.cropRegion[0] = 0; + node_group_info->leader.output.cropRegion[1] = 0; + node_group_info->leader.output.cropRegion[2] = node_group_info->leader.input.cropRegion[2]; + node_group_info->leader.output.cropRegion[3] = node_group_info->leader.input.cropRegion[3]; + + /* Capture 0 : SCC - [scaling] */ + node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[0] = node_group_info->leader.output.cropRegion[0]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[1] = node_group_info->leader.output.cropRegion[1]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[2] = node_group_info->leader.output.cropRegion[2]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[3] = node_group_info->leader.output.cropRegion[3]; + + node_group_info->capture[PERFRAME_BACK_SCC_POS].output.cropRegion[0] = node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[0]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].output.cropRegion[1] = node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[1]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].output.cropRegion[2] = node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[2]; + node_group_info->capture[PERFRAME_BACK_SCC_POS].output.cropRegion[3] = node_group_info->capture[PERFRAME_BACK_SCC_POS].input.cropRegion[3]; + + /* Capture 1 : SCP - [scaling] */ + node_group_info->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[0] = node_group_info->leader.output.cropRegion[0]; + node_group_info->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[1] = node_group_info->leader.output.cropRegion[1]; + node_group_info->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[2] = node_group_info->leader.output.cropRegion[2]; + node_group_info->capture[PERFRAME_BACK_SCP_POS].input.cropRegion[3] = node_group_info->leader.output.cropRegion[3]; + +#endif + return; +} + +void ExynosCamera::m_checkFpsAndUpdatePipeWaitTime(void) +{ + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + frame_queue_t *inputFrameQ = NULL; + + m_parameters->getPreviewFpsRange(&curMinFps, &curMaxFps); + + if (m_curMinFps != curMinFps) { + CLOGD("DEBUG(%s[%d]):(%d)(%d)", __FUNCTION__, __LINE__, curMinFps, curMaxFps); + + enum pipeline pipe = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC : PIPE_ISPC; + + m_previewFrameFactory->getInputFrameQToPipe(&inputFrameQ, pipe); + + /* 100ms * (30 / 15 fps) = 200ms */ + /* 100ms * (30 / 30 fps) = 100ms */ + /* 100ms * (30 / 10 fps) = 300ms */ + if (inputFrameQ != NULL && curMinFps != 0) + inputFrameQ->setWaitTime(((100000000 / curMinFps) * 30)); + } + + m_curMinFps = curMinFps; + + return; +} + +void ExynosCamera::m_printExynosCameraInfo(const char *funcName) +{ + int w = 0; + int h = 0; + ExynosRect srcRect, dstRect; + + CLOGD("DEBUG(%s[%d]):===================================================", __FUNCTION__, __LINE__); + CLOGD("DEBUG(%s[%d]):============= ExynosCameraInfo call by %s", __FUNCTION__, __LINE__, funcName); + CLOGD("DEBUG(%s[%d]):===================================================", __FUNCTION__, __LINE__); + + CLOGD("DEBUG(%s[%d]):============= Scenario ============================", __FUNCTION__, __LINE__); + CLOGD("DEBUG(%s[%d]):= getCameraId : %d", __FUNCTION__, __LINE__, m_parameters->getCameraId()); + CLOGD("DEBUG(%s[%d]):= getDualMode : %d", __FUNCTION__, __LINE__, m_parameters->getDualMode()); + CLOGD("DEBUG(%s[%d]):= getScalableSensorMode : %d", __FUNCTION__, __LINE__, m_parameters->getScalableSensorMode()); + CLOGD("DEBUG(%s[%d]):= getRecordingHint : %d", __FUNCTION__, __LINE__, m_parameters->getRecordingHint()); + CLOGD("DEBUG(%s[%d]):= getEffectRecordingHint : %d", __FUNCTION__, __LINE__, m_parameters->getEffectRecordingHint()); + CLOGD("DEBUG(%s[%d]):= getDualRecordingHint : %d", __FUNCTION__, __LINE__, m_parameters->getDualRecordingHint()); + CLOGD("DEBUG(%s[%d]):= getAdaptiveCSCRecording : %d", __FUNCTION__, __LINE__, m_parameters->getAdaptiveCSCRecording()); + CLOGD("DEBUG(%s[%d]):= doCscRecording : %d", __FUNCTION__, __LINE__, m_parameters->doCscRecording()); + CLOGD("DEBUG(%s[%d]):= needGSCForCapture : %d", __FUNCTION__, __LINE__, m_parameters->needGSCForCapture(getCameraId())); + CLOGD("DEBUG(%s[%d]):= getShotMode : %d", __FUNCTION__, __LINE__, m_parameters->getShotMode()); + CLOGD("DEBUG(%s[%d]):= getTpuEnabledMode : %d", __FUNCTION__, __LINE__, m_parameters->getTpuEnabledMode()); + CLOGD("DEBUG(%s[%d]):= getHWVdisMode : %d", __FUNCTION__, __LINE__, m_parameters->getHWVdisMode()); + CLOGD("DEBUG(%s[%d]):= get3dnrMode : %d", __FUNCTION__, __LINE__, m_parameters->get3dnrMode()); + + CLOGD("DEBUG(%s[%d]):============= Internal setting ====================", __FUNCTION__, __LINE__); + CLOGD("DEBUG(%s[%d]):= isFlite3aaOtf : %d", __FUNCTION__, __LINE__, m_parameters->isFlite3aaOtf()); + CLOGD("DEBUG(%s[%d]):= is3aaIspOtf : %d", __FUNCTION__, __LINE__, m_parameters->is3aaIspOtf()); + CLOGD("DEBUG(%s[%d]):= isReprocessing : %d", __FUNCTION__, __LINE__, m_parameters->isReprocessing()); + CLOGD("DEBUG(%s[%d]):= isReprocessing3aaIspOTF : %d", __FUNCTION__, __LINE__, m_parameters->isReprocessing3aaIspOTF()); + CLOGD("DEBUG(%s[%d]):= getUsePureBayerReprocessing : %d", __FUNCTION__, __LINE__, m_parameters->getUsePureBayerReprocessing()); + + int reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + switch(reprocessingBayerMode) { + case REPROCESSING_BAYER_MODE_NONE: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : REPROCESSING_BAYER_MODE_NONE", __FUNCTION__, __LINE__); + break; + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON", __FUNCTION__, __LINE__); + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON", __FUNCTION__, __LINE__); + break; + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : REPROCESSING_BAYER_MODE_PURE_DYNAMIC", __FUNCTION__, __LINE__); + break; + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC", __FUNCTION__, __LINE__); + break; + default: + CLOGD("DEBUG(%s[%d]):= getReprocessingBayerMode : unexpected mode %d", __FUNCTION__, __LINE__, reprocessingBayerMode); + break; + } + + CLOGD("DEBUG(%s[%d]):= isSccCapture : %d", __FUNCTION__, __LINE__, m_parameters->isSccCapture()); + + CLOGD("DEBUG(%s[%d]):============= size setting =======================", __FUNCTION__, __LINE__); + m_parameters->getMaxSensorSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getMaxSensorSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getHwSensorSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getHwSensorSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getBnsSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getBnsSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getPreviewBayerCropSize(&srcRect, &dstRect); + CLOGD("DEBUG(%s[%d]):= getPreviewBayerCropSize : (%d, %d, %d, %d) -> (%d, %d, %d, %d)", __FUNCTION__, __LINE__, + srcRect.x, srcRect.y, srcRect.w, srcRect.h, + dstRect.x, dstRect.y, dstRect.w, dstRect.h); + + m_parameters->getPreviewBdsSize(&dstRect); + CLOGD("DEBUG(%s[%d]):= getPreviewBdsSize : (%d, %d, %d, %d)", __FUNCTION__, __LINE__, + dstRect.x, dstRect.y, dstRect.w, dstRect.h); + + m_parameters->getHwPreviewSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getHwPreviewSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getPreviewSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getPreviewSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getPictureBayerCropSize(&srcRect, &dstRect); + CLOGD("DEBUG(%s[%d]):= getPictureBayerCropSize : (%d, %d, %d, %d) -> (%d, %d, %d, %d)", __FUNCTION__, __LINE__, + srcRect.x, srcRect.y, srcRect.w, srcRect.h, + dstRect.x, dstRect.y, dstRect.w, dstRect.h); + + m_parameters->getPictureBdsSize(&dstRect); + CLOGD("DEBUG(%s[%d]):= getPictureBdsSize : (%d, %d, %d, %d)", __FUNCTION__, __LINE__, + dstRect.x, dstRect.y, dstRect.w, dstRect.h); + + m_parameters->getHwPictureSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getHwPictureSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + m_parameters->getPictureSize(&w, &h); + CLOGD("DEBUG(%s[%d]):= getPictureSize : %d x %d", __FUNCTION__, __LINE__, w, h); + + CLOGD("DEBUG(%s[%d]):===================================================", __FUNCTION__, __LINE__); +} + +status_t ExynosCamera::m_copyMetaFrameToFrame(ExynosCameraFrame *srcframe, ExynosCameraFrame *dstframe, bool useDm, bool useUdm) +{ + Mutex::Autolock lock(m_metaCopyLock); + + memset(m_tempshot, 0x00, sizeof(struct camera2_shot_ext)); + if(useDm) { + srcframe->getDynamicMeta(m_tempshot); + dstframe->storeDynamicMeta(m_tempshot); + } + + if(useUdm) { + srcframe->getUserDynamicMeta(m_tempshot); + dstframe->storeUserDynamicMeta(m_tempshot); + } + + return NO_ERROR; +} +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCamera.h b/libcamera/34xx/hal1/ExynosCamera.h new file mode 100644 index 0000000..0556047 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCamera.h @@ -0,0 +1,597 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_HW_IMPLEMENTATION_H +#define EXYNOS_CAMERA_HW_IMPLEMENTATION_H + +#include "ExynosCameraConfig.h" +#include "ExynosCameraDefine.h" +#include "ExynosCamera1Parameters.h" +#include "ExynosCameraFrameFactory.h" +#include "ExynosCameraFrameFactoryPreview.h" +#include "ExynosCameraFrameFactory3aaIspM2M.h" +#include "ExynosCameraFrameFactory3aaIspM2MTpu.h" +#include "ExynosCameraFrameFactory3aaIspOtf.h" +#include "ExynosCameraFrameFactory3aaIspOtfTpu.h" +#include "ExynosCameraFrameReprocessingFactory.h" +#include "ExynosCameraFrameReprocessingFactoryNV21.h" +#include "ExynosCameraFrameFactoryVision.h" +#include "ExynosCameraFrameFactoryFront.h" + +#ifdef BURST_CAPTURE +#define BURST_SAVE_PATH_PHONE "/data/media/0" +#define BURST_SAVE_PATH_EXT "/mnt/extSdCard" +#define BURST_CAPTURE_FILEPATH_SIZE 100 +#endif + +#ifdef BURST_CAPTURE +#include +#include + +#include +#include +#endif + +#ifdef TOUCH_AE +#define AE_RESULT 0xF351 +#endif + +namespace android { + +typedef ExynosCameraList framefactory_queue_t; + +class ExynosCamera { +public: + ExynosCamera() {}; + ExynosCamera(int cameraId, camera_device_t *dev); + virtual ~ExynosCamera(); + void initialize(); + status_t setPreviewWindow(preview_stream_ops *w); + void setCallbacks( + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void *user); + + void enableMsgType(int32_t msgType); + void disableMsgType(int32_t msgType); + bool msgTypeEnabled(int32_t msgType); + + status_t startPreview(); + void stopPreview(); + bool previewEnabled(); + + status_t storeMetaDataInBuffers(bool enable); + + status_t startRecording(); + void stopRecording(); + bool recordingEnabled(); + void releaseRecordingFrame(const void *opaque); + + status_t autoFocus(); + status_t cancelAutoFocus(); + + status_t takePicture(); + status_t cancelPicture(); + + status_t setParameters(const CameraParameters& params); + CameraParameters getParameters() const; + status_t sendCommand(int32_t command, int32_t arg1, int32_t arg2); + + int getMaxNumDetectedFaces(void); + bool startFaceDetection(void); + bool stopFaceDetection(void); + + bool m_startFaceDetection(bool toggle); + int m_calibratePosition(int w, int new_w, int pos); + status_t m_doFdCallbackFunc(ExynosCameraFrame *frame); + void release(); + + status_t dump(int fd) const; + void dump(void); + + int getCameraId() const; + int getShotBufferIdex() const; + status_t generateFrame(int32_t frameCount, ExynosCameraFrame **newFrame); + status_t generateFrameSccScp(uint32_t pipeId, uint32_t *frameCount, ExynosCameraFrame **newFrame); + + status_t generateFrameReprocessing(ExynosCameraFrame **newFrame); + + /* vision */ + status_t generateFrameVision(int32_t frameCount, ExynosCameraFrame **newFrame); + + void vendorSpecificConstructor(int cameraId, camera_device_t *dev); + void vendorSpecificDestructor(void); + +private: + void m_createThreads(void); + + status_t m_startPreviewInternal(void); + status_t m_stopPreviewInternal(void); + + status_t m_restartPreviewInternal(void); + + status_t m_startPictureInternal(void); + status_t m_stopPictureInternal(void); + + status_t m_startRecordingInternal(void); + status_t m_stopRecordingInternal(void); + + status_t m_searchFrameFromList(List *list, uint32_t frameCount, ExynosCameraFrame **frame); + status_t m_removeFrameFromList(List *list, ExynosCameraFrame *frame); + status_t m_deleteFrame(ExynosCameraFrame **frame); + + status_t m_clearList(List *list); + status_t m_clearList(frame_queue_t *queue); + status_t m_clearFrameQ(frame_queue_t *frameQ, uint32_t pipeId, uint32_t dstPipeId, uint32_t direction); + + status_t m_printFrameList(List *list); + + status_t m_createIonAllocator(ExynosCameraIonAllocator **allocator); + status_t m_createInternalBufferManager(ExynosCameraBufferManager **bufferManager, const char *name); + status_t m_createBufferManager( + ExynosCameraBufferManager **bufferManager, + const char *name, + buffer_manager_type type = BUFFER_MANAGER_ION_TYPE); + + status_t m_setFrameManager(); + + status_t m_setConfigInform(); + status_t m_setBuffers(void); + status_t m_setReprocessingBuffer(void); + status_t m_setPreviewCallbackBuffer(void); + status_t m_setPictureBuffer(void); + status_t m_releaseBuffers(void); + + status_t m_putBuffers(ExynosCameraBufferManager *bufManager, int bufIndex); + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int reqBufCount, + bool createMetaPlane, + bool needMmap = false); + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap = false); + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap = false); + + status_t m_handlePreviewFrame(ExynosCameraFrame *frame); + status_t m_handlePreviewFrameFront(ExynosCameraFrame *frame); + status_t m_handlePreviewFrameFrontDual(ExynosCameraFrame *frame); + + status_t m_setupEntity( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *srcBuf = NULL, + ExynosCameraBuffer *dstBuf = NULL); + status_t m_setSrcBuffer( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *buffer); + status_t m_setDstBuffer( + uint32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer *buffer); + + status_t m_getBufferManager(uint32_t pipeId, ExynosCameraBufferManager **bufMgr, uint32_t direction); + + status_t m_calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t m_calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t m_calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t m_calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t m_calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect); + + status_t m_setCallbackBufferInfo(ExynosCameraBuffer *callbackBuf, char *baseAddr); + + status_t m_doPreviewToCallbackFunc( + int32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer previewBuf, + ExynosCameraBuffer callbackBuf); + status_t m_doCallbackToPreviewFunc( + int32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer callbackBuf, + ExynosCameraBuffer previewBuf); + status_t m_doPrviewToRecordingFunc( + int32_t pipeId, + ExynosCameraBuffer previewBuf, + ExynosCameraBuffer recordingBuf, + nsecs_t timeStamp); + status_t m_doZoomPrviewWithCSC(int32_t pipeId, int32_t gscPipe, ExynosCameraFrame *frame); + status_t m_releaseRecordingBuffer(int bufIndex); + + camera_memory_t *m_getJpegCallbackHeap(ExynosCameraBuffer callbackBuf, int seriesShotNumber); + + void m_debugFpsCheck(uint32_t pipeId); + + uint32_t m_getBayerPipeId(void); + + status_t m_convertingStreamToShotExt(ExynosCameraBuffer *buffer, struct camera2_node_output *outputInfo); + + status_t m_getBayerBuffer(uint32_t pipeId, ExynosCameraBuffer *buffer, camera2_shot_ext *updateDmShot = NULL); + status_t m_checkBufferAvailable(uint32_t pipeId, ExynosCameraBufferManager *bufferMgr); + + status_t m_boostDynamicCapture(void); + void m_updateBoostDynamicCaptureSize(camera2_node_group *node_group_info); + void m_checkFpsAndUpdatePipeWaitTime(void); + void m_printExynosCameraInfo(const char *funcName); + + status_t m_setupFrameFactory(void); + status_t m_initFrameFactory(void); + status_t m_deinitFrameFactory(void); + void m_checkEntranceLux(struct camera2_shot_ext *meta_shot_ext); + status_t m_copyMetaFrameToFrame(ExynosCameraFrame *srcframe, ExynosCameraFrame *dstframe, bool useDm, bool useUdm); + +public: + + ExynosCameraFrameFactory *m_frameFactory[FRAME_FACTORY_TYPE_MAX]; + + ExynosCamera1Parameters *m_parameters; + ExynosCameraFrameFactory *m_previewFrameFactory; + ExynosCameraGrallocAllocator *m_grAllocator; + ExynosCameraIonAllocator *m_ionAllocator; + ExynosCameraMHBAllocator *m_mhbAllocator; + + ExynosCameraFrameFactory *m_pictureFrameFactory; + + ExynosCameraFrameFactory *m_reprocessingFrameFactory; + mutable Mutex m_frameLock; + mutable Mutex m_searchframeLock; + + preview_stream_ops *m_previewWindow; + bool m_previewEnabled; + bool m_pictureEnabled; + bool m_recordingEnabled; + bool m_zslPictureEnabled; + bool m_use_companion; + bool m_checkFirstFrameLux; + ExynosCameraActivityControl *m_exynosCameraActivityControl; + + /* vision */ + ExynosCameraFrameFactory *m_visionFrameFactory; + +private: + camera_device_t *m_dev; + typedef ExynosCameraThread mainCameraThread; + + uint32_t m_cameraId; + uint32_t m_cameraSensorId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + camera_notify_callback m_notifyCb; + camera_data_callback m_dataCb; + camera_data_timestamp_callback m_dataCbTimestamp; + camera_request_memory m_getMemoryCb; + void *m_callbackCookie; + + List m_processList; + List m_postProcessList; + List m_recordingProcessList; + frame_queue_t *m_pipeFrameDoneQ; + + framefactory_queue_t *m_frameFactoryQ; + sp m_framefactoryThread; + bool m_frameFactoryInitThreadFunc(void); + + /* vision */ + frame_queue_t *m_pipeFrameVisionDoneQ; + + sp m_mainThread; + bool m_mainThreadFunc(void); + + frame_queue_t *m_previewQ; + frame_queue_t *m_previewCallbackGscFrameDoneQ; + sp m_previewThread; + frame_queue_t *m_mainSetupQ[MAX_NUM_PIPES]; + sp m_mainSetupQThread[MAX_NUM_PIPES]; + bool m_previewThreadFunc(void); + + frame_queue_t *m_vraThreadQ; + frame_queue_t *m_vraGscDoneQ; + frame_queue_t *m_vraPipeDoneQ; + sp m_vraThread; + bool m_vraThreadFunc(void); + + sp m_setBuffersThread; + bool m_setBuffersThreadFunc(void); + + sp m_startPictureInternalThread; + bool m_startPictureInternalThreadFunc(void); + + sp m_startPictureBufferThread; + bool m_startPictureBufferThreadFunc(void); + + sp m_autoFocusThread; + bool m_autoFocusThreadFunc(void); + bool m_autoFocusResetNotify(int focusMode); + mutable Mutex m_autoFocusLock; + mutable Mutex m_captureLock; + mutable Mutex m_recordingListLock; + mutable Mutex m_recordingStopLock; + bool m_exitAutoFocusThread; + bool m_autoFocusRunning; + int m_autoFocusType; + + typedef ExynosCameraList worker_queue_t; + worker_queue_t m_autoFocusContinousQ; + sp m_autoFocusContinousThread; + bool m_autoFocusContinousThreadFunc(void); + + ExynosCameraBufferManager *m_bayerBufferMgr; + ExynosCameraBufferManager *m_3aaBufferMgr; + ExynosCameraBufferManager *m_ispBufferMgr; + ExynosCameraBufferManager *m_hwDisBufferMgr; + ExynosCameraBufferManager *m_sccBufferMgr; + ExynosCameraBufferManager *m_scpBufferMgr; + ExynosCameraBufferManager *m_vraBufferMgr; + + uint32_t m_fliteFrameCount; + uint32_t m_3aa_ispFrameCount; + uint32_t m_ispFrameCount; + uint32_t m_sccFrameCount; + uint32_t m_scpFrameCount; + + int m_frameSkipCount; + + ExynosCameraFrameManager *m_frameMgr; + + bool m_isSuccessedBufferAllocation; + + uint32_t m_vraRunningCount; + /* for Recording */ + bool m_doCscRecording; + int m_recordingBufferCount; + frame_queue_t *m_recordingQ; + nsecs_t m_lastRecordingTimeStamp; + nsecs_t m_recordingStartTimeStamp; + + ExynosCameraBufferManager *m_recordingBufferMgr; + camera_memory_t *m_recordingCallbackHeap; + + bool m_recordingBufAvailable[MAX_BUFFERS]; + nsecs_t m_recordingTimeStamp[MAX_BUFFERS]; + sp m_recordingThread; + bool m_recordingThreadFunc(void); + + mutable Mutex m_recordingStateLock; + bool m_getRecordingEnabled(void); + void m_setRecordingEnabled(bool enable); + + ExynosCameraBufferManager *m_previewCallbackBufferMgr; + ExynosCameraBufferManager *m_highResolutionCallbackBufferMgr; + + /* Pre picture Thread */ + sp m_prePictureThread; + bool m_reprocessingPrePictureInternal(void); + bool m_prePictureInternal(bool* pIsProcessed); + bool m_prePictureThreadFunc(void); + + sp m_pictureThread; + bool m_pictureThreadFunc(void); + + sp m_postPictureThread; + bool m_postPictureThreadFunc(void); + + sp m_jpegCallbackThread; + bool m_jpegCallbackThreadFunc(void); + void m_clearJpegCallbackThread(bool callFromJpeg); + + bool m_releasebuffersForRealloc(void); + bool m_CheckBurstJpegSavingPath(char *dir); + + /* Reprocessing Buffer Managers */ + ExynosCameraBufferManager *m_ispReprocessingBufferMgr; + + ExynosCameraBufferManager *m_sccReprocessingBufferMgr; + + ExynosCameraBufferManager *m_thumbnailBufferMgr; + + /* TODO: will be removed when SCC scaling for picture size */ + ExynosCameraBufferManager *m_gscBufferMgr; + + ExynosCameraBufferManager *m_jpegBufferMgr; + + ExynosCameraCounter m_takePictureCounter; + ExynosCameraCounter m_reprocessingCounter; + ExynosCameraCounter m_pictureCounter; + ExynosCameraCounter m_jpegCounter; + + /* Reprocessing Q */ + frame_queue_t *dstIspReprocessingQ; + frame_queue_t *dstSccReprocessingQ; + frame_queue_t *dstGscReprocessingQ; + +#ifdef RAWDUMP_CAPTURE + frame_queue_t *m_RawCaptureDumpQ; + sp m_RawCaptureDumpThread; + bool m_RawCaptureDumpThreadFunc(void); +#endif + frame_queue_t *dstJpegReprocessingQ; + + frame_queue_t *m_postPictureQ; + jpeg_callback_queue_t *m_jpegCallbackQ; + postview_callback_queue_t *m_postviewCallbackQ; + thumbnail_callback_queue_t *m_thumbnailCallbackQ; + + bool m_flagStartFaceDetection; + bool m_flagLLSStart; + bool m_flagLightCondition; + camera_face_t m_faces[NUM_OF_DETECTED_FACES]; + camera_frame_metadata_t m_frameMetadata; + camera_memory_t *m_fdCallbackHeap; + + bool m_faceDetected; + int m_fdThreshold; + + frame_queue_t *m_facedetectQ; + sp m_facedetectThread; + bool m_facedetectThreadFunc(void); + + ExynosCameraScalableSensor m_scalableSensorMgr; + + /* Watch Dog Thread */ + sp m_monitorThread; + bool m_monitorThreadFunc(void); +#ifdef MONITOR_LOG_SYNC + static uint32_t cameraSyncLogId; + int m_syncLogDuration; + uint32_t m_getSyncLogId(void); +#endif + bool m_disablePreviewCB; + bool m_flagThreadStop; + status_t m_checkThreadState(int *threadState, int *countRenew); + status_t m_checkThreadInterval(uint32_t pipeId, uint32_t pipeInterval, int *threadState); + unsigned int m_callbackState; + unsigned int m_callbackStateOld; + int m_callbackMonitorCount; + bool m_isNeedAllocPictureBuffer; + +#ifdef FPS_CHECK + /* TODO: */ +#define DEBUG_MAX_PIPE_NUM 10 + int32_t m_debugFpsCount[DEBUG_MAX_PIPE_NUM]; + ExynosCameraDurationTimer m_debugFpsTimer[DEBUG_MAX_PIPE_NUM]; +#endif + + ExynosCameraFrameSelector *m_captureSelector; + ExynosCameraFrameSelector *m_sccCaptureSelector; + + sp m_jpegSaveThread[JPEG_SAVE_THREAD_MAX_COUNT]; + bool m_jpegSaveThreadFunc(void); + + jpeg_callback_queue_t *m_jpegSaveQ[JPEG_SAVE_THREAD_MAX_COUNT]; + +#ifdef BURST_CAPTURE + bool m_isCancelBurstCapture; + int m_burstCaptureCallbackCount; + mutable Mutex m_burstCaptureCallbackCountLock; + mutable Mutex m_burstCaptureSaveLock; + ExynosCameraDurationTimer m_burstSaveTimer; + long long m_burstSaveTimerTime; + int m_burstDuration; + bool m_burstInitFirst; + bool m_burstRealloc; + char m_burstSavePath[BURST_CAPTURE_FILEPATH_SIZE]; + int m_burstShutterLocation; +#endif + +#ifdef USE_PREVIEW_DURATION_CONTROL + ExynosCameraDurationTimer PreviewDurationTimer; + uint64_t PreviewDurationTime; +#endif + +#ifdef PREVIEW_DURATION_DEBUG + ExynosCameraDurationTimer PreviewDurationDebugTimer; +#endif + + bool m_stopBurstShot; + bool m_burst[JPEG_SAVE_THREAD_MAX_COUNT]; + bool m_running[JPEG_SAVE_THREAD_MAX_COUNT]; + + bool m_isZSLCaptureOn; + + /* high resolution preview callback */ + sp m_highResolutionCallbackThread; + bool m_highResolutionCallbackThreadFunc(void); + + frame_queue_t *m_highResolutionCallbackQ; + bool m_highResolutionCallbackRunning; + bool m_skipReprocessing; + int m_skipCount; + bool m_resetPreview; + + uint32_t m_displayPreviewToggle; + + bool m_hdrEnabled; + unsigned int m_hdrSkipedFcount; + bool m_isFirstStart; + uint32_t m_dynamicSccCount; + bool m_isTryStopFlash; + uint32_t m_curMinFps; + + /* vision */ + status_t m_startVisionInternal(void); + status_t m_stopVisionInternal(void); + + status_t m_setVisionBuffers(void); + status_t m_setVisionCallbackBuffer(void); + + sp m_visionThread; + bool m_visionThreadFunc(void); + int m_visionFps; + int m_visionAe; + + /* shutter callback */ + sp m_shutterCallbackThread; + bool m_shutterCallbackThreadFunc(void); + + int m_previewBufferCount; + struct ExynosConfigInfo *m_exynosconfig; +#if 1 + uint32_t m_hackForAlignment; +#endif + uint32_t m_recordingFrameSkipCount; + + /* CTS2.0 */ + uint32_t m_oldPreviewW; + uint32_t m_oldPreviewH; + + bool m_mainThreadQSetup3AA_ISP(); + bool m_mainThreadQSetupISP(); + bool m_mainThreadQSetupFLITE(); + bool m_mainThreadQSetup3AC(); + bool m_mainThreadQSetupSCP(); + bool m_mainThreadQSetup3AA(); + + status_t m_startCompanion(void); + status_t m_stopCompanion(void); + status_t m_waitCompanionThreadEnd(void); + + frame_queue_t *m_zoomPreviwWithCscQ; + +#ifdef FIRST_PREVIEW_TIME_CHECK + ExynosCameraDurationTimer m_firstPreviewTimer; + bool m_flagFirstPreviewTimerOn; +#endif + mutable Mutex m_metaCopyLock; + struct camera2_shot_ext *m_tempshot; + struct camera2_shot_ext *m_fdmeta_shot; + struct camera2_shot_ext *m_meta_shot; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/34xx/hal1/ExynosCamera1Parameters.cpp b/libcamera/34xx/hal1/ExynosCamera1Parameters.cpp new file mode 100644 index 0000000..a168046 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCamera1Parameters.cpp @@ -0,0 +1,6360 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera1Parameters" +#include + +#include "ExynosCamera1Parameters.h" + +namespace android { + +ExynosCamera1Parameters::ExynosCamera1Parameters(int cameraId, __unused bool flagCompanion, int halVersion) +{ + m_cameraId = cameraId; + m_halVersion = halVersion; + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "ParametersBack" : "ParametersFront"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + m_staticInfo = createExynosCamera1SensorInfo(cameraId); + m_useSizeTable = (m_staticInfo->sizeTableSupport) ? USE_CAMERA_SIZE_TABLE : false; + m_useAdaptiveCSCRecording = (cameraId == CAMERA_ID_BACK) ? USE_ADAPTIVE_CSC_RECORDING : false; + + m_exynosconfig = NULL; + m_activityControl = new ExynosCameraActivityControl(m_cameraId); + + memset(&m_cameraInfo, 0, sizeof(struct exynos_camera_info)); + memset(&m_exifInfo, 0, sizeof(m_exifInfo)); + + m_initMetadata(); + + m_setExifFixedAttribute(); + + m_exynosconfig = new ExynosConfigInfo(); + + mDebugInfo.num_of_appmarker = 1; /* Default : APP4 */ + mDebugInfo.idx[0][0] = APP_MARKER_4; /* matching the app marker 4 */ + + mDebugInfo.debugSize[APP_MARKER_4] = sizeof(struct camera2_udm); + + mDebugInfo.debugData[APP_MARKER_4] = new char[mDebugInfo.debugSize[APP_MARKER_4]]; + memset((void *)mDebugInfo.debugData[APP_MARKER_4], 0, mDebugInfo.debugSize[APP_MARKER_4]); + memset((void *)m_exynosconfig, 0x00, sizeof(struct ExynosConfigInfo)); + + // CAUTION!! : Initial values must be prior to setDefaultParameter() function. + // Initial Values : START + m_IsThumbnailCallbackOn = false; + m_fastFpsMode = 0; + m_previewRunning = false; + m_previewSizeChanged = false; + m_pictureRunning = false; + m_recordingRunning = false; + m_flagRestartPreviewChecked = false; + m_flagRestartPreview = false; + m_reallocBuffer = false; + m_setFocusmodeSetting = false; + m_flagMeteringRegionChanged = false; + m_flagCheckDualMode = false; + m_flagHWVDisMode = false; + m_flagVideoStabilization = false; + m_flag3dnrMode = false; + + m_flagCheckRecordingHint = false; + m_zoomWithScaler = false; + + m_useDynamicBayer = (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_BAYER : USE_DYNAMIC_BAYER_FRONT; + m_useDynamicBayerVideoSnapShot = + (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_BAYER_VIDEO_SNAP_SHOT : USE_DYNAMIC_BAYER_VIDEO_SNAP_SHOT_FRONT; + m_useDynamicScc = (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_SCC_REAR : USE_DYNAMIC_SCC_FRONT; + m_useFastenAeStable = (cameraId == CAMERA_ID_BACK) ? USE_FASTEN_AE_STABLE : false; + + /* we cannot know now, whether recording mode or not */ + /* + if (getRecordingHint() == true || getDualRecordingHint() == true) + m_usePureBayerReprocessing = (cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + else + */ + m_usePureBayerReprocessing = (cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + + m_enabledMsgType = 0; + + m_previewBufferCount = NUM_PREVIEW_BUFFERS; + + m_dvfsLock = false; + +#ifdef USE_BINNING_MODE + m_binningProperty = checkProperty(false); +#endif +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + m_rotationProperty = checkRotationProperty(); +#endif + m_zoom_activated = false; + m_exposureTimeCapture = 0; + m_isFactoryMode = false; + + // Initial Values : END + setDefaultCameraInfo(); + setDefaultParameter(); +} + +ExynosCamera1Parameters::~ExynosCamera1Parameters() +{ + if (m_staticInfo != NULL) { + delete m_staticInfo; + m_staticInfo = NULL; + } + + if (m_activityControl != NULL) { + delete m_activityControl; + m_activityControl = NULL; + } + + for(int i = 0; i < mDebugInfo.num_of_appmarker; i++) { + if(mDebugInfo.debugData[mDebugInfo.idx[i][0]]) + delete mDebugInfo.debugData[mDebugInfo.idx[i][0]]; + mDebugInfo.debugData[mDebugInfo.idx[i][0]] = NULL; + mDebugInfo.debugSize[mDebugInfo.idx[i][0]] = 0; + } + + if (m_exynosconfig != NULL) { + memset((void *)m_exynosconfig, 0x00, sizeof(struct ExynosConfigInfo)); + delete m_exynosconfig; + m_exynosconfig = NULL; + } + + if (m_exifInfo.maker_note) { + delete m_exifInfo.maker_note; + m_exifInfo.maker_note = NULL; + } + + if (m_exifInfo.user_comment) { + delete m_exifInfo.user_comment; + m_exifInfo.user_comment = NULL; + } +} + +int ExynosCamera1Parameters::getCameraId(void) +{ + return m_cameraId; +} + +CameraParameters ExynosCamera1Parameters::getParameters() const +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + return m_params; +} + +void ExynosCamera1Parameters::setDefaultCameraInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + m_setHwSensorSize(m_staticInfo->maxSensorW, m_staticInfo->maxSensorH); + m_setHwPreviewSize(m_staticInfo->maxPreviewW, m_staticInfo->maxPreviewH); + m_setHwPictureSize(m_staticInfo->maxPictureW, m_staticInfo->maxPictureH); + + /* Initalize BNS scale ratio, step:500, ex)1500->x1.5 scale down */ + m_setBnsScaleRatio(1000); + /* Initalize Binning scale ratio */ + m_setBinningScaleRatio(1000); + /* Set Default VideoSize to FHD */ + m_setVideoSize(1920,1080); +} + +status_t ExynosCamera1Parameters::checkRecordingHint(const CameraParameters& params) +{ + /* recording hint */ + bool recordingHint = false; + const char *newRecordingHint = params.get(CameraParameters::KEY_RECORDING_HINT); + + if (newRecordingHint != NULL) { + CLOGD("DEBUG(%s):newRecordingHint : %s", "setParameters", newRecordingHint); + + recordingHint = (strcmp(newRecordingHint, "true") == 0) ? true : false; + + m_setRecordingHint(recordingHint); + + m_params.set(CameraParameters::KEY_RECORDING_HINT, newRecordingHint); + + } else { + /* to confirm that recordingHint value is checked up (whatever value is) */ + m_setRecordingHint(m_cameraInfo.recordingHint); + + recordingHint = getRecordingHint(); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setRecordingHint(bool hint) +{ + m_cameraInfo.recordingHint = hint; + + if (hint) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_ON); + } else if (!hint && !getDualRecordingHint() && !getEffectRecordingHint()) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_OFF); + } + + /* RecordingHint is confirmed */ + m_flagCheckRecordingHint = true; +} + +bool ExynosCamera1Parameters::getRecordingHint(void) +{ + /* + * Before setParameters, we cannot know recordingHint is valid or not + * So, check and make assert for fast debugging + */ + if (m_flagCheckRecordingHint == false) + android_printAssert(NULL, LOG_TAG, "Cannot call getRecordingHint befor setRecordingHint, assert!!!!"); + + return m_cameraInfo.recordingHint; +} + + +status_t ExynosCamera1Parameters::checkDualMode(const CameraParameters& params) +{ + /* dual_mode */ + bool flagDualMode = false; + int newDualMode = params.getInt("dual_mode"); + + if (newDualMode == 1) { + CLOGD("DEBUG(%s):newDualMode : %d", "setParameters", newDualMode); + flagDualMode = true; + } + + m_setDualMode(flagDualMode); + m_params.set("dual_mode", newDualMode); + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setDualMode(bool dual) +{ + m_cameraInfo.dualMode = dual; + /* dualMode is confirmed */ + m_flagCheckDualMode = true; +} + +bool ExynosCamera1Parameters::getDualMode(void) +{ + /* + * Before setParameters, we cannot know dualMode is valid or not + * So, check and make assert for fast debugging + */ + if (m_flagCheckDualMode == false) + android_printAssert(NULL, LOG_TAG, "Cannot call getDualMode befor checkDualMode, assert!!!!"); + + return m_cameraInfo.dualMode; +} + +status_t ExynosCamera1Parameters::checkDualRecordingHint(const CameraParameters& params) +{ + /* dual recording hint */ + bool flagDualRecordingHint = false; + int newDualRecordingHint = params.getInt("dualrecording-hint"); + + if (newDualRecordingHint == 1) { + CLOGD("DEBUG(%s):newDualRecordingHint : %d", "setParameters", newDualRecordingHint); + flagDualRecordingHint = true; + } + + m_setDualRecordingHint(flagDualRecordingHint); + m_params.set("dualrecording-hint", newDualRecordingHint); + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setDualRecordingHint(bool hint) +{ + m_cameraInfo.dualRecordingHint = hint; + + if (hint) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_ON); + } else if (!hint && !getRecordingHint() && !getEffectRecordingHint()) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_OFF); + } +} + +bool ExynosCamera1Parameters::getDualRecordingHint(void) +{ + return m_cameraInfo.dualRecordingHint; +} + +status_t ExynosCamera1Parameters::checkEffectHint(const CameraParameters& params) +{ + /* effect hint */ + bool flagEffectHint = false; + int newEffectHint = params.getInt("effect_hint"); + + if (newEffectHint < 0) + return NO_ERROR; + + if (newEffectHint == 1) { + CLOGD("DEBUG(%s[%d]):newEffectHint : %d", "setParameters", __LINE__, newEffectHint); + flagEffectHint = true; + } + + m_setEffectHint(newEffectHint); + m_params.set("effect_hint", newEffectHint); + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setEffectHint(bool hint) +{ + m_cameraInfo.effectHint = hint; +} + +bool ExynosCamera1Parameters::getEffectHint(void) +{ + return m_cameraInfo.effectHint; +} + +bool ExynosCamera1Parameters::getEffectRecordingHint(void) +{ + return m_cameraInfo.effectRecordingHint; +} + +status_t ExynosCamera1Parameters::checkPreviewFps(const CameraParameters& params) +{ + int ret = 0; + + ret = checkPreviewFpsRange(params); + if (ret == BAD_VALUE) { + CLOGE("ERR(%s): Inavalid value", "setParameters"); + return ret; + } else if (ret != NO_ERROR) { + ret = checkPreviewFrameRate(params); + } + + return ret; +} + +status_t ExynosCamera1Parameters::checkPreviewFpsRange(const CameraParameters& params) +{ + int newMinFps = 0; + int newMaxFps = 0; + int newFrameRate = params.getPreviewFrameRate(); + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + + params.getPreviewFpsRange(&newMinFps, &newMaxFps); + if (newMinFps <= 0 || newMaxFps <= 0 || newMinFps > newMaxFps) { + CLOGE("PreviewFpsRange is invalid, newMin(%d), newMax(%d)", newMinFps, newMaxFps); + return BAD_VALUE; + } + + ALOGI("INFO(%s):Original FpsRange[Min=%d, Max=%d]", __FUNCTION__, newMinFps, newMaxFps); + + if (m_adjustPreviewFpsRange(newMinFps, newMaxFps) != NO_ERROR) { + CLOGE("Fail to adjust preview fps range"); + return INVALID_OPERATION; + } + + newMinFps = newMinFps / 1000; + newMaxFps = newMaxFps / 1000; + if (FRAME_RATE_MAX < newMaxFps || newMaxFps < newMinFps) { + CLOGE("PreviewFpsRange is out of bound"); + return INVALID_OPERATION; + } + + getPreviewFpsRange(&curMinFps, &curMaxFps); + CLOGI("INFO(%s):curFpsRange[Min=%d, Max=%d], newFpsRange[Min=%d, Max=%d], [curFrameRate=%d]", + "checkPreviewFpsRange", curMinFps, curMaxFps, newMinFps, newMaxFps, m_params.getPreviewFrameRate()); + + if (curMinFps != (uint32_t)newMinFps || curMaxFps != (uint32_t)newMaxFps) { + m_setPreviewFpsRange((uint32_t)newMinFps, (uint32_t)newMaxFps); + + char newFpsRange[256]; + memset (newFpsRange, 0, 256); + snprintf(newFpsRange, 256, "%d,%d", newMinFps * 1000, newMaxFps * 1000); + + CLOGI("DEBUG(%s):set PreviewFpsRange(%s)", __FUNCTION__, newFpsRange); + CLOGI("DEBUG(%s):set PreviewFrameRate(curFps=%d->newFps=%d)", __FUNCTION__, m_params.getPreviewFrameRate(), newMaxFps); + m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, newFpsRange); + m_params.setPreviewFrameRate(newMaxFps); + } + + getPreviewFpsRange(&curMinFps, &curMaxFps); + m_activityControl->setFpsValue(curMinFps); + + /* For backword competivity */ + m_params.setPreviewFrameRate(newFrameRate); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_adjustPreviewFpsRange(int &newMinFps, int &newMaxFps) +{ + bool flagSpecialMode = false; + int curSceneMode = 0; + int curShotMode = 0; + + if (getDualMode() == true) { + flagSpecialMode = true; + + /* when dual mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps. */ + newMinFps = newMaxFps; + ALOGD("DEBUG(%s[%d]):dualMode(true), newMaxFps=%d", __FUNCTION__, __LINE__, newMaxFps); + } + + if (getDualRecordingHint() == true) { + flagSpecialMode = true; + + /* when dual recording mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps. */ + newMinFps = newMaxFps; + ALOGD("DEBUG(%s[%d]):dualRecordingHint(true), newMaxFps=%d", __FUNCTION__, __LINE__, newMaxFps); + } + + if (getEffectHint() == true) { + flagSpecialMode = true; +#if 0 /* Don't use to set fixed fps in the hal side. */ + /* when effect mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps due to GPU preformance. */ + newMinFps = newMaxFps; +#endif + ALOGD("DEBUG(%s[%d]):effectHint(true), newMaxFps=%d", __FUNCTION__, __LINE__, newMaxFps); + } + + if (getRecordingHint() == true) { + flagSpecialMode = true; +#if 0 /* Don't use to set fixed fps in the hal side. */ +#ifdef USE_VARIABLE_FPS_OF_FRONT_RECORDING + if (getCameraId() == CAMERA_ID_FRONT && getSamsungCamera() == true) { + /* Supported the variable frame rate for Image Quality Performacne */ + ALOGD("DEBUG(%s[%d]):RecordingHint(true),newMinFps=%d,newMaxFps=%d", __FUNCTION__, __LINE__, newMinFps, newMaxFps); + } else +#endif + { + /* set fixed fps. */ + newMinFps = newMaxFps; + } + ALOGD("DEBUG(%s[%d]):RecordingHint(true), newMaxFps=%d", __FUNCTION__, __LINE__, newMaxFps); +#endif + ALOGD("DEBUG(%s[%d]):RecordingHint(true),newMinFps=%d,newMaxFps=%d", __FUNCTION__, __LINE__, newMinFps, newMaxFps); + } + + if (flagSpecialMode == true) { + CLOGD("DEBUG(%s[%d]):special mode enabled, newMaxFps=%d", __FUNCTION__, __LINE__, newMaxFps); + goto done; + } + + curSceneMode = getSceneMode(); + switch (curSceneMode) { + case SCENE_MODE_ACTION: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps; + } else { + newMinFps = 30000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_PORTRAIT: + case SCENE_MODE_LANDSCAPE: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 2; + } else { + newMinFps = 15000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_NIGHT: + /* for Front MMS mode FPS */ + if (getCameraId() == CAMERA_ID_FRONT && getRecordingHint() == true) + break; + + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 4; + } else { + newMinFps = 8000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_NIGHT_PORTRAIT: + case SCENE_MODE_THEATRE: + case SCENE_MODE_BEACH: + case SCENE_MODE_SNOW: + case SCENE_MODE_SUNSET: + case SCENE_MODE_STEADYPHOTO: + case SCENE_MODE_FIREWORKS: + case SCENE_MODE_SPORTS: + case SCENE_MODE_PARTY: + case SCENE_MODE_CANDLELIGHT: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 2; + } else { + newMinFps = 15000; + newMaxFps = 30000; + } + break; + default: + break; + } + + curShotMode = getShotMode(); + switch (curShotMode) { + case SHOT_MODE_DRAMA: + case SHOT_MODE_3DTOUR: + case SHOT_MODE_3D_PANORAMA: + case SHOT_MODE_LIGHT_TRACE: + newMinFps = 30000; + newMaxFps = 30000; + break; + case SHOT_MODE_ANIMATED_SCENE: + newMinFps = 15000; + newMaxFps = 15000; + break; +#ifdef USE_LIMITATION_FOR_THIRD_PARTY + case THIRD_PARTY_BLACKBOX_MODE: + ALOGI("INFO(%s): limit the maximum 30 fps range in THIRD_PARTY_BLACKBOX_MODE(%d,%d)", __FUNCTION__, newMinFps, newMaxFps); + if (newMinFps > 30000) { + newMinFps = 30000; + } + if (newMaxFps > 30000) { + newMaxFps = 30000; + } + break; + case THIRD_PARTY_VTCALL_MODE: + ALOGI("INFO(%s): limit the maximum 15 fps range in THIRD_PARTY_VTCALL_MODE(%d,%d)", __FUNCTION__, newMinFps, newMaxFps); + if (newMinFps > 15000) { + newMinFps = 15000; + } + if (newMaxFps > 15000) { + newMaxFps = 15000; + } + break; + case THIRD_PARTY_HANGOUT_MODE: + ALOGI("INFO(%s): change fps range 15000,15000 in THIRD_PARTY_HANGOUT_MODE", __FUNCTION__); + newMinFps = 15000; + newMaxFps = 15000; + break; +#endif + default: + break; + } + +done: + if (newMinFps != newMaxFps) { + if (m_getSupportedVariableFpsList(newMinFps, newMaxFps, &newMinFps, &newMaxFps) == false) + newMinFps = newMaxFps / 2; + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::updatePreviewFpsRange(void) +{ + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + int newMinFps = 0; + int newMaxFps = 0; + + getPreviewFpsRange(&curMinFps, &curMaxFps); + newMinFps = curMinFps * 1000; + newMaxFps = curMaxFps * 1000; + + if (m_adjustPreviewFpsRange(newMinFps, newMaxFps) != NO_ERROR) { + CLOGE("Fils to adjust preview fps range"); + return; + } + + newMinFps = newMinFps / 1000; + newMaxFps = newMaxFps / 1000; + + if (curMinFps != (uint32_t)newMinFps || curMaxFps != (uint32_t)newMaxFps) { + m_setPreviewFpsRange((uint32_t)newMinFps, (uint32_t)newMaxFps); + } +} + +status_t ExynosCamera1Parameters::checkPreviewFrameRate(const CameraParameters& params) +{ + int newFrameRate = params.getPreviewFrameRate(); + int curFrameRate = m_params.getPreviewFrameRate(); + int newMinFps = 0; + int newMaxFps = 0; + int tempFps = 0; + + if (newFrameRate < 0) { + return BAD_VALUE; + } + CLOGD("DEBUG(%s):curFrameRate=%d, newFrameRate=%d", __FUNCTION__, curFrameRate, newFrameRate); + if (newFrameRate != curFrameRate) { + tempFps = newFrameRate * 1000; + + if (m_getSupportedVariableFpsList(tempFps / 2, tempFps, &newMinFps, &newMaxFps) == false) { + newMinFps = tempFps / 2; + newMaxFps = tempFps; + } + + char newFpsRange[256]; + memset (newFpsRange, 0, 256); + snprintf(newFpsRange, 256, "%d,%d", newMinFps, newMaxFps); + m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, newFpsRange); + + if (checkPreviewFpsRange(m_params) == true) { + m_params.setPreviewFrameRate(newFrameRate); + CLOGD("DEBUG(%s):setPreviewFrameRate(newFrameRate=%d)", __FUNCTION__, newFrameRate); + } + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setPreviewFpsRange(uint32_t min, uint32_t max) +{ + setMetaCtlAeTargetFpsRange(&m_metadata, min, max); + setMetaCtlSensorFrameDuration(&m_metadata, (uint64_t)((1000 * 1000 * 1000) / (uint64_t)max)); + + ALOGI("INFO(%s):fps min(%d) max(%d)", __FUNCTION__, min, max); +} + +void ExynosCamera1Parameters::getPreviewFpsRange(uint32_t *min, uint32_t *max) +{ + /* ex) min = 15 , max = 30 */ + getMetaCtlAeTargetFpsRange(&m_metadata, min, max); +} + +bool ExynosCamera1Parameters::m_getSupportedVariableFpsList(int min, int max, int *newMin, int *newMax) +{ + int (*sizeList)[2]; + + if (getCameraId() == CAMERA_ID_BACK) { + /* Try to find exactly same in REAR LIST*/ + sizeList = m_staticInfo->rearFPSList; + for (int i = 0; i < m_staticInfo->rearFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find exactly same in HIDDEN REAR LIST*/ + sizeList = m_staticInfo->hiddenRearFPSList; + for (int i = 0; i < m_staticInfo->hiddenRearFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find similar fps in REAR LIST*/ + sizeList = m_staticInfo->rearFPSList; + for (int i = 0; i < m_staticInfo->rearFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW("WARN(%s):calibrate new fps(%d/%d -> %d/%d)", __FUNCTION__, min, max, *newMin, *newMax); + + return true; + } + } + /* Try to find similar fps in HIDDEN REAR LIST*/ + sizeList = m_staticInfo->hiddenRearFPSList; + for (int i = 0; i < m_staticInfo->hiddenRearFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW("WARN(%s):calibrate new fps(%d/%d -> %d/%d)", __FUNCTION__, min, max, *newMin, *newMax); + + return true; + } + } + } else { + /* Try to find exactly same in FRONT LIST*/ + sizeList = m_staticInfo->frontFPSList; + for (int i = 0; i < m_staticInfo->frontFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find exactly same in HIDDEN FRONT LIST*/ + sizeList = m_staticInfo->hiddenFrontFPSList; + for (int i = 0; i < m_staticInfo->hiddenFrontFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find similar fps in FRONT LIST*/ + sizeList = m_staticInfo->frontFPSList; + for (int i = 0; i < m_staticInfo->frontFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW("WARN(%s):calibrate new fps(%d/%d -> %d/%d)", __FUNCTION__, min, max, *newMin, *newMax); + + return true; + } + } + /* Try to find similar fps in HIDDEN FRONT LIST*/ + sizeList = m_staticInfo->hiddenFrontFPSList; + for (int i = 0; i < m_staticInfo->hiddenFrontFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW("WARN(%s):calibrate new fps(%d/%d -> %d/%d)", __FUNCTION__, min, max, *newMin, *newMax); + + return true; + } + } + } + + return false; +} + +status_t ExynosCamera1Parameters::checkVideoSize(const CameraParameters& params) +{ + /* Video size */ + int newVideoW = 0; + int newVideoH = 0; + + params.getVideoSize(&newVideoW, &newVideoH); + + if (0 < newVideoW && 0 < newVideoH && + m_isSupportedVideoSize(newVideoW, newVideoH) == false) { + return BAD_VALUE; + } + + CLOGI("INFO(%s):newVideo Size (%dx%d), ratioId(%d)", + "setParameters", newVideoW, newVideoH, m_cameraInfo.videoSizeRatioId); + m_setVideoSize(newVideoW, newVideoH); + m_params.setVideoSize(newVideoW, newVideoH); + + return NO_ERROR; +} + +bool ExynosCamera1Parameters::m_isSupportedVideoSize(const int width, + const int height) +{ + int maxWidth = 0; + int maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + getMaxVideoSize(&maxWidth, &maxHeight); + + if (maxWidth < width || maxHeight < height) { + CLOGE("ERR(%s):invalid video Size(maxSize(%d/%d) size(%d/%d)", + __FUNCTION__, maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearVideoList; + for (int i = 0; i < m_staticInfo->rearVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontVideoList; + for (int i = 0; i < m_staticInfo->frontVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearVideoList; + for (int i = 0; i < m_staticInfo->hiddenRearVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontVideoList; + for (int i = 0; i < m_staticInfo->hiddenFrontVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE("ERR(%s):Invalid video size(%dx%d)", __FUNCTION__, width, height); + + return false; +} + +bool ExynosCamera1Parameters::m_isUHDRecordingMode(void) +{ + bool isUHDRecording = false; + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if (((videoW == 3840 && videoH == 2160) || (videoW == 2560 && videoH == 1440)) && getRecordingHint() == true) + isUHDRecording = true; + +#if 0 + /* we need to make WQHD SCP(LCD size), when FHD recording for clear rendering */ + int hwPreviewW = 0, hwPreviewH = 0; + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + /* regard align margin(ex:1920x1088), check size more than 1920x1088 */ + /* if (1920 < hwPreviewW && 1080 < hwPreviewH) */ + if ((ALIGN_UP(1920, CAMERA_MAGIC_ALIGN) < hwPreviewW) && + (ALIGN_UP(1080, CAMERA_MAGIC_ALIGN) < hwPreviewH) && + (getRecordingHint() == true)) { + isUHDRecording = true; + } +#endif + + return isUHDRecording; +} + +void ExynosCamera1Parameters::m_setVideoSize(int w, int h) +{ + m_cameraInfo.videoW = w; + m_cameraInfo.videoH = h; +} + +bool ExynosCamera1Parameters::getUHDRecordingMode(void) +{ + return m_isUHDRecordingMode(); +} + +bool ExynosCamera1Parameters::getFaceDetectionMode(bool flagCheckingRecording) +{ + bool ret = true; + + /* turn off when dual mode back camera */ + if (getDualMode() == true && + getCameraId() == CAMERA_ID_BACK) { + ret = false; + } + + /* turn off when vt mode */ + if (getVtMode() != 0) + ret = false; + + /* when stopRecording, ignore recording hint */ + if (flagCheckingRecording == true) { + /* when recording mode*/ + if (getRecordingHint() == true) { + ret = false; + } + } + + return ret; +} + +void ExynosCamera1Parameters::getVideoSize(int *w, int *h) +{ + *w = m_cameraInfo.videoW; + *h = m_cameraInfo.videoH; +} + +void ExynosCamera1Parameters::getMaxVideoSize(int *w, int *h) +{ + *w = m_staticInfo->maxVideoW; + *h = m_staticInfo->maxVideoH; +} + +int ExynosCamera1Parameters::getVideoFormat(void) +{ + if (getAdaptiveCSCRecording() == true) { + return V4L2_PIX_FMT_NV21M; + } else { + return V4L2_PIX_FMT_NV12M; + } +} + +bool ExynosCamera1Parameters::getReallocBuffer() { + Mutex::Autolock lock(m_reallocLock); + return m_reallocBuffer; +} + +bool ExynosCamera1Parameters::setReallocBuffer(bool enable) { + Mutex::Autolock lock(m_reallocLock); + m_reallocBuffer = enable; + return m_reallocBuffer; +} + +status_t ExynosCamera1Parameters::checkFastFpsMode(const CameraParameters& params) +{ +#ifdef TEST_GED_HIGH_SPEED_RECORDING + int fastFpsMode = getFastFpsMode(); +#else + int fastFpsMode = params.getInt("fast-fps-mode"); +#endif + int tempShotMode = params.getInt("shot-mode"); + int prevFpsMode = getFastFpsMode(); + + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + uint32_t newMinFps = curMinFps; + uint32_t newMaxFps = curMaxFps; + + bool recordingHint = getRecordingHint(); + bool isShotModeAnimated = false; + bool flagHighSpeed = false; + int newVideoW = 0; + int newVideoH = 0; + + params.getVideoSize(&newVideoW, &newVideoH); + getPreviewFpsRange(&curMinFps, &curMaxFps); + + // Workaround : Should be removed later once application fixes this. + if( (curMinFps == 60 && curMaxFps == 60) && (newVideoW == 1920 && newVideoH == 1080) ) { + fastFpsMode = 1; + } + + CLOGD("DEBUG(%s):fast-fps-mode : %d", "setParameters", fastFpsMode); + +#if (!USE_HIGHSPEED_RECORDING) + fastFpsMode = -1; + CLOGD("DEBUG(%s):fast-fps-mode not supported. set to (%d).", "setParameters", fastFpsMode); +#endif + + CLOGI("INFO(%s):curFpsRange[Min=%d, Max=%d], [curFrameRate=%d]", + "checkPreviewFpsRange", curMinFps, curMaxFps, m_params.getPreviewFrameRate()); + + + if (fastFpsMode <= 0 || fastFpsMode > 3) { + m_setHighSpeedRecording(false); + setConfigMode(CONFIG_MODE::NORMAL); + if( fastFpsMode != prevFpsMode) { + setFastFpsMode(fastFpsMode); + m_params.set("fast-fps-mode", fastFpsMode); + setReallocBuffer(true); + m_setRestartPreviewChecked(true); + } + return NO_ERROR; + } else { + if( fastFpsMode == prevFpsMode ) { + CLOGE("INFO(%s):mode is not changed fastFpsMode(%d) prevFpsMode(%d)", "checkFastFpsMode", fastFpsMode, prevFpsMode); + return NO_ERROR; + } + } + + if (tempShotMode == SHOT_MODE_ANIMATED_SCENE) { + if (curMinFps == 15 && curMaxFps == 15) + isShotModeAnimated = true; + } + + if ((recordingHint == true) && !(isShotModeAnimated)) { + + CLOGD("DEBUG(%s):Set High Speed Recording", "setParameters"); + + switch(fastFpsMode) { + case 1: + newMinFps = 60; + newMaxFps = 60; + setConfigMode(CONFIG_MODE::HIGHSPEED_60); + break; + case 2: + newMinFps = 120; + newMaxFps = 120; + setConfigMode(CONFIG_MODE::HIGHSPEED_120); + break; + case 3: + newMinFps = 240; + newMaxFps = 240; + setConfigMode(CONFIG_MODE::HIGHSPEED_240); + break; + } + setFastFpsMode(fastFpsMode); + m_params.set("fast-fps-mode", fastFpsMode); + + CLOGI("INFO(%s):fastFpsMode(%d) prevFpsMode(%d)", "checkFastFpsMode", fastFpsMode, prevFpsMode); + setReallocBuffer(true); + m_setRestartPreviewChecked(true); + + flagHighSpeed = m_adjustHighSpeedRecording(curMinFps, curMaxFps, newMinFps, newMaxFps); + m_setHighSpeedRecording(flagHighSpeed); + m_setPreviewFpsRange(newMinFps, newMaxFps); + + CLOGI("INFO(%s):m_setPreviewFpsRange(newFpsRange[Min=%d, Max=%d])", "checkFastFpsMode", newMinFps, newMaxFps); +#ifdef TEST_GED_HIGH_SPEED_RECORDING + m_params.setPreviewFrameRate(newMaxFps); + CLOGD("DEBUG(%s):setPreviewFrameRate (newMaxFps=%d)", "checkFastFpsMode", newMaxFps); +#endif + updateHwSensorSize(); + } + + return NO_ERROR; +}; + +void ExynosCamera1Parameters::setFastFpsMode(int fpsMode) +{ + m_fastFpsMode = fpsMode; +} + +int ExynosCamera1Parameters::getFastFpsMode(void) +{ + return m_fastFpsMode; +} + +void ExynosCamera1Parameters::m_setHighSpeedRecording(bool highSpeed) +{ + m_cameraInfo.highSpeedRecording = highSpeed; +} + +bool ExynosCamera1Parameters::getHighSpeedRecording(void) +{ + return m_cameraInfo.highSpeedRecording; +} + +bool ExynosCamera1Parameters::m_adjustHighSpeedRecording(int curMinFps, int curMaxFps, __unused int newMinFps, int newMaxFps) +{ + bool flagHighSpeedRecording = false; + bool restartPreview = false; + + /* setting high speed */ + if (30 < newMaxFps) { + flagHighSpeedRecording = true; + /* 30 -> 60/120 */ + if (curMaxFps <= 30) + restartPreview = true; + /* 60 -> 120 */ + else if (curMaxFps <= 60 && 120 <= newMaxFps) + restartPreview = true; + /* 120 -> 60 */ + else if (curMaxFps <= 120 && newMaxFps <= 60) + restartPreview = true; + /* variable 60 -> fixed 60 */ + else if (curMinFps < 60 && newMaxFps <= 60) + restartPreview = true; + /* variable 120 -> fixed 120 */ + else if (curMinFps < 120 && newMaxFps <= 120) + restartPreview = true; + } else if (newMaxFps <= 30) { + flagHighSpeedRecording = false; + if (30 < curMaxFps) + restartPreview = true; + } + + if (restartPreview == true && + getPreviewRunning() == true) { + CLOGD("DEBUG(%s[%d]):setRestartPreviewChecked true", __FUNCTION__, __LINE__); + m_setRestartPreviewChecked(true); + } + + return flagHighSpeedRecording; +} + +void ExynosCamera1Parameters::m_setRestartPreviewChecked(bool restart) +{ + CLOGD("DEBUG(%s):setRestartPreviewChecked(during SetParameters) %s", __FUNCTION__, restart ? "true" : "false"); + Mutex::Autolock lock(m_parameterLock); + + m_flagRestartPreviewChecked = restart; +} + +bool ExynosCamera1Parameters::m_getRestartPreviewChecked(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_flagRestartPreviewChecked; +} + +bool ExynosCamera1Parameters::getPreviewSizeChanged(void) +{ + return m_previewSizeChanged; +} + +void ExynosCamera1Parameters::m_setRestartPreview(bool restart) +{ + CLOGD("DEBUG(%s):setRestartPreview %s", __FUNCTION__, restart ? "true" : "false"); + Mutex::Autolock lock(m_parameterLock); + + m_flagRestartPreview = restart; + +} + +void ExynosCamera1Parameters::setPreviewRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_previewRunning = enable; + m_flagRestartPreviewChecked = false; + m_flagRestartPreview = false; + m_previewSizeChanged = false; +} + +void ExynosCamera1Parameters::setPictureRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_pictureRunning = enable; +} + +void ExynosCamera1Parameters::setRecordingRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_recordingRunning = enable; +} + +bool ExynosCamera1Parameters::getPreviewRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_previewRunning; +} + +bool ExynosCamera1Parameters::getPictureRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_pictureRunning; +} + +bool ExynosCamera1Parameters::getRecordingRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_recordingRunning; +} + +bool ExynosCamera1Parameters::getRestartPreview(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_flagRestartPreview; +} + +status_t ExynosCamera1Parameters::checkVideoStabilization(const CameraParameters& params) +{ + /* video stablization */ + const char *newVideoStabilization = params.get(CameraParameters::KEY_VIDEO_STABILIZATION); + bool currVideoStabilization = m_flagVideoStabilization; + bool isVideoStabilization = false; + + if (newVideoStabilization != NULL) { + CLOGD("DEBUG(%s):newVideoStabilization %s", "setParameters", newVideoStabilization); + + if (!strcmp(newVideoStabilization, "true")) + isVideoStabilization = true; + + if (currVideoStabilization != isVideoStabilization) { + m_flagVideoStabilization = isVideoStabilization; + m_setVideoStabilization(m_flagVideoStabilization); + m_params.set(CameraParameters::KEY_VIDEO_STABILIZATION, newVideoStabilization); + } + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setVideoStabilization(bool stabilization) +{ + m_cameraInfo.videoStabilization = stabilization; +} + +bool ExynosCamera1Parameters::getVideoStabilization(void) +{ + return m_cameraInfo.videoStabilization; +} + +bool ExynosCamera1Parameters::updateTpuParameters(void) +{ + status_t ret = NO_ERROR; + + /* 1. update data video stabilization state to actual*/ + CLOGD("%s(%d) video stabilization old(%d) new(%d)", __FUNCTION__, __LINE__, m_cameraInfo.videoStabilization, m_flagVideoStabilization); + m_setVideoStabilization(m_flagVideoStabilization); + + bool hwVdisMode = this->getHWVdisMode(); + + if (setDisEnable(hwVdisMode) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDisEnable(%d) fail", __FUNCTION__, __LINE__, hwVdisMode); + } + + /* 2. update data 3DNR state to actual*/ + CLOGD("%s(%d) 3DNR old(%d) new(%d)", __FUNCTION__, __LINE__, m_cameraInfo.is3dnrMode, m_flag3dnrMode); + m_set3dnrMode(m_flag3dnrMode); + if (setDnrEnable(m_flag3dnrMode) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDnrEnable(%d) fail", __FUNCTION__, __LINE__, m_flag3dnrMode); + } + + return true; +} + +status_t ExynosCamera1Parameters::checkPreviewSize(const CameraParameters& params) +{ + /* preview size */ + int previewW = 0; + int previewH = 0; + int newPreviewW = 0; + int newPreviewH = 0; + int newCalHwPreviewW = 0; + int newCalHwPreviewH = 0; + + int curPreviewW = 0; + int curPreviewH = 0; + int curHwPreviewW = 0; + int curHwPreviewH = 0; + + params.getPreviewSize(&previewW, &previewH); + getPreviewSize(&curPreviewW, &curPreviewH); + getHwPreviewSize(&curHwPreviewW, &curHwPreviewH); + m_isHighResolutionMode(params); + + newPreviewW = previewW; + newPreviewH = previewH; + if (m_adjustPreviewSize(previewW, previewH, &newPreviewW, &newPreviewH, &newCalHwPreviewW, &newCalHwPreviewH) != OK) { + ALOGE("ERR(%s): adjustPreviewSize fail, newPreviewSize(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + if (m_isSupportedPreviewSize(newPreviewW, newPreviewH) == false) { + ALOGE("ERR(%s): new preview size is invalid(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + ALOGI("INFO(%s):Cur Preview size(%dx%d)", "setParameters", curPreviewW, curPreviewH); + ALOGI("INFO(%s):Cur HwPreview size(%dx%d)", "setParameters", curHwPreviewW, curHwPreviewH); + ALOGI("INFO(%s):param.preview size(%dx%d)", "setParameters", previewW, previewH); + ALOGI("INFO(%s):Adjust Preview size(%dx%d), ratioId(%d)", "setParameters", newPreviewW, newPreviewH, m_cameraInfo.previewSizeRatioId); + ALOGI("INFO(%s):Calibrated HwPreview size(%dx%d)", "setParameters", newCalHwPreviewW, newCalHwPreviewH); + + if (curPreviewW != newPreviewW || curPreviewH != newPreviewH || + curHwPreviewW != newCalHwPreviewW || curHwPreviewH != newCalHwPreviewH || + getHighResolutionCallbackMode() == true) { + m_setPreviewSize(newPreviewW, newPreviewH); + m_setHwPreviewSize(newCalHwPreviewW, newCalHwPreviewH); + + if (getHighResolutionCallbackMode() == true) { + m_previewSizeChanged = false; + } else { + ALOGD("DEBUG(%s):setRestartPreviewChecked true", __FUNCTION__); + m_setRestartPreviewChecked(true); + m_previewSizeChanged = true; + } + } else { + m_previewSizeChanged = false; + } + + updateBinningScaleRatio(); + updateBnsScaleRatio(); + + m_params.setPreviewSize(newPreviewW, newPreviewH); + + return NO_ERROR; +} + +bool ExynosCamera1Parameters::m_isSupportedPreviewSize(const int width, + const int height) +{ + int maxWidth, maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + if (getHighResolutionCallbackMode() == true) { + CLOGD("DEBUG(%s): Burst panorama mode start", __FUNCTION__); +#if defined(PANORAMA_RATIO) + m_cameraInfo.previewSizeRatioId = PANORAMA_RATIO; +#else + m_cameraInfo.previewSizeRatioId = SIZE_RATIO_16_9; +#endif + return true; + } + + getMaxPreviewSize(&maxWidth, &maxHeight); + + if (maxWidth*maxHeight < width*height) { + CLOGE("ERR(%s):invalid PreviewSize(maxSize(%d/%d) size(%d/%d)", + __FUNCTION__, maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearPreviewList; + for (int i = 0; i < m_staticInfo->rearPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontPreviewList; + for (int i = 0; i < m_staticInfo->frontPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPreviewList; + for (int i = 0; i < m_staticInfo->hiddenRearPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontPreviewList; + for (int i = 0; i < m_staticInfo->hiddenFrontPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE("ERR(%s):Invalid preview size(%dx%d)", __FUNCTION__, width, height); + + return false; +} + +status_t ExynosCamera1Parameters::m_getPreviewSizeList(int *sizeList) +{ + int *tempSizeList = NULL; + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* CAMERA2_API use Video Scenario LUT as a default */ + if (m_staticInfo->videoSizeLut == NULL) { + ALOGE("ERR(%s[%d]):videoSizeLut is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + ALOGE("ERR(%s[%d]):unsupported video ratioId(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } +#if defined(ENABLE_FULL_FRAME) + tempSizeList = m_staticInfo->videoSizeLut[m_cameraInfo.previewSizeRatioId]; +#else + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; +#endif + } else { + if (getDualMode() == true) { + if (getDualRecordingHint() == true + && m_staticInfo->dualVideoSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->videoSizeLutMax) { + tempSizeList = m_staticInfo->dualVideoSizeLut[m_cameraInfo.previewSizeRatioId]; + } else if (m_staticInfo->dualPreviewSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->previewSizeLutMax) { + tempSizeList = m_staticInfo->dualPreviewSizeLut[m_cameraInfo.previewSizeRatioId]; + } else { /* Use Preview LUT as a default */ + if (m_staticInfo->previewSizeLut == NULL) { + ALOGE("ERR(%s[%d]):previewSizeLut is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else if (m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + ALOGE("ERR(%s[%d]):unsupported preview ratioId(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } else { /* getDualMode() == false */ + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + if (getHighSpeedRecording() == true) { + int fpsmode = 0; + + fpsmode = getConfigMode(); + tempSizeList = getHighSpeedSizeTable(fpsmode); + } +#ifdef USE_BNS_RECORDING + else if (m_staticInfo->videoSizeBnsLut != NULL + && videoW == 1920 && videoH == 1080) { /* Use BNS Recording only for FHD(16:9) */ + if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + ALOGE("ERR(%s[%d]):unsupported video ratioId(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->videoSizeBnsLut[m_cameraInfo.previewSizeRatioId]; + } +#endif + else { /* Normal Recording Mode */ + if (m_staticInfo->videoSizeLut == NULL) { + ALOGE("ERR(%s[%d]):videoSizeLut is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + ALOGE("ERR(%s[%d]):unsupported video ratioId(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->videoSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + /* + * VT mode + * 1: 3G vtmode (176x144, Fixed 7fps) + * 2: LTE or WIFI vtmode (640x480, Fixed 15fps) + */ + int index = 0; + if (m_staticInfo->vtcallSizeLut == NULL + || m_staticInfo->vtcallSizeLutMax == 0) { + ALOGE("ERR(%s[%d]):vtcallSizeLut is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + for (index = 0; index < m_staticInfo->vtcallSizeLutMax; index++) { + if (m_staticInfo->vtcallSizeLut[index][0] == m_cameraInfo.previewSizeRatioId) + break; + } + + if (m_staticInfo->vtcallSizeLutMax <= index) + index = 0; + + tempSizeList = m_staticInfo->vtcallSizeLut[index]; + } +#endif + else { /* Use Preview LUT */ + if (m_staticInfo->previewSizeLut == NULL) { + ALOGE("ERR(%s[%d]):previewSizeLut is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else if (m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + ALOGE("ERR(%s[%d]):unsupported preview ratioId(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } + } + + if (tempSizeList == NULL) { + ALOGE("ERR(%s[%d]):fail to get LUT", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + for (int i = 0; i < SIZE_LUT_INDEX_END; i++) + sizeList[i] = tempSizeList[i]; + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_getSWVdisPreviewSize(int w, int h, int *newW, int *newH) +{ + if (w < 0 || h < 0) { + return; + } + + if (w == 1920 && h == 1080) { + *newW = 2304; + *newH = 1296; + } + else if (w == 1280 && h == 720) { + *newW = 1536; + *newH = 864; + } + else { + *newW = ALIGN_UP((w * 6) / 5, CAMERA_ISP_ALIGN); + *newH = ALIGN_UP((h * 6) / 5, CAMERA_ISP_ALIGN); + } +} + +bool ExynosCamera1Parameters::m_isHighResolutionCallbackSize(const int width, const int height) +{ +#if defined(USE_LOW_RESOLUTION_PANORAMA) + m_setHighResolutionCallbackMode(false); + return false; +#else + bool highResolutionCallbackMode; + + if (width == m_staticInfo->highResolutionCallbackW && height == m_staticInfo->highResolutionCallbackH) + highResolutionCallbackMode = true; + else + highResolutionCallbackMode = false; + + CLOGD("DEBUG(%s):highResolutionCallSize:%s", "setParameters", + highResolutionCallbackMode == true? "on":"off"); + + m_setHighResolutionCallbackMode(highResolutionCallbackMode); + + return highResolutionCallbackMode; +#endif +} + +void ExynosCamera1Parameters::m_isHighResolutionMode(const CameraParameters& params) +{ +#if defined(USE_LOW_RESOLUTION_PANORAMA) + m_setHighResolutionCallbackMode(false); +#else + bool highResolutionCallbackMode; + int shotmode = params.getInt("shot-mode"); + + if ((getRecordingHint() == false) && (shotmode == SHOT_MODE_PANORAMA)) + highResolutionCallbackMode = true; + else + highResolutionCallbackMode = false; + + ALOGD("DEBUG(%s):highResolutionMode:%s", "setParameters", + highResolutionCallbackMode == true? "on":"off"); + + m_setHighResolutionCallbackMode(highResolutionCallbackMode); +#endif +} + +void ExynosCamera1Parameters::m_setHighResolutionCallbackMode(bool enable) +{ + m_cameraInfo.highResolutionCallbackMode = enable; +} + +bool ExynosCamera1Parameters::getHighResolutionCallbackMode(void) +{ + return m_cameraInfo.highResolutionCallbackMode; +} + +status_t ExynosCamera1Parameters::checkPreviewFormat(const CameraParameters& params) +{ + const char *strNewPreviewFormat = params.getPreviewFormat(); + const char *strCurPreviewFormat = m_params.getPreviewFormat(); + int curHwPreviewFormat = getHwPreviewFormat(); + int newPreviewFormat = 0; + int hwPreviewFormat = 0; + + CLOGD("DEBUG(%s):newPreviewFormat: %s", "setParameters", strNewPreviewFormat); + + if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGB565)) + newPreviewFormat = V4L2_PIX_FMT_RGB565; + else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGBA8888)) + newPreviewFormat = V4L2_PIX_FMT_RGB32; + else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) + newPreviewFormat = V4L2_PIX_FMT_NV21; + else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) + newPreviewFormat = V4L2_PIX_FMT_YVU420; + else if (!strcmp(strNewPreviewFormat, "yuv420sp_custom")) + newPreviewFormat = V4L2_PIX_FMT_NV12T; + else if (!strcmp(strNewPreviewFormat, "yuv422i")) + newPreviewFormat = V4L2_PIX_FMT_YUYV; + else if (!strcmp(strNewPreviewFormat, "yuv422p")) + newPreviewFormat = V4L2_PIX_FMT_YUV422P; + else + newPreviewFormat = V4L2_PIX_FMT_NV21; /* for 3rd party */ + + if (m_adjustPreviewFormat(newPreviewFormat, hwPreviewFormat) != NO_ERROR) { + return BAD_VALUE; + } + + m_setPreviewFormat(newPreviewFormat); + m_params.setPreviewFormat(strNewPreviewFormat); + if (curHwPreviewFormat != hwPreviewFormat) { + m_setHwPreviewFormat(hwPreviewFormat); + CLOGI("INFO(%s[%d]): preview format changed cur(%s) -> new(%s)", "Parameters", __LINE__, strCurPreviewFormat, strNewPreviewFormat); + + if (getPreviewRunning() == true) { + CLOGD("DEBUG(%s[%d]):setRestartPreviewChecked true", __FUNCTION__, __LINE__); + m_setRestartPreviewChecked(true); + } + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_adjustPreviewFormat(__unused int &previewFormat, int &hwPreviewFormat) +{ +#if 1 + /* HACK : V4L2_PIX_FMT_NV21M is set to FIMC-IS * + * and Gralloc. V4L2_PIX_FMT_YVU420 is just * + * color format for callback frame. */ + hwPreviewFormat = V4L2_PIX_FMT_NV21M; +#else + if (previewFormat == V4L2_PIX_FMT_NV21) + hwPreviewFormat = V4L2_PIX_FMT_NV21M; + else if (previewFormat == V4L2_PIX_FMT_YVU420) + hwPreviewFormat = V4L2_PIX_FMT_YVU420M; +#endif + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_getCropRegion(int *x, int *y, int *w, int *h) +{ + getMetaCtlCropRegion(&m_metadata, x, y, w, h); +} + +void ExynosCamera1Parameters::m_setPreviewSize(int w, int h) +{ + m_cameraInfo.previewW = w; + m_cameraInfo.previewH = h; +} + +void ExynosCamera1Parameters::getPreviewSize(int *w, int *h) +{ + *w = m_cameraInfo.previewW; + *h = m_cameraInfo.previewH; +} + +void ExynosCamera1Parameters::getYuvSize(int *width, int *height, const int index) +{ + *width = m_cameraInfo.yuvWidth[index]; + *height = m_cameraInfo.yuvHeight[index]; +} + +void ExynosCamera1Parameters::getMaxSensorSize(int *w, int *h) +{ + *w = m_staticInfo->maxSensorW; + *h = m_staticInfo->maxSensorH; +} + +void ExynosCamera1Parameters::getSensorMargin(int *w, int *h) +{ + *w = m_staticInfo->sensorMarginW; + *h = m_staticInfo->sensorMarginH; +} + +void ExynosCamera1Parameters::m_adjustSensorMargin(int *sensorMarginW, int *sensorMarginH) +{ + float bnsRatio = 1.00f; + float binningRatio = 1.00f; + float sensorMarginRatio = 1.00f; + + bnsRatio = (float)getBnsScaleRatio() / 1000.00f; + binningRatio = (float)getBinningScaleRatio() / 1000.00f; + sensorMarginRatio = bnsRatio * binningRatio; + if ((int)sensorMarginRatio < 1) { + ALOGW("WARN(%s[%d]):Invalid sensor margin ratio(%f), bnsRatio(%f), binningRatio(%f)", + __FUNCTION__, __LINE__, sensorMarginRatio, bnsRatio, binningRatio); + sensorMarginRatio = 1.00f; + } + + if (getHalVersion() != IS_HAL_VER_3_2) { + *sensorMarginW = ALIGN_DOWN((int)(*sensorMarginW / sensorMarginRatio), 2); + *sensorMarginH = ALIGN_DOWN((int)(*sensorMarginH / sensorMarginRatio), 2); + } else { + int leftMargin = 0, rightMargin = 0, topMargin = 0, bottomMargin = 0; + + rightMargin = ALIGN_DOWN((int)(m_staticInfo->sensorMarginBase[WIDTH_BASE] / sensorMarginRatio), 2); + leftMargin = m_staticInfo->sensorMarginBase[LEFT_BASE] + rightMargin; + bottomMargin = ALIGN_DOWN((int)(m_staticInfo->sensorMarginBase[HEIGHT_BASE] / sensorMarginRatio), 2); + topMargin = m_staticInfo->sensorMarginBase[TOP_BASE] + bottomMargin; + + *sensorMarginW = leftMargin + rightMargin; + *sensorMarginH = topMargin + bottomMargin; + } +} + +void ExynosCamera1Parameters::getMaxPreviewSize(int *w, int *h) +{ + *w = m_staticInfo->maxPreviewW; + *h = m_staticInfo->maxPreviewH; +} + +void ExynosCamera1Parameters::m_setPreviewFormat(int fmt) +{ + m_cameraInfo.previewFormat = fmt; +} + +int ExynosCamera1Parameters::getPreviewFormat(void) +{ + return m_cameraInfo.previewFormat; +} + +void ExynosCamera1Parameters::m_setHwPreviewSize(int w, int h) +{ + m_cameraInfo.hwPreviewW = w; + m_cameraInfo.hwPreviewH = h; +} + +void ExynosCamera1Parameters::getHwPreviewSize(int *w, int *h) +{ + if (m_cameraInfo.scalableSensorMode != true) { + *w = m_cameraInfo.hwPreviewW; + *h = m_cameraInfo.hwPreviewH; + } else { + int newSensorW = 0; + int newSensorH = 0; + m_getScalableSensorSize(&newSensorW, &newSensorH); + + *w = newSensorW; + *h = newSensorH; +/* + * Should not use those value + * *w = 1024; + * *h = 768; + * *w = 1440; + * *h = 1080; + */ + *w = m_cameraInfo.hwPreviewW; + *h = m_cameraInfo.hwPreviewH; + } +} + +void ExynosCamera1Parameters::setHwPreviewStride(int stride) +{ + m_cameraInfo.previewStride = stride; +} + +int ExynosCamera1Parameters::getHwPreviewStride(void) +{ + return m_cameraInfo.previewStride; +} + +void ExynosCamera1Parameters::m_setHwPreviewFormat(int fmt) +{ + m_cameraInfo.hwPreviewFormat = fmt; +} + +int ExynosCamera1Parameters::getHwPreviewFormat(void) +{ + return m_cameraInfo.hwPreviewFormat; +} + +void ExynosCamera1Parameters::updateHwSensorSize(void) +{ + int curHwSensorW = 0; + int curHwSensorH = 0; + int newHwSensorW = 0; + int newHwSensorH = 0; + int maxHwSensorW = 0; + int maxHwSensorH = 0; + + getHwSensorSize(&newHwSensorW, &newHwSensorH); + getMaxSensorSize(&maxHwSensorW, &maxHwSensorH); + + if (newHwSensorW > maxHwSensorW || newHwSensorH > maxHwSensorH) { + CLOGE("ERR(%s):Invalid sensor size (maxSize(%d/%d) size(%d/%d)", + __FUNCTION__, maxHwSensorW, maxHwSensorH, newHwSensorW, newHwSensorH); + } + + if (getHighSpeedRecording() == true) { +#if 0 + int sizeList[SIZE_LUT_INDEX_END]; + m_getHighSpeedRecordingSize(sizeList); + newHwSensorW = sizeList[SENSOR_W]; + newHwSensorH = sizeList[SENSOR_H]; +#endif + } else if (getScalableSensorMode() == true) { + m_getScalableSensorSize(&newHwSensorW, &newHwSensorH); + } else { + getBnsSize(&newHwSensorW, &newHwSensorH); + } + + getHwSensorSize(&curHwSensorW, &curHwSensorH); + CLOGI("INFO(%s):curHwSensor size(%dx%d) newHwSensor size(%dx%d)", __FUNCTION__, curHwSensorW, curHwSensorH, newHwSensorW, newHwSensorH); + if (curHwSensorW != newHwSensorW || curHwSensorH != newHwSensorH) { + m_setHwSensorSize(newHwSensorW, newHwSensorH); + CLOGI("INFO(%s):newHwSensor size(%dx%d)", __FUNCTION__, newHwSensorW, newHwSensorH); + } +} + +void ExynosCamera1Parameters::m_setHwSensorSize(int w, int h) +{ + m_cameraInfo.hwSensorW = w; + m_cameraInfo.hwSensorH = h; +} + +void ExynosCamera1Parameters::getHwSensorSize(int *w, int *h) +{ + ALOGV("INFO(%s[%d]) getScalableSensorMode()(%d)", __FUNCTION__, __LINE__, getScalableSensorMode()); + int width = 0; + int height = 0; + int sizeList[SIZE_LUT_INDEX_END]; + + if (m_cameraInfo.scalableSensorMode != true) { + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == true + && m_staticInfo->previewSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->previewSizeLutMax + && m_getPreviewSizeList(sizeList) == NO_ERROR) { + + width = sizeList[SENSOR_W]; + height = sizeList[SENSOR_H]; + + } else { + width = m_cameraInfo.hwSensorW; + height = m_cameraInfo.hwSensorH; + } + } else { + m_getScalableSensorSize(&width, &height); + } + + *w = width; + *h = height; +} + +void ExynosCamera1Parameters::updateBnsScaleRatio(void) +{ + int ret = 0; + uint32_t bnsRatio = DEFAULT_BNS_RATIO * 1000; + int curPreviewW = 0, curPreviewH = 0; + + if (m_staticInfo->bnsSupport == false) + return; + + getPreviewSize(&curPreviewW, &curPreviewH); + if (getDualMode() == true) { +#if defined(USE_BNS_DUAL_PREVIEW) || defined(USE_BNS_DUAL_RECORDING) + bnsRatio = 2000; +#endif + } else if ((getRecordingHint() == true) +/* || (curPreviewW == curPreviewH)*/) { +#ifdef USE_BNS_RECORDING + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if ((getHighSpeedRecording() == true)) { + bnsRatio = 1000; + } else if (videoW == 1920 && videoH == 1080) { + bnsRatio = 1500; + ALOGI("INFO(%s[%d]):bnsRatio(%d), videoSize (%d, %d)", + __FUNCTION__, __LINE__, bnsRatio, videoW, videoH); + } else +#endif + { + bnsRatio = 1000; + } + if (bnsRatio != getBnsScaleRatio()) { + CLOGI("INFO(%s[%d]): restart set due to changing bnsRatio(%d/%d)", + __FUNCTION__, __LINE__, bnsRatio, getBnsScaleRatio()); + m_setRestartPreview(true); + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + bnsRatio = 1000; + } +#endif + + if (bnsRatio != getBnsScaleRatio()) + ret = m_setBnsScaleRatio(bnsRatio); + + if (ret < 0) + CLOGE("ERR(%s[%d]): Cannot update BNS scale ratio(%d)", __FUNCTION__, __LINE__, bnsRatio); +} + +status_t ExynosCamera1Parameters::m_setBnsScaleRatio(int ratio) +{ +#define MIN_BNS_RATIO 1000 +#define MAX_BNS_RATIO 8000 + + if (m_staticInfo->bnsSupport == false) { + CLOGD("DEBUG(%s[%d]): This camera does not support BNS", __FUNCTION__, __LINE__); + ratio = MIN_BNS_RATIO; + } + + if (ratio < MIN_BNS_RATIO || ratio > MAX_BNS_RATIO) { + CLOGE("ERR(%s[%d]): Out of bound, ratio(%d), min:max(%d:%d)", __FUNCTION__, __LINE__, ratio, MAX_BNS_RATIO, MAX_BNS_RATIO); + return BAD_VALUE; + } + + CLOGD("DEBUG(%s[%d]): update BNS ratio(%d -> %d)", __FUNCTION__, __LINE__, m_cameraInfo.bnsScaleRatio, ratio); + + m_cameraInfo.bnsScaleRatio = ratio; + + /* When BNS scale ratio is changed, reset BNS size to MAX sensor size */ + getMaxSensorSize(&m_cameraInfo.bnsW, &m_cameraInfo.bnsH); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_addHiddenResolutionList(String8 &string8Buf, __unused struct ExynosSensorInfoBase *sensorInfo, + int w, int h, enum MODE mode, int cameraId) + +{ + status_t ret = NO_ERROR; + bool found = false; + + int (*sizeList)[SIZE_OF_RESOLUTION]; + int listSize = 0; + + switch (mode) { + case MODE_PREVIEW: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPreviewList; + listSize = m_staticInfo->hiddenRearPreviewListMax; + } else { + sizeList = m_staticInfo->hiddenFrontPreviewList; + listSize = m_staticInfo->hiddenFrontPreviewListMax; + } + break; + case MODE_PICTURE: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPictureList; + listSize = m_staticInfo->hiddenRearPictureListMax; + } else { + sizeList = m_staticInfo->hiddenFrontPictureList; + listSize = m_staticInfo->hiddenFrontPictureListMax; + } + break; + case MODE_VIDEO: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearVideoList; + listSize = m_staticInfo->hiddenRearVideoListMax; + } else { + sizeList = m_staticInfo->hiddenFrontVideoList; + listSize = m_staticInfo->hiddenFrontVideoListMax; + } + break; + default: + CLOGE("ERR(%s[%d]): invalid mode(%d)", __FUNCTION__, __LINE__, mode); + return BAD_VALUE; + break; + } + + for (int i = 0; i < listSize; i++) { + if (w == sizeList[i][0] && h == sizeList[i][1]) { + found = true; + break; + } + } + + if (found == true) { + String8 uhdTempStr; + char strBuf[32]; + + snprintf(strBuf, sizeof(strBuf), "%dx%d,", w, h); + + /* append on head of string8Buf */ + uhdTempStr.setTo(strBuf); + uhdTempStr.append(string8Buf); + string8Buf.setTo(uhdTempStr); + } else { + ret = INVALID_OPERATION; + } + + return ret; +} + +uint32_t ExynosCamera1Parameters::getBnsScaleRatio(void) +{ + return m_cameraInfo.bnsScaleRatio; +} + +void ExynosCamera1Parameters::setBnsSize(int w, int h) +{ + m_cameraInfo.bnsW = w; + m_cameraInfo.bnsH = h; + + updateHwSensorSize(); + +#if 0 + int zoom = getZoomLevel(); + int previewW = 0, previewH = 0; + getPreviewSize(&previewW, &previewH); + if (m_setParamCropRegion(zoom, w, h, previewW, previewH) != NO_ERROR) + CLOGE("ERR(%s):m_setParamCropRegion() fail", __FUNCTION__); +#else + ExynosRect srcRect, dstRect; + getPreviewBayerCropSize(&srcRect, &dstRect); +#endif +} + +void ExynosCamera1Parameters::getBnsSize(int *w, int *h) +{ + *w = m_cameraInfo.bnsW; + *h = m_cameraInfo.bnsH; +} + +void ExynosCamera1Parameters::updateBinningScaleRatio(void) +{ + int ret = 0; + uint32_t binningRatio = DEFAULT_BINNING_RATIO * 1000; + + if ((getRecordingHint() == true) + && (getHighSpeedRecording() == true)) { + int fpsmode = 0; + fpsmode = getFastFpsMode(); + switch (fpsmode) { + case 1: /* 60 fps */ + binningRatio = 2000; + break; + case 2: /* 120 fps */ + case 3: /* 240 fps */ + binningRatio = 4000; + break; + default: + ALOGE("ERR(%s[%d]): Invalide FastFpsMode(%d)", __FUNCTION__, __LINE__, fpsmode); + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + binningRatio = 2000; + } +#endif + + if (binningRatio != getBinningScaleRatio()) { + ALOGI("INFO(%s[%d]):New sensor binning ratio(%d)", __FUNCTION__, __LINE__, binningRatio); + ret = m_setBinningScaleRatio(binningRatio); + } + + if (ret < 0) + ALOGE("ERR(%s[%d]): Cannot update BNS scale ratio(%d)", __FUNCTION__, __LINE__, binningRatio); +} + +status_t ExynosCamera1Parameters::m_setBinningScaleRatio(int ratio) +{ +#define MIN_BINNING_RATIO 1000 +#define MAX_BINNING_RATIO 6000 + + if (ratio < MIN_BINNING_RATIO || ratio > MAX_BINNING_RATIO) { + ALOGE("ERR(%s[%d]): Out of bound, ratio(%d), min:max(%d:%d)", + __FUNCTION__, __LINE__, ratio, MAX_BINNING_RATIO, MAX_BINNING_RATIO); + return BAD_VALUE; + } + + m_cameraInfo.binningScaleRatio = ratio; + + return NO_ERROR; +} + +uint32_t ExynosCamera1Parameters::getBinningScaleRatio(void) +{ + return m_cameraInfo.binningScaleRatio; +} + +status_t ExynosCamera1Parameters::checkPictureSize(const CameraParameters& params) +{ + int curPictureW = 0; + int curPictureH = 0; + int newPictureW = 0; + int newPictureH = 0; + int curHwPictureW = 0; + int curHwPictureH = 0; + int newHwPictureW = 0; + int newHwPictureH = 0; + int right_ratio = 177; + + params.getPictureSize(&newPictureW, &newPictureH); + + if (newPictureW < 0 || newPictureH < 0) { + return BAD_VALUE; + } + + if (m_adjustPictureSize(&newPictureW, &newPictureH, &newHwPictureW, &newHwPictureH) != NO_ERROR) { + return BAD_VALUE; + } + + if (m_isSupportedPictureSize(newPictureW, newPictureH) == false) { + int maxHwPictureW =0; + int maxHwPictureH = 0; + + CLOGE("ERR(%s):Invalid picture size(%dx%d)", __FUNCTION__, newPictureW, newPictureH); + + /* prevent wrong size setting */ + getMaxPictureSize(&maxHwPictureW, &maxHwPictureH); + m_setPictureSize(maxHwPictureW, maxHwPictureH); + m_setHwPictureSize(maxHwPictureW, maxHwPictureH); + m_params.setPictureSize(maxHwPictureW, maxHwPictureH); + CLOGE("ERR(%s):changed picture size to MAX(%dx%d)", __FUNCTION__, maxHwPictureW, maxHwPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + return INVALID_OPERATION; + } + CLOGI("INFO(%s):newPicture Size (%dx%d), ratioId(%d)", + "setParameters", newPictureW, newPictureH, m_cameraInfo.pictureSizeRatioId); + + if ((int)(m_staticInfo->maxSensorW * 100 / m_staticInfo->maxSensorH) == right_ratio) { + setHorizontalViewAngle(newPictureW, newPictureH); + } + m_params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); + + getPictureSize(&curPictureW, &curPictureH); + getHwPictureSize(&curHwPictureW, &curHwPictureH); + + if (curPictureW != newPictureW || curPictureH != newPictureH || + curHwPictureW != newHwPictureW || curHwPictureH != newHwPictureH) { + + CLOGI("INFO(%s[%d]): Picture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curPictureW, curPictureH, newPictureW, newPictureH); + CLOGI("INFO(%s[%d]): HwPicture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curHwPictureW, curHwPictureH, newHwPictureW, newHwPictureH); + + m_setPictureSize(newPictureW, newPictureH); + m_setHwPictureSize(newHwPictureW, newHwPictureH); + m_params.setPictureSize(newPictureW, newPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_adjustPictureSize(int *newPictureW, int *newPictureH, + int *newHwPictureW, int *newHwPictureH) +{ + int ret = 0; + int newX = 0, newY = 0, newW = 0, newH = 0; + float zoomRatio = getZoomRatio(0) / 1000; + + if ((getRecordingHint() == true && getHighSpeedRecording() == true) +#ifdef USE_BINNING_MODE + || getBinningMode() +#endif + ) + { + int sizeList[SIZE_LUT_INDEX_END]; + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + *newPictureW = sizeList[TARGET_W]; + *newPictureH = sizeList[TARGET_H]; + *newHwPictureW = *newPictureW; + *newHwPictureH = *newPictureH; + + return NO_ERROR; + } else { + ALOGE("ERR(%s):m_getPreviewSizeList() fail", __FUNCTION__); + return BAD_VALUE; + } + } + + getMaxPictureSize(newHwPictureW, newHwPictureH); + + //if (getCameraId() == CAMERA_ID_BACK) { + // libcamera: 75xx: Fix size issue in preview and capture // Vijayakumar S N + if (isReprocessing() == true + && isUseYuvReprocessing() == false) { + ret = getCropRectAlign(*newHwPictureW, *newHwPictureH, + *newPictureW, *newPictureH, + &newX, &newY, &newW, &newH, + CAMERA_ISP_ALIGN, 2, 0, zoomRatio); + if (ret < 0) { + CLOGE("ERR(%s):getCropRectAlign(%d, %d, %d, %d) fail", + __FUNCTION__, *newHwPictureW, *newHwPictureH, *newPictureW, *newPictureH); + return BAD_VALUE; + } + *newHwPictureW = newW; + *newHwPictureH = newH; + +#ifdef FIXED_SENSOR_SIZE + /* + * sensor crop size: + * sensor crop is only used at 16:9 aspect ratio in picture size. + */ + if (getSamsungCamera() == true) { + if (((float)*newPictureW / (float)*newPictureH) == ((float)16 / (float)9)) { + CLOGD("(%s): Use sensor crop (ratio: %f)", + __FUNCTION__, ((float)*newPictureW / (float)*newPictureH)); + m_setHwSensorSize(newW, newH); + } + } +#endif + // libcamera: 75xx: Fix size issue in preview and capture // Vijayakumar S N + } else { + /* + * 15.05.29 + * setFormat size set by hwPictureSize. + * 3ac's per-frame size use getPreviewBayerCropSize. + * if hwPictureSize(== m_getPreviewSizeList) is smaller than getPreviewBayerCropSize, + * MMU fault is happen. + * so, just comment out. + * (originally, this code is for less memory. + * but, reserved is already allocated on system + * non-reverved memory is not much different size.) + */ + /* + int sizeList[SIZE_LUT_INDEX_END]; + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + if (*newHwPictureW > sizeList[BCROP_W] || *newHwPictureH > sizeList[BCROP_H]) { + *newHwPictureW = sizeList[BCROP_W]; + *newHwPictureH = sizeList[BCROP_H]; + } + } + */ + } + + return NO_ERROR; +} + +bool ExynosCamera1Parameters::m_isSupportedPictureSize(const int width, + const int height) +{ + int maxWidth, maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + getMaxPictureSize(&maxWidth, &maxHeight); + + if (maxWidth < width || maxHeight < height) { + CLOGE("ERR(%s):invalid picture Size(maxSize(%d/%d) size(%d/%d)", + __FUNCTION__, maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearPictureList; + for (int i = 0; i < m_staticInfo->rearPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontPictureList; + for (int i = 0; i < m_staticInfo->frontPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPictureList; + for (int i = 0; i < m_staticInfo->hiddenRearPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontPictureList; + for (int i = 0; i < m_staticInfo->hiddenFrontPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE("ERR(%s):Invalid picture size(%dx%d)", __FUNCTION__, width, height); + + return false; +} + +void ExynosCamera1Parameters::m_setPictureSize(int w, int h) +{ + m_cameraInfo.pictureW = w; + m_cameraInfo.pictureH = h; +} + +void ExynosCamera1Parameters::getPictureSize(int *w, int *h) +{ + *w = m_cameraInfo.pictureW; + *h = m_cameraInfo.pictureH; +} + +void ExynosCamera1Parameters::getMaxPictureSize(int *w, int *h) +{ + *w = m_staticInfo->maxPictureW; + *h = m_staticInfo->maxPictureH; +} + +void ExynosCamera1Parameters::m_setHwPictureSize(int w, int h) +{ + m_cameraInfo.hwPictureW = w; + m_cameraInfo.hwPictureH = h; +} + +void ExynosCamera1Parameters::getHwPictureSize(int *w, int *h) +{ + *w = m_cameraInfo.hwPictureW; + *h = m_cameraInfo.hwPictureH; +} + +void ExynosCamera1Parameters::m_setHwBayerCropRegion(int w, int h, int x, int y) +{ + Mutex::Autolock lock(m_parameterLock); + + m_cameraInfo.hwBayerCropW = w; + m_cameraInfo.hwBayerCropH = h; + m_cameraInfo.hwBayerCropX = x; + m_cameraInfo.hwBayerCropY = y; +} + +void ExynosCamera1Parameters::getHwVraInputSize(int *w, int *h) +{ +#if defined(MAX_VRA_INPUT_SIZE_WIDTH) && defined(MAX_VRA_INPUT_SIZE_HEIGHT) + int vraWidth = MAX_VRA_INPUT_WIDTH; + int vraHeight = MAX_VRA_INPUT_HEIGHT; +#else + int vraWidth = 640; + int vraHeight = 480; +#endif + float vraRatio = ROUND_OFF(((float)vraWidth / (float)vraHeight), 2); + + switch (m_cameraInfo.previewSizeRatioId) { + case SIZE_RATIO_16_9: + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 16) * 9, 2); + break; + case SIZE_RATIO_4_3: + *w = ALIGN_UP((vraHeight / 3) * 4, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + case SIZE_RATIO_1_1: + *w = vraHeight; + *h = vraHeight; + break; + case SIZE_RATIO_3_2: + if (vraRatio == 1.33f) { /* 4:3 */ + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 3) * 2, 2); + } else if (vraRatio == 1.77f) { /* 16:9 */ + *w = ALIGN_UP((vraHeight / 2) * 3, CAMERA_16PX_ALIGN); + *h = vraHeight; + } else { + *w = vraWidth; + *h = vraHeight; + } + break; + case SIZE_RATIO_5_4: + *w = ALIGN_UP((vraHeight / 4) * 5, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + case SIZE_RATIO_5_3: + if (vraRatio == 1.33f) { /* 4:3 */ + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 5) * 3, 2); + } else if (vraRatio == 1.77f) { /* 16:9 */ + *w = ALIGN_UP((vraHeight / 3) * 5, CAMERA_16PX_ALIGN); + *h = vraHeight; + } else { + *w = vraWidth; + *h = vraHeight; + } + break; + case SIZE_RATIO_11_9: + *w = ALIGN_UP((vraHeight / 9) * 11, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + default: + CLOGW("WARN(%s[%d]):Invalid size ratio(%d)", + __FUNCTION__, __LINE__, m_cameraInfo.previewSizeRatioId); + + *w = vraWidth; + *h = vraHeight; + break; + } +} + +int ExynosCamera1Parameters::getHwVraInputFormat(void) +{ +#if defined(CAMERA_VRA_INPUT_FORMAT) + return CAMERA_VRA_INPUT_FORMAT; +#else + return V4L2_PIX_FMT_NV21; +#endif +} + +void ExynosCamera1Parameters::getHwBayerCropRegion(int *w, int *h, int *x, int *y) +{ + Mutex::Autolock lock(m_parameterLock); + + *w = m_cameraInfo.hwBayerCropW; + *h = m_cameraInfo.hwBayerCropH; + *x = m_cameraInfo.hwBayerCropX; + *y = m_cameraInfo.hwBayerCropY; +} + +void ExynosCamera1Parameters::m_setPictureFormat(int fmt) +{ + m_cameraInfo.pictureFormat = fmt; +} + +int ExynosCamera1Parameters::getPictureFormat(void) +{ + return m_cameraInfo.pictureFormat; +} + +void ExynosCamera1Parameters::m_setHwPictureFormat(int fmt) +{ + m_cameraInfo.hwPictureFormat = fmt; +} + +int ExynosCamera1Parameters::getHwPictureFormat(void) +{ + return m_cameraInfo.hwPictureFormat; +} + +status_t ExynosCamera1Parameters::checkJpegQuality(const CameraParameters& params) +{ + int newJpegQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY); + int curJpegQuality = getJpegQuality(); + + CLOGD("DEBUG(%s):newJpegQuality %d", "setParameters", newJpegQuality); + + if (newJpegQuality < 1 || newJpegQuality > 100) { + CLOGE("ERR(%s): Invalid Jpeg Quality (Min: %d, Max: %d, Value: %d)", __FUNCTION__, 1, 100, newJpegQuality); + return BAD_VALUE; + } + + if (curJpegQuality != newJpegQuality) { + m_setJpegQuality(newJpegQuality); + m_params.set(CameraParameters::KEY_JPEG_QUALITY, newJpegQuality); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setJpegQuality(int quality) +{ + m_cameraInfo.jpegQuality = quality; +} + +int ExynosCamera1Parameters::getJpegQuality(void) +{ + return m_cameraInfo.jpegQuality; +} + +status_t ExynosCamera1Parameters::checkThumbnailSize(const CameraParameters& params) +{ + int curThumbnailW = 0; + int curThumbnailH = 0; + int maxThumbnailW = 0; + int maxThumbnailH = 0; + int newJpegThumbnailW = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + int newJpegThumbnailH = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + + CLOGD("DEBUG(%s):newJpegThumbnailW X newJpegThumbnailH: %d X %d", "setParameters", newJpegThumbnailW, newJpegThumbnailH); + + getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + + if (newJpegThumbnailW < 0 || newJpegThumbnailH < 0 || + newJpegThumbnailW > maxThumbnailW || newJpegThumbnailH > maxThumbnailH) { + CLOGE("ERR(%s): Invalid Thumbnail Size (maxSize(%d/%d) size(%d/%d)", __FUNCTION__, maxThumbnailW, maxThumbnailH, newJpegThumbnailW, newJpegThumbnailH); + return BAD_VALUE; + } + + getThumbnailSize(&curThumbnailW, &curThumbnailH); + + if (curThumbnailW != newJpegThumbnailW || curThumbnailH != newJpegThumbnailH) { + m_setThumbnailSize(newJpegThumbnailW, newJpegThumbnailH); + m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, newJpegThumbnailW); + m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, newJpegThumbnailH); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setThumbnailSize(int w, int h) +{ + m_cameraInfo.thumbnailW = w; + m_cameraInfo.thumbnailH = h; +} + +void ExynosCamera1Parameters::getThumbnailSize(int *w, int *h) +{ + *w = m_cameraInfo.thumbnailW; + *h = m_cameraInfo.thumbnailH; +} + +void ExynosCamera1Parameters::getMaxThumbnailSize(int *w, int *h) +{ + *w = m_staticInfo->maxThumbnailW; + *h = m_staticInfo->maxThumbnailH; +} + +status_t ExynosCamera1Parameters::checkThumbnailQuality(const CameraParameters& params) +{ + int newJpegThumbnailQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + int curThumbnailQuality = getThumbnailQuality(); + + CLOGD("DEBUG(%s):newJpegThumbnailQuality %d", "setParameters", newJpegThumbnailQuality); + + if (newJpegThumbnailQuality < 0 || newJpegThumbnailQuality > 100) { + CLOGE("ERR(%s): Invalid Thumbnail Quality (Min: %d, Max: %d, Value: %d)", __FUNCTION__, 0, 100, newJpegThumbnailQuality); + return BAD_VALUE; + } + + if (curThumbnailQuality != newJpegThumbnailQuality) { + m_setThumbnailQuality(newJpegThumbnailQuality); + m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, newJpegThumbnailQuality); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setThumbnailQuality(int quality) +{ + m_cameraInfo.thumbnailQuality = quality; +} + +int ExynosCamera1Parameters::getThumbnailQuality(void) +{ + return m_cameraInfo.thumbnailQuality; +} + +status_t ExynosCamera1Parameters::check3dnrMode(const CameraParameters& params) +{ + bool new3dnrMode = false; + bool cur3dnrMode = false; + const char *str3dnrMode = params.get("3dnr"); + + if (str3dnrMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):new3dnrMode %s", "setParameters", str3dnrMode); + + if (!strcmp(str3dnrMode, "true")) + new3dnrMode = true; + + if (m_flag3dnrMode != new3dnrMode) { + m_flag3dnrMode = new3dnrMode; + m_params.set("3dnr", str3dnrMode); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_set3dnrMode(bool toggle) +{ + m_cameraInfo.is3dnrMode = toggle; +} + +bool ExynosCamera1Parameters::get3dnrMode(void) +{ + return m_cameraInfo.is3dnrMode; +} + +status_t ExynosCamera1Parameters::checkDrcMode(const CameraParameters& params) +{ + bool newDrcMode = false; + bool curDrcMode = false; + const char *strDrcMode = params.get("drc"); + + if (strDrcMode == NULL) { +#ifdef USE_FRONT_PREVIEW_DRC + if (getCameraId() == CAMERA_ID_FRONT && m_staticInfo->drcSupport == true) { + newDrcMode = !getRecordingHint(); + m_setDrcMode(newDrcMode); + ALOGD("DEBUG(%s):Force DRC %s for front", "setParameters", + (newDrcMode == true)? "ON" : "OFF"); + } +#endif + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newDrcMode %s", "setParameters", strDrcMode); + + if (!strcmp(strDrcMode, "true")) + newDrcMode = true; + + curDrcMode = getDrcMode(); + + if (curDrcMode != newDrcMode) { + m_setDrcMode(newDrcMode); + m_params.set("drc", strDrcMode); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setDrcMode(bool toggle) +{ + m_cameraInfo.isDrcMode = toggle; + if (setDrcEnable(toggle) < 0) { + CLOGE("ERR(%s[%d]): set DRC fail, toggle(%d)", __FUNCTION__, __LINE__, toggle); + } +} + +bool ExynosCamera1Parameters::getDrcMode(void) +{ + return m_cameraInfo.isDrcMode; +} + +status_t ExynosCamera1Parameters::checkOdcMode(const CameraParameters& params) +{ + bool newOdcMode = false; + bool curOdcMode = false; + const char *strOdcMode = params.get("odc"); + + if (strOdcMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newOdcMode %s", "setParameters", strOdcMode); + + if (!strcmp(strOdcMode, "true")) + newOdcMode = true; + + curOdcMode = getOdcMode(); + + if (curOdcMode != newOdcMode) { + m_setOdcMode(newOdcMode); + m_params.set("odc", strOdcMode); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setOdcMode(bool toggle) +{ + m_cameraInfo.isOdcMode = toggle; +} + +bool ExynosCamera1Parameters::getOdcMode(void) +{ + return m_cameraInfo.isOdcMode; +} + +bool ExynosCamera1Parameters::getTpuEnabledMode(void) +{ + if (getHWVdisMode() == true) + return true; + + if (get3dnrMode() == true) + return true; + + if (getOdcMode() == true) + return true; + + return false; +} + +status_t ExynosCamera1Parameters::checkZoomLevel(const CameraParameters& params) +{ + int newZoom = params.getInt(CameraParameters::KEY_ZOOM); + int curZoom = 0; + + CLOGD("DEBUG(%s):newZoom %d", "setParameters", newZoom); + + /* cannot support DZoom -> set Zoom Level 0 */ + if (getZoomSupported() == false) { + if (newZoom != ZOOM_LEVEL_0) { + CLOGE("ERR(%s):Invalid value (Zoom Should be %d, Value: %d)", __FUNCTION__, ZOOM_LEVEL_0, newZoom); + return BAD_VALUE; + } + + if (setZoomLevel(ZOOM_LEVEL_0) != NO_ERROR) + return BAD_VALUE; + + return NO_ERROR; + } else { + if (newZoom < ZOOM_LEVEL_0 || getMaxZoomLevel() <= newZoom) { + CLOGE("ERR(%s):Invalid value (Min: %d, Max: %d, Value: %d)", __FUNCTION__, ZOOM_LEVEL_0, getMaxZoomLevel(), newZoom); + return BAD_VALUE; + } + + if (setZoomLevel(newZoom) != NO_ERROR) { + return BAD_VALUE; + } + m_params.set(CameraParameters::KEY_ZOOM, newZoom); + + m_flagMeteringRegionChanged = true; + + return NO_ERROR; + } + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::setZoomLevel(int zoom) +{ + int srcW = 0; + int srcH = 0; + int dstW = 0; + int dstH = 0; +#ifdef USE_FW_ZOOMRATIO + float zoomRatio = 1.00f; +#endif + + m_cameraInfo.zoom = zoom; + + getHwSensorSize(&srcW, &srcH); + getHwPreviewSize(&dstW, &dstH); + +#if 0 + if (m_setParamCropRegion(zoom, srcW, srcH, dstW, dstH) != NO_ERROR) { + return BAD_VALUE; + } +#else + ExynosRect srcRect, dstRect; + getPreviewBayerCropSize(&srcRect, &dstRect); +#endif +#ifdef USE_FW_ZOOMRATIO + zoomRatio = getZoomRatio(zoom) / 1000; + setMetaCtlZoom(&m_metadata, zoomRatio); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_setParamCropRegion( + int zoom, + int srcW, int srcH, + int dstW, int dstH) +{ + int newX = 0, newY = 0, newW = 0, newH = 0; + float zoomRatio = getZoomRatio(zoom) / 1000; + + if (getCropRectAlign(srcW, srcH, + dstW, dstH, + &newX, &newY, + &newW, &newH, + CAMERA_MAGIC_ALIGN, 2, + zoom, zoomRatio) != NO_ERROR) { + CLOGE("ERR(%s):getCropRectAlign(%d, %d, %d, %d) fail", + __func__, srcW, srcH, dstW, dstH); + return BAD_VALUE; + } + + newX = ALIGN_UP(newX, 2); + newY = ALIGN_UP(newY, 2); + newW = srcW - (newX * 2); + newH = srcH - (newY * 2); + + CLOGI("DEBUG(%s):size0(%d, %d, %d, %d)", + __FUNCTION__, srcW, srcH, dstW, dstH); + CLOGI("DEBUG(%s):size(%d, %d, %d, %d), level(%d)", + __FUNCTION__, newX, newY, newW, newH, zoom); + + m_setHwBayerCropRegion(newW, newH, newX, newY); + + return NO_ERROR; +} + +int ExynosCamera1Parameters::getZoomLevel(void) +{ + return m_cameraInfo.zoom; +} + +status_t ExynosCamera1Parameters::checkRotation(const CameraParameters& params) +{ + int newRotation = params.getInt(CameraParameters::KEY_ROTATION); + int curRotation = 0; + + if (newRotation < 0) { + CLOGE("ERR(%s): Invalide Rotation value(%d)", __FUNCTION__, newRotation); + return NO_ERROR; + } + + CLOGD("DEBUG(%s):set orientation:%d", "setParameters", newRotation); + + curRotation = getRotation(); + + if (curRotation != newRotation) { + m_setRotation(newRotation); + m_params.set(CameraParameters::KEY_ROTATION, newRotation); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setRotation(int rotation) +{ + m_cameraInfo.rotation = rotation; +} + +int ExynosCamera1Parameters::getRotation(void) +{ + return m_cameraInfo.rotation; +} + +status_t ExynosCamera1Parameters::checkAutoExposureLock(const CameraParameters& params) +{ + bool newAutoExposureLock = false; + bool curAutoExposureLock = false; + const char *strAutoExposureLock = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK); + if (strAutoExposureLock == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newAutoExposureLock %s", "setParameters", strAutoExposureLock); + + if (!strcmp(strAutoExposureLock, "true")) + newAutoExposureLock = true; + + curAutoExposureLock = getAutoExposureLock(); + + if (curAutoExposureLock != newAutoExposureLock) { + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setAeLock(newAutoExposureLock); + m_setAutoExposureLock(newAutoExposureLock); + m_params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, strAutoExposureLock); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setAutoExposureLock(bool lock) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + m_cameraInfo.autoExposureLock = lock; + setMetaCtlAeLock(&m_metadata, lock); + } +} + +bool ExynosCamera1Parameters::getAutoExposureLock(void) +{ + return m_cameraInfo.autoExposureLock; +} + +status_t ExynosCamera1Parameters::checkExposureCompensation(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } + + int minExposureCompensation = params.getInt(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION); + int maxExposureCompensation = params.getInt(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION); + int newExposureCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); + int curExposureCompensation = getExposureCompensation(); + + CLOGD("DEBUG(%s):newExposureCompensation %d", "setParameters", newExposureCompensation); + + if ((newExposureCompensation < minExposureCompensation) || + (newExposureCompensation > maxExposureCompensation)) { + CLOGE("ERR(%s): Invalide Exposurecompensation (Min: %d, Max: %d, Value: %d)", __FUNCTION__, + minExposureCompensation, maxExposureCompensation, newExposureCompensation); + return BAD_VALUE; + } + + if (curExposureCompensation != newExposureCompensation) { + m_setExposureCompensation(newExposureCompensation); + m_params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, newExposureCompensation); + } + + return NO_ERROR; +} + +int32_t ExynosCamera1Parameters::getLongExposureShotCount(void) +{ + bool getResult; + int32_t count = 0; +#ifdef CAMERA_ADD_BAYER_ENABLE + if (m_exposureTimeCapture <= CAMERA_EXPOSURE_TIME_MAX) +#endif + { + return 0; + } + + if (m_exposureTimeCapture % CAMERA_EXPOSURE_TIME_MAX) { + count = 2; + getResult = false; + while (!getResult) { + if (m_exposureTimeCapture % count) { + count++; + continue; + } + if (CAMERA_EXPOSURE_TIME_MAX < (m_exposureTimeCapture / count)) { + count++; + continue; + } + getResult = true; + } + return count - 1; + } else { + return m_exposureTimeCapture / CAMERA_EXPOSURE_TIME_MAX - 1; + } +} + +status_t ExynosCamera1Parameters::checkMeteringAreas(const CameraParameters& params) +{ + int ret = NO_ERROR; + const char *newMeteringAreas = params.get(CameraParameters::KEY_METERING_AREAS); + const char *curMeteringAreas = m_params.get(CameraParameters::KEY_METERING_AREAS); + const char meteringAreas[20] = "(0,0,0,0,0)"; + bool nullCheckflag = false; + + int newMeteringAreasSize = 0; + bool isMeteringAreasSame = false; + uint32_t maxNumMeteringAreas = getMaxNumMeteringAreas(); + + if (newMeteringAreas == NULL || (newMeteringAreas != NULL && !strcmp(newMeteringAreas, "(0,0,0,0,0)"))) { + if(getMeteringMode() == METERING_MODE_SPOT) { + newMeteringAreas = meteringAreas; + nullCheckflag = true; + } else { + setMetaCtlAeRegion(&m_metadata, 0, 0, 0, 0, 0); + return NO_ERROR; + } + } + + if(getSamsungCamera()) { + maxNumMeteringAreas = 1; + } + + if (maxNumMeteringAreas <= 0) { + CLOGD("DEBUG(%s): meterin area is not supported", "Parameters"); + return NO_ERROR; + } + + ALOGD("DEBUG(%s):newMeteringAreas: %s ,maxNumMeteringAreas(%d)", "setParameters", newMeteringAreas, maxNumMeteringAreas); + + newMeteringAreasSize = strlen(newMeteringAreas); + if (curMeteringAreas != NULL) { + isMeteringAreasSame = !strncmp(newMeteringAreas, curMeteringAreas, newMeteringAreasSize); + } + + if (curMeteringAreas == NULL || isMeteringAreasSame == false || m_flagMeteringRegionChanged == true) { + /* ex : (-10,-10,0,0,300),(0,0,10,10,700) */ + ExynosRect2 *rect2s = new ExynosRect2[maxNumMeteringAreas]; + int *weights = new int[maxNumMeteringAreas]; + uint32_t validMeteringAreas = bracketsStr2Ints((char *)newMeteringAreas, maxNumMeteringAreas, rect2s, weights, 1); + + if (0 < validMeteringAreas && validMeteringAreas <= maxNumMeteringAreas) { + m_setMeteringAreas((uint32_t)validMeteringAreas, rect2s, weights); + if(!nullCheckflag) { + m_params.set(CameraParameters::KEY_METERING_AREAS, newMeteringAreas); + } + } else { + CLOGE("ERR(%s):MeteringAreas value is invalid", __FUNCTION__); + ret = UNKNOWN_ERROR; + } + + m_flagMeteringRegionChanged = false; + delete [] rect2s; + delete [] weights; + } + + return ret; +} + +void ExynosCamera1Parameters::m_setMeteringAreas(uint32_t num, ExynosRect *rects, int *weights) +{ + ExynosRect2 *rect2s = new ExynosRect2[num]; + + for (uint32_t i = 0; i < num; i++) + convertingRectToRect2(&rects[i], &rect2s[i]); + + m_setMeteringAreas(num, rect2s, weights); + + delete [] rect2s; +} + +void ExynosCamera1Parameters::getMeteringAreas(__unused ExynosRect *rects) +{ + /* TODO */ +} + +void ExynosCamera1Parameters::getMeteringAreas(__unused ExynosRect2 *rect2s) +{ + /* TODO */ +} + +void ExynosCamera1Parameters::m_setMeteringMode(int meteringMode) +{ + uint32_t x = 0; + uint32_t y = 0; + uint32_t w = 0; + uint32_t h = 0; + uint32_t weight = 0; + int hwSensorW = 0; + int hwSensorH = 0; + enum aa_aemode aeMode; + + if (getAutoExposureLock() == true) { + CLOGD("DEBUG(%s):autoExposure is Locked", __FUNCTION__); + return; + } + + m_cameraInfo.meteringMode = meteringMode; + + getHwSensorSize(&hwSensorW, &hwSensorH); + + switch (meteringMode) { + case METERING_MODE_AVERAGE: + aeMode = AA_AEMODE_AVERAGE; + x = 0; + y = 0; + w = hwSensorW; + h = hwSensorH; + weight = 1000; + break; + case METERING_MODE_MATRIX: + aeMode = AA_AEMODE_MATRIX; + x = 0; + y = 0; + w = hwSensorW; + h = hwSensorH; + weight = 1000; + break; + case METERING_MODE_SPOT: + /* In spot mode, default region setting is 100x100 rectangle on center */ + aeMode = AA_AEMODE_SPOT; + x = hwSensorW / 2 - 50; + y = hwSensorH / 2 - 50; + w = hwSensorW / 2 + 50; + h = hwSensorH / 2 + 50; + weight = 50; + break; +#ifdef TOUCH_AE + case METERING_MODE_MATRIX_TOUCH: + aeMode = AA_AEMODE_MATRIX_TOUCH; + break; + case METERING_MODE_SPOT_TOUCH: + aeMode = AA_AEMODE_SPOT_TOUCH; + break; + case METERING_MODE_CENTER_TOUCH: + aeMode = AA_AEMODE_CENTER_TOUCH; + break; + case METERING_MODE_AVERAGE_TOUCH: + aeMode = AA_AEMODE_AVERAGE_TOUCH; + break; +#endif + case METERING_MODE_CENTER: + default: + aeMode = AA_AEMODE_CENTER; + x = 0; + y = 0; + w = 0; + h = 0; + weight = 1000; + break; + } + + setMetaCtlAeMode(&m_metadata, aeMode); + + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setFlashExposure(aeMode); +} + +int ExynosCamera1Parameters::getMeteringMode(void) +{ + return m_cameraInfo.meteringMode; +} + +int ExynosCamera1Parameters::getSupportedMeteringMode(void) +{ + return m_staticInfo->meteringList; +} + +status_t ExynosCamera1Parameters::checkAntibanding(const CameraParameters& params) +{ + int newAntibanding = -1; + int curAntibanding = -1; + + const char *strKeyAntibanding = params.get(CameraParameters::KEY_ANTIBANDING); + const char *strNewAntibanding = m_adjustAntibanding(strKeyAntibanding); + + if (strNewAntibanding == NULL) { + return NO_ERROR; + } + CLOGD("DEBUG(%s):strNewAntibanding %s", "setParameters", strNewAntibanding); + + if (!strcmp(strNewAntibanding, CameraParameters::ANTIBANDING_AUTO)) + newAntibanding = AA_AE_ANTIBANDING_AUTO; + else if (!strcmp(strNewAntibanding, CameraParameters::ANTIBANDING_50HZ)) + newAntibanding = AA_AE_ANTIBANDING_AUTO_50HZ; + else if (!strcmp(strNewAntibanding, CameraParameters::ANTIBANDING_60HZ)) + newAntibanding = AA_AE_ANTIBANDING_AUTO_60HZ; + else if (!strcmp(strNewAntibanding, CameraParameters::ANTIBANDING_OFF)) + newAntibanding = AA_AE_ANTIBANDING_OFF; + else { + CLOGE("ERR(%s):Invalid antibanding value(%s)", __FUNCTION__, strNewAntibanding); + return BAD_VALUE; + } + + curAntibanding = getAntibanding(); + + if (curAntibanding != newAntibanding) { + m_setAntibanding(newAntibanding); + } + + if (strKeyAntibanding != NULL) { + m_params.set(CameraParameters::KEY_ANTIBANDING, strKeyAntibanding); + } + + return NO_ERROR; +} + +const char *ExynosCamera1Parameters::m_adjustAntibanding(const char *strAntibanding) +{ + const char *strAdjustedAntibanding = NULL; + + strAdjustedAntibanding = strAntibanding; + +#if 0 /* fixed the flicker issue when highspeed recording(60fps or 120fps) */ + /* when high speed recording mode, off thre antibanding */ + if (getHighSpeedRecording()) + strAdjustedAntibanding = CameraParameters::ANTIBANDING_OFF; +#endif + return strAdjustedAntibanding; +} + + +void ExynosCamera1Parameters::m_setAntibanding(int value) +{ + setMetaCtlAntibandingMode(&m_metadata, (enum aa_ae_antibanding_mode)value); +} + +int ExynosCamera1Parameters::getAntibanding(void) +{ + enum aa_ae_antibanding_mode antibanding; + getMetaCtlAntibandingMode(&m_metadata, &antibanding); + return (int)antibanding; +} + +int ExynosCamera1Parameters::getSupportedAntibanding(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->antiBandingList; + } +} + +int ExynosCamera1Parameters::getSceneMode(void) +{ + return m_cameraInfo.sceneMode; +} + +int ExynosCamera1Parameters::getSupportedSceneModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->sceneModeList; + } +} + +void ExynosCamera1Parameters::setFocusModeLock(bool enable) { + int curFocusMode = getFocusMode(); + + ALOGD("DEBUG(%s):FocusModeLock (%s)", __FUNCTION__, enable? "true" : "false"); + + if(enable) { + m_activityControl->stopAutoFocus(); + } else { + m_setFocusMode(curFocusMode); + } +} + +void ExynosCamera1Parameters::setFocusModeSetting(bool enable) +{ + m_setFocusmodeSetting = enable; +} + +int ExynosCamera1Parameters::getFocusModeSetting(void) +{ + return m_setFocusmodeSetting; +} + +int ExynosCamera1Parameters::getFocusMode(void) +{ + return m_cameraInfo.focusMode; +} + +int ExynosCamera1Parameters::getSupportedFocusModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->focusModeList; + } +} + +status_t ExynosCamera1Parameters::checkFlashMode(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } else { + int newFlashMode = -1; + int curFlashMode = -1; + const char *strFlashMode = params.get(CameraParameters::KEY_FLASH_MODE); + + if (strFlashMode == NULL) { + return NO_ERROR; + } + + const char *strNewFlashMode = m_adjustFlashMode(strFlashMode); + + if (strNewFlashMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewFlashMode %s", "setParameters", strNewFlashMode); + + if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_OFF)) + newFlashMode = FLASH_MODE_OFF; + else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_AUTO)) + newFlashMode = FLASH_MODE_AUTO; + else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_ON)) + newFlashMode = FLASH_MODE_ON; + else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_RED_EYE)) + newFlashMode = FLASH_MODE_RED_EYE; + else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_TORCH)) + newFlashMode = FLASH_MODE_TORCH; + else { + CLOGE("ERR(%s):unmatched flash_mode(%s), turn off flash", __FUNCTION__, strNewFlashMode); + newFlashMode = FLASH_MODE_OFF; + strNewFlashMode = CameraParameters::FLASH_MODE_OFF; + return BAD_VALUE; + } + +#ifndef UNSUPPORT_FLASH + if (!(newFlashMode & getSupportedFlashModes())) { + CLOGE("ERR(%s[%d]): Flash mode(%s) is not supported!", __FUNCTION__, __LINE__, strNewFlashMode); + return BAD_VALUE; + } +#endif + + curFlashMode = getFlashMode(); + + if (curFlashMode != newFlashMode) { + m_setFlashMode(newFlashMode); + m_params.set(CameraParameters::KEY_FLASH_MODE, strNewFlashMode); + } + + return NO_ERROR; + } +} + +const char *ExynosCamera1Parameters::m_adjustFlashMode(const char *flashMode) +{ + int sceneMode = getSceneMode(); + const char *newFlashMode = NULL; + + /* TODO: vendor specific adjust */ + + newFlashMode = flashMode; + + return newFlashMode; +} + +void ExynosCamera1Parameters::m_setFlashMode(int flashMode) +{ + m_cameraInfo.flashMode = flashMode; + + /* TODO: Notity flash activity */ + m_activityControl->setFlashMode(flashMode); +} + +int ExynosCamera1Parameters::getFlashMode(void) +{ + return m_cameraInfo.flashMode; +} + +int ExynosCamera1Parameters::getSupportedFlashModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->flashModeList; + } +} + +status_t ExynosCamera1Parameters::checkWhiteBalanceMode(const CameraParameters& params) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + int newWhiteBalance = -1; + int curWhiteBalance = -1; + const char *strWhiteBalance = params.get(CameraParameters::KEY_WHITE_BALANCE); + const char *strNewWhiteBalance = m_adjustWhiteBalanceMode(strWhiteBalance); + + if (strNewWhiteBalance == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newWhiteBalance %s", "setParameters", strNewWhiteBalance); + + if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_AUTO)) + newWhiteBalance = WHITE_BALANCE_AUTO; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_INCANDESCENT)) + newWhiteBalance = WHITE_BALANCE_INCANDESCENT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_FLUORESCENT)) + newWhiteBalance = WHITE_BALANCE_FLUORESCENT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT)) + newWhiteBalance = WHITE_BALANCE_WARM_FLUORESCENT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_DAYLIGHT)) + newWhiteBalance = WHITE_BALANCE_DAYLIGHT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT)) + newWhiteBalance = WHITE_BALANCE_CLOUDY_DAYLIGHT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_TWILIGHT)) + newWhiteBalance = WHITE_BALANCE_TWILIGHT; + else if (!strcmp(strNewWhiteBalance, CameraParameters::WHITE_BALANCE_SHADE)) + newWhiteBalance = WHITE_BALANCE_SHADE; + else { + CLOGE("ERR(%s):Invalid white balance(%s)", __FUNCTION__, strNewWhiteBalance); + return BAD_VALUE; + } + + if (!(newWhiteBalance & getSupportedWhiteBalance())) { + CLOGE("ERR(%s[%d]): white balance mode(%s) is not supported", __FUNCTION__, __LINE__, strNewWhiteBalance); + return BAD_VALUE; + } + + curWhiteBalance = getWhiteBalanceMode(); + + if (getSceneMode() == SCENE_MODE_AUTO) { + enum aa_awbmode cur_awbMode; + getMetaCtlAwbMode(&m_metadata, &cur_awbMode); + + if (m_setWhiteBalanceMode(newWhiteBalance) != NO_ERROR) + return BAD_VALUE; + + m_params.set(CameraParameters::KEY_WHITE_BALANCE, strNewWhiteBalance); + } + } + return NO_ERROR; +} + +const char *ExynosCamera1Parameters::m_adjustWhiteBalanceMode(const char *whiteBalance) +{ + int sceneMode = getSceneMode(); + const char *newWhiteBalance = NULL; + + /* TODO: vendor specific adjust */ + + /* TN' feautre can change whiteBalance even if Non SCENE_MODE_AUTO */ + + newWhiteBalance = whiteBalance; + + return newWhiteBalance; +} + +status_t ExynosCamera1Parameters::m_setWhiteBalanceMode(int whiteBalance) +{ + enum aa_awbmode awbMode; + + switch (whiteBalance) { + case WHITE_BALANCE_AUTO: + awbMode = AA_AWBMODE_WB_AUTO; + break; + case WHITE_BALANCE_INCANDESCENT: + awbMode = AA_AWBMODE_WB_INCANDESCENT; + break; + case WHITE_BALANCE_FLUORESCENT: + awbMode = AA_AWBMODE_WB_FLUORESCENT; + break; + case WHITE_BALANCE_DAYLIGHT: + awbMode = AA_AWBMODE_WB_DAYLIGHT; + break; + case WHITE_BALANCE_CLOUDY_DAYLIGHT: + awbMode = AA_AWBMODE_WB_CLOUDY_DAYLIGHT; + break; + case WHITE_BALANCE_WARM_FLUORESCENT: + awbMode = AA_AWBMODE_WB_WARM_FLUORESCENT; + break; + case WHITE_BALANCE_TWILIGHT: + awbMode = AA_AWBMODE_WB_TWILIGHT; + break; + case WHITE_BALANCE_SHADE: + awbMode = AA_AWBMODE_WB_SHADE; + break; + default: + CLOGE("ERR(%s):Unsupported value(%d)", __FUNCTION__, whiteBalance); + return BAD_VALUE; + } + + m_cameraInfo.whiteBalanceMode = whiteBalance; + setMetaCtlAwbMode(&m_metadata, awbMode); + + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setFlashWhiteBalance(awbMode); + + return NO_ERROR; +} + +int ExynosCamera1Parameters::m_convertMetaCtlAwbMode(struct camera2_shot_ext *shot_ext) +{ + int awbMode = WHITE_BALANCE_AUTO; + + switch (shot_ext->shot.ctl.aa.awbMode) { + case AA_AWBMODE_WB_AUTO: + awbMode = WHITE_BALANCE_AUTO; + break; + case AA_AWBMODE_WB_INCANDESCENT: + awbMode = WHITE_BALANCE_INCANDESCENT; + break; + case AA_AWBMODE_WB_FLUORESCENT: + awbMode = WHITE_BALANCE_FLUORESCENT; + break; + case AA_AWBMODE_WB_DAYLIGHT: + awbMode = WHITE_BALANCE_DAYLIGHT; + break; + case AA_AWBMODE_WB_CLOUDY_DAYLIGHT: + awbMode = WHITE_BALANCE_CLOUDY_DAYLIGHT; + break; + case AA_AWBMODE_WB_WARM_FLUORESCENT: + awbMode = WHITE_BALANCE_WARM_FLUORESCENT; + break; + case AA_AWBMODE_WB_TWILIGHT: + awbMode = WHITE_BALANCE_TWILIGHT; + break; + case AA_AWBMODE_WB_SHADE: + awbMode = WHITE_BALANCE_SHADE; + break; + default: + ALOGE("ERR(%s):Unsupported awbMode(%d)", __FUNCTION__, shot_ext->shot.ctl.aa.awbMode); + return BAD_VALUE; + } + + return awbMode; +} + +int ExynosCamera1Parameters::getWhiteBalanceMode(void) +{ + return m_cameraInfo.whiteBalanceMode; +} + +int ExynosCamera1Parameters::getSupportedWhiteBalance(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->whiteBalanceList; + } +} + +int ExynosCamera1Parameters::getSupportedISO(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->isoValues; + } +} + +status_t ExynosCamera1Parameters::checkAutoWhiteBalanceLock(const CameraParameters& params) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + bool newAutoWhiteBalanceLock = false; + bool curAutoWhiteBalanceLock = false; + const char *strNewAutoWhiteBalanceLock = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK); + + if (strNewAutoWhiteBalanceLock == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewAutoWhiteBalanceLock %s", "setParameters", strNewAutoWhiteBalanceLock); + + if (!strcmp(strNewAutoWhiteBalanceLock, "true")) + newAutoWhiteBalanceLock = true; + + curAutoWhiteBalanceLock = getAutoWhiteBalanceLock(); + + if (curAutoWhiteBalanceLock != newAutoWhiteBalanceLock) { + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setAwbLock(newAutoWhiteBalanceLock); + m_setAutoWhiteBalanceLock(newAutoWhiteBalanceLock); + m_params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, strNewAutoWhiteBalanceLock); + } + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setAutoWhiteBalanceLock(bool value) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + m_cameraInfo.autoWhiteBalanceLock = value; + setMetaCtlAwbLock(&m_metadata, value); + } +} + +bool ExynosCamera1Parameters::getAutoWhiteBalanceLock(void) +{ + return m_cameraInfo.autoWhiteBalanceLock; +} + +void ExynosCamera1Parameters::m_setFocusAreas(uint32_t numValid, ExynosRect *rects, int *weights) +{ + ExynosRect2 *rect2s = new ExynosRect2[numValid]; + + for (uint32_t i = 0; i < numValid; i++) + convertingRectToRect2(&rects[i], &rect2s[i]); + + m_setFocusAreas(numValid, rect2s, weights); + + delete [] rect2s; +} + +void ExynosCamera1Parameters::getFocusAreas(int *validFocusArea, ExynosRect2 *rect2s, int *weights) +{ + *validFocusArea = m_cameraInfo.numValidFocusArea; + + if (*validFocusArea != 0) { + /* Currently only supported 1 region */ + getMetaCtlAfRegion(&m_metadata, &rect2s->x1, &rect2s->y1, + &rect2s->x2, &rect2s->y2, weights); + } +} + +status_t ExynosCamera1Parameters::checkColorEffectMode(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } else { + int newEffectMode = EFFECT_NONE; + int curEffectMode = EFFECT_NONE; + const char *strNewEffectMode = params.get(CameraParameters::KEY_EFFECT); + + if (strNewEffectMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewEffectMode %s", "setParameters", strNewEffectMode); + + if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_NONE)) { + newEffectMode = EFFECT_NONE; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_MONO)) { + newEffectMode = EFFECT_MONO; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_NEGATIVE)) { + newEffectMode = EFFECT_NEGATIVE; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_SOLARIZE)) { + newEffectMode = EFFECT_SOLARIZE; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_SEPIA)) { + newEffectMode = EFFECT_SEPIA; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_POSTERIZE)) { + newEffectMode = EFFECT_POSTERIZE; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_WHITEBOARD)) { + newEffectMode = EFFECT_WHITEBOARD; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_BLACKBOARD)) { + newEffectMode = EFFECT_BLACKBOARD; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_AQUA)) { + newEffectMode = EFFECT_AQUA; + } else if (!strcmp(strNewEffectMode, CameraParameters::EFFECT_POINT_BLUE)) { + newEffectMode = EFFECT_BLUE; + } else if (!strcmp(strNewEffectMode, "point-red-yellow")) { + newEffectMode = EFFECT_RED_YELLOW; + } else if (!strcmp(strNewEffectMode, "vintage-cold")) { + newEffectMode = EFFECT_COLD_VINTAGE; + } else if (!strcmp(strNewEffectMode, "beauty" )) { + newEffectMode = EFFECT_BEAUTY_FACE; + } else { + CLOGE("ERR(%s):Invalid effect(%s)", __FUNCTION__, strNewEffectMode); + return BAD_VALUE; + } + + if (!isSupportedColorEffects(newEffectMode)) { + CLOGE("ERR(%s[%d]): Effect mode(%s) is not supported!", __FUNCTION__, __LINE__, strNewEffectMode); + return BAD_VALUE; + } + + curEffectMode = getColorEffectMode(); + + if (curEffectMode != newEffectMode) { + m_setColorEffectMode(newEffectMode); + m_params.set(CameraParameters::KEY_EFFECT, strNewEffectMode); + + m_frameSkipCounter.setCount(EFFECT_SKIP_FRAME); + } + return NO_ERROR; + } +} + +void ExynosCamera1Parameters::m_setColorEffectMode(int effect) +{ + aa_effect_mode_t newEffect; + + switch(effect) { + case EFFECT_NONE: + newEffect = AA_EFFECT_OFF; + break; + case EFFECT_MONO: + newEffect = AA_EFFECT_MONO; + break; + case EFFECT_NEGATIVE: + newEffect = AA_EFFECT_NEGATIVE; + break; + case EFFECT_SOLARIZE: + newEffect = AA_EFFECT_SOLARIZE; + break; + case EFFECT_SEPIA: + newEffect = AA_EFFECT_SEPIA; + break; + case EFFECT_POSTERIZE: + newEffect = AA_EFFECT_POSTERIZE; + break; + case EFFECT_WHITEBOARD: + newEffect = AA_EFFECT_WHITEBOARD; + break; + case EFFECT_BLACKBOARD: + newEffect = AA_EFFECT_BLACKBOARD; + break; + case EFFECT_AQUA: + newEffect = AA_EFFECT_AQUA; + break; + case EFFECT_RED_YELLOW: + newEffect = AA_EFFECT_RED_YELLOW_POINT; + break; + case EFFECT_BLUE: + newEffect = AA_EFFECT_BLUE_POINT; + break; + case EFFECT_WARM_VINTAGE: + newEffect = AA_EFFECT_WARM_VINTAGE; + break; + case EFFECT_COLD_VINTAGE: + newEffect = AA_EFFECT_COLD_VINTAGE; + break; + case EFFECT_BEAUTY_FACE: + newEffect = AA_EFFECT_BEAUTY_FACE; + break; + default: + newEffect = AA_EFFECT_OFF; + CLOGE("ERR(%s[%d]):Color Effect mode(%d) is not supported", __FUNCTION__, __LINE__, effect); + break; + } + setMetaCtlAaEffect(&m_metadata, newEffect); +} + +int ExynosCamera1Parameters::getColorEffectMode(void) +{ + aa_effect_mode_t curEffect; + int effect; + + getMetaCtlAaEffect(&m_metadata, &curEffect); + + switch(curEffect) { + case AA_EFFECT_OFF: + effect = EFFECT_NONE; + break; + case AA_EFFECT_MONO: + effect = EFFECT_MONO; + break; + case AA_EFFECT_NEGATIVE: + effect = EFFECT_NEGATIVE; + break; + case AA_EFFECT_SOLARIZE: + effect = EFFECT_SOLARIZE; + break; + case AA_EFFECT_SEPIA: + effect = EFFECT_SEPIA; + break; + case AA_EFFECT_POSTERIZE: + effect = EFFECT_POSTERIZE; + break; + case AA_EFFECT_WHITEBOARD: + effect = EFFECT_WHITEBOARD; + break; + case AA_EFFECT_BLACKBOARD: + effect = EFFECT_BLACKBOARD; + break; + case AA_EFFECT_AQUA: + effect = EFFECT_AQUA; + break; + case AA_EFFECT_RED_YELLOW_POINT: + effect = EFFECT_RED_YELLOW; + break; + case AA_EFFECT_BLUE_POINT: + effect = EFFECT_BLUE; + break; + case AA_EFFECT_WARM_VINTAGE: + effect = EFFECT_WARM_VINTAGE; + break; + case AA_EFFECT_COLD_VINTAGE: + effect = EFFECT_COLD_VINTAGE; + break; + case AA_EFFECT_BEAUTY_FACE: + effect = EFFECT_BEAUTY_FACE; + break; + default: + effect = 0; + CLOGE("ERR(%s[%d]):Color Effect mode(%d) is invalid value", __FUNCTION__, __LINE__, curEffect); + break; + } + + return effect; +} + +int ExynosCamera1Parameters::getSupportedColorEffects(void) +{ + return m_staticInfo->effectList; +} + +bool ExynosCamera1Parameters::isSupportedColorEffects(int effectMode) +{ + int ret = false; + + if (effectMode & getSupportedColorEffects()) { + return true; + } + + if (effectMode & m_staticInfo->hiddenEffectList) { + return true; + } + + return ret; +} + +status_t ExynosCamera1Parameters::checkGpsAltitude(const CameraParameters& params) +{ + double newAltitude = 0; + double curAltitude = 0; + const char *strNewGpsAltitude = params.get(CameraParameters::KEY_GPS_ALTITUDE); + + if (strNewGpsAltitude == NULL) { + m_params.remove(CameraParameters::KEY_GPS_ALTITUDE); + m_setGpsAltitude(0); + return NO_ERROR; + } + + CLOGV("DEBUG(%s):strNewGpsAltitude %s", "setParameters", strNewGpsAltitude); + + newAltitude = atof(strNewGpsAltitude); + curAltitude = getGpsAltitude(); + + if (curAltitude != newAltitude) { + m_setGpsAltitude(newAltitude); + m_params.set(CameraParameters::KEY_GPS_ALTITUDE, strNewGpsAltitude); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGpsAltitude(double altitude) +{ + m_cameraInfo.gpsAltitude = altitude; +} + +double ExynosCamera1Parameters::getGpsAltitude(void) +{ + return m_cameraInfo.gpsAltitude; +} + +status_t ExynosCamera1Parameters::checkGpsLatitude(const CameraParameters& params) +{ + double newLatitude = 0; + double curLatitude = 0; + const char *strNewGpsLatitude = params.get(CameraParameters::KEY_GPS_LATITUDE); + + if (strNewGpsLatitude == NULL) { + m_params.remove(CameraParameters::KEY_GPS_LATITUDE); + m_setGpsLatitude(0); + return NO_ERROR; + } + + CLOGV("DEBUG(%s):strNewGpsLatitude %s", "setParameters", strNewGpsLatitude); + + newLatitude = atof(strNewGpsLatitude); + curLatitude = getGpsLatitude(); + + if (curLatitude != newLatitude) { + m_setGpsLatitude(newLatitude); + m_params.set(CameraParameters::KEY_GPS_LATITUDE, strNewGpsLatitude); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGpsLatitude(double latitude) +{ + m_cameraInfo.gpsLatitude = latitude; +} + +double ExynosCamera1Parameters::getGpsLatitude(void) +{ + return m_cameraInfo.gpsLatitude; +} + +status_t ExynosCamera1Parameters::checkGpsLongitude(const CameraParameters& params) +{ + double newLongitude = 0; + double curLongitude = 0; + const char *strNewGpsLongitude = params.get(CameraParameters::KEY_GPS_LONGITUDE); + + if (strNewGpsLongitude == NULL) { + m_params.remove(CameraParameters::KEY_GPS_LONGITUDE); + m_setGpsLongitude(0); + return NO_ERROR; + } + + CLOGV("DEBUG(%s):strNewGpsLongitude %s", "setParameters", strNewGpsLongitude); + + newLongitude = atof(strNewGpsLongitude); + curLongitude = getGpsLongitude(); + + if (curLongitude != newLongitude) { + m_setGpsLongitude(newLongitude); + m_params.set(CameraParameters::KEY_GPS_LONGITUDE, strNewGpsLongitude); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGpsLongitude(double longitude) +{ + m_cameraInfo.gpsLongitude = longitude; +} + +double ExynosCamera1Parameters::getGpsLongitude(void) +{ + return m_cameraInfo.gpsLongitude; +} + +status_t ExynosCamera1Parameters::checkGpsProcessingMethod(const CameraParameters& params) +{ + // gps processing method + const char *strNewGpsProcessingMethod = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); + const char *strCurGpsProcessingMethod = NULL; + bool changeMethod = false; + + if (strNewGpsProcessingMethod == NULL) { + m_params.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD); + m_setGpsProcessingMethod(NULL); + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewGpsProcessingMethod %s", "setParameters", strNewGpsProcessingMethod); + + strCurGpsProcessingMethod = getGpsProcessingMethod(); + + if (strCurGpsProcessingMethod != NULL) { + int newLen = strlen(strNewGpsProcessingMethod); + int curLen = strlen(strCurGpsProcessingMethod); + + if (newLen != curLen) + changeMethod = true; + else + changeMethod = strncmp(strNewGpsProcessingMethod, strCurGpsProcessingMethod, newLen); + } + + if (changeMethod == true) { + m_setGpsProcessingMethod(strNewGpsProcessingMethod); + m_params.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, strNewGpsProcessingMethod); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGpsProcessingMethod(const char *gpsProcessingMethod) +{ + memset(m_exifInfo.gps_processing_method, 0, sizeof(m_exifInfo.gps_processing_method)); + if (gpsProcessingMethod == NULL) + return; + + size_t len = strlen(gpsProcessingMethod); + + if (len > sizeof(m_exifInfo.gps_processing_method)) { + len = sizeof(m_exifInfo.gps_processing_method); + } + memcpy(m_exifInfo.gps_processing_method, gpsProcessingMethod, len); +} + +const char *ExynosCamera1Parameters::getGpsProcessingMethod(void) +{ + return (const char *)m_exifInfo.gps_processing_method; +} + +void ExynosCamera1Parameters::m_setExifFixedAttribute(void) +{ + char property[PROPERTY_VALUE_MAX]; + + memset(&m_exifInfo, 0, sizeof(m_exifInfo)); + + /* 2 0th IFD TIFF Tags */ + /* 3 Maker */ + property_get("ro.product.manufacturer", property, EXIF_DEF_MAKER); + strncpy((char *)m_exifInfo.maker, property, + sizeof(m_exifInfo.maker) - 1); + m_exifInfo.maker[sizeof(EXIF_DEF_MAKER) - 1] = '\0'; + + /* 3 Model */ + property_get("ro.product.model", property, EXIF_DEF_MODEL); + strncpy((char *)m_exifInfo.model, property, + sizeof(m_exifInfo.model) - 1); + m_exifInfo.model[sizeof(m_exifInfo.model) - 1] = '\0'; + /* 3 Software */ + property_get("ro.build.PDA", property, EXIF_DEF_SOFTWARE); + strncpy((char *)m_exifInfo.software, property, + sizeof(m_exifInfo.software) - 1); + m_exifInfo.software[sizeof(m_exifInfo.software) - 1] = '\0'; + + /* 3 YCbCr Positioning */ + m_exifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; + + /*2 0th IFD Exif Private Tags */ + /* 3 Exposure Program */ + m_exifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; + /* 3 Exif Version */ + memcpy(m_exifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(m_exifInfo.exif_version)); + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* 3 Aperture */ + m_exifInfo.aperture.num = (int) m_staticInfo->aperture * COMMON_DENOMINATOR; + m_exifInfo.aperture.den = COMMON_DENOMINATOR; + /* 3 F Number */ + m_exifInfo.fnumber.num = m_staticInfo->fNumber * COMMON_DENOMINATOR; + m_exifInfo.fnumber.den = COMMON_DENOMINATOR; + /* 3 Maximum lens aperture */ + m_exifInfo.max_aperture.num = m_staticInfo->aperture * COMMON_DENOMINATOR; + m_exifInfo.max_aperture.den = COMMON_DENOMINATOR; + /* 3 Lens Focal Length */ + m_exifInfo.focal_length.num = m_staticInfo->focalLength * COMMON_DENOMINATOR; + m_exifInfo.focal_length.den = COMMON_DENOMINATOR; + } else { + m_exifInfo.aperture.num = m_staticInfo->apertureNum; + m_exifInfo.aperture.den = m_staticInfo->apertureDen; + /* 3 F Number */ + m_exifInfo.fnumber.num = m_staticInfo->fNumberNum; + m_exifInfo.fnumber.den = m_staticInfo->fNumberDen; + /* 3 Maximum lens aperture */ + m_exifInfo.max_aperture.num = m_staticInfo->apertureNum; + m_exifInfo.max_aperture.den = m_staticInfo->apertureDen; + /* 3 Lens Focal Length */ + m_exifInfo.focal_length.num = m_staticInfo->focalLengthNum; + m_exifInfo.focal_length.den = m_staticInfo->focalLengthDen; + } + + /* 3 Maker note */ + if (m_exifInfo.maker_note) + delete m_exifInfo.maker_note; + + m_exifInfo.maker_note_size = 98; + m_exifInfo.maker_note = new unsigned char[m_exifInfo.maker_note_size]; + memset((void *)m_exifInfo.maker_note, 0, m_exifInfo.maker_note_size); + /* 3 User Comments */ + if (m_exifInfo.user_comment) + delete m_exifInfo.user_comment; + + m_exifInfo.user_comment_size = sizeof("user comment"); + m_exifInfo.user_comment = new unsigned char[m_exifInfo.user_comment_size + 8]; + memset((void *)m_exifInfo.user_comment, 0, m_exifInfo.user_comment_size + 8); + + /* 3 Color Space information */ + m_exifInfo.color_space = EXIF_DEF_COLOR_SPACE; + /* 3 interoperability */ + m_exifInfo.interoperability_index = EXIF_DEF_INTEROPERABILITY; + /* 3 Exposure Mode */ + m_exifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; + + /* 2 0th IFD GPS Info Tags */ + unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; + memcpy(m_exifInfo.gps_version_id, gps_version, sizeof(gps_version)); + + /* 2 1th IFD TIFF Tags */ + m_exifInfo.compression_scheme = EXIF_DEF_COMPRESSION; + m_exifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; + m_exifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; + m_exifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; + m_exifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; + m_exifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; +} + +void ExynosCamera1Parameters::setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + m_setExifChangedAttribute(exifInfo, pictureRect, thumbnailRect, shot); + } else { + m_setExifChangedAttribute(exifInfo, pictureRect, thumbnailRect, &(shot->dm), &(shot->udm)); + } +} + +debug_attribute_t *ExynosCamera1Parameters::getDebugAttribute(void) +{ + return &mDebugInfo; +} + +status_t ExynosCamera1Parameters::getFixedExifInfo(exif_attribute_t *exifInfo) +{ + if (exifInfo == NULL) { + CLOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(exifInfo, &m_exifInfo, sizeof(exif_attribute_t)); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::checkGpsTimeStamp(const CameraParameters& params) +{ + long newGpsTimeStamp = -1; + long curGpsTimeStamp = -1; + const char *strNewGpsTimeStamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP); + + if (strNewGpsTimeStamp == NULL) { + m_params.remove(CameraParameters::KEY_GPS_TIMESTAMP); + m_setGpsTimeStamp(0); + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewGpsTimeStamp %s", "setParameters", strNewGpsTimeStamp); + + newGpsTimeStamp = atol(strNewGpsTimeStamp); + + curGpsTimeStamp = getGpsTimeStamp(); + + if (curGpsTimeStamp != newGpsTimeStamp) { + m_setGpsTimeStamp(newGpsTimeStamp); + m_params.set(CameraParameters::KEY_GPS_TIMESTAMP, strNewGpsTimeStamp); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGpsTimeStamp(long timeStamp) +{ + m_cameraInfo.gpsTimeStamp = timeStamp; +} + +long ExynosCamera1Parameters::getGpsTimeStamp(void) +{ + return m_cameraInfo.gpsTimeStamp; +} + +status_t ExynosCamera1Parameters::checkBrightness(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } else { + int maxBrightness = params.getInt("brightness-max"); + int minBrightness = params.getInt("brightness-min"); + int newBrightness = params.getInt("brightness"); + int curBrightness = -1; + int curEffectMode = EFFECT_NONE; + + CLOGD("DEBUG(%s):newBrightness %d", "setParameters", newBrightness); + + if (newBrightness < minBrightness || newBrightness > maxBrightness) { + CLOGE("ERR(%s): Invalid Brightness (Min: %d, Max: %d, Value: %d)", __FUNCTION__, minBrightness, maxBrightness, newBrightness); + return BAD_VALUE; + } + + curEffectMode = getColorEffectMode(); + if(curEffectMode == EFFECT_BEAUTY_FACE) { + return NO_ERROR; + } + + curBrightness = getBrightness(); + + if (curBrightness != newBrightness) { + m_setBrightness(newBrightness); + m_params.set("brightness", newBrightness); + } + return NO_ERROR; + } +} + +/* F/W's middle value is 3, and step is -2, -1, 0, 1, 2 */ +void ExynosCamera1Parameters::m_setBrightness(int brightness) +{ + setMetaCtlBrightness(&m_metadata, brightness + IS_BRIGHTNESS_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera1Parameters::getBrightness(void) +{ + int32_t brightness = 0; + + getMetaCtlBrightness(&m_metadata, &brightness); + return brightness - IS_BRIGHTNESS_DEFAULT - FW_CUSTOM_OFFSET; +} + +status_t ExynosCamera1Parameters::checkSaturation(const CameraParameters& params) +{ + int maxSaturation = params.getInt("saturation-max"); + int minSaturation = params.getInt("saturation-min"); + int newSaturation = params.getInt("saturation"); + int curSaturation = -1; + + CLOGD("DEBUG(%s):newSaturation %d", "setParameters", newSaturation); + + if (newSaturation < minSaturation || newSaturation > maxSaturation) { + CLOGE("ERR(%s): Invalid Saturation (Min: %d, Max: %d, Value: %d)", __FUNCTION__, minSaturation, maxSaturation, newSaturation); + return BAD_VALUE; + } + + curSaturation = getSaturation(); +#ifdef CAMERA_GED_FEATURE + if(getSceneMode() == SCENE_MODE_AUTO) { + if (curSaturation != newSaturation) { + m_setSaturation(newSaturation); + m_params.set("saturation", newSaturation); + } + } else { + m_params.set("saturation", "auto"); + } +#else + if (curSaturation != newSaturation) { + m_setSaturation(newSaturation); + m_params.set("saturation", newSaturation); + } +#endif + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setSaturation(int saturation) +{ + setMetaCtlSaturation(&m_metadata, saturation + IS_SATURATION_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera1Parameters::getSaturation(void) +{ + int32_t saturation = 0; + + getMetaCtlSaturation(&m_metadata, &saturation); + return saturation - IS_SATURATION_DEFAULT - FW_CUSTOM_OFFSET; +} + +status_t ExynosCamera1Parameters::checkSharpness(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } else { + int maxSharpness = params.getInt("sharpness-max"); + int minSharpness = params.getInt("sharpness-min"); + int newSharpness = params.getInt("sharpness"); + int curSharpness = -1; + int curEffectMode = EFFECT_NONE; + + CLOGD("DEBUG(%s):newSharpness %d", "setParameters", newSharpness); + + if (newSharpness < minSharpness || newSharpness > maxSharpness) { + CLOGE("ERR(%s): Invalid Sharpness (Min: %d, Max: %d, Value: %d)", __FUNCTION__, minSharpness, maxSharpness, newSharpness); + return BAD_VALUE; + } + + curEffectMode = getColorEffectMode(); + if(curEffectMode == EFFECT_BEAUTY_FACE) { + return NO_ERROR; + } + curSharpness = getSharpness(); + + if (curSharpness != newSharpness) { + m_setSharpness(newSharpness); + m_params.set("sharpness", newSharpness); + } + return NO_ERROR; + } +} + +void ExynosCamera1Parameters::m_setSharpness(int sharpness) +{ +#ifdef USE_NEW_NOISE_REDUCTION_ALGORITHM + enum processing_mode default_edge_mode = PROCESSING_MODE_FAST; + enum processing_mode default_noise_mode = PROCESSING_MODE_FAST; + int default_edge_strength = 5; + int default_noise_strength = 5; +#else + enum processing_mode default_edge_mode = PROCESSING_MODE_OFF; + enum processing_mode default_noise_mode = PROCESSING_MODE_OFF; + int default_edge_strength = 0; + int default_noise_strength = 0; +#endif + + int newSharpness = sharpness + IS_SHARPNESS_DEFAULT; + enum processing_mode edge_mode = default_edge_mode; + enum processing_mode noise_mode = default_noise_mode; + int edge_strength = default_edge_strength; + int noise_strength = default_noise_strength; + + switch (newSharpness) { + case IS_SHARPNESS_MINUS_2: + edge_mode = default_edge_mode; + noise_mode = default_noise_mode; + edge_strength = default_edge_strength; + noise_strength = 10; + break; + case IS_SHARPNESS_MINUS_1: + edge_mode = default_edge_mode; + noise_mode = default_noise_mode; + edge_strength = default_edge_strength; + noise_strength = (10 + default_noise_strength + 1) / 2; + break; + case IS_SHARPNESS_DEFAULT: + edge_mode = default_edge_mode; + noise_mode = default_noise_mode; + edge_strength = default_edge_strength; + noise_strength = default_noise_strength; + break; + case IS_SHARPNESS_PLUS_1: + edge_mode = default_edge_mode; + noise_mode = default_noise_mode; + edge_strength = (10 + default_edge_strength + 1) / 2; + noise_strength = default_noise_strength; + break; + case IS_SHARPNESS_PLUS_2: + edge_mode = default_edge_mode; + noise_mode = default_noise_mode; + edge_strength = 10; + noise_strength = default_noise_strength; + break; + default: + break; + } + + CLOGD("DEBUG(%s):newSharpness %d edge_mode(%d),st(%d), noise(%d),st(%d)", + __FUNCTION__, newSharpness, edge_mode, edge_strength, noise_mode, noise_strength); + + setMetaCtlSharpness(&m_metadata, edge_mode, edge_strength, noise_mode, noise_strength); +} + +int ExynosCamera1Parameters::getSharpness(void) +{ +#ifdef USE_NEW_NOISE_REDUCTION_ALGORITHM + enum processing_mode default_edge_mode = PROCESSING_MODE_FAST; + enum processing_mode default_noise_mode = PROCESSING_MODE_FAST; + int default_edge_sharpness = 5; + int default_noise_sharpness = 5; +#else + enum processing_mode default_edge_mode = PROCESSING_MODE_OFF; + enum processing_mode default_noise_mode = PROCESSING_MODE_OFF; + int default_edge_sharpness = 0; + int default_noise_sharpness = 0; +#endif + + int32_t edge_sharpness = default_edge_sharpness; + int32_t noise_sharpness = default_noise_sharpness; + int32_t sharpness = 0; + enum processing_mode edge_mode = default_edge_mode; + enum processing_mode noise_mode = default_noise_mode; + + getMetaCtlSharpness(&m_metadata, &edge_mode, &edge_sharpness, &noise_mode, &noise_sharpness); + + if(noise_sharpness == 10 && edge_sharpness == default_edge_sharpness) { + sharpness = IS_SHARPNESS_MINUS_2; + } else if(noise_sharpness == (10 + default_noise_sharpness + 1) / 2 + && edge_sharpness == default_edge_sharpness) { + sharpness = IS_SHARPNESS_MINUS_1; + } else if(noise_sharpness == default_noise_sharpness + && edge_sharpness == default_edge_sharpness) { + sharpness = IS_SHARPNESS_DEFAULT; + } else if(noise_sharpness == default_noise_sharpness + && edge_sharpness == (10 + default_edge_sharpness + 1) / 2) { + sharpness = IS_SHARPNESS_PLUS_1; + } else if(noise_sharpness == default_noise_sharpness + && edge_sharpness == 10) { + sharpness = IS_SHARPNESS_PLUS_2; + } else { + sharpness = IS_SHARPNESS_DEFAULT; + } + + return sharpness - IS_SHARPNESS_DEFAULT; +} + +status_t ExynosCamera1Parameters::checkHue(const CameraParameters& params) +{ + int maxHue = params.getInt("hue-max"); + int minHue = params.getInt("hue-min"); + int newHue = params.getInt("hue"); + int curHue = -1; + + CLOGD("DEBUG(%s):newHue %d", "setParameters", newHue); + + if (newHue < minHue || newHue > maxHue) { + CLOGE("ERR(%s): Invalid Hue (Min: %d, Max: %d, Value: %d)", __FUNCTION__, minHue, maxHue, newHue); + return BAD_VALUE; + } + + curHue = getHue(); + + if (curHue != newHue) { + m_setHue(newHue); + m_params.set("hue", newHue); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setHue(int hue) +{ + setMetaCtlHue(&m_metadata, hue + IS_HUE_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera1Parameters::getHue(void) +{ + int32_t hue = 0; + + getMetaCtlHue(&m_metadata, &hue); + return hue - IS_HUE_DEFAULT - FW_CUSTOM_OFFSET; +} + +status_t ExynosCamera1Parameters::checkIso(const CameraParameters& params) +{ + uint32_t newISO = 0; + uint32_t curISO = 0; + const char *strNewISO = params.get("iso"); + + if (strNewISO == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewISO %s", "setParameters", strNewISO); + + if (!strcmp(strNewISO, "auto")) { + newISO = 0; + } else { + newISO = (uint32_t)atoi(strNewISO); + if (newISO == 0) { + CLOGE("ERR(%s):Invalid iso value(%s)", __FUNCTION__, strNewISO); + return BAD_VALUE; + } + } + + curISO = getIso(); +#ifdef CAMERA_GED_FEATURE + if(getSceneMode() == SCENE_MODE_AUTO) { + if (curISO != newISO) { + m_setIso(newISO); + m_params.set("iso", strNewISO); + } + } else { + m_params.set("iso", "auto"); + } +#else + if (curISO != newISO) { + m_setIso(newISO); + m_params.set("iso", strNewISO); + } +#endif + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setIso(uint32_t iso) +{ + enum aa_isomode mode = AA_ISOMODE_AUTO; + + if (iso == 0 ) + mode = AA_ISOMODE_AUTO; + else + mode = AA_ISOMODE_MANUAL; + + setMetaCtlIso(&m_metadata, mode, iso); +} + +uint32_t ExynosCamera1Parameters::getIso(void) +{ + enum aa_isomode mode = AA_ISOMODE_AUTO; + uint32_t iso = 0; + + getMetaCtlIso(&m_metadata, &mode, &iso); + + return iso; +} + +uint64_t ExynosCamera1Parameters::getCaptureExposureTime(void) +{ + return m_exposureTimeCapture; +} + +status_t ExynosCamera1Parameters::checkContrast(const CameraParameters& params) +{ + uint32_t newContrast = 0; + uint32_t curContrast = 0; + const char *strNewContrast = params.get("contrast"); + + if (strNewContrast == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewContrast %s", "setParameters", strNewContrast); + + if (!strcmp(strNewContrast, "auto")) + newContrast = IS_CONTRAST_DEFAULT; + else if (!strcmp(strNewContrast, "-2")) + newContrast = IS_CONTRAST_MINUS_2; + else if (!strcmp(strNewContrast, "-1")) + newContrast = IS_CONTRAST_MINUS_1; + else if (!strcmp(strNewContrast, "0")) + newContrast = IS_CONTRAST_DEFAULT; + else if (!strcmp(strNewContrast, "1")) + newContrast = IS_CONTRAST_PLUS_1; + else if (!strcmp(strNewContrast, "2")) + newContrast = IS_CONTRAST_PLUS_2; + else { + CLOGE("ERR(%s):Invalid contrast value(%s)", __FUNCTION__, strNewContrast); + return BAD_VALUE; + } + + curContrast = getContrast(); + + if (curContrast != newContrast) { + m_setContrast(newContrast); + m_params.set("contrast", strNewContrast); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setContrast(uint32_t contrast) +{ + setMetaCtlContrast(&m_metadata, contrast); +} + +uint32_t ExynosCamera1Parameters::getContrast(void) +{ + uint32_t contrast = 0; + getMetaCtlContrast(&m_metadata, &contrast); + return contrast; +} + +status_t ExynosCamera1Parameters::checkHdrMode(const CameraParameters& params) +{ + int newHDR = params.getInt("hdr-mode"); + bool curHDR = -1; + + if (newHDR < 0) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newHDR %d", "setParameters", newHDR); + + curHDR = getHdrMode(); + + if (curHDR != (bool)newHDR) { + m_setHdrMode((bool)newHDR); + m_params.set("hdr-mode", newHDR); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setHdrMode(bool hdr) +{ + m_cameraInfo.hdrMode = hdr; + +#ifdef CAMERA_GED_FEATURE + if (hdr == true) + m_setShotMode(SHOT_MODE_RICH_TONE); + else + m_setShotMode(SHOT_MODE_NORMAL); +#endif + + m_activityControl->setHdrMode(hdr); +} + +bool ExynosCamera1Parameters::getHdrMode(void) +{ + return m_cameraInfo.hdrMode; +} + +int ExynosCamera1Parameters::getPreviewBufferCount(void) +{ + CLOGV("DEBUG(%s):getPreviewBufferCount %d", "setParameters", m_previewBufferCount); + + return m_previewBufferCount; +} + +void ExynosCamera1Parameters::setPreviewBufferCount(int previewBufferCount) +{ + m_previewBufferCount = previewBufferCount; + + CLOGV("DEBUG(%s):setPreviewBufferCount %d", "setParameters", m_previewBufferCount); + + return; +} + +status_t ExynosCamera1Parameters::checkAntiShake(const CameraParameters& params) +{ + int newAntiShake = params.getInt("anti-shake"); + bool curAntiShake = false; + bool toggle = false; + int curShotMode = getShotMode(); + + if (curShotMode != SHOT_MODE_AUTO) + return NO_ERROR; + + if (newAntiShake < 0) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newAntiShake %d", "setParameters", newAntiShake); + + if (newAntiShake == 1) + toggle = true; + + curAntiShake = getAntiShake(); + + if (curAntiShake != toggle) { + m_setAntiShake(toggle); + m_params.set("anti-shake", newAntiShake); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setAntiShake(bool toggle) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + + if (toggle == true) { + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ANTISHAKE; + } + + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); + m_cameraInfo.antiShake = toggle; +} + +bool ExynosCamera1Parameters::getAntiShake(void) +{ + return m_cameraInfo.antiShake; +} + +status_t ExynosCamera1Parameters::checkScalableSensorMode(const CameraParameters& params) +{ + bool needScaleMode = false; + bool curScaleMode = false; + int newScaleMode = params.getInt("scale_mode"); + + if (newScaleMode < 0) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newScaleMode %d", "setParameters", newScaleMode); + + if (isScalableSensorSupported() == true) { + needScaleMode = m_adjustScalableSensorMode(newScaleMode); + curScaleMode = getScalableSensorMode(); + + if (curScaleMode != needScaleMode) { + setScalableSensorMode(needScaleMode); + m_params.set("scale_mode", newScaleMode); + } + +// updateHwSensorSize(); + } + + return NO_ERROR; +} + +bool ExynosCamera1Parameters::isScalableSensorSupported(void) +{ + return m_staticInfo->scalableSensorSupport; +} + +bool ExynosCamera1Parameters::m_adjustScalableSensorMode(const int scaleMode) +{ + bool adjustedScaleMode = false; + int pictureW = 0; + int pictureH = 0; + float pictureRatio = 0; + uint32_t minFps = 0; + uint32_t maxFps = 0; + + /* If scale_mode is 1 or dual camera, scalable sensor turn on */ + if (scaleMode == 1) + adjustedScaleMode = true; + + if (getDualMode() == true) + adjustedScaleMode = true; + + /* + * scalable sensor only support 24 fps for 4:3 - picture size + * scalable sensor only support 15, 24, 30 fps for 16:9 - picture size + */ + getPreviewFpsRange(&minFps, &maxFps); + getPictureSize(&pictureW, &pictureH); + + pictureRatio = ROUND_OFF(((float)pictureW / (float)pictureH), 2); + + if (pictureRatio == 1.33f) { /* 4:3 */ + if (maxFps != 24) + adjustedScaleMode = false; + } else if (pictureRatio == 1.77f) { /* 16:9 */ + if ((maxFps != 15) && (maxFps != 24) && (maxFps != 30)) + adjustedScaleMode = false; + } else { + adjustedScaleMode = false; + } + + if (scaleMode == 1 && adjustedScaleMode == false) { + CLOGW("WARN(%s):pictureRatio(%f, %d, %d) fps(%d, %d) is not proper for scalable", + __FUNCTION__, pictureRatio, pictureW, pictureH, minFps, maxFps); + } + + return adjustedScaleMode; +} + +void ExynosCamera1Parameters::setScalableSensorMode(bool scaleMode) +{ + m_cameraInfo.scalableSensorMode = scaleMode; +} + +bool ExynosCamera1Parameters::getScalableSensorMode(void) +{ + return m_cameraInfo.scalableSensorMode; +} + +void ExynosCamera1Parameters::m_getScalableSensorSize(int *newSensorW, int *newSensorH) +{ + int previewW = 0; + int previewH = 0; + + *newSensorW = 1920; + *newSensorH = 1080; + + /* default scalable sensor size is 1920x1080(16:9) */ + getPreviewSize(&previewW, &previewH); + + /* when preview size is 1440x1080(4:3), return sensor size(1920x1440) */ + /* if (previewW == 1440 && previewH == 1080) { */ + if ((previewW * 3 / 4) == previewH) { + *newSensorW = 1920; + *newSensorH = 1440; + } +} + +bool ExynosCamera1Parameters::getZoomSupported(void) +{ + return m_staticInfo->zoomSupport; +} + +bool ExynosCamera1Parameters::getSmoothZoomSupported(void) +{ + return m_staticInfo->smoothZoomSupport; +} + +void ExynosCamera1Parameters::checkHorizontalViewAngle(void) +{ + m_params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); +} + +void ExynosCamera1Parameters::setHorizontalViewAngle(int pictureW, int pictureH) +{ + double pi_camera = 3.1415926f; + double distance; + double ratio; + double hViewAngle_half_rad = pi_camera / 180 * (double)m_staticInfo->horizontalViewAngle[SIZE_RATIO_16_9] / 2; + + distance = ((double)m_staticInfo->maxSensorW / (double)m_staticInfo->maxSensorH * 9 / 2) + / tan(hViewAngle_half_rad); + ratio = (double)pictureW / (double)pictureH; + + m_calculatedHorizontalViewAngle = (float)(atan(ratio * 9 / 2 / distance) * 2 * 180 / pi_camera); +} + +float ExynosCamera1Parameters::getHorizontalViewAngle(void) +{ + int right_ratio = 177; + + if ((int)(m_staticInfo->maxSensorW * 100 / m_staticInfo->maxSensorH) == right_ratio) { + return m_calculatedHorizontalViewAngle; + } else { + return m_staticInfo->horizontalViewAngle[m_cameraInfo.pictureSizeRatioId]; + } +} + +float ExynosCamera1Parameters::getVerticalViewAngle(void) +{ + return m_staticInfo->verticalViewAngle; +} + +void ExynosCamera1Parameters::getFnumber(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->fNumber * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->fNumberNum; + *den = m_staticInfo->fNumberDen; + } +} + +void ExynosCamera1Parameters::getApertureValue(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->aperture * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->apertureNum; + *den = m_staticInfo->apertureDen; + } +} + +int ExynosCamera1Parameters::getFocalLengthIn35mmFilm(void) +{ + return m_staticInfo->focalLengthIn35mmLength; +} + +void ExynosCamera1Parameters::getFocalLength(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->focalLength * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->focalLengthNum; + *den = m_staticInfo->focalLengthDen; + } +} + +void ExynosCamera1Parameters::getFocusDistances(int *num, int *den) +{ + *num = m_staticInfo->focusDistanceNum; + *den = m_staticInfo->focusDistanceDen; +} + +int ExynosCamera1Parameters::getMinExposureCompensation(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->exposureCompensationRange[MIN]; + } else { + return m_staticInfo->minExposureCompensation; + } +} + +int ExynosCamera1Parameters::getMaxExposureCompensation(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->exposureCompensationRange[MAX]; + } else { + return m_staticInfo->maxExposureCompensation; + } +} + +float ExynosCamera1Parameters::getExposureCompensationStep(void) +{ + return m_staticInfo->exposureCompensationStep; +} + +int ExynosCamera1Parameters::getMaxNumDetectedFaces(void) +{ + return m_staticInfo->maxNumDetectedFaces; +} + +uint32_t ExynosCamera1Parameters::getMaxNumFocusAreas(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->max3aRegions[AF]; + } else { + return m_staticInfo->maxNumFocusAreas; + } +} + +uint32_t ExynosCamera1Parameters::getMaxNumMeteringAreas(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->max3aRegions[AE]; + } else { + return m_staticInfo->maxNumMeteringAreas; + } +} + +int ExynosCamera1Parameters::getMaxZoomLevel(void) +{ + int zoomLevel = 0; + int samsungCamera = getSamsungCamera(); + + if (samsungCamera || m_cameraId == CAMERA_ID_FRONT) { + zoomLevel = m_staticInfo->maxZoomLevel; + } else { + zoomLevel = m_staticInfo->maxBasicZoomLevel; + } + return zoomLevel; +} + +float ExynosCamera1Parameters::getMaxZoomRatio(void) +{ + return (float)m_staticInfo->maxZoomRatio; +} + +float ExynosCamera1Parameters::getZoomRatio(int zoomLevel) +{ + float zoomRatio = 1.00f; + if (getZoomSupported() == true) + zoomRatio = (float)m_staticInfo->zoomRatioList[zoomLevel]; + else + zoomRatio = 1000.00f; + + return zoomRatio; +} + +bool ExynosCamera1Parameters::getVideoSnapshotSupported(void) +{ + return m_staticInfo->videoSnapshotSupport; +} + +bool ExynosCamera1Parameters::getVideoStabilizationSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + bool supported = false; + for (size_t i = 0; i < m_staticInfo->videoStabilizationModesLength; i++) { + if (m_staticInfo->videoStabilizationModes[i] + == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) { + supported = true; + break; + } + } + return supported; + } else { + return m_staticInfo->videoStabilizationSupport; + } +} + +bool ExynosCamera1Parameters::getAutoWhiteBalanceLockSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return true; + } else { + return m_staticInfo->autoWhiteBalanceLockSupport; + } +} + +bool ExynosCamera1Parameters::getAutoExposureLockSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return true; + } else { + return m_staticInfo->autoExposureLockSupport; + } +} + +void ExynosCamera1Parameters::enableMsgType(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + m_enabledMsgType |= msgType; +} + +void ExynosCamera1Parameters::disableMsgType(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + m_enabledMsgType &= ~msgType; +} + +bool ExynosCamera1Parameters::msgTypeEnabled(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + return (m_enabledMsgType & msgType); +} + +status_t ExynosCamera1Parameters::setFrameSkipCount(int count) +{ + m_frameSkipCounter.setCount(count); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::getFrameSkipCount(int *count) +{ + *count = m_frameSkipCounter.getCount(); + m_frameSkipCounter.decCount(); + + return NO_ERROR; +} + +int ExynosCamera1Parameters::getFrameSkipCount(void) +{ + return m_frameSkipCounter.getCount(); +} + +void ExynosCamera1Parameters::setIsFirstStartFlag(bool flag) +{ + m_flagFirstStart = flag; +} + +int ExynosCamera1Parameters::getIsFirstStartFlag(void) +{ + return m_flagFirstStart; +} + +ExynosCameraActivityControl *ExynosCamera1Parameters::getActivityControl(void) +{ + return m_activityControl; +} + +status_t ExynosCamera1Parameters::setAutoFocusMacroPosition(int autoFocusMacroPosition) +{ + int oldAutoFocusMacroPosition = m_cameraInfo.autoFocusMacroPosition; + m_cameraInfo.autoFocusMacroPosition = autoFocusMacroPosition; + + m_activityControl->setAutoFocusMacroPosition(oldAutoFocusMacroPosition, autoFocusMacroPosition); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::setDisEnable(bool enable) +{ + setMetaBypassDis(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera1Parameters::getDisEnable(void) +{ + return m_metadata.dis_bypass; +} + +status_t ExynosCamera1Parameters::setDrcEnable(bool enable) +{ + setMetaBypassDrc(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera1Parameters::getDrcEnable(void) +{ + return m_metadata.drc_bypass; +} + +status_t ExynosCamera1Parameters::setDnrEnable(bool enable) +{ + setMetaBypassDnr(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera1Parameters::getDnrEnable(void) +{ + return m_metadata.dnr_bypass; +} + +status_t ExynosCamera1Parameters::setFdEnable(bool enable) +{ + setMetaBypassFd(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera1Parameters::getFdEnable(void) +{ + return m_metadata.fd_bypass; +} + +status_t ExynosCamera1Parameters::setFdMode(enum facedetect_mode mode) +{ + setMetaCtlFdMode(&m_metadata, mode); + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::getFdMeta(bool reprocessing, void *buf) +{ + if (buf == NULL) { + CLOGE("ERR: buf is NULL"); + return BAD_VALUE; + } + + struct camera2_shot_ext *meta_shot_ext = (struct camera2_shot_ext *)buf; + + /* disable face detection for reprocessing frame */ + if (reprocessing) { + meta_shot_ext->fd_bypass = 1; + meta_shot_ext->shot.ctl.stats.faceDetectMode = ::FACEDETECT_MODE_OFF; + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::setFlipHorizontal(int val) +{ + if (val < 0) { + CLOGE("ERR(%s[%d]): setFlipHorizontal ignored, invalid value(%d)", + __FUNCTION__, __LINE__, val); + return; + } + + m_cameraInfo.flipHorizontal = val; +} + +int ExynosCamera1Parameters::getFlipHorizontal(void) +{ +#if defined(USE_CAPTURE_FRAME_FRONT_WIDESELFIE) + return m_cameraInfo.flipHorizontal; +#else + if (m_cameraInfo.shotMode == SHOT_MODE_FRONT_PANORAMA) { + return 0; + } else { + return m_cameraInfo.flipHorizontal; + } +#endif +} + +void ExynosCamera1Parameters::setFlipVertical(int val) +{ + if (val < 0) { + CLOGE("ERR(%s[%d]): setFlipVertical ignored, invalid value(%d)", + __FUNCTION__, __LINE__, val); + return; + } + + m_cameraInfo.flipVertical = val; +} + +int ExynosCamera1Parameters::getFlipVertical(void) +{ + return m_cameraInfo.flipVertical; +} + +bool ExynosCamera1Parameters::getCallbackNeedCSC(void) +{ + bool ret = true; + + int previewW = 0, previewH = 0; + int hwPreviewW = 0, hwPreviewH = 0; + int previewFormat = getPreviewFormat(); + + getPreviewSize(&previewW, &previewH); + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + if ((previewW == hwPreviewW)&& + (previewH == hwPreviewH)&& + (previewFormat == V4L2_PIX_FMT_NV21)) { + ret = false; + } + return ret; +} + +bool ExynosCamera1Parameters::getCallbackNeedCopy2Rendering(void) +{ + bool ret = false; + int previewW = 0, previewH = 0; + + getPreviewSize(&previewW, &previewH); + if (previewW * previewH <= 1920*1080) { + ret = true; + } + return ret; +} + +#ifdef RAWDUMP_CAPTURE +void ExynosCamera1Parameters::setRawCaptureModeOn(bool enable) +{ + m_flagRawCaptureOn = enable; +} + +bool ExynosCamera1Parameters::getRawCaptureModeOn(void) +{ + return m_flagRawCaptureOn; +} +#endif + +bool ExynosCamera1Parameters::setDeviceOrientation(int orientation) +{ + if (orientation < 0 || orientation % 90 != 0) { + CLOGE("ERR(%s[%d]):Invalid orientation (%d)", + __FUNCTION__, __LINE__, orientation); + return false; + } + + m_cameraInfo.deviceOrientation = orientation; + + /* fd orientation need to be calibrated, according to f/w spec */ + int hwRotation = BACK_ROTATION; + +#if 0 + if (this->getCameraId() == CAMERA_ID_FRONT) + hwRotation = FRONT_ROTATION; +#endif + + int fdOrientation = (orientation + hwRotation) % 360; + + CLOGD("DEBUG(%s[%d]):orientation(%d), hwRotation(%d), fdOrientation(%d)", + __FUNCTION__, __LINE__, orientation, hwRotation, fdOrientation); + + return true; +} + +int ExynosCamera1Parameters::getDeviceOrientation(void) +{ + return m_cameraInfo.deviceOrientation; +} + +int ExynosCamera1Parameters::getFdOrientation(void) +{ + /* HACK: Calibrate FRONT FD orientation */ + if (getCameraId() == CAMERA_ID_FRONT) + return (m_cameraInfo.deviceOrientation + FRONT_ROTATION + 180) % 360; + else + return (m_cameraInfo.deviceOrientation + BACK_ROTATION) % 360; +} + +void ExynosCamera1Parameters::getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) +{ + if (flagReprocessing == true) { + *setfile = m_setfileReprocessing; + *yuvRange = m_yuvRangeReprocessing; + } else { + *setfile = m_setfile; + *yuvRange = m_yuvRange; + } +} + +status_t ExynosCamera1Parameters::checkSetfileYuvRange(void) +{ + int oldSetFile = m_setfile; + int oldYUVRange = m_yuvRange; + + /* general */ + m_getSetfileYuvRange(false, &m_setfile, &m_yuvRange); + + /* reprocessing */ + m_getSetfileYuvRange(true, &m_setfileReprocessing, &m_yuvRangeReprocessing); + + CLOGD("DEBUG(%s[%d]):m_cameraId(%d) : general[setfile(%d) YUV range(%d)] : reprocesing[setfile(%d) YUV range(%d)]", + __FUNCTION__, __LINE__, + m_cameraId, + m_setfile, m_yuvRange, + m_setfileReprocessing, m_yuvRangeReprocessing); + + return NO_ERROR; +} + +void ExynosCamera1Parameters::setSetfileYuvRange(void) +{ + /* reprocessing */ + m_getSetfileYuvRange(true, &m_setfileReprocessing, &m_yuvRangeReprocessing); + + ALOGD("DEBUG(%s[%d]):m_cameraId(%d) : general[setfile(%d) YUV range(%d)] : reprocesing[setfile(%d) YUV range(%d)]", + __FUNCTION__, __LINE__, + m_cameraId, + m_setfile, m_yuvRange, + m_setfileReprocessing, m_yuvRangeReprocessing); + +} + +void ExynosCamera1Parameters::setUseDynamicBayer(bool enable) +{ + m_useDynamicBayer = enable; +} + +bool ExynosCamera1Parameters::getUseDynamicBayer(void) +{ + return m_useDynamicBayer; +} + +void ExynosCamera1Parameters::setUseDynamicBayerVideoSnapShot(bool enable) +{ + m_useDynamicBayerVideoSnapShot = enable; +} + +bool ExynosCamera1Parameters::getUseDynamicBayerVideoSnapShot(void) +{ + return m_useDynamicBayerVideoSnapShot; +} + +void ExynosCamera1Parameters::setUseDynamicScc(bool enable) +{ + m_useDynamicScc = enable; +} + +bool ExynosCamera1Parameters::getUseDynamicScc(void) +{ + bool dynamicScc = m_useDynamicScc; + bool reprocessing = isReprocessing(); + + if (getRecordingHint() == true && reprocessing == false) + dynamicScc = false; + + return dynamicScc; +} + +status_t ExynosCamera1Parameters::calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int previewW = 0, previewH = 0, previewFormat = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + previewFormat = getPreviewFormat(); + pictureFormat = getHwPictureFormat(); + + if (isOwnScc(m_cameraId) == true) + getPictureSize(&pictureW, &pictureH); + else + getHwPictureSize(&pictureW, &pictureH); + getPreviewSize(&previewW, &previewH); + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = pictureW; + srcRect->h = pictureH; + srcRect->fullW = pictureW; + srcRect->fullH = pictureH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + dstRect->colorFormat = previewFormat; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0, hwPictureFormat = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int previewW = 0, previewH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + + /* TODO: check state ready for start */ + pictureFormat = getHwPictureFormat(); + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + ALIGN_UP(crop_crop_x, 2); + ALIGN_UP(crop_crop_y, 2); + +#if 0 + ALOGD("DEBUG(%s):hwSensorSize (%dx%d), previewSize (%dx%d)", + __FUNCTION__, hwSensorW, hwSensorH, previewW, previewH); + ALOGD("DEBUG(%s):hwPictureSize (%dx%d), pictureSize (%dx%d)", + __FUNCTION__, hwPictureW, hwPictureH, pictureW, pictureH); + ALOGD("DEBUG(%s):size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, cropX, cropY, cropW, cropH, zoomLevel); + ALOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + ALOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = crop_crop_x; + srcRect->y = crop_crop_y; + srcRect->w = crop_crop_w; + srcRect->h = crop_crop_h; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + float zoomRatio = getZoomRatio(0) / 1000; +#if 0 + int zoom = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; +#endif + /* TODO: check state ready for start */ + pictureFormat = getHwPictureFormat(); + getPictureSize(&pictureW, &pictureH); + + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(originW, originH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + ALIGN_UP(crop_crop_x, 2); + ALIGN_UP(crop_crop_y, 2); + +#if 0 + CLOGD("DEBUG(%s):originSize (%dx%d) pictureSize (%dx%d)", + __FUNCTION__, originW, originH, pictureW, pictureH); + CLOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoom); + CLOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = crop_crop_x; + srcRect->y = crop_crop_y; + srcRect->w = crop_crop_w; + srcRect->h = crop_crop_h; + srcRect->fullW = originW; + srcRect->fullH = originH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int hwBnsW = 0; + int hwBnsH = 0; + int hwBcropW = 0; + int hwBcropH = 0; + int zoomLevel = 0; + float zoomRatio = 1.00f; + int sizeList[SIZE_LUT_INDEX_END]; + int hwSensorMarginW = 0; + int hwSensorMarginH = 0; + float bnsRatio = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->previewSizeLut == NULL + || m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId + || (getUsePureBayerReprocessing() == false && + m_cameraInfo.pictureSizeRatioId != m_cameraInfo.previewSizeRatioId) + || m_getPreviewSizeList(sizeList) != NO_ERROR) + return calcPreviewBayerCropSize(srcRect, dstRect); + + /* use LUT */ + hwBnsW = sizeList[BNS_W]; + hwBnsH = sizeList[BNS_H]; + hwBcropW = sizeList[BCROP_W]; + hwBcropH = sizeList[BCROP_H]; + + if (getRecordingHint() == true) { + if (m_cameraInfo.previewSizeRatioId != m_cameraInfo.videoSizeRatioId) { + ALOGV("WARN(%s):preview ratioId(%d) != videoRatioId(%d), use previewRatioId", + __FUNCTION__, m_cameraInfo.previewSizeRatioId, m_cameraInfo.videoSizeRatioId); + } + } + + int curBnsW = 0, curBnsH = 0; + getBnsSize(&curBnsW, &curBnsH); + if (SIZE_RATIO(curBnsW, curBnsH) != SIZE_RATIO(hwBnsW, hwBnsH)) + ALOGW("ERROR(%s[%d]): current BNS size(%dx%d) is NOT same with Hw BNS size(%dx%d)", + __FUNCTION__, __LINE__, curBnsW, curBnsH, hwBnsW, hwBnsH); + + zoomLevel = getZoomLevel(); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + /* Skip to calculate the crop size with zoom level + * Condition 1 : High-speed camcording D-zoom with External Scaler + * Condition 2 : HAL3 (Service calculates the crop size by itself + */ + int fastFpsMode = getFastFpsMode(); + if ((fastFpsMode > CONFIG_MODE::HIGHSPEED_60 && + fastFpsMode < CONFIG_MODE::MAX && + getZoomPreviewWIthScaler() == true) || + getHalVersion() == IS_HAL_VER_3_2) { + ALOGV("DEBUG(%s[%d]):hwBnsSize (%dx%d), hwBcropSize(%dx%d), fastFpsMode(%d)", + __FUNCTION__, __LINE__, + hwBnsW, hwBnsH, + hwBcropW, hwBcropH, + fastFpsMode); + } else { +#if defined(SCALER_MAX_SCALE_UP_RATIO) + /* + * After dividing float & casting int, + * zoomed size can be smaller too much. + * so, when zoom until max, ceil up about floating point. + */ + if (ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN) * SCALER_MAX_SCALE_UP_RATIO < hwBcropW || + ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2) * SCALER_MAX_SCALE_UP_RATIO < hwBcropH) { + hwBcropW = ALIGN_UP((int)ceil((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)ceil((float)hwBcropH / zoomRatio), 2); + } else +#endif + { + hwBcropW = ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2); + } + } + + /* Re-calculate the BNS size for removing Sensor Margin */ + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + m_adjustSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + hwBnsW = hwBnsW - hwSensorMarginW; + hwBnsH = hwBnsH - hwSensorMarginH; + + /* src */ + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwBnsW; + srcRect->h = hwBnsH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + status_t ret = NO_ERROR; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwBnsW / (float) maxSensorW; + scaleRatioY = (float) hwBnsH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwBnsW; + cropRegionH = hwBnsH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + if ((cropRegionW > hwBcropW) && (cropRegionH > hwBcropH)) { + dstRect->x = ALIGN_DOWN((cropRegionX + ((cropRegionW - hwBcropW) >> 1)), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + ((cropRegionH - hwBcropH) >> 1)), 2); + dstRect->w = hwBcropW; + dstRect->h = hwBcropH; + } else { + ret = getCropRectAlign(cropRegionW, cropRegionH, + hwBcropW, hwBcropH, + &(dstRect->x), &(dstRect->y), + &(dstRect->w), &(dstRect->h), + CAMERA_BCROP_ALIGN, 2, + 0, 0.0f); + dstRect->x = ALIGN_DOWN((cropRegionX + dstRect->x), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + dstRect->y), 2); + } + } else { + if (hwBnsW > hwBcropW) { + dstRect->x = ALIGN_UP(((hwBnsW - hwBcropW) >> 1), 2); + dstRect->w = hwBcropW; + } else { + dstRect->x = 0; + dstRect->w = hwBnsW; + } + + if (hwBnsH > hwBcropH) { + dstRect->y = ALIGN_UP(((hwBnsH - hwBcropH) >> 1), 2); + dstRect->h = hwBcropH; + } else { + dstRect->y = 0; + dstRect->h = hwBnsH; + } + } + + m_setHwBayerCropRegion(dstRect->w, dstRect->h, dstRect->x, dstRect->y); +#ifdef DEBUG_PERFRAME + ALOGD("DEBUG(%s):zoomLevel=%d", __FUNCTION__, zoomLevel); + ALOGD("DEBUG(%s):hwBnsSize (%dx%d), hwBcropSize (%d, %d)(%dx%d)", + __FUNCTION__, srcRect->w, srcRect->h, dstRect->x, dstRect->y, dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::getPreviewBdsSize(ExynosRect *dstRect) +{ + status_t ret = NO_ERROR; + + ret = m_getPreviewBdsSize(dstRect); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):calcPreviewBDSSize() fail", __FUNCTION__, __LINE__); + return ret; + } + + if (this->getHWVdisMode() == true) { + int disW = ALIGN_UP((int)(dstRect->w * HW_VDIS_W_RATIO), 2); + int disH = ALIGN_UP((int)(dstRect->h * HW_VDIS_H_RATIO), 2); + + CLOGV("DEBUG(%s[%d]):HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", + __FUNCTION__, __LINE__, dstRect->w, dstRect->h, disW, disH); + + /* + * check H/W VDIS size(BDS dst size) is too big than bayerCropSize(BDS out size). + */ + ExynosRect bnsSize, bayerCropSize; + + if (getPreviewBayerCropSize(&bnsSize, &bayerCropSize) != NO_ERROR) { + CLOGE("ERR(%s[%d]):getPreviewBayerCropSize() fail", __FUNCTION__, __LINE__); + } else { + if (bayerCropSize.w < disW || bayerCropSize.h < disH) { + CLOGV("DEBUG(%s[%d]):bayerCropSize (%d x %d) is smaller than (%d x %d). so force bayerCropSize", + __FUNCTION__, __LINE__, bayerCropSize.w, bayerCropSize.h, disW, disH); + + disW = bayerCropSize.w; + disH = bayerCropSize.h; + } + } + + dstRect->w = disW; + dstRect->h = disH; + } + +#ifdef DEBUG_PERFRAME + CLOGD("DEBUG(%s):hwBdsSize (%dx%d)", __FUNCTION__, dstRect->w, dstRect->h); +#endif + + return ret; +} + +status_t ExynosCamera1Parameters::getPictureBdsSize(ExynosRect *dstRect) +{ + int hwBdsW = 0; + int hwBdsH = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->pictureSizeLut == NULL + || m_staticInfo->pictureSizeLutMax <= m_cameraInfo.pictureSizeRatioId) { + ExynosRect rect; + return calcPictureBDSSize(&rect, dstRect); + } + + /* use LUT */ + hwBdsW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BDS_W]; + hwBdsH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BDS_H]; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = hwBdsW; + dstRect->h = hwBdsH; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcNormalToTpuSize(int srcW, int srcH, int *dstW, int *dstH) +{ + status_t ret = NO_ERROR; + if (srcW < 0 || srcH < 0) { + CLOGE("ERR(%s[%d]):src size is invalid(%d x %d)", __FUNCTION__, __LINE__, srcW, srcH); + return INVALID_OPERATION; + } + + int disW = ALIGN_UP((int)(srcW * HW_VDIS_W_RATIO), 2); + int disH = ALIGN_UP((int)(srcH * HW_VDIS_H_RATIO), 2); + + *dstW = disW; + *dstH = disH; + CLOGD("DEBUG(%s[%d]):HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", __FUNCTION__, __LINE__, srcW, srcH, disW, disH); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcTpuToNormalSize(int srcW, int srcH, int *dstW, int *dstH) +{ + status_t ret = NO_ERROR; + if (srcW < 0 || srcH < 0) { + CLOGE("ERR(%s[%d]):src size is invalid(%d x %d)", __FUNCTION__, __LINE__, srcW, srcH); + return INVALID_OPERATION; + } + + int disW = ALIGN_DOWN((int)(srcW / HW_VDIS_W_RATIO), 2); + int disH = ALIGN_DOWN((int)(srcH / HW_VDIS_H_RATIO), 2); + + *dstW = disW; + *dstH = disH; + CLOGD("DEBUG(%s[%d]):HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", __FUNCTION__, __LINE__, srcW, srcH, disW, disH); + + return ret; +} + +void ExynosCamera1Parameters::setUsePureBayerReprocessing(bool enable) +{ + m_usePureBayerReprocessing = enable; +} + +bool ExynosCamera1Parameters::getUsePureBayerReprocessing(void) +{ + int oldMode = m_usePureBayerReprocessing; + + if (getRecordingHint() == true) { + if (getDualMode() == true) + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_DUAL_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_DUAL_RECORDING; + else + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + } else { + if (getDualMode() == true) + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_DUAL : USE_PURE_BAYER_REPROCESSING_FRONT_ON_DUAL; + else + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + } + + if (oldMode != m_usePureBayerReprocessing) { + CLOGD("DEBUG(%s[%d]):bayer usage is changed (%d -> %d)", __FUNCTION__, __LINE__, oldMode, m_usePureBayerReprocessing); + } + + return m_usePureBayerReprocessing; +} + +bool ExynosCamera1Parameters::isUseYuvReprocessing(void) +{ + bool ret = false; + +#ifdef USE_YUV_REPROCESSING + ret = USE_YUV_REPROCESSING; +#endif + + return ret; +} + +bool ExynosCamera1Parameters::isUseYuvReprocessingForThumbnail(void) +{ + bool ret = false; + +#ifdef USE_YUV_REPROCESSING_FOR_THUMBNAIL + if (isUseYuvReprocessing() == true) + ret = USE_YUV_REPROCESSING_FOR_THUMBNAIL; +#endif + + return ret; +} + +int32_t ExynosCamera1Parameters::getReprocessingBayerMode(void) +{ + int32_t mode = REPROCESSING_BAYER_MODE_NONE; + bool useDynamicBayer = (getRecordingHint() == true || getDualRecordingHint() == true) ? + getUseDynamicBayerVideoSnapShot() : getUseDynamicBayer(); + + if (isReprocessing() == false) + return mode; + + if (useDynamicBayer == true) { + if (getUsePureBayerReprocessing() == true) + mode = REPROCESSING_BAYER_MODE_PURE_DYNAMIC; + else + mode = REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC; + } else { + if (getUsePureBayerReprocessing() == true) + mode = REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON; + else + mode = REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON; + } + + return mode; +} + +int ExynosCamera1Parameters::getBayerFormat(int pipeId) +{ + int bayerFormat = V4L2_PIX_FMT_SBGGR16; + + switch (pipeId) { + case PIPE_FLITE: + case PIPE_3AA: + case PIPE_FLITE_REPROCESSING: + case PIPE_3AA_REPROCESSING: + bayerFormat = CAMERA_BAYER_FORMAT; + break; + case PIPE_3AC: + case PIPE_3AP: + case PIPE_ISP: + case PIPE_3AC_REPROCESSING: + case PIPE_3AP_REPROCESSING: + case PIPE_ISP_REPROCESSING: + default: + CLOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + break; + } + + return bayerFormat; +} + +void ExynosCamera1Parameters::setAdaptiveCSCRecording(bool enable) +{ + m_useAdaptiveCSCRecording = enable; +} + +bool ExynosCamera1Parameters::getAdaptiveCSCRecording(void) +{ + return m_useAdaptiveCSCRecording; +} + +int ExynosCamera1Parameters::getHalPixelFormat(void) +{ + int setfile = 0; + int yuvRange = 0; + int previewFormat = getHwPreviewFormat(); + int halFormat = 0; + + m_getSetfileYuvRange(false, &setfile, &yuvRange); + + halFormat = convertingHalPreviewFormat(previewFormat, yuvRange); + + return halFormat; +} + +#if (TARGET_ANDROID_VER_MAJ >= 4 && TARGET_ANDROID_VER_MIN >= 4) +int ExynosCamera1Parameters::convertingHalPreviewFormat(int previewFormat, int yuvRange) +{ + int halFormat = 0; + + switch (previewFormat) { + case V4L2_PIX_FMT_NV21: + CLOGD("DEBUG(%s[%d]): preview format NV21", __FUNCTION__, __LINE__); + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + break; + case V4L2_PIX_FMT_NV21M: + CLOGD("DEBUG(%s[%d]): preview format NV21M", __FUNCTION__, __LINE__); + if (yuvRange == YUV_FULL_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL; + } else if (yuvRange == YUV_LIMITED_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + } else { + CLOGW("WRN(%s[%d]): invalid yuvRange, force set to full range", __FUNCTION__, __LINE__); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL; + } + break; + case V4L2_PIX_FMT_YVU420: + CLOGD("DEBUG(%s[%d]): preview format YVU420", __FUNCTION__, __LINE__); + halFormat = HAL_PIXEL_FORMAT_YV12; + break; + case V4L2_PIX_FMT_YVU420M: + CLOGD("DEBUG(%s[%d]): preview format YVU420M", __FUNCTION__, __LINE__); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12_M; + break; + default: + CLOGE("ERR(%s[%d]): unknown preview format(%d)", __FUNCTION__, __LINE__, previewFormat); + break; + } + + return halFormat; +} +#else +int ExynosCamera1Parameters::convertingHalPreviewFormat(int previewFormat, int yuvRange) +{ + int halFormat = 0; + + switch (previewFormat) { + case V4L2_PIX_FMT_NV21: + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + break; + case V4L2_PIX_FMT_NV21M: + if (yuvRange == YUV_FULL_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + } else if (yuvRange == YUV_LIMITED_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; + } else { + CLOGW("WRN(%s[%d]): invalid yuvRange, force set to full range", __FUNCTION__, __LINE__); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + } + break; + case V4L2_PIX_FMT_YVU420: + halFormat = HAL_PIXEL_FORMAT_YV12; + break; + case V4L2_PIX_FMT_YVU420M: + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; + break; + default: + CLOGE("ERR(%s[%d]): unknown preview format(%d)", __FUNCTION__, __LINE__, previewFormat); + break; + } + + return halFormat; +} +#endif + +void ExynosCamera1Parameters::setDvfsLock(bool lock) { + m_dvfsLock = lock; +} + +bool ExynosCamera1Parameters::getDvfsLock(void) { + return m_dvfsLock; +} +bool ExynosCamera1Parameters::setConfig(struct ExynosConfigInfo* config) +{ + memcpy(m_exynosconfig, config, sizeof(struct ExynosConfigInfo)); + setConfigMode(m_exynosconfig->mode); + return true; +} +struct ExynosConfigInfo* ExynosCamera1Parameters::getConfig() +{ + return m_exynosconfig; +} + +bool ExynosCamera1Parameters::setConfigMode(uint32_t mode) +{ + bool ret = false; + switch(mode){ + case CONFIG_MODE::NORMAL: + case CONFIG_MODE::HIGHSPEED_60: + case CONFIG_MODE::HIGHSPEED_120: + case CONFIG_MODE::HIGHSPEED_240: + m_exynosconfig->current = &m_exynosconfig->info[mode]; + m_exynosconfig->mode = mode; + ret = true; + break; + default: + CLOGE("ERR(%s[%d]): unknown config mode (%d)", __FUNCTION__, __LINE__, mode); + } + return ret; +} + +int ExynosCamera1Parameters::getConfigMode() +{ + int ret = -1; + switch(m_exynosconfig->mode){ + case CONFIG_MODE::NORMAL: + case CONFIG_MODE::HIGHSPEED_60: + case CONFIG_MODE::HIGHSPEED_120: + case CONFIG_MODE::HIGHSPEED_240: + ret = m_exynosconfig->mode; + break; + default: + CLOGE("ERR(%s[%d]): unknown config mode (%d)", __FUNCTION__, __LINE__, m_exynosconfig->mode); + } + + return ret; +} + +void ExynosCamera1Parameters::setZoomActiveOn(bool enable) +{ + m_zoom_activated = enable; +} + +bool ExynosCamera1Parameters::getZoomActiveOn(void) +{ + return m_zoom_activated; +} + +status_t ExynosCamera1Parameters::setMarkingOfExifFlash(int flag) +{ + m_firing_flash_marking = flag; + + return NO_ERROR; +} + +int ExynosCamera1Parameters::getMarkingOfExifFlash(void) +{ + return m_firing_flash_marking; +} + +bool ExynosCamera1Parameters::getSensorOTFSupported(void) +{ + return m_staticInfo->flite3aaOtfSupport; +} + +bool ExynosCamera1Parameters::isReprocessing(void) +{ + bool reprocessing = false; + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { +#if defined(MAIN_CAMERA_DUAL_REPROCESSING) && defined(MAIN_CAMERA_SINGLE_REPROCESSING) + reprocessing = (flagDual == true) ? MAIN_CAMERA_DUAL_REPROCESSING : MAIN_CAMERA_SINGLE_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_REPROCESSING/MAIN_CAMERA_SINGLE_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#if defined(FRONT_CAMERA_DUAL_REPROCESSING) && defined(FRONT_CAMERA_SINGLE_REPROCESSING) + reprocessing = (flagDual == true) ? FRONT_CAMERA_DUAL_REPROCESSING : FRONT_CAMERA_SINGLE_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_REPROCESSING/FRONT_CAMERA_SINGLE_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + + return reprocessing; +} + +bool ExynosCamera1Parameters::isSccCapture(void) +{ + bool sccCapture = false; + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { +#if defined(MAIN_CAMERA_DUAL_SCC_CAPTURE) && defined(MAIN_CAMERA_SINGLE_SCC_CAPTURE) + sccCapture = (flagDual == true) ? MAIN_CAMERA_DUAL_SCC_CAPTURE : MAIN_CAMERA_SINGLE_SCC_CAPTURE; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_SCC_CAPTURE/MAIN_CAMERA_SINGLE_SCC_CAPTUREis not defined", __FUNCTION__, __LINE__); +#endif + } else { +#if defined(FRONT_CAMERA_DUAL_SCC_CAPTURE) && defined(FRONT_CAMERA_SINGLE_SCC_CAPTURE) + sccCapture = (flagDual == true) ? FRONT_CAMERA_DUAL_SCC_CAPTURE : FRONT_CAMERA_SINGLE_SCC_CAPTURE; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_SCC_CAPTURE/FRONT_CAMERA_SINGLE_SCC_CAPTURE is not defined", __FUNCTION__, __LINE__); +#endif + } + + return sccCapture; +} + +bool ExynosCamera1Parameters::isFlite3aaOtf(void) +{ + bool flagOtfInput = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + bool flagSensorOtf = getSensorOTFSupported(); + + if (flagSensorOtf == false) + return flagOtfInput; + + if (cameraId == CAMERA_ID_BACK) { + /* for 52xx scenario */ + flagOtfInput = true; + + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_FLITE_3AA_OTF + flagOtfInput = MAIN_CAMERA_DUAL_FLITE_3AA_OTF; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_FLITE_3AA_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_FLITE_3AA_OTF + flagOtfInput = MAIN_CAMERA_SINGLE_FLITE_3AA_OTF; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_SINGLE_FLITE_3AA_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_FLITE_3AA_OTF + flagOtfInput = FRONT_CAMERA_DUAL_FLITE_3AA_OTF; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_FLITE_3AA_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_FLITE_3AA_OTF + flagOtfInput = FRONT_CAMERA_SINGLE_FLITE_3AA_OTF; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_SINGLE_FLITE_3AA_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } + } + + return flagOtfInput; +} + +bool ExynosCamera1Parameters::is3aaIspOtf(void) +{ + bool ret = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_3AA_ISP_OTF + ret = MAIN_CAMERA_DUAL_3AA_ISP_OTF; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_3AA_ISP_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_3AA_ISP_OTF + ret = MAIN_CAMERA_SINGLE_3AA_ISP_OTF; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_SINGLE_3AA_ISP_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_3AA_ISP_OTF + ret = FRONT_CAMERA_DUAL_3AA_ISP_OTF; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_3AA_ISP_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_ISP_OTF + ret = FRONT_CAMERA_SINGLE_3AA_ISP_OTF; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_SINGLE_3AA_ISP_OTF is not defined", __FUNCTION__, __LINE__); +#endif + } + } + + return ret; +} + +bool ExynosCamera1Parameters::isIspMcscOtf(void) +{ + bool ret = true; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_ISP_MCSC_OTF + ret = MAIN_CAMERA_DUAL_ISP_MCSC_OTF; +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_ISP_MCSC_OTF + ret = MAIN_CAMERA_SINGLE_ISP_MCSC_OTF; +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_ISP_MCSC_OTF + ret = FRONT_CAMERA_DUAL_ISP_MCSC_OTF; +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_ISP_MCSC_OTF + ret = FRONT_CAMERA_SINGLE_ISP_MCSC_OTF; +#endif + } + } + + return ret; +} + +bool ExynosCamera1Parameters::isMcscVraOtf(void) +{ + bool ret = true; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_MCSC_VRA_OTF + ret = MAIN_CAMERA_DUAL_MCSC_VRA_OTF; +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_MCSC_VRA_OTF + ret = MAIN_CAMERA_SINGLE_MCSC_VRA_OTF; +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_MCSC_VRA_OTF + ret = FRONT_CAMERA_DUAL_MCSC_VRA_OTF; +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_MCSC_VRA_OTF + ret = FRONT_CAMERA_SINGLE_MCSC_VRA_OTF; +#endif + } + } + + return ret; +} + +bool ExynosCamera1Parameters::isReprocessing3aaIspOTF(void) +{ + bool otf = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING + otf = MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING + otf = MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING + otf = FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING + otf = FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } + + if (otf == true) { + bool flagDirtyBayer = false; + + int reprocessingBayerMode = this->getReprocessingBayerMode(); + switch(reprocessingBayerMode) { + case REPROCESSING_BAYER_MODE_NONE: + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + flagDirtyBayer = false; + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC: + default: + flagDirtyBayer = true; + break; + } + + if (flagDirtyBayer == true) { + ALOGW("WRN(%s[%d]): otf == true. but, flagDirtyBayer == true. so force false on 3aa_isp otf", + __FUNCTION__, __LINE__); + + otf = false; + } + } + + return otf; +} + +bool ExynosCamera1Parameters::isReprocessingIspMcscOTF(void) +{ + bool otf = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (isUseYuvReprocessing() == false) { + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING + otf = MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING + otf = MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING + otf = FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_OTF_MCSC_REPROCESSING + otf = FRONT_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } + } + + return otf; +} + +bool ExynosCamera1Parameters::isHWFCEnabled(void) +{ +#if defined(USE_JPEG_HWFC) + return USE_JPEG_HWFC; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::isHWFCOnDemand(void) +{ +#if defined(USE_JPEG_HWFC_ONDEMAND) + return USE_JPEG_HWFC_ONDEMAND; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::isUseThumbnailHWFC(void) +{ +#if defined(USE_THUMBNAIL_HWFC) + return USE_JPEG_HWFC_ONDEMAND; +#else + return false; +#endif +} + +void ExynosCamera1Parameters::setZoomPreviewWIthScaler(bool enable) +{ + m_zoomWithScaler = enable; +} + +bool ExynosCamera1Parameters::getZoomPreviewWIthScaler(void) +{ + return m_zoomWithScaler; +} + +bool ExynosCamera1Parameters::isUsing3acForIspc(void) +{ +#if (defined(USE_3AC_FOR_ISPC) && (USE_3AC_FOR_ISPC)) + return true; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::isOwnScc(int cameraId) +{ + bool ret = false; + + if (cameraId == CAMERA_ID_BACK) { +#ifdef MAIN_CAMERA_HAS_OWN_SCC + ret = MAIN_CAMERA_HAS_OWN_SCC; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_HAS_OWN_SCC is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_HAS_OWN_SCC + ret = FRONT_CAMERA_HAS_OWN_SCC; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_HAS_OWN_SCC is not defined", __FUNCTION__, __LINE__); +#endif + } + + return ret; +} + +bool ExynosCamera1Parameters::isOwnMCSC(void) +{ + bool ret = false; + +#ifdef OWN_MCSC_HW + ret = OWN_MCSC_HW; +#endif + + return ret; +} + +bool ExynosCamera1Parameters::isCompanion(int cameraId) +{ + bool ret = false; + + if (cameraId == CAMERA_ID_BACK) { + ALOGI("INFO(%s[%d]): MAIN_CAMERA_USE_SAMSUNG_COMPANION is not defined", __FUNCTION__, __LINE__); + } else { + ALOGI("INFO(%s[%d]): FRONT_CAMERA_USE_SAMSUNG_COMPANION is not defined", __FUNCTION__, __LINE__); + } + + return ret; +} + +int ExynosCamera1Parameters::getHalVersion(void) +{ + return m_halVersion; +} + +void ExynosCamera1Parameters::setHalVersion(int halVersion) +{ + m_halVersion = halVersion; + m_activityControl->setHalVersion(m_halVersion); + + ALOGI("INFO(%s[%d]): m_halVersion(%d)", __FUNCTION__, __LINE__, m_halVersion); + + return; +} + +struct ExynosSensorInfoBase *ExynosCamera1Parameters::getSensorStaticInfo() +{ + ALOGE("ERR(%s[%d]): halVersion(%d) does not support this function!!!!", __FUNCTION__, __LINE__, m_halVersion); + return NULL; +} + +bool ExynosCamera1Parameters::getSetFileCtlMode(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL + return true; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::getSetFileCtl3AA_ISP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_3AA_ISP + return SET_SETFILE_BY_SET_CTRL_3AA_ISP; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::getSetFileCtl3AA(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_3AA + return SET_SETFILE_BY_SET_CTRL_3AA; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::getSetFileCtlISP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_ISP + return SET_SETFILE_BY_SET_CTRL_ISP; +#else + return false; +#endif +} + +bool ExynosCamera1Parameters::getSetFileCtlSCP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_SCP + return SET_SETFILE_BY_SET_CTRL_SCP; +#else + return false; +#endif +} + +int ExynosCamera1Parameters::getMaxNumCPUCluster(void) +{ +#ifdef CLUSTER_MAX_NUM + return CLUSTER_MAX_NUM; +#else + return 1; +#endif +} + +int ExynosCamera1Parameters::getNumOfReprocessingFactory(void) +{ +#ifdef NUM_OF_REPROCESSING_FACTORY + return NUM_OF_REPROCESSING_FACTORY; +#else + return 1; +#endif +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCamera1Parameters.h b/libcamera/34xx/hal1/ExynosCamera1Parameters.h new file mode 100644 index 0000000..5d52768 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCamera1Parameters.h @@ -0,0 +1,1017 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_1_PARAMETERS_H +#define EXYNOS_CAMERA_1_PARAMETERS_H + +#include "ExynosCameraConfig.h" +#include "ExynosCameraParameters.h" + +#include "ExynosCameraUtilsModule.h" +#include "ExynosCameraSensorInfo.h" +#include"ExynosCameraNode.h" + +#define FW_CUSTOM_OFFSET (1) + +namespace android { + +class ExynosCamera1Parameters : public ExynosCameraParameters { +public: + /* Constructor */ + ExynosCamera1Parameters(int cameraId, bool flagCompanion = false, int halVersion = IS_HAL_VER_1_0); + + /* Destructor */ + virtual ~ExynosCamera1Parameters(); + + /* Create the instance */ + bool create(int cameraId); + /* Destroy the instance */ + bool destroy(void); + /* Check if the instance was created */ + bool flagCreate(void); + + void setDefaultCameraInfo(void); + void setDefaultParameter(void); + +public: + status_t checkVisionMode(const CameraParameters& params); + status_t checkRecordingHint(const CameraParameters& params); + status_t checkDualMode(const CameraParameters& params); + status_t checkDualRecordingHint(const CameraParameters& params); + status_t checkEffectHint(const CameraParameters& params); + status_t checkEffectRecordingHint(const CameraParameters& params); + /* + * Check preview frame rate + * both previewFpsRange(new API) and previewFrameRate(Old API) + */ + status_t checkPreviewFps(const CameraParameters& params); + status_t checkPreviewFpsRange(const CameraParameters& params); + status_t checkPreviewFrameRate(const CameraParameters& params); + status_t checkVideoSize(const CameraParameters& params); + status_t checkFastFpsMode(const CameraParameters& params); + status_t checkVideoStabilization(const CameraParameters& params); + status_t checkSWVdisMode(const CameraParameters& params); + status_t checkHWVdisMode(const CameraParameters& params); + status_t checkPreviewSize(const CameraParameters& params); + status_t checkPreviewFormat(const CameraParameters& params); + status_t checkPictureSize(const CameraParameters& params); + status_t checkPictureFormat(const CameraParameters& params); + status_t checkJpegQuality(const CameraParameters& params); + status_t checkThumbnailSize(const CameraParameters& params); + status_t checkThumbnailQuality(const CameraParameters& params); + status_t check3dnrMode(const CameraParameters& params); + status_t checkDrcMode(const CameraParameters& params); + status_t checkOdcMode(const CameraParameters& params); + status_t checkZoomLevel(const CameraParameters& params); + status_t checkRotation(const CameraParameters& params); + status_t checkAutoExposureLock(const CameraParameters& params); + status_t checkExposureCompensation(const CameraParameters& params); + status_t checkMeteringAreas(const CameraParameters& params); + status_t checkMeteringMode(const CameraParameters& params); + status_t checkAntibanding(const CameraParameters& params); + status_t checkSceneMode(const CameraParameters& params); + status_t checkFocusMode(const CameraParameters& params); + status_t checkFlashMode(const CameraParameters& params); + status_t checkWhiteBalanceMode(const CameraParameters& params); + status_t checkAutoWhiteBalanceLock(const CameraParameters& params); + status_t checkFocusAreas(const CameraParameters& params); + status_t checkColorEffectMode(const CameraParameters& params); + status_t checkGpsAltitude(const CameraParameters& params); + status_t checkGpsLatitude(const CameraParameters& params); + status_t checkGpsLongitude(const CameraParameters& params); + status_t checkGpsProcessingMethod(const CameraParameters& params); + status_t checkGpsTimeStamp(const CameraParameters& params); + status_t checkCityId(const CameraParameters& params); + status_t checkWeatherId(const CameraParameters& params); + status_t checkBrightness(const CameraParameters& params); + status_t checkSaturation(const CameraParameters& params); + status_t checkSharpness(const CameraParameters& params); + status_t checkHue(const CameraParameters& params); + status_t checkIso(const CameraParameters& params); + status_t checkContrast(const CameraParameters& params); + status_t checkHdrMode(const CameraParameters& params); + status_t checkWdrMode(const CameraParameters& params); + status_t checkShotMode(const CameraParameters& params); + status_t checkAntiShake(const CameraParameters& params); + status_t checkVtMode(const CameraParameters& params); + status_t checkVRMode(const CameraParameters& params); + status_t checkGamma(const CameraParameters& params); + status_t checkSlowAe(const CameraParameters& params); + status_t checkScalableSensorMode(const CameraParameters& params); + status_t checkImageUniqueId(const CameraParameters& params); + status_t checkSeriesShotMode(const CameraParameters& params); +#ifdef BURST_CAPTURE + status_t checkSeriesShotFilePath(const CameraParameters& params); +#endif + + status_t checkFactoryMode(const CameraParameters& params); + + status_t calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect); + + status_t getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t getPreviewBdsSize(ExynosRect *dstRect); + status_t getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t getPictureBdsSize(ExynosRect *dstRect); + + status_t calcPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPreviewBDSSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureBDSSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcNormalToTpuSize(int srcW, int srcH, int *dstW, int *dstH); + status_t calcTpuToNormalSize(int srcW, int srcH, int *dstW, int *dstH); + status_t calcPreviewDzoomCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + /* Sets the auto-exposure lock state. */ + void m_setAutoExposureLock(bool lock); + /* Sets the auto-white balance lock state. */ + void m_setAutoWhiteBalanceLock(bool value); + +private: + /* Sets the dimensions for preview pictures. */ + void m_setPreviewSize(int w, int h); + /* Sets the image format for preview pictures. */ + void m_setPreviewFormat(int colorFormat); + /* Sets the dimensions for pictures. */ + void m_setPictureSize(int w, int h); + /* Sets the image format for pictures. */ + void m_setPictureFormat(int colorFormat); + /* Sets video's width, height */ + void m_setVideoSize(int w, int h); + /* Sets video's color format */ + void m_setVideoFormat(int colorFormat); + + /* Sets the dimensions for Sesnor-related HW. */ + void m_setHwSensorSize(int w, int h); + /* Sets the dimensions for preview-related HW. */ + void m_setHwPreviewSize(int w, int h); + /* Sets the image format for preview-related HW. */ + void m_setHwPreviewFormat(int colorFormat); + /* Sets the dimensions for picture-related HW. */ + void m_setHwPictureSize(int w, int h); + /* Sets the image format for picture-related HW. */ + void m_setHwPictureFormat(int colorFormat); + /* Sets HW Bayer Crop Size */ + void m_setHwBayerCropRegion(int w, int h, int x, int y); + /* Sets the antibanding. */ + void m_setAntibanding(int value); + /* Sets the current color effect setting. */ + void m_setColorEffectMode(int effect); + /* Sets the exposure compensation index. */ + void m_setExposureCompensation(int value); + /* Sets the flash mode. */ + void m_setFlashMode(int flashMode); + /* Sets focus areas. */ + void m_setFocusAreas(uint32_t numValid, ExynosRect* rects, int *weights); + /* Sets focus areas. (Using ExynosRect2) */ + void m_setFocusAreas(uint32_t numValid, ExynosRect2* rect2s, int *weights); + /* Sets the focus mode. */ + void m_setFocusMode(int focusMode); + /* Sets Jpeg quality of captured picture. */ + void m_setJpegQuality(int quality); + /* Sets the quality of the EXIF thumbnail in Jpeg picture. */ + void m_setThumbnailQuality(int quality); + /* Sets the dimensions for EXIF thumbnail in Jpeg picture. */ + void m_setThumbnailSize(int w, int h); + /* Sets metering areas. */ + void m_setMeteringAreas(uint32_t num, ExynosRect *rects, int *weights); + /* Sets metering areas.(Using ExynosRect2) */ + void m_setMeteringAreas(uint32_t num, ExynosRect2 *rect2s, int *weights); + /* Sets the frame rate range for preview. */ + void m_setPreviewFpsRange(uint32_t min, uint32_t max); + /* Sets the scene mode. */ + void m_setSceneMode(int sceneMode); + /* Enables and disables video stabilization. */ + void m_setVideoStabilization(bool stabilization); + /* Sets the white balance. */ + status_t m_setWhiteBalanceMode(int whiteBalance); + /* Sets Bayer Crop Region */ + status_t m_setParamCropRegion(int zoom, int srcW, int srcH, int dstW, int dstH); + /* Sets recording mode hint. */ + void m_setRecordingHint(bool hint); + /* Sets GPS altitude. */ + void m_setGpsAltitude(double altitude); + /* Sets GPS latitude coordinate. */ + void m_setGpsLatitude(double latitude); + /* Sets GPS longitude coordinate. */ + void m_setGpsLongitude(double longitude); + /* Sets GPS processing method. */ + void m_setGpsProcessingMethod(const char *gpsProcessingMethod); + /* Sets GPS timestamp. */ + void m_setGpsTimeStamp(long timeStamp); + /* Sets the rotation angle in degrees relative to the orientation of the camera. */ + void m_setRotation(int rotation); + +/* + * Additional API. + */ + /* Sets metering areas. */ + void m_setMeteringMode(int meteringMode); + /* Sets brightness */ + void m_setBrightness(int brightness); + /* Sets ISO */ + void m_setIso(uint32_t iso); + /* Sets Contrast */ + void m_setContrast(uint32_t contrast); + /* Sets Saturation */ + void m_setSaturation(int saturation); + /* Sets Sharpness */ + void m_setSharpness(int sharpness); + /* Sets Hue */ + void m_setHue(int hue); + /* Sets WDR */ + void m_setHdrMode(bool hdr); + /* Sets WDR */ + void m_setWdrMode(bool wdr); + /* Sets anti shake */ + void m_setAntiShake(bool toggle); + /* Sets gamma */ + void m_setGamma(bool gamma); + /* Sets ODC */ + void m_setOdcMode(bool toggle); + /* Sets Slow AE */ + void m_setSlowAe(bool slowAe); + /* Sets 3DNR */ + void m_set3dnrMode(bool toggle); + /* Sets DRC */ + void m_setDrcMode(bool toggle); + +/* + * Vendor specific APIs + */ + /* Sets Intelligent mode */ + status_t m_setIntelligentMode(int intelligentMode); + void m_setVisionMode(bool vision); + void m_setVisionModeFps(int fps); + void m_setVisionModeAeTarget(int ae); + + void m_setSWVdisMode(bool swVdis); + void m_setSWVdisUIMode(bool swVdisUI); + + /* Sets VT mode */ + void m_setVtMode(int vtMode); + + /* Sets VR mode */ + void m_setVRMode(int vtMode); + + /* Sets Dual mode */ + void m_setDualMode(bool toggle); + /* Sets dual recording mode hint. */ + void m_setDualRecordingHint(bool hint); + /* Sets effect hint. */ + void m_setEffectHint(bool hint); + /* Sets effect recording mode hint. */ + void m_setEffectRecordingHint(bool hint); + + void m_setHighResolutionCallbackMode(bool enable); + void m_setHighSpeedRecording(bool highSpeed); + void m_setCityId(long long int cityId); + void m_setWeatherId(unsigned char cityId); + status_t m_setImageUniqueId(const char *uniqueId); + /* Sets camera angle */ + bool m_setAngle(int angle); + /* Sets Top-down mirror */ + bool m_setTopDownMirror(void); + /* Sets Left-right mirror */ + bool m_setLRMirror(void); + /* Sets Burst mode */ + void m_setSeriesShotCount(int seriesShotCount); + bool m_setAutoFocusMacroPosition(int autoFocusMacroPosition); + /* Sets Low Light A */ + bool m_setLLAMode(void); + + /* Sets object tracking */ + bool m_setObjectTracking(bool toggle); + /* Start or stop object tracking operation */ + bool m_setObjectTrackingStart(bool toggle); + /* Sets x, y position for object tracking operation */ + bool m_setObjectPosition(int x, int y); + /* Sets smart auto */ + bool m_setSmartAuto(bool toggle); + /* Sets beauty shot */ + bool m_setBeautyShot(bool toggle); + +/* + * Others + */ + void m_setRestartPreviewChecked(bool restart); + bool m_getRestartPreviewChecked(void); + void m_setRestartPreview(bool restart); + void m_setExifFixedAttribute(void); + int m_convertMetaCtlAwbMode(struct camera2_shot_ext *shot_ext); + +public: + + /* Returns the image format for FLITE/3AC/3AP bayer */ + int getBayerFormat(int pipeId); + /* Returns the dimensions setting for preview pictures. */ + void getPreviewSize(int *w, int *h); + /* Returns the image format for preview frames got from Camera.PreviewCallback. */ + int getPreviewFormat(void); + /* Returns the dimension setting for pictures. */ + void getPictureSize(int *w, int *h); + /* Returns the image format for pictures. */ + int getPictureFormat(void); + /* Gets video's width, height */ + void getVideoSize(int *w, int *h); + /* Gets video's color format */ + int getVideoFormat(void); + /* Gets the supported sensor sizes. */ + void getMaxSensorSize(int *w, int *h); + /* Gets the supported sensor margin. */ + void getSensorMargin(int *w, int *h); + /* Gets the supported preview sizes. */ + void getMaxPreviewSize(int *w, int *h); + /* Gets the supported picture sizes. */ + void getMaxPictureSize(int *w, int *h); + /* Gets the supported video frame sizes that can be used by MediaRecorder. */ + void getMaxVideoSize(int *w, int *h); + /* Gets the supported jpeg thumbnail sizes. */ + bool getSupportedJpegThumbnailSizes(int *w, int *h); + + /* Returns the dimensions setting for preview-related HW. */ + void getHwSensorSize(int *w, int *h); + /* Returns the dimensions setting for preview-related HW. */ + void getHwPreviewSize(int *w, int *h); + /* Returns the image format for preview-related HW. */ + int getHwPreviewFormat(void); + /* Returns the dimension setting for picture-related HW. */ + void getHwPictureSize(int *w, int *h); + /* Returns the image format for picture-related HW. */ + int getHwPictureFormat(void); + /* Returns HW Bayer Crop Size */ + void getHwBayerCropRegion(int *w, int *h, int *x, int *y); + /* Returns VRA input Size */ + void getHwVraInputSize(int *w, int *h); + /* Returns VRA format */ + int getHwVraInputFormat(void); + + /* Gets the current antibanding setting. */ + int getAntibanding(void); + /* Gets the state of the auto-exposure lock. */ + bool getAutoExposureLock(void); + /* Gets the state of the auto-white balance lock. */ + bool getAutoWhiteBalanceLock(void); + /* Gets the current color effect setting. */ + int getColorEffectMode(void); + /* Gets the current exposure compensation index. */ + int getExposureCompensation(void); + /* Gets the current flash mode setting. */ + int getFlashMode(void); + /* Gets the current focus areas. */ + void getFocusAreas(int *validFocusArea, ExynosRect2 *rect2s, int *weights); + /* Gets the current focus mode setting. */ + int getFocusMode(void); + /* Returns the quality setting for the JPEG picture. */ + int getJpegQuality(void); + /* Returns the quality setting for the EXIF thumbnail in Jpeg picture. */ + int getThumbnailQuality(void); + /* Returns the dimensions for EXIF thumbnail in Jpeg picture. */ + void getThumbnailSize(int *w, int *h); + /* Returns the max size for EXIF thumbnail in Jpeg picture. */ + void getMaxThumbnailSize(int *w, int *h); + /* Gets the current metering areas. */ + void getMeteringAreas(ExynosRect *rects); + /* Gets the current metering areas.(Using ExynosRect2) */ + void getMeteringAreas(ExynosRect2 *rect2s); + /* Returns the current minimum and maximum preview fps. */ + void getPreviewFpsRange(uint32_t *min, uint32_t *max); + /* Gets scene mode */ + int getSceneMode(void); + /* Gets the current state of video stabilization. */ + bool getVideoStabilization(void); + /* Gets the current white balance setting. */ + int getWhiteBalanceMode(void); + /* Sets current zoom value. */ + status_t setZoomLevel(int value); + /* Gets current zoom value. */ + int getZoomLevel(void); + /* Set the current crop region info */ + status_t setCropRegion(int x, int y, int w, int h); + /* Returns the recording mode hint. */ + bool getRecordingHint(void); + /* Gets GPS altitude. */ + double getGpsAltitude(void); + /* Gets GPS latitude coordinate. */ + double getGpsLatitude(void); + /* Gets GPS longitude coordinate. */ + double getGpsLongitude(void); + /* Gets GPS processing method. */ + const char * getGpsProcessingMethod(void); + /* Gets GPS timestamp. */ + long getGpsTimeStamp(void); + /* Gets the rotation angle in degrees relative to the orientation of the camera. */ + int getRotation(void); + +/* + * Additional API. + */ + + /* Gets metering */ + int getMeteringMode(void); + /* Gets metering List */ + int getSupportedMeteringMode(void); + /* Gets brightness */ + int getBrightness(void); + /* Gets ISO */ + uint32_t getIso(void); + /* Gets ExposureTime for capture */ + uint64_t getCaptureExposureTime(void); + int32_t getLongExposureShotCount(void); + + /* Gets Contrast */ + uint32_t getContrast(void); + /* Gets Saturation */ + int getSaturation(void); + /* Gets Sharpness */ + int getSharpness(void); + /* Gets Hue */ + int getHue(void); + /* Gets WDR */ + bool getHdrMode(void); + /* Gets WDR */ + bool getWdrMode(void); + /* Gets anti shake */ + bool getAntiShake(void); + /* Gets gamma */ + bool getGamma(void); + /* Gets ODC */ + bool getOdcMode(void); + /* Gets Slow AE */ + bool getSlowAe(void); + /* Gets Shot mode */ + int getShotMode(void); + /* Gets Preview Buffer Count */ + int getPreviewBufferCount(void); + /* Sets Preview Buffer Count */ + void setPreviewBufferCount(int previewBufferCount); + + /* Gets 3DNR */ + bool get3dnrMode(void); + /* Gets DRC */ + bool getDrcMode(void); + /* Gets TPU enable case or not */ + bool getTpuEnabledMode(void); + +/* + * Vendor specific APIs + */ + + /* Gets Intelligent mode */ + int getIntelligentMode(void); + bool getVisionMode(void); + int getVisionModeFps(void); + int getVisionModeAeTarget(void); + + bool isSWVdisMode(void); /* need to change name */ + bool isSWVdisModeWithParam(int nPreviewW, int nPreviewH); + bool getSWVdisMode(void); + bool getSWVdisUIMode(void); + + bool getHWVdisMode(void); + int getHWVdisFormat(void); + + /* Gets VT mode */ + int getVtMode(void); + + /* Gets VR mode */ + int getVRMode(void); + + /* Gets Dual mode */ + bool getDualMode(void); + /* Returns the dual recording mode hint. */ + bool getDualRecordingHint(void); + /* Returns the effect hint. */ + bool getEffectHint(void); + /* Returns the effect recording mode hint. */ + bool getEffectRecordingHint(void); + + void setFastFpsMode(int fpsMode); + int getFastFpsMode(void); + + bool getHighResolutionCallbackMode(void); + bool getSamsungCamera(void); + void setSamsungCamera(bool value); + bool getHighSpeedRecording(void); + bool getScalableSensorMode(void); + void setScalableSensorMode(bool scaleMode); + long long int getCityId(void); + unsigned char getWeatherId(void); + /* Gets ImageUniqueId */ + const char *getImageUniqueId(void); + /* Gets camera angle */ + int getAngle(void); + + void setFlipHorizontal(int val); + int getFlipHorizontal(void); + void setFlipVertical(int val); + int getFlipVertical(void); + + /* Gets Burst mode */ + int getSeriesShotCount(void); + /* Return callback need CSC */ + bool getCallbackNeedCSC(void); + /* Return callback need copy to rendering */ + bool getCallbackNeedCopy2Rendering(void); + + /* Gets Illumination */ + int getIllumination(void); + /* Gets Low Light Shot */ + int getLLS(struct camera2_shot_ext *shot); + /* Gets Low Light A */ + bool getLLAMode(void); + /* Sets the device orientation angle in degrees to camera FW for FD scanning property. */ + bool setDeviceOrientation(int orientation); + /* Gets the device orientation angle in degrees . */ + int getDeviceOrientation(void); + /* Gets the FD orientation angle in degrees . */ + int getFdOrientation(void); + /* Gets object tracking */ + bool getObjectTracking(void); + /* Gets status of object tracking operation */ + int getObjectTrackingStatus(void); + /* Gets smart auto */ + bool getSmartAuto(void); + /* Gets the status of smart auto operation */ + int getSmartAutoStatus(void); + /* Gets beauty shot */ + bool getBeautyShot(void); + +/* + * Static info + */ + /* Gets the exposure compensation step. */ + float getExposureCompensationStep(void); + + /* Gets the focal length (in millimeter) of the camera. */ + void getFocalLength(int *num, int *den); + + /* Gets the distances from the camera to where an object appears to be in focus. */ + void getFocusDistances(int *num, int *den);; + + /* Gets the minimum exposure compensation index. */ + int getMinExposureCompensation(void); + + /* Gets the maximum exposure compensation index. */ + int getMaxExposureCompensation(void); + + /* Gets the maximum number of detected faces supported. */ + int getMaxNumDetectedFaces(void); + + /* Gets the maximum number of focus areas supported. */ + uint32_t getMaxNumFocusAreas(void); + + /* Gets the maximum number of metering areas supported. */ + uint32_t getMaxNumMeteringAreas(void); + + /* Gets the maximum zoom value allowed for snapshot. */ + int getMaxZoomLevel(void); + + /* Gets the supported antibanding values. */ + int getSupportedAntibanding(void); + + /* Gets the supported color effects. */ + int getSupportedColorEffects(void); + + /* Gets the supported color effects & hidden color effect. */ + bool isSupportedColorEffects(int effectMode); + + /* Check whether the target support Flash */ + int getSupportedFlashModes(void); + + /* Gets the supported focus modes. */ + int getSupportedFocusModes(void); + + /* Gets the supported preview fps range. */ + bool getMaxPreviewFpsRange(int *min, int *max); + + /* Gets the supported scene modes. */ + int getSupportedSceneModes(void); + + /* Gets the supported white balance. */ + int getSupportedWhiteBalance(void); + + /* Gets the supported Iso values. */ + int getSupportedISO(void); + + /* Gets max zoom ratio */ + float getMaxZoomRatio(void); + /* Gets zoom ratio */ + float getZoomRatio(int zoom); + + /* Returns true if auto-exposure locking is supported. */ + bool getAutoExposureLockSupported(void); + + /* Returns true if auto-white balance locking is supported. */ + bool getAutoWhiteBalanceLockSupported(void); + + /* Returns true if smooth zoom is supported. */ + bool getSmoothZoomSupported(void); + + /* Returns true if video snapshot is supported. */ + bool getVideoSnapshotSupported(void); + + /* Returns true if video stabilization is supported. */ + bool getVideoStabilizationSupported(void); + + /* Returns true if zoom is supported. */ + bool getZoomSupported(void); + + /* Gets the horizontal angle of view in degrees. */ + void checkHorizontalViewAngle(void); + float getHorizontalViewAngle(void); + + /* Sets the horizontal angle of view in degrees. */ + void setHorizontalViewAngle(int pictureW, int pictureH); + + /* Gets the vertical angle of view in degrees. */ + float getVerticalViewAngle(void); + + /* Gets Fnumber */ + void getFnumber(int *num, int *den); + + /* Gets Aperture value */ + void getApertureValue(int *num, int *den); + + /* Gets FocalLengthIn35mmFilm */ + int getFocalLengthIn35mmFilm(void); + + bool isScalableSensorSupported(void); + + status_t getFixedExifInfo(exif_attribute_t *exifInfo); + void setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot); + + debug_attribute_t *getDebugAttribute(void); + +#ifdef DEBUG_RAWDUMP + bool checkBayerDumpEnable(void); +#endif/* DEBUG_RAWDUMP */ +#ifdef USE_BINNING_MODE + int getBinningMode(void); +#endif /* USE_BINNING_MODE */ +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + int getRotationProperty(void); +#endif + +public: + bool DvfsLock(); + bool DvfsUnLock(); + + void updatePreviewFpsRange(void); + void updateHwSensorSize(void); + void updateBinningScaleRatio(void); + void updateBnsScaleRatio(void); + + void setHwPreviewStride(int stride); + int getHwPreviewStride(void); + + status_t duplicateCtrlMetadata(void *buf); + + status_t setRequestDis(int value); + + status_t setDisEnable(bool enable); + status_t setDrcEnable(bool enable); + status_t setDnrEnable(bool enable); + status_t setFdEnable(bool enable); + + bool getDisEnable(void); + bool getDrcEnable(void); + bool getDnrEnable(void); + bool getFdEnable(void); + + status_t setFdMode(enum facedetect_mode mode); + status_t getFdMeta(bool reprocessing, void *buf); + bool getUHDRecordingMode(void); + bool getFaceDetectionMode(bool flagCheckingRecording = true); + void vendorSpecificConstructor(int); + +private: + int * getHighSpeedSizeTable(int fpsMode); + bool m_isSupportedPreviewSize(const int width, const int height); + bool m_isSupportedPictureSize(const int width, const int height); + bool m_isSupportedVideoSize(const int width, const int height); + bool m_isHighResolutionCallbackSize(const int width, const int height); + void m_isHighResolutionMode(const CameraParameters& params); + + bool m_getSupportedVariableFpsList(int min, int max, + int *newMin, int *newMax); + + status_t m_getPreviewSizeList(int *sizeList); + + void m_getSWVdisPreviewSize(int w, int h, int *newW, int *newH); + void m_getScalableSensorSize(int *newSensorW, int *newSensorH); + + void m_initMetadata(void); + + bool m_isUHDRecordingMode(void); + +/* + * Vendor specific adjust function + */ +private: + status_t m_adjustPreviewFpsRange(int &newMinFps, int &newMaxFps); + status_t m_getPreviewBdsSize(ExynosRect *dstRect); + status_t m_adjustPreviewSize(int previewW, int previewH, + int *newPreviewW, int *newPreviewH, + int *newCalPreviewW, int *newCalPreviewH); + status_t m_adjustPreviewFormat(int &previewFormat, int &hwPreviewFormatH); + status_t m_adjustPictureSize(int *newPictureW, int *newPictureH, + int *newHwPictureW, int *newHwPictureH); + bool m_adjustHighSpeedRecording(int curMinFps, int curMaxFps, int newMinFps, int newMaxFps); + const char * m_adjustAntibanding(const char *strAntibanding); + const char * m_adjustFocusMode(const char *focusMode); + const char * m_adjustFlashMode(const char *flashMode); + const char * m_adjustWhiteBalanceMode(const char *whiteBalance); + bool m_adjustScalableSensorMode(const int scaleMode); + void m_adjustAeMode(enum aa_aemode curAeMode, enum aa_aemode *newAeMode); + void m_adjustSensorMargin(int *sensorMarginW, int *sensorMarginH); + void m_getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange); + void m_getCropRegion(int *x, int *y, int *w, int *h); + + /* for initial 120fps start due to quick launch */ +/* + void set120FpsState(enum INIT_120FPS_STATE state); + void clear120FpsState(enum INIT_120FPS_STATE state); + bool flag120FpsStart(void); + bool setSensorFpsAfter120fps(void); + void setInitValueAfter120fps(bool isAfter); +*/ + + status_t m_setBinningScaleRatio(int ratio); + status_t m_setBnsScaleRatio(int ratio); + status_t m_addHiddenResolutionList(String8 &string8Buf, struct ExynosSensorInfoBase *sensorInfo, + int w, int h, enum MODE mode, int cameraId); +/* 1.0 Function */ + void m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_dm *dm, + camera2_udm *udm); +/* 3.2 Function */ + void m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot); +public: + status_t setParameters(const CameraParameters& params); + CameraParameters getParameters() const; + int getCameraId(void); + /* Gets the detected faces areas. */ + int getDetectedFacesAreas(int num, int *id, + int *score, ExynosRect *face, + ExynosRect *leftEye, ExynosRect *rightEye, + ExynosRect *mouth); + /* Gets the detected faces areas. (Using ExynosRect2) */ + int getDetectedFacesAreas(int num, int *id, + int *score, ExynosRect2 *face, + ExynosRect2 *leftEye, ExynosRect2 *rightEye, + ExynosRect2 *mouth); + + void enableMsgType(int32_t msgType); + void disableMsgType(int32_t msgType); + bool msgTypeEnabled(int32_t msgType); + + status_t setFrameSkipCount(int count); + status_t getFrameSkipCount(int *count); + int getFrameSkipCount(void); + + void setIsFirstStartFlag(bool flag); + int getIsFirstStartFlag(void); + + void setPreviewRunning(bool enable); + void setPictureRunning(bool enable); + void setRecordingRunning(bool enable); + bool getPreviewRunning(void); + bool getPictureRunning(void); + bool getRecordingRunning(void); + bool getRestartPreview(void); + bool getPreviewSizeChanged(void); + + ExynosCameraActivityControl *getActivityControl(void); + status_t setAutoFocusMacroPosition(int autoFocusMacroPosition); + + void getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange); + void setSetfileYuvRange(void); + status_t checkSetfileYuvRange(void); + + void setUseDynamicBayer(bool enable); + bool getUseDynamicBayer(void); + void setUseDynamicBayerVideoSnapShot(bool enable); + bool getUseDynamicBayerVideoSnapShot(void); + void setUseDynamicScc(bool enable); + bool getUseDynamicScc(void); + + void setUsePureBayerReprocessing(bool enable); + bool getUsePureBayerReprocessing(void); + + bool isUseYuvReprocessing(void); + bool isUseYuvReprocessingForThumbnail(void); + + int32_t getReprocessingBayerMode(void); + + void setAdaptiveCSCRecording(bool enable); + bool getAdaptiveCSCRecording(void); + bool doCscRecording(void); + + uint32_t getBinningScaleRatio(void); + uint32_t getBnsScaleRatio(void); + /* Sets the dimensions for Sesnor-related BNS. */ + void setBnsSize(int w, int h); + /* Gets the dimensions for Sesnor-related BNS. */ + void getBnsSize(int *w, int *h); + + /* + * This must call before startPreview(), + * this update h/w setting at once. + */ + bool updateTpuParameters(void); + int getHalVersion(void); + void setHalVersion(int halVersion); + struct ExynosSensorInfoBase *getSensorStaticInfo(); + +#ifdef BURST_CAPTURE + int getSeriesShotSaveLocation(void); + void setSeriesShotSaveLocation(int ssaveLocation); + char *getSeriesShotFilePath(void); + int m_seriesShotSaveLocation; + char m_seriesShotFilePath[100]; +#endif + int getSeriesShotDuration(void); + int getSeriesShotMode(void); + void setSeriesShotMode(int sshotMode, int count = 0); + + int getHalPixelFormat(void); + int convertingHalPreviewFormat(int previewFormat, int yuvRange); + + void setDvfsLock(bool lock); + bool getDvfsLock(void); + + int getMaxNumCPUCluster(void); + int getNumOfReprocessingFactory(void); + + void setFocusModeSetting(bool enable); + int getFocusModeSetting(void); + bool getSensorOTFSupported(void); + bool isReprocessing(void); + bool isSccCapture(void); + bool isFlite3aaOtf(void); + bool is3aaIspOtf(void); + bool isIspMcscOtf(void); + bool isMcscVraOtf(void); + bool isReprocessing3aaIspOTF(void); + bool isReprocessingIspMcscOTF(void); + bool isHWFCEnabled(void); + bool isHWFCOnDemand(void); + bool isUseThumbnailHWFC(void); + + bool getSupportedZoomPreviewWIthScaler(void); + void setZoomPreviewWIthScaler(bool enable); + bool getZoomPreviewWIthScaler(void); + bool isUsing3acForIspc(void); + + bool getReallocBuffer(); + bool setReallocBuffer(bool enable); + + bool setConfig(struct ExynosConfigInfo* config); + struct ExynosConfigInfo* getConfig(); + + bool setConfigMode(uint32_t mode); + int getConfigMode(); + /* Sets Shot mode */ + void m_setShotMode(int shotMode); +#ifdef RAWDUMP_CAPTURE + void setRawCaptureModeOn(bool enable); + bool getRawCaptureModeOn(void); +#endif + void setZoomActiveOn(bool enable); + bool getZoomActiveOn(void); + void setFocusModeLock(bool enable); + status_t setMarkingOfExifFlash(int flag); + int getMarkingOfExifFlash(void); + bool increaseMaxBufferOfPreview(void); +//Added + int getHDRDelay(void) { return HDR_DELAY; } + int getReprocessingBayerHoldCount(void) { return REPROCESSING_BAYER_HOLD_COUNT; } + int getFastenAeFps(void) { return FASTEN_AE_FPS; } + int getPerFrameControlPipe(void) {return PERFRAME_CONTROL_PIPE; } + int getPerFrameControlReprocessingPipe(void) {return PERFRAME_CONTROL_REPROCESSING_PIPE; } + int getPerFrameInfo3AA(void) { return PERFRAME_INFO_3AA; }; + int getPerFrameInfoIsp(void) { return PERFRAME_INFO_ISP; }; + int getPerFrameInfoDis(void) { return PERFRAME_INFO_DIS; }; + int getPerFrameInfoReprocessingPure3AA(void) { return PERFRAME_INFO_PURE_REPROCESSING_3AA; } + int getPerFrameInfoReprocessingPureIsp(void) { return PERFRAME_INFO_PURE_REPROCESSING_ISP; } + int getScalerNodeNumPicture(void) { return PICTURE_GSC_NODE_NUM;} + int getScalerNodeNumPreview(void) { return PREVIEW_GSC_NODE_NUM;} + int getScalerNodeNumVideo(void) { return VIDEO_GSC_NODE_NUM;} + bool isOwnScc(int cameraId); + bool isOwnMCSC(void); + bool isCompanion(int cameraId); + bool needGSCForCapture(int camId) { return (camId == CAMERA_ID_BACK) ? USE_GSC_FOR_CAPTURE_BACK : USE_GSC_FOR_CAPTURE_FRONT; } + bool getSetFileCtlMode(void); + bool getSetFileCtl3AA_ISP(void); + bool getSetFileCtl3AA(void); + bool getSetFileCtlISP(void); + bool getSetFileCtlSCP(void); + void setIsThumbnailCallbackOn(bool enable); + bool getIsThumbnailCallbackOn(); + void getYuvSize(int *width, int *height, const int outputPortId); + +private: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + CameraParameters m_params; + struct camera2_shot_ext m_metadata; + + struct exynos_camera_info m_cameraInfo; + struct ExynosSensorInfoBase *m_staticInfo; + + exif_attribute_t m_exifInfo; + debug_attribute_t mDebugInfo; + + int32_t m_enabledMsgType; + mutable Mutex m_msgLock; + float m_calculatedHorizontalViewAngle; + /* frame skips */ + ExynosCameraCounter m_frameSkipCounter; + + mutable Mutex m_parameterLock; + + ExynosCameraActivityControl *m_activityControl; + + /* Flags for camera status */ + bool m_previewRunning; + bool m_previewSizeChanged; + bool m_pictureRunning; + bool m_recordingRunning; + bool m_flagCheckDualMode; + bool m_IsThumbnailCallbackOn; + bool m_flagRestartPreviewChecked; + bool m_flagRestartPreview; + int m_fastFpsMode; + bool m_flagFirstStart; + bool m_flagMeteringRegionChanged; + bool m_flagHWVDisMode; + + bool m_flagVideoStabilization; + bool m_flag3dnrMode; + +#ifdef RAWDUMP_CAPTURE + bool m_flagRawCaptureOn; +#endif + bool m_flagCheckRecordingHint; + + int m_setfile; + int m_yuvRange; + int m_setfileReprocessing; + int m_yuvRangeReprocessing; +#ifdef USE_BINNING_MODE + int m_binningProperty; +#endif +#ifdef USE_PREVIEW_CROP_FOR_ROATAION + int m_rotationProperty; +#endif + bool m_useSizeTable; + bool m_useDynamicBayer; + bool m_useDynamicBayerVideoSnapShot; + bool m_useDynamicScc; + bool m_useFastenAeStable; + bool m_usePureBayerReprocessing; + bool m_useAdaptiveCSCRecording; + bool m_dvfsLock; + int m_previewBufferCount; + + bool m_reallocBuffer; + mutable Mutex m_reallocLock; + struct ExynosConfigInfo *m_exynosconfig; + + bool m_setFocusmodeSetting; + bool m_zoom_activated; + int m_firing_flash_marking; + int m_halVersion; + + uint64_t m_exposureTimeCapture; + bool m_zoomWithScaler; + bool m_isFactoryMode; +}; +}; /* namespace android */ +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactory.cpp new file mode 100644 index 0000000..d545b71 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory.cpp @@ -0,0 +1,943 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactory" +#include + +#include "ExynosCameraFrameFactory.h" + +namespace android { + +ExynosCameraFrameFactory::~ExynosCameraFrameFactory() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraFrameFactory::destroy(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + ret = m_pipes[i]->destroy(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_pipes[%d]->destroy() fail", __FUNCTION__, __LINE__, i); + if (m_shot_ext != NULL) { + delete m_shot_ext; + m_shot_ext = NULL; + } + return ret; + } + + SAFE_DELETE(m_pipes[i]); + + CLOGD("DEBUG(%s):Pipe(%d) destroyed", __FUNCTION__, i); + } + } + if (m_shot_ext != NULL) { + delete m_shot_ext; + m_shot_ext = NULL; + } + + m_setCreate(false); + + return ret; +} + +status_t ExynosCameraFrameFactory::setFrameManager(ExynosCameraFrameManager *manager) +{ + m_frameMgr = manager; + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getFrameManager(ExynosCameraFrameManager **manager) +{ + *manager = m_frameMgr; + return NO_ERROR; +} + +bool ExynosCameraFrameFactory::isCreated(void) +{ + return m_getCreate(); +} + +status_t ExynosCameraFrameFactory::m_setCreate(bool create) +{ + Mutex::Autolock lock(m_createLock); + CLOGD("DEBUG(%s[%d]) setCreate old(%s) new(%s)", __FUNCTION__, __LINE__, (m_create)?"true":"false", (create)?"true":"false"); + m_create = create; + return NO_ERROR; +} + +bool ExynosCameraFrameFactory::m_getCreate() +{ + Mutex::Autolock lock(m_createLock); + return m_create; +} + +int ExynosCameraFrameFactory::m_getFliteNodenum() +{ + int fliteNodeNim = FIMC_IS_VIDEO_SS0_NUM; + + fliteNodeNim = (m_cameraId == CAMERA_ID_BACK)?MAIN_CAMERA_FLITE_NUM:FRONT_CAMERA_FLITE_NUM; + + return fliteNodeNim; +} + +int ExynosCameraFrameFactory::m_getSensorId(__unused unsigned int nodeNum, bool reprocessing) +{ + unsigned int reprocessingBit = 0; + unsigned int nodeNumBit = 0; + unsigned int sensorIdBit = 0; + unsigned int sensorId = getSensorId(m_cameraId); + + if (reprocessing == true) + reprocessingBit = (1 << REPROCESSING_SHIFT); + + /* + * hack + * nodeNum - FIMC_IS_VIDEO_BAS_NUM is proper. + * but, historically, FIMC_IS_VIDEO_SS0_NUM - FIMC_IS_VIDEO_SS0_NUM is worked properly + */ + //nodeNumBit = ((nodeNum - FIMC_IS_VIDEO_BAS_NUM) << SSX_VINDEX_SHIFT); + nodeNumBit = ((FIMC_IS_VIDEO_SS0_NUM - FIMC_IS_VIDEO_SS0_NUM) << SSX_VINDEX_SHIFT); + + sensorIdBit = (sensorId << 0); + + return (reprocessingBit) | + (nodeNumBit) | + (sensorIdBit); +} + +int ExynosCameraFrameFactory::m_getSensorId(unsigned int nodeNum, bool flagOTFInterface, bool flagLeader, bool reprocessing) +{ + /* sub 100, and make index */ + nodeNum -= 100; + + unsigned int reprocessingBit = 0; + unsigned int otfInterfaceBit = 0; + unsigned int leaderBit = 0; + unsigned int sensorId = getSensorId(m_cameraId); + + if (reprocessing == true) + reprocessingBit = 1; + + if (flagLeader == true) + leaderBit = 1; + + if (flagOTFInterface == true) + otfInterfaceBit = 1; + + return ((reprocessingBit << INPUT_STREAM_SHIFT) & INPUT_STREAM_MASK) | + ((sensorId << INPUT_MODULE_SHIFT) & INPUT_MODULE_MASK) | + ((nodeNum << INPUT_VINDEX_SHIFT) & INPUT_VINDEX_MASK) | + ((otfInterfaceBit << INPUT_MEMORY_SHIFT) & INPUT_MEMORY_MASK) | + ((leaderBit << INPUT_LEADER_SHIFT) & INPUT_LEADER_MASK); +} + + +status_t ExynosCameraFrameFactory::m_initFrameMetadata(ExynosCameraFrame *frame) +{ + int ret = 0; + + if (m_shot_ext == NULL) { + CLOGE("ERR(%s[%d]): new struct camera2_shot_ext fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + memset(m_shot_ext, 0x0, sizeof(struct camera2_shot_ext)); + + m_shot_ext->shot.magicNumber = SHOT_MAGIC_NUMBER; + + /* TODO: These bypass values are enabled at per-frame control */ +#if 1 + m_bypassDRC = m_parameters->getDrcEnable(); + m_bypassDNR = m_parameters->getDnrEnable(); + m_bypassDIS = m_parameters->getDisEnable(); + m_bypassFD = m_parameters->getFdEnable(); +#endif + setMetaBypassDrc(m_shot_ext, m_bypassDRC); + setMetaBypassDnr(m_shot_ext, m_bypassDNR); + setMetaBypassDis(m_shot_ext, m_bypassDIS); + setMetaBypassFd(m_shot_ext, m_bypassFD); + + ret = frame->initMetaData(m_shot_ext); + if (ret < 0) + CLOGE("ERR(%s[%d]): initMetaData fail", __FUNCTION__, __LINE__); + + frame->setRequest(m_request3AP, + m_request3AC, + m_requestISP, + m_requestISPP, + m_requestISPC, + m_requestSCC, + m_requestDIS, + m_requestSCP); + + if (m_flagReprocessing == true) { + frame->setRequest(PIPE_MCSC0_REPROCESSING, m_requestSCP); + frame->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, m_requestJPEG); + frame->setRequest(PIPE_HWFC_JPEG_DST_REPROCESSING, m_requestJPEG); + frame->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, m_requestThumbnail); + frame->setRequest(PIPE_HWFC_THUMB_DST_REPROCESSING, m_requestThumbnail); + } + + return ret; +} + +int ExynosCameraFrameFactory::setSrcNodeEmpty(int sensorId) +{ + return (sensorId & INPUT_STREAM_MASK) | + (sensorId & INPUT_MODULE_MASK) | + (0 & INPUT_VINDEX_MASK) | + (sensorId & INPUT_MEMORY_MASK) | + (sensorId & INPUT_LEADER_MASK); +} + +int ExynosCameraFrameFactory::setLeader(int sensorId, bool flagLeader) +{ + return (sensorId & INPUT_STREAM_MASK) | + (sensorId & INPUT_MODULE_MASK) | + (sensorId & INPUT_VINDEX_MASK) | + (sensorId & INPUT_MEMORY_MASK) | + ((flagLeader)?1:0 & INPUT_LEADER_MASK); +} + +ExynosCameraFrame *ExynosCameraFrameFactory::createNewFrameOnlyOnePipe(int pipeId, int frameCnt) +{ + Mutex::Autolock lock(m_frameLock); + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {}; + + if (frameCnt < 0) { + frameCnt = m_frameCount; + } + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, frameCnt); + if (frame == NULL) + return NULL; + + /* set pipe to linkageList */ + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + + return frame; +} + +ExynosCameraFrame *ExynosCameraFrameFactory::createNewFrameVideoOnly(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount); + if (frame == NULL) + return NULL; + + /* set GSC-Video pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + return frame; +} + +status_t ExynosCameraFrameFactory::m_initPipelines(ExynosCameraFrame *frame) +{ + ExynosCameraFrameEntity *curEntity = NULL; + ExynosCameraFrameEntity *childEntity = NULL; + frame_queue_t *frameQ = NULL; + int ret = 0; + + curEntity = frame->getFirstEntity(); + + while (curEntity != NULL) { + childEntity = curEntity->getNextEntity(); + if (childEntity != NULL) { + ret = getInputFrameQToPipe(&frameQ, childEntity->getPipeId()); + if (ret < 0 || frameQ == NULL) { + CLOGE("ERR(%s):getInputFrameQToPipe fail, ret(%d), frameQ(%p)", __FUNCTION__, ret, frameQ); + return ret; + } + + ret = setOutputFrameQToPipe(frameQ, curEntity->getPipeId()); + if (ret < 0) { + CLOGE("ERR(%s):setOutputFrameQToPipe fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + + if (childEntity->getPipeId() != PIPE_VRA) { + /* check Image Configuration Equality */ + ret = m_checkPipeInfo(curEntity->getPipeId(), childEntity->getPipeId()); + if (ret < 0) { + CLOGE("ERR(%s):checkPipeInfo fail, Pipe[%d], Pipe[%d]", __FUNCTION__, curEntity->getPipeId(), childEntity->getPipeId()); + return ret; + } + } + + curEntity = childEntity; + } else { + curEntity = frame->getNextEntity(); + } + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::pushFrameToPipe(ExynosCameraFrame **newFrame, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->pushFrame(newFrame); + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::setOutputFrameQToPipe(frame_queue_t *outputQ, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->setOutputFrameQ(outputQ); + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getOutputFrameQToPipe(frame_queue_t **outputQ, uint32_t pipeId) +{ + CLOGV("DEBUG(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + m_pipes[INDEX(pipeId)]->getOutputFrameQ(outputQ); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::setFrameDoneQToPipe(frame_queue_t *frameDoneQ, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->setFrameDoneQ(frameDoneQ); + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getFrameDoneQToPipe(frame_queue_t **frameDoneQ, uint32_t pipeId) +{ + CLOGV("DEBUG(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + m_pipes[INDEX(pipeId)]->getFrameDoneQ(frameDoneQ); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getInputFrameQToPipe(frame_queue_t **inputFrameQ, uint32_t pipeId) +{ + CLOGV("DEBUG(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + + m_pipes[INDEX(pipeId)]->getInputFrameQ(inputFrameQ); + + if (inputFrameQ == NULL) + CLOGE("ERR(%s[%d])inputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::setBufferManagerToPipe(ExynosCameraBufferManager **bufferManager, uint32_t pipeId) +{ + if (m_pipes[INDEX(pipeId)] == NULL) { + CLOGE("ERR(%s[%d])m_pipes[INDEX(%d)] == NULL. pipeId(%d)", __FUNCTION__, __LINE__, INDEX(pipeId), pipeId); + return INVALID_OPERATION; + } + + return m_pipes[INDEX(pipeId)]->setBufferManager(bufferManager); +} + +status_t ExynosCameraFrameFactory::getThreadState(int **threadState, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadState(threadState); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getThreadInterval(uint64_t **threadInterval, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadInterval(threadInterval); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::getThreadRenew(int **threadRenew, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadRenew(threadRenew); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::incThreadRenew(uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->incThreadRenew(); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::startThread(uint32_t pipeId) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + + ret = m_pipes[INDEX(pipeId)]->startThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):start thread fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + /* TODO: exception handling */ + } + return ret; +} + +status_t ExynosCameraFrameFactory::stopThread(uint32_t pipeId) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + + if (m_pipes[INDEX(pipeId)] == NULL) { + CLOGE("ERR(%s[%d]):m_pipes[INDEX(%d)] == NULL. so, fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(pipeId)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):stop thread fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + /* TODO: exception handling */ + } + return ret; +} + +status_t ExynosCameraFrameFactory::setStopFlag(void) +{ + CLOGE("ERR(%s[%d]):Must use the concreate class, don't use superclass", __FUNCTION__, __LINE__); + return INVALID_OPERATION; +} + +status_t ExynosCameraFrameFactory::stopThreadAndWait(uint32_t pipeId, int sleep, int times) +{ + status_t status = NO_ERROR; + + CLOGI("INFO(%s[%d]):pipeId=%d", __FUNCTION__, __LINE__, pipeId); + status = m_pipes[INDEX(pipeId)]->stopThreadAndWait(sleep, times); + if (status < 0) { + CLOGE("ERR(%s[%d]):pipe(%d) stopThreadAndWait fail, ret(%d)", __FUNCTION__, __LINE__, pipeId); + /* TODO: exception handling */ + status = INVALID_OPERATION; + } + return status; +} + +status_t ExynosCameraFrameFactory::stopPipe(uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):Pipe:%d stopThread fail, ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(pipeId)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):Pipe:%d stop fail, ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::stopPipes(void) +{ + CLOGE("ERR(%s[%d]):Must use the concreate class, don't use superclass", __FUNCTION__, __LINE__); + return INVALID_OPERATION; +} + +void ExynosCameraFrameFactory::dump() +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + m_pipes[i]->dump(); + } + } + + return; +} + +void ExynosCameraFrameFactory::setRequest(int pipeId, bool enable) +{ + switch (pipeId) { + case PIPE_FLITE: + case PIPE_FLITE_REPROCESSING: + m_requestFLITE = enable ? 1 : 0; + break; + case PIPE_3AC: + case PIPE_3AC_REPROCESSING: + m_request3AC = enable ? 1 : 0; + break; + case PIPE_3AP: + case PIPE_3AP_REPROCESSING: + m_request3AP = enable ? 1 : 0; + break; + case PIPE_ISPC: + case PIPE_ISPC_REPROCESSING: + m_requestISPC = enable ? 1 : 0; + break; + case PIPE_ISPP: + case PIPE_ISPP_REPROCESSING: + m_requestISPP = enable ? 1 : 0; + break; + case PIPE_MCSC0: + case PIPE_MCSC0_REPROCESSING: + m_requestSCP = enable ? 1 : 0; + break; + case PIPE_HWFC_JPEG_SRC_REPROCESSING: + case PIPE_HWFC_JPEG_DST_REPROCESSING: + m_requestJPEG = enable ? 1 : 0; + break; + case PIPE_HWFC_THUMB_SRC_REPROCESSING: + case PIPE_HWFC_THUMB_DST_REPROCESSING: + m_requestThumbnail = enable ? 1 : 0; + break; + default: + CLOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + break; + } +} + +void ExynosCameraFrameFactory::setRequestFLITE(bool enable) +{ +#if 1 + m_requestFLITE = enable ? 1 : 0; +#else + /* If not FLite->3AA OTF, FLite must be on */ + if (m_flagFlite3aaOTF == true) { + m_requestFLITE = enable ? 1 : 0; + } else { + CLOGW("WRN(%s[%d]): isFlite3aaOtf (%d) == false). so Skip set m_requestFLITE(%d) as (%d)", + __FUNCTION__, __LINE__, m_cameraId, m_requestFLITE, enable); + } +#endif + +} + +void ExynosCameraFrameFactory::setRequest3AC(bool enable) +{ +#if 1 + m_request3AC = enable ? 1 : 0; +#else + /* From 74xx, Front will use reprocessing. so, we need to prepare BDS */ + if (isReprocessing(m_cameraId) == true) { + if (m_parameters->getUsePureBayerReprocessing() == true) { + m_request3AC = 0; + } else { + m_request3AC = enable ? 1 : 0; + } + } else { + m_request3AC = 0; + } +#endif +} + +void ExynosCameraFrameFactory::setRequestISPC(bool enable) +{ + m_requestISPC = enable ? 1 : 0; +} + +void ExynosCameraFrameFactory::setRequestISPP(bool enable) +{ + m_requestISPP = enable ? 1 : 0; +} + + +void ExynosCameraFrameFactory::setRequestSCC(bool enable) +{ + m_requestSCC = enable ? 1 : 0; +} + +void ExynosCameraFrameFactory::setRequestDIS(bool enable) +{ + m_requestDIS = enable ? 1 : 0; +} + +status_t ExynosCameraFrameFactory::setParam(struct v4l2_streamparm *streamParam, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->setParam(*streamParam); + + return ret; +} + +status_t ExynosCameraFrameFactory::m_checkPipeInfo(uint32_t srcPipeId, uint32_t dstPipeId) +{ + int srcFullW, srcFullH, srcColorFormat; + int dstFullW, dstFullH, dstColorFormat; + int isDifferent = 0; + int ret = 0; + + ret = m_pipes[INDEX(srcPipeId)]->getPipeInfo(&srcFullW, &srcFullH, &srcColorFormat, SRC_PIPE); + if (ret < 0) { + CLOGE("ERR(%s):Source getPipeInfo fail", __FUNCTION__); + return ret; + } + ret = m_pipes[INDEX(dstPipeId)]->getPipeInfo(&dstFullW, &dstFullH, &dstColorFormat, DST_PIPE); + if (ret < 0) { + CLOGE("ERR(%s):Destination getPipeInfo fail", __FUNCTION__); + return ret; + } + + if (srcFullW != dstFullW || srcFullH != dstFullH || srcColorFormat != dstColorFormat) { + CLOGE("ERR(%s[%d]):Video Node Image Configuration is NOT matching. so, fail", __FUNCTION__, __LINE__); + + CLOGE("ERR(%s[%d]):fail info : srcPipeId(%d), srcFullW(%d), srcFullH(%d), srcColorFormat(%d)", + __FUNCTION__, __LINE__, srcPipeId, srcFullW, srcFullH, srcColorFormat); + + CLOGE("ERR(%s[%d]):fail info : dstPipeId(%d), dstFullW(%d), dstFullH(%d), dstColorFormat(%d)", + __FUNCTION__, __LINE__, dstPipeId, dstFullW, dstFullH, dstColorFormat); + + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory::dumpFimcIsInfo(uint32_t pipeId, bool bugOn) +{ + int ret = 0; + int pipeIdIsp = 0; + + if (m_pipes[INDEX(pipeId)] != NULL) + ret = m_pipes[INDEX(pipeId)]->dumpFimcIsInfo(bugOn); + else + ALOGE("ERR(%s): pipe is not ready (%d/%d)", __FUNCTION__, pipeId, bugOn); + + return ret; +} + +#ifdef MONITOR_LOG_SYNC +status_t ExynosCameraFrameFactory::syncLog(uint32_t pipeId, uint32_t syncId) +{ + int ret = 0; + int pipeIdIsp = 0; + + if (m_pipes[INDEX(pipeId)] != NULL) + ret = m_pipes[INDEX(pipeId)]->syncLog(syncId); + else + ALOGE("ERR(%s): pipe is not ready (%d/%d)", __FUNCTION__, pipeId, syncId); + + return ret; +} +#endif + +status_t ExynosCameraFrameFactory::setControl(int cid, int value, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->setControl(cid, value); + + return ret; +} + +bool ExynosCameraFrameFactory::checkPipeThreadRunning(uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->isThreadRunning(); + + return ret; +} + +status_t ExynosCameraFrameFactory::getControl(int cid, int *value, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->getControl(cid, value); + + return ret; +} + +status_t ExynosCameraFrameFactory::m_checkNodeSetting(int pipeId) +{ + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NODE; i++) { + /* in case of wrong nodeNums set */ + if (m_deviceInfo[pipeId].nodeNum[i] != m_nodeNums[pipeId][i]) { + CLOGE("ERR(%s[%d]):m_deviceInfo[%d].nodeNum[%d](%d) != m_nodeNums[%d][%d](%d). so, fail", + __FUNCTION__, __LINE__, + pipeId, i, m_deviceInfo[pipeId].nodeNum[i], + pipeId, i, m_nodeNums[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of not set sensorId */ + if (0 < m_deviceInfo[pipeId].nodeNum[i] && m_sensorIds[pipeId][i] < 0) { + CLOGE("ERR(%s[%d]):0 < m_deviceInfo[%d].nodeNum[%d](%d) && m_sensorIds[%d][%d](%d) < 0. so, fail", + __FUNCTION__, __LINE__, + pipeId, i, m_deviceInfo[pipeId].nodeNum[i], + pipeId, i, m_sensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of strange set sensorId */ + if (m_deviceInfo[pipeId].nodeNum[i] < 0 && 0 < m_sensorIds[pipeId][i]) { + CLOGE("ERR(%s[%d]):m_deviceInfo[%d].nodeNum[%d](%d) < 0 && 0 < m_sensorIds[%d][%d](%d). so, fail", + __FUNCTION__, __LINE__, + pipeId, i, m_deviceInfo[pipeId].nodeNum[i], + pipeId, i, m_sensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of not set secondarySensorId */ + if (0 < m_deviceInfo[pipeId].secondaryNodeNum[i] && m_secondarySensorIds[pipeId][i] < 0) { + CLOGE("ERR(%s[%d]):0 < m_deviceInfo[%d].secondaryNodeNum[%d](%d) && m_secondarySensorIds[%d][%d](%d) < 0. so, fail", + __FUNCTION__, __LINE__, + pipeId, i, m_deviceInfo[pipeId].secondaryNodeNum[i], + pipeId, i, m_secondarySensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of strange set secondarySensorId */ + if (m_deviceInfo[pipeId].secondaryNodeNum[i] < 0 && 0 < m_secondarySensorIds[pipeId][i]) { + CLOGE("ERR(%s[%d]):m_deviceInfo[%d].secondaryNodeNum[%d](%d) < 0 && 0 < m_secondarySensorIds[%d][%d](%d). so, fail", + __FUNCTION__, __LINE__, + pipeId, i, m_deviceInfo[pipeId].secondaryNodeNum[i], + pipeId, i, m_secondarySensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + } + +err: + return ret; +} + +enum NODE_TYPE ExynosCameraFrameFactory::getNodeType(uint32_t pipeId) +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected call.. pipe_id(%d), assert!!!!", __FUNCTION__, __LINE__, pipeId); + + return INVALID_NODE; +}; + +int ExynosCameraFrameFactory::m_initFlitePipe(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + struct ExynosConfigInfo *config = m_parameters->getConfig(); + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + + /* FLITE pipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + /* setParam for Frame rate : must after setInput on Flite */ + uint32_t min, max, frameRate; + struct v4l2_streamparm streamParam; + + memset(&streamParam, 0x0, sizeof(v4l2_streamparm)); + m_parameters->getPreviewFpsRange(&min, &max); + + if (m_parameters->getScalableSensorMode() == true) + frameRate = 24; + else + frameRate = max; + + streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + streamParam.parm.capture.timeperframe.numerator = 1; + streamParam.parm.capture.timeperframe.denominator = frameRate; + CLOGI("INFO(%s[%d]:set framerate (denominator=%d)", __FUNCTION__, __LINE__, frameRate); + ret = setParam(&streamParam, PIPE_FLITE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE setParam fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[0].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[0].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 10) * 8 / 5; + } +#endif + + ret = m_pipes[INDEX(PIPE_FLITE)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* set BNS ratio */ + int bnsScaleRatio = 0; + int bnsSize = 0; + if( m_parameters->getHighSpeedRecording() +#ifdef USE_BINNING_MODE + || m_parameters->getBinningMode() +#endif + ) { + bnsScaleRatio = 1000; + } else { + bnsScaleRatio = m_parameters->getBnsScaleRatio(); + } + ret = m_pipes[INDEX(PIPE_FLITE)]->setControl(V4L2_CID_IS_S_BNS, bnsScaleRatio); + if (ret < 0) { + CLOGE("ERR(%s[%d]): set BNS(%d) fail, ret(%d)", __FUNCTION__, __LINE__, bnsScaleRatio, ret); + } else { + ret = m_pipes[INDEX(PIPE_FLITE)]->getControl(V4L2_CID_IS_G_BNS_SIZE, &bnsSize); + if (ret < 0) { + CLOGE("ERR(%s[%d]): get BNS size fail, ret(%d)", __FUNCTION__, __LINE__, ret); + bnsSize = -1; + } + } + + int bnsWidth = 0; + int bnsHeight = 0; + if (bnsSize > 0) { + bnsHeight = bnsSize & 0xffff; + bnsWidth = bnsSize >> 16; + + CLOGI("INFO(%s[%d]): BNS scale down ratio(%.1f), size (%dx%d)", __FUNCTION__, __LINE__, (float)(bnsScaleRatio / 1000), bnsWidth, bnsHeight); + m_parameters->setBnsSize(bnsWidth, bnsHeight); + } + + return NO_ERROR; +} + +void ExynosCameraFrameFactory::m_initDeviceInfo(int pipeId) +{ + camera_device_info_t nullDeviceInfo; + + m_deviceInfo[pipeId] = nullDeviceInfo; + + for (int i = 0; i < MAX_NODE; i++) { + // set nodeNum + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + // set default sensorId + m_sensorIds[pipeId][i] = -1; + + // set second sensorId + m_secondarySensorIds[pipeId][i] = -1; + } +} + +void ExynosCameraFrameFactory::m_init(void) +{ + m_cameraId = 0; + memset(m_name, 0x00, sizeof(m_name)); + m_frameCount = 0; + + memset(m_nodeNums, -1, sizeof(m_nodeNums)); + memset(m_sensorIds, -1, sizeof(m_sensorIds)); + memset(m_secondarySensorIds, -1, sizeof(m_secondarySensorIds)); + + for (int i = 0; i < MAX_NUM_PIPES; i++) + m_pipes[i] = NULL; + + /* setting about request */ + m_requestFLITE = 1; + + m_request3AP = 1; + m_request3AC = 0; + m_requestISP = 1; + + m_requestISPC = 0; + m_requestISPP = 0; + m_requestSCC = 0; + + m_requestDIS = 0; + m_requestSCP = 1; + + m_requestVRA = 0; + + m_requestJPEG = 0; + m_requestThumbnail = 0; + + /* setting about bypass */ + m_bypassDRC = true; + m_bypassDIS = true; + m_bypassDNR = true; + m_bypassFD = true; + + m_setCreate(false); + + m_flagFlite3aaOTF = false; + m_flag3aaIspOTF = false; + m_flagIspTpuOTF = false; + m_flagIspMcscOTF = false; + m_flagTpuMcscOTF = false; + m_flagMcscVraOTF = false; + m_supportReprocessing = false; + m_flagReprocessing = false; + m_supportPureBayerReprocessing = false; + m_supportSCC = false; + m_supportMCSC = false; + m_shot_ext = new struct camera2_shot_ext; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory.h b/libcamera/34xx/hal1/ExynosCameraFrameFactory.h new file mode 100644 index 0000000..717e718 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory.h @@ -0,0 +1,218 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_H +#define EXYNOS_CAMERA_FRAME_FACTORY_H + +#include "ExynosCameraFrame.h" + +#include "ExynosCameraPipe.h" +#include "ExynosCameraMCPipe.h" +#include "ExynosCameraPipeFlite.h" +#include "ExynosCameraPipeVRA.h" +#include "ExynosCameraPipeGSC.h" +#include "ExynosCameraPipeJpeg.h" +#include "ExynosCameraFrameManager.h" + +#include "ExynosCamera1Parameters.h" + +namespace android { + +#define SET_OUTPUT_DEVICE_BASIC_INFO(perframeInfo) \ + pipeInfo[nodeType].rectInfo = tempRect;\ + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;\ + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = perframeInfo;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(pipeId)].nodeNum[nodeType] - FIMC_IS_VIDEO_BAS_NUM); +#define SET_CAPTURE_DEVICE_BASIC_INFO() \ + pipeInfo[nodeType].rectInfo = tempRect;\ + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;\ + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE;\ + pipeInfo[leaderNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE;\ + pipeInfo[leaderNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(pipeId)].nodeNum[nodeType] - FIMC_IS_VIDEO_BAS_NUM);\ + +class ExynosCameraFrameFactory { +public: + ExynosCameraFrameFactory() + { + m_init(); + } + + ExynosCameraFrameFactory(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "FrameFactoryBack" : "FrameFactoryFront"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactory(); + + virtual status_t create(bool active = true) = 0; + + virtual status_t setFrameManager(ExynosCameraFrameManager *manager); + virtual status_t getFrameManager(ExynosCameraFrameManager **manager); + virtual status_t destroy(void); + virtual bool isCreated(void); + + virtual ExynosCameraFrame *createNewFrameOnlyOnePipe(int pipeId, int frameCnt=-1); + virtual ExynosCameraFrame *createNewFrameVideoOnly(void); + virtual ExynosCameraFrame *createNewFrame(void) = 0; + + virtual status_t initPipes(void) = 0; + virtual status_t preparePipes(void) = 0; + virtual status_t startPipes(void) = 0; + virtual status_t stopPipes(void) = 0; + virtual status_t startInitialThreads(void) = 0; + + virtual status_t pushFrameToPipe(ExynosCameraFrame **newFrame, uint32_t pipeId); + virtual status_t setOutputFrameQToPipe(frame_queue_t *outputQ, uint32_t pipeId); + virtual status_t getOutputFrameQToPipe(frame_queue_t **outputQ, uint32_t pipeId); + virtual status_t setFrameDoneQToPipe(frame_queue_t *frameDoneQ, uint32_t pipeId); + virtual status_t getFrameDoneQToPipe(frame_queue_t **frameDoneQ, uint32_t pipeId); + virtual status_t getInputFrameQToPipe(frame_queue_t **inputFrameQ, uint32_t pipeId); + + virtual status_t setBufferManagerToPipe(ExynosCameraBufferManager **bufferManager, uint32_t pipeId); + + virtual status_t startThread(uint32_t pipeId); + virtual status_t stopThread(uint32_t pipeId); + virtual status_t setStopFlag(void); + virtual status_t stopThreadAndWait(uint32_t pipeId, int sleep = 5, int times = 40); + virtual status_t stopPipe(uint32_t pipeId); + + virtual status_t getThreadState(int **threadState, uint32_t pipeId); + virtual status_t getThreadInterval(uint64_t **threadInterval, uint32_t pipeId); + virtual status_t getThreadRenew(int **threadRenew, uint32_t pipeId); + virtual status_t incThreadRenew(uint32_t pipeId); + virtual void dump(void); + + virtual void setRequest(int pipeId, bool enable); + virtual void setRequestFLITE(bool enable); + virtual void setRequest3AC(bool enable); + virtual void setRequestISPC(bool enable); + virtual void setRequestISPP(bool enable); + virtual void setRequestSCC(bool enable); + virtual void setRequestDIS(bool enable); + + virtual status_t setParam(struct v4l2_streamparm *streamParam, uint32_t pipeId); + virtual status_t setControl(int cid, int value, uint32_t pipeId); + virtual status_t getControl(int cid, int *value, uint32_t pipeId); + + virtual bool checkPipeThreadRunning(uint32_t pipeId); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + + /* only for debugging */ + virtual status_t dumpFimcIsInfo(uint32_t pipeId, bool bugOn); +#ifdef MONITOR_LOG_SYNC + virtual status_t syncLog(uint32_t pipeId, uint32_t syncId); +#endif + +protected: + virtual status_t m_initPipelines(ExynosCameraFrame *frame); + virtual status_t m_initFrameMetadata(ExynosCameraFrame *frame); + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame) = 0; + virtual status_t m_checkPipeInfo(uint32_t srcPipeId, uint32_t dstPipeId); + virtual status_t m_setCreate(bool create); + virtual bool m_getCreate(); + virtual int m_getFliteNodenum(); + + /* 54xx style*/ + virtual int m_getSensorId(unsigned int nodeNum, bool reprocessing); + + /* 74xx style*/ + virtual int m_getSensorId(unsigned int nodeNum, bool flagOTFInterface, bool flagLeader, bool reprocessing); + + virtual int setSrcNodeEmpty(int sensorId); + virtual int setLeader(int sensorId, bool flagLeader); + virtual status_t m_setupConfig(void) = 0; + virtual status_t m_checkNodeSetting(int pipeId); + virtual void m_initDeviceInfo(int pipeId); + + /* flite pipe setting */ + virtual status_t m_initFlitePipe(void); + +private: + void m_init(void); + +protected: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + ExynosCameraPipe *m_pipes[MAX_NUM_PIPES]; + + int32_t m_nodeNums[MAX_NUM_PIPES][MAX_NODE]; + int32_t m_sensorIds[MAX_NUM_PIPES][MAX_NODE]; + int32_t m_secondarySensorIds[MAX_NUM_PIPES][MAX_NODE]; + camera_device_info_t m_deviceInfo[MAX_NUM_PIPES]; + + ExynosCamera1Parameters *m_parameters; + + ExynosCameraFrameManager *m_frameMgr; + + uint32_t m_frameCount; + Mutex m_frameLock; + + ExynosCameraActivityControl *m_activityControl; + + uint32_t m_requestFLITE; + uint32_t m_request3AP; + uint32_t m_request3AC; + uint32_t m_requestISP; + uint32_t m_requestISPC; + uint32_t m_requestISPP; + uint32_t m_requestSCC; + uint32_t m_requestDIS; + uint32_t m_requestSCP; + uint32_t m_requestVRA; + uint32_t m_requestJPEG; + uint32_t m_requestThumbnail; + + bool m_bypassDRC; + bool m_bypassDIS; + bool m_bypassDNR; + bool m_bypassFD; + + Mutex m_createLock; + + bool m_flagFlite3aaOTF; + bool m_flag3aaIspOTF; + bool m_flagIspTpuOTF; + bool m_flagIspMcscOTF; + bool m_flagTpuMcscOTF; + bool m_flagMcscVraOTF; + bool m_supportReprocessing; + bool m_flagReprocessing; + bool m_supportPureBayerReprocessing; + bool m_supportSCC; + bool m_supportMCSC; + +private: + bool m_create; + struct camera2_shot_ext *m_shot_ext; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.cpp new file mode 100644 index 0000000..c543089 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.cpp @@ -0,0 +1,784 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactory3aaIspM2M" +#include + +#include "ExynosCameraFrameFactory3aaIspM2M.h" + +namespace android { + +ExynosCameraFrameFactory3aaIspM2M::~ExynosCameraFrameFactory3aaIspM2M() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +enum NODE_TYPE ExynosCameraFrameFactory3aaIspM2M::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = CAPTURE_NODE_2; + break; + case PIPE_ISP: + nodeType = OUTPUT_NODE; + break; + case PIPE_ISPP: + nodeType = CAPTURE_NODE_3; + break; + case PIPE_DIS: + nodeType = OTF_NODE_1; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_4; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_5; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +status_t ExynosCameraFrameFactory3aaIspM2M::m_setDeviceInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + bool flagDirtyBayer = false; + + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) + flagDirtyBayer = true; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + +#ifdef RAWDUMP_CAPTURE + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; +#else + if (m_parameters->getDualMode() == true) { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; + } else { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_30C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; + } +#endif + + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I0S_NUM; + ispNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_I0C_NUM; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I0P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA)); + m_initDeviceInfo(INDEX(PIPE_ISP)); + m_initDeviceInfo(INDEX(PIPE_DIS)); + + /******* + * 3AA + ******/ + pipeId = INDEX(PIPE_3AA); + + // 3AS + nodeType = getNodeType(PIPE_3AA); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + // 3AC + nodeType = getNodeType(PIPE_3AC); + if (flagDirtyBayer == true) { + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } else { + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } + + // 3AP + nodeType = getNodeType(PIPE_3AP); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + /******* + * ISP + ******/ + pipeId = INDEX(PIPE_ISP); + + // ISPS + nodeType = getNodeType(PIPE_ISP); + m_deviceInfo[pipeId].nodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPP + nodeType = getNodeType(PIPE_ISPP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP; + + // SCP + nodeType = getNodeType(PIPE_SCP); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_SCP_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "SCP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_ISP)].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_SCP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + /******* + * DIS + ******/ + pipeId = INDEX(PIPE_DIS); + + // DIS + nodeType = getNodeType(PIPE_DIS); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = FIMC_IS_VIDEO_TPU_NUM; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "DIS_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_ISP)].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_DIS; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspM2M::m_initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): HWPictureSize(%dx%d)", __FUNCTION__, __LINE__, hwPictureW, hwPictureH); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + /* 3AS */ + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[t3apNodeType].rectInfo = tempRect; + pipeInfo[t3apNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3apNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3apNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_ISP)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_ISP].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_ISP].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_ISP)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_ISP)][i]; + } + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[ispsNodeType].rectInfo = tempRect; + pipeInfo[ispsNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[ispsNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispsNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* SCP */ + int scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + tempRect.fullW = stride; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = hwPreviewW; +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_ISP)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_DIS)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_DIS].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_DIS].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_DIS)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_DIS)][i]; + } + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + + pipeInfo[disNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 16); + + pipeInfo[disNodeType].rectInfo = tempRect; + pipeInfo[disNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[disNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[disNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_DIS].secondaryNodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + + ret = m_pipes[INDEX(PIPE_DIS)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspM2M::m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH) +{ + status_t ret = NO_ERROR; + + /* TODO 1. setup pipes for 120FPS */ + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + uint32_t frameRate = 0; + struct v4l2_streamparm streamParam; + int perFramePos = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + } + + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3asNodeType].bufInfo.count = numFrames; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP pipe */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + //perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3apNodeType].rectInfo = tempRect; + pipeInfo[t3apNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3apNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3apNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[t3apNodeType].rectInfo.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[t3apNodeType].rectInfo.fullW * 3 / 2, 16); + } +#endif + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISP pipe */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_ISP)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_ISP].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_ISP].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_ISP)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_ISP)][i]; + } + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[ispsNodeType].rectInfo = tempRect; + pipeInfo[ispsNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[ispsNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispsNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + int isppNodeType = getNodeType(PIPE_SCP); + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[2].rectInfo.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[2].rectInfo.fullW * 3 / 2, 16); + } +#endif + + int stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + + pipeInfo[isppNodeType].rectInfo = tempRect; + pipeInfo[isppNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[isppNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[isppNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[isppNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + /* per frame info */ + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = hwPreviewW; +#endif + + + ret = m_pipes[INDEX(PIPE_ISP)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_DIS)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_DIS].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_DIS].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_DIS)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_DIS)][i]; + } + + /* SRC */ + /* per frame info */ + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_DIS].secondaryNodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + ret = m_pipes[INDEX(PIPE_DIS)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +void ExynosCameraFrameFactory3aaIspM2M::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.h b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.h new file mode 100644 index 0000000..339f6e0 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2M.h @@ -0,0 +1,67 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_M2M_H +#define EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_M2M_H + +#include "ExynosCameraFrameFactoryPreview.h" + +namespace android { + +class ExynosCameraFrameFactory3aaIspM2M : public ExynosCameraFrameFactoryPreview { +public: + ExynosCameraFrameFactory3aaIspM2M() + { + m_init(); + } + + ExynosCameraFrameFactory3aaIspM2M(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = "ExynosCameraFrameFactory3aaIspM2M"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactory3aaIspM2M(); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + +protected: + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void); + + /* pipe setting */ + virtual status_t m_initPipes(void); + + /* pipe setting for fastAE */ + virtual status_t m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH); +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.cpp new file mode 100644 index 0000000..3992f43 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.cpp @@ -0,0 +1,840 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactory3aaIspM2MTpu" +#include + +#include "ExynosCameraFrameFactory3aaIspM2MTpu.h" + +namespace android { + +ExynosCameraFrameFactory3aaIspM2MTpu::~ExynosCameraFrameFactory3aaIspM2MTpu() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +enum NODE_TYPE ExynosCameraFrameFactory3aaIspM2MTpu::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = CAPTURE_NODE_2; + break; + case PIPE_ISP: + nodeType = OUTPUT_NODE; + break; + case PIPE_ISPP: + nodeType = CAPTURE_NODE_3; + break; + case PIPE_DIS: + nodeType = OUTPUT_NODE; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_4; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_5; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +status_t ExynosCameraFrameFactory3aaIspM2MTpu::m_setDeviceInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + bool flagDirtyBayer = false; + + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) + flagDirtyBayer = true; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + + if (m_parameters->getDualMode() == true) { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; + } else { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_30C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; + } + + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I0S_NUM; + ispNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_I0C_NUM; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I0P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA)); + m_initDeviceInfo(INDEX(PIPE_ISP)); + m_initDeviceInfo(INDEX(PIPE_DIS)); + + /******* + * 3AA + ******/ + pipeId = INDEX(PIPE_3AA); + + // 3AS + nodeType = getNodeType(PIPE_3AA); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + // 3AC + nodeType = getNodeType(PIPE_3AC); + if (flagDirtyBayer == true) { + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } else { + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } + + // 3AP + nodeType = getNodeType(PIPE_3AP); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + /******* + * ISP + ******/ + pipeId = INDEX(PIPE_ISP); + + // ISPS + nodeType = getNodeType(PIPE_ISP); + m_deviceInfo[pipeId].nodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPP + nodeType = getNodeType(PIPE_ISPP); + m_deviceInfo[pipeId].nodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + /******* + * DIS + ******/ + pipeId = INDEX(PIPE_DIS); + + // DIS + nodeType = getNodeType(PIPE_DIS); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_TPU_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "DIS_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[getNodeType(PIPE_ISPP)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_DIS; + + // SCP + nodeType = getNodeType(PIPE_SCP); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_SCP_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "SCP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_DIS)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_SCP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspM2MTpu::m_initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): HWPictureSize(%dx%d)", __FUNCTION__, __LINE__, hwPictureW, hwPictureH); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[t3apNodeType].rectInfo = tempRect; + pipeInfo[t3apNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3apNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3apNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_ISP)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_ISP].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_ISP].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_ISP)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_ISP)][i]; + } + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[ispsNodeType].rectInfo = tempRect; + pipeInfo[ispsNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[ispsNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispsNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = bdsSize.w; +#endif + + pipeInfo[isppNodeType].rectInfo = tempRect; + pipeInfo[isppNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[isppNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[isppNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers;; + + /* per frame info */ + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_ISP)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + + pipeInfo[disNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 16); + + pipeInfo[disNodeType].rectInfo = tempRect; + pipeInfo[disNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[disNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[disNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_DIS].nodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* SCP */ + int scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_DIS)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + tempRect.fullW = stride; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + /* per frame info */ + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = hwPreviewW; +#endif + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_DIS)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_DIS].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_DIS].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_DIS)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_DIS)][i]; + } + + ret = m_pipes[INDEX(PIPE_DIS)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspM2MTpu::m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH) +{ + status_t ret = NO_ERROR; + + /* TODO 1. setup pipes for 120FPS */ + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + uint32_t frameRate = 0; + struct v4l2_streamparm streamParam; + int perFramePos = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + } + + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3asNodeType].bufInfo.count = numFrames; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP pipe */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + //perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3apNodeType].rectInfo = tempRect; + pipeInfo[t3apNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3apNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3apNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[t3apNodeType].rectInfo.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3apNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[t3apNodeType].rectInfo.fullW * 3 / 2, 16); + } +#endif + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISP pipe */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_ISP)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_ISP].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_ISP].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_ISP)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_ISP)][i]; + } + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[ispsNodeType].rectInfo = tempRect; + pipeInfo[ispsNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[ispsNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispsNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + int isppNodeType = getNodeType(PIPE_ISPP); + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_ISP)].nodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[2].rectInfo.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[2].rectInfo.fullW * 3 / 2, 16); + } +#endif + + int stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + + pipeInfo[isppNodeType].rectInfo = tempRect; + pipeInfo[isppNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[isppNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->getShotMode() == SHOT_MODE_BEAUTY_FACE) { + pipeInfo[isppNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[isppNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + /* per frame info */ + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = hwPreviewW; +#endif + + + ret = m_pipes[INDEX(PIPE_ISP)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_DIS)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_DIS].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_DIS].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_DIS)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_DIS)][i]; + } + + /* SRC */ + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = hwVdisformat; + + pipeInfo[disNodeType].rectInfo = tempRect; + pipeInfo[disNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[disNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[disNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[disNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 16); + + /* per frame info */ + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_DIS].nodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* SCP */ + int scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_DIS)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + tempRect.fullW = stride; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + /* per frame info */ + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_DIS)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +void ExynosCameraFrameFactory3aaIspM2MTpu::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.h b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.h new file mode 100644 index 0000000..baeab96 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspM2MTpu.h @@ -0,0 +1,68 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_M2M_TPU_H +#define EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_M2M_TPU_H + +#include "ExynosCameraFrameFactoryPreview.h" + +namespace android { + +class ExynosCameraFrameFactory3aaIspM2MTpu : public ExynosCameraFrameFactoryPreview { +public: + ExynosCameraFrameFactory3aaIspM2MTpu() + { + m_init(); + } + + ExynosCameraFrameFactory3aaIspM2MTpu(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = "ExynosCameraFrameFactory3aaIspM2MTpu"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactory3aaIspM2MTpu(); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + +protected: + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void); + + /* pipe setting */ + virtual status_t m_initPipes(void); + + /* pipe setting for fastAE */ + virtual status_t m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.cpp new file mode 100644 index 0000000..0b7a493 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.cpp @@ -0,0 +1,716 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactory3aaIspOtf" +#include + +#include "ExynosCameraFrameFactory3aaIspOtf.h" + +namespace android { + +ExynosCameraFrameFactory3aaIspOtf::~ExynosCameraFrameFactory3aaIspOtf() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +enum NODE_TYPE ExynosCameraFrameFactory3aaIspOtf::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = OTF_NODE_1; + break; + case PIPE_ISP: + nodeType = OTF_NODE_2; + break; + case PIPE_ISPP: + nodeType = OTF_NODE_3; + break; + case PIPE_DIS: + nodeType = OTF_NODE_4; + break; + case PIPE_MCSC: + nodeType = OTF_NODE_5; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_7; + break; + case PIPE_VRA: + nodeType = OUTPUT_NODE; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +status_t ExynosCameraFrameFactory3aaIspOtf::m_setDeviceInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + bool flagDirtyBayer = false; + + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) + flagDirtyBayer = true; + + int pipeId = -1; + int previousPipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + + if (m_parameters->getDualMode() == true) { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; + } else { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_30C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; + } + + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I0S_NUM; + ispNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_I0C_NUM; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I0P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA)); + m_initDeviceInfo(INDEX(PIPE_ISP)); + m_initDeviceInfo(INDEX(PIPE_DIS)); + + /******* + * 3AA + ******/ + pipeId = INDEX(PIPE_3AA); + + // 3AS + nodeType = getNodeType(PIPE_3AA); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + // 3AC + nodeType = getNodeType(PIPE_3AC); + if (flagDirtyBayer == true || m_parameters->isUsing3acForIspc() == true) { + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } else { + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } + + // 3AP + nodeType = getNodeType(PIPE_3AP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + // ISPS + nodeType = getNodeType(PIPE_ISP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPP + nodeType = getNodeType(PIPE_ISPP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP; + + // DIS + if (m_parameters->getHWVdisMode()) { + nodeType = getNodeType(PIPE_DIS); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = FIMC_IS_VIDEO_TPU_NUM; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "DIS_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_DIS; + } + + if (m_supportMCSC == true) { + // MCSC + nodeType = getNodeType(PIPE_MCSC); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = FIMC_IS_VIDEO_M0S_NUM; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "MCSC_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], m_flagIspMcscOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC; + + // MCSC0 + nodeType = getNodeType(PIPE_MCSC0); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_M0P_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "MCSC_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_MCSC)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC0; + } else { + // SCP + nodeType = getNodeType(PIPE_SCP); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_SCP_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "SCP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_SCP; + } + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + // VRA + previousPipeId = pipeId; + nodeNums = m_nodeNums[INDEX(PIPE_VRA)]; + nodeNums[OUTPUT_NODE] = FIMC_IS_VIDEO_VRA_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_VRA)]; + controlId[OUTPUT_NODE] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_SCP)], m_flagMcscVraOTF, true, true); + controlId[CAPTURE_NODE_1] = -1; + controlId[CAPTURE_NODE_2] = -1; + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspOtf::m_initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): HWPictureSize(%dx%d)", __FUNCTION__, __LINE__, hwPictureW, hwPictureH); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if ((m_supportReprocessing == true && m_supportPureBayerReprocessing == false) || m_parameters->isUsing3acForIspc() == true) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + if (m_parameters->isUsing3acForIspc() == true) { + tempRect.colorFormat = SCC_OUTPUT_COLOR_FMT; + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + } else { + tempRect.colorFormat = bayerFormat; + } + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[PIPE_3AA].secondaryNodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + if (m_parameters->getHWVdisMode()) { + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + if (m_supportMCSC == true) { + /* MCSC */ + enum NODE_TYPE mcscNodeType = getNodeType(PIPE_MCSC); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_MCSC; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[mcscNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + /* SCP & MCSC0 */ + enum NODE_TYPE scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + tempRect.fullW = stride; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; +#ifdef USE_BUFFER_WITH_STRIDE +/* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = tempRect.fullW; +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagMcscVraOTF == false) { + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + /* VRA pipe */ + tempRect.fullW = vraWidth; + tempRect.fullH = vraHeight; + tempRect.colorFormat = m_parameters->getHwVraInputFormat(); + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = config->current->bufInfo.num_vra_buffers; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_VRA; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (FIMC_IS_VIDEO_VRA_NUM - FIMC_IS_VIDEO_BAS_NUM); + + ret = m_pipes[INDEX(PIPE_VRA)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_VRA)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspOtf::m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH) +{ + status_t ret = NO_ERROR; + + /* TODO 1. setup pipes for 120FPS */ + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + uint32_t frameRate = 0; + struct v4l2_streamparm streamParam; + int perFramePos = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3asNodeType].bufInfo.count = numFrames; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if ((m_supportReprocessing == true && m_supportPureBayerReprocessing == false) || m_parameters->isUsing3acForIspc() == true) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + if (m_parameters->isUsing3acForIspc() == true) { + tempRect.colorFormat = SCC_OUTPUT_COLOR_FMT; + } else { + tempRect.colorFormat = bayerFormat; + } + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISP pipe */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[PIPE_3AA].secondaryNodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + if (m_parameters->getHWVdisMode()) { + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + /* per frame info */ + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_3AA].secondaryNodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + if (m_supportMCSC == true) { + /* MCSC */ + enum NODE_TYPE mcscNodeType = getNodeType(PIPE_MCSC); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_MCSC; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[mcscNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + /* SCP & MCSC0 */ + enum NODE_TYPE scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[scpNodeType].bufInfo.count = numFrames; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagMcscVraOTF == false) { + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + /* VRA pipe */ + tempRect.fullW = vraWidth; + tempRect.fullH = vraHeight; + tempRect.colorFormat = m_parameters->getHwVraInputFormat(); + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_VRA; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (FIMC_IS_VIDEO_VRA_NUM - FIMC_IS_VIDEO_BAS_NUM); + + ret = m_pipes[INDEX(PIPE_VRA)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_VRA)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +void ExynosCameraFrameFactory3aaIspOtf::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.h b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.h new file mode 100644 index 0000000..2a6f3f8 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtf.h @@ -0,0 +1,68 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_OTF_H +#define EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_OTF_H + +#include "ExynosCameraFrameFactoryPreview.h" + +namespace android { + +class ExynosCameraFrameFactory3aaIspOtf : public ExynosCameraFrameFactoryPreview { +public: + ExynosCameraFrameFactory3aaIspOtf() + { + m_init(); + } + + ExynosCameraFrameFactory3aaIspOtf(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = "ExynosCameraFrameFactory3aaIspOtf"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactory3aaIspOtf(); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + +protected: + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void); + + /* pipe setting */ + virtual status_t m_initPipes(void); + + /* pipe setting for fastAE */ + virtual status_t m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.cpp new file mode 100644 index 0000000..31ea0db --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.cpp @@ -0,0 +1,674 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactory3aaIspOtfTpu" +#include + +#include "ExynosCameraFrameFactory3aaIspOtfTpu.h" + +namespace android { + +ExynosCameraFrameFactory3aaIspOtfTpu::~ExynosCameraFrameFactory3aaIspOtfTpu() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +enum NODE_TYPE ExynosCameraFrameFactory3aaIspOtfTpu::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = OTF_NODE_1; + break; + case PIPE_ISP: + nodeType = OTF_NODE_2; + break; + case PIPE_ISPP: + nodeType = CAPTURE_NODE_4; + break; + case PIPE_DIS: + nodeType = OUTPUT_NODE; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_5; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_6; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +status_t ExynosCameraFrameFactory3aaIspOtfTpu::m_setDeviceInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + bool flagDirtyBayer = false; + + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) + flagDirtyBayer = true; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + + if (m_parameters->getDualMode() == true) { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; + } else { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_30C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; + } + + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I0S_NUM; + ispNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_I0C_NUM; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I0P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA)); + m_initDeviceInfo(INDEX(PIPE_ISP)); + m_initDeviceInfo(INDEX(PIPE_DIS)); + + /******* + * 3AA + ******/ + pipeId = INDEX(PIPE_3AA); + + // 3AS + nodeType = getNodeType(PIPE_3AA); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + // 3AC + nodeType = getNodeType(PIPE_3AC); + if (flagDirtyBayer == true) { + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } else { + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } + + // 3AP + nodeType = getNodeType(PIPE_3AP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + // ISPS + nodeType = getNodeType(PIPE_ISP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPP + nodeType = getNodeType(PIPE_ISPP); + m_deviceInfo[pipeId].nodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISP)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + /******* + * DIS + ******/ + pipeId = INDEX(PIPE_DIS); + + // DIS + nodeType = getNodeType(PIPE_DIS); + + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_TPU_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "DIS_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[getNodeType(PIPE_ISPP)], false, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_DIS; + + // SCP + nodeType = getNodeType(PIPE_SCP); + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_SCP_NUM; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "SCP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_DIS)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_SCP; + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspOtfTpu::m_initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): HWPictureSize(%dx%d)", __FUNCTION__, __LINE__, hwPictureW, hwPictureH); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[PIPE_3AA].nodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[isppNodeType].bytesPerPlane[0] = bdsSize.w; +#endif + + pipeInfo[isppNodeType].rectInfo = tempRect; + pipeInfo[isppNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[isppNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[isppNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_DIS)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_DIS].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_DIS].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_DIS)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_DIS)][i]; + } + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + pipeInfo[disNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 16); + + pipeInfo[disNodeType].rectInfo = tempRect; + pipeInfo[disNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[disNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[disNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_DIS)].nodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* SCP */ + enum NODE_TYPE scpNodeType = getNodeType(PIPE_SCP); + + stride = m_parameters->getHwPreviewStride(); + CLOGV("INFO(%s[%d]):stride=%d", __FUNCTION__, __LINE__, stride); + tempRect.fullW = stride; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = hwPreviewW; +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_DIS)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_DIS)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactory3aaIspOtfTpu::m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH) +{ + status_t ret = NO_ERROR; + + /* TODO 1. setup pipes for 120FPS */ + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + uint32_t frameRate = 0; + struct v4l2_streamparm streamParam; + int perFramePos = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3asNodeType].bufInfo.count = numFrames; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = numFrames; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISP pipe */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = bdsSize.w; +#endif + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers;; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[PIPE_3AA].secondaryNodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + /* SRC */ + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = hwVdisformat; + pipeInfo[disNodeType].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 16); + + pipeInfo[disNodeType].rectInfo = tempRect; + pipeInfo[disNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[disNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[disNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + /* per frame info */ + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[PIPE_3AA].nodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* SCP */ + enum NODE_TYPE scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[scpNodeType].bufInfo.count = numFrames; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +void ExynosCameraFrameFactory3aaIspOtfTpu::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.h b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.h new file mode 100644 index 0000000..3ebe7bc --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactory3aaIspOtfTpu.h @@ -0,0 +1,68 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_OTF_TPU_H +#define EXYNOS_CAMERA_FRAME_FACTORY_3AA_ISP_OTF_TPU_H + +#include "ExynosCameraFrameFactoryPreview.h" + +namespace android { + +class ExynosCameraFrameFactory3aaIspOtfTpu : public ExynosCameraFrameFactoryPreview { +public: + ExynosCameraFrameFactory3aaIspOtfTpu() + { + m_init(); + } + + ExynosCameraFrameFactory3aaIspOtfTpu(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = "ExynosCameraFrameFactory3aaIspOtfTpu"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactory3aaIspOtfTpu(); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + +protected: + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void); + + /* pipe setting */ + virtual status_t m_initPipes(void); + + /* pipe setting for fastAE */ + virtual status_t m_initPipesFastenAeStable(int32_t numFrames, + int hwSensorW, int hwSensorH, + int hwPreviewW, int hwPreviewH); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.cpp new file mode 100644 index 0000000..38b1cd6 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.cpp @@ -0,0 +1,1059 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactoryFront" +#include + +#include "ExynosCameraFrameFactoryFront.h" + +namespace android { + +ExynosCameraFrameFactoryFront::~ExynosCameraFrameFactoryFront() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraFrameFactoryFront::create(__unused bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_setupConfig(); + + int ret = 0; + int32_t nodeNums[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) + nodeNums[i] = -1; + + m_pipes[INDEX(PIPE_FLITE)] = (ExynosCameraPipe*)new ExynosCameraPipeFlite(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_FLITE)]); + m_pipes[INDEX(PIPE_FLITE)]->setPipeId(PIPE_FLITE); + m_pipes[INDEX(PIPE_FLITE)]->setPipeName("PIPE_FLITE"); + + m_pipes[INDEX(PIPE_3AA)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, false, &m_deviceInfo[INDEX(PIPE_3AA)]); + m_pipes[INDEX(PIPE_3AA)]->setPipeId(PIPE_3AA); + m_pipes[INDEX(PIPE_3AA)]->setPipeName("PIPE_3AA"); + + m_pipes[INDEX(PIPE_ISP)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, false, &m_deviceInfo[INDEX(PIPE_ISP)]); + m_pipes[INDEX(PIPE_ISP)]->setPipeId(PIPE_ISP); + m_pipes[INDEX(PIPE_ISP)]->setPipeName("PIPE_ISP"); + + m_pipes[INDEX(PIPE_GSC)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_GSC)]); + m_pipes[INDEX(PIPE_GSC)]->setPipeId(PIPE_GSC); + m_pipes[INDEX(PIPE_GSC)]->setPipeName("PIPE_GSC"); + + m_pipes[INDEX(PIPE_GSC_VIDEO)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_GSC_VIDEO)]); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeId(PIPE_GSC_VIDEO); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeName("PIPE_GSC_VIDEO"); + + if (m_supportReprocessing == false) { + m_pipes[INDEX(PIPE_GSC_PICTURE)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_GSC_PICTURE)]); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeId(PIPE_GSC_PICTURE); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeName("PIPE_GSC_PICTURE"); + + m_pipes[INDEX(PIPE_JPEG)] = (ExynosCameraPipe*)new ExynosCameraPipeJpeg(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_JPEG)]); + m_pipes[INDEX(PIPE_JPEG)]->setPipeId(PIPE_JPEG); + m_pipes[INDEX(PIPE_JPEG)]->setPipeName("PIPE_JPEG"); + } + + /* flite pipe initialize */ + ret = m_pipes[INDEX(PIPE_FLITE)]->create(m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_FLITE)); + + /* ISP pipe initialize */ + ret = m_pipes[INDEX(PIPE_ISP)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_ISP)); + + /* 3AA pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_3AA)); + + /* GSC_PREVIEW pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):GSC create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC)); + + ret = m_pipes[INDEX(PIPE_GSC_VIDEO)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC_VIDEO create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC_VIDEO)); + + if (m_supportReprocessing == false) { + /* GSC_PICTURE pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_PICTURE)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):GSC_PICTURE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC_PICTURE)); + + /* JPEG pipe initialize */ + ret = m_pipes[INDEX(PIPE_JPEG)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):JPEG create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_JPEG)); + } + + /* EOS */ + ret = m_pipes[INDEX(PIPE_3AA)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", __FUNCTION__, __LINE__, PIPE_3AA, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + camera2_node_group node_group_info_3aa, node_group_info_isp, node_group_info_dis; + int zoom = m_parameters->getZoomLevel(); + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + ExynosRect bnsSize; /* == bayerCropInputSize */ + ExynosRect bayerCropSize; + ExynosRect bdsSize; + int perFramePos = 0; + bool tpu = false; + bool dual = true; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_dis, 0x0, sizeof(camera2_node_group)); + + /* should add this request value in FrameFactory */ + /* 3AA */ + node_group_info_3aa.leader.request = 1; + + /* 3AC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AC); + + /* 3AP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AP); + + /* should add this request value in FrameFactory */ + /* ISP */ + node_group_info_isp.leader.request = 1; + + /* SCC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCC_POS : PERFRAME_FRONT_SCC_POS; + + if (m_supportSCC == true) + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_SCC); + else + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_ISPC); + + memcpy(&node_group_info_dis, &node_group_info_isp, sizeof (camera2_node_group)); + + if (m_requestISPC == true) { + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPC_POS : PERFRAME_FRONT_ISPC_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_ISPC); + } + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info_3aa, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + tpu); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + if (m_requestISPC == true) { + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + for(int i=0 ; i < 4 ; i++) { + node_group_info_3aa.capture[PERFRAME_FRONT_ISPC_POS].input.cropRegion[i] = node_group_info_3aa.capture[perFramePos].input.cropRegion[i]; + node_group_info_3aa.capture[PERFRAME_FRONT_ISPC_POS].output.cropRegion[i] = node_group_info_3aa.capture[perFramePos].input.cropRegion[i]; + } + } + + frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_3AA, zoom); + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, zoom); + frame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoom); + + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCameraFrameFactoryFront::createNewFrame(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount, FRAME_TYPE_PREVIEW); + + int requestEntityCount = 0; + bool dzoomScaler = false; + dzoomScaler = m_parameters->getZoomPreviewWIthScaler(); + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE("(%s[%d]): frame(%d) metadata initialize fail", __FUNCTION__, __LINE__, m_frameCount); + + if (m_requestFLITE) { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + } + + /* set 3AA_ISP pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_3AA)]); + requestEntityCount++; + + if (m_supportReprocessing == false) { + /* set GSC-Picture pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_PICTURE)] = new ExynosCameraFrameEntity(PIPE_GSC_PICTURE, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_PICTURE)]); + } + + /* set GSC pipe to linkageList */ + newEntity[INDEX(PIPE_GSC)] = new ExynosCameraFrameEntity(PIPE_GSC, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC)]); + if (dzoomScaler) { + requestEntityCount++; + } + + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + if (m_supportReprocessing == false) { + /* set JPEG pipe to linkageList */ + newEntity[INDEX(PIPE_JPEG)] = new ExynosCameraFrameEntity(PIPE_JPEG, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_JPEG)]); + } + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE("ERR(%s):m_initPipelines fail, ret(%d)", __FUNCTION__, ret); + } + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + return frame; +} + +status_t ExynosCameraFrameFactoryFront::initPipes(void) +{ + CLOGI("INFO(%s[%d]) IN", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + bool hwVdis = m_parameters->getTpuEnabledMode(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): HWPictureSize(%dx%d)", __FUNCTION__, __LINE__, hwPictureW, hwPictureH); + + + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + + /* FLITE pipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + /* setParam for Frame rate : must after setInput on Flite */ + uint32_t min, max, frameRate; + struct v4l2_streamparm streamParam; + + memset(&streamParam, 0x0, sizeof(v4l2_streamparm)); + m_parameters->getPreviewFpsRange(&min, &max); + + if (m_parameters->getScalableSensorMode() == true) + frameRate = 24; + else + frameRate = max; + + streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + streamParam.parm.capture.timeperframe.numerator = 1; + streamParam.parm.capture.timeperframe.denominator = frameRate; + CLOGI("INFO(%s[%d]:set framerate (denominator=%d)", __FUNCTION__, __LINE__, frameRate); + ret = setParam(&streamParam, PIPE_FLITE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE setParam fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + tempRect.colorFormat = bayerFormat; + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[0].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[0].bytesPerPlane[0] = ROUND_UP(pipeInfo[0].rectInfo.fullW, 10) * 8 / 5; + } +#endif + + ret = m_pipes[INDEX(PIPE_FLITE)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* set BNS ratio */ + + int bnsScaleRatio = 0; + int bnsSize = 0; + if( m_parameters->getHighSpeedRecording() +#ifdef USE_BINNING_MODE + || m_parameters->getBinningMode() +#endif + ) { + bnsScaleRatio = 1000; + } else { + bnsScaleRatio = m_parameters->getBnsScaleRatio(); + } + ret = m_pipes[INDEX(PIPE_FLITE)]->setControl(V4L2_CID_IS_S_BNS, bnsScaleRatio); + if (ret < 0) { + CLOGE("ERR(%s[%d]): set BNS(%d) fail, ret(%d)", __FUNCTION__, __LINE__, bnsScaleRatio, ret); + } else { + ret = m_pipes[INDEX(PIPE_FLITE)]->getControl(V4L2_CID_IS_G_BNS_SIZE, &bnsSize); + if (ret < 0) { + CLOGE("ERR(%s[%d]): get BNS size fail, ret(%d)", __FUNCTION__, __LINE__, ret); + bnsSize = -1; + } + } + + int bnsWidth = 0; + int bnsHeight = 0; + if (bnsSize > 0) { + bnsHeight = bnsSize & 0xffff; + bnsWidth = bnsSize >> 16; + + CLOGI("INFO(%s[%d]): BNS scale down ratio(%.1f), size (%dx%d)", __FUNCTION__, __LINE__, (float)(bnsScaleRatio / 1000), bnsWidth, bnsHeight); + m_parameters->setBnsSize(bnsWidth, bnsHeight); + } + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_deviceInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPipeId(%d, %d) fail, ret(%d)", __FUNCTION__, __LINE__, i, m_deviceInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); +#if 1 + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + pipeInfo[t3apNodeType].rectInfo = tempRect; + pipeInfo[t3apNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3apNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; +#endif + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = bayerFormat; +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 2, 16); + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[ispsNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW * 3 / 2, 16); + } +#endif + + pipeInfo[ispsNodeType].rectInfo = tempRect; + pipeInfo[ispsNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[ispsNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispsNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_deviceInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPC */ + enum NODE_TYPE ispcNodeType = getNodeType(PIPE_ISPC); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPC_POS : PERFRAME_FRONT_ISPC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_deviceInfo[PIPE_3AA].nodeNum[ispcNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + tempRect.fullW = bdsSize.w; + tempRect.fullH = bdsSize.h; + tempRect.colorFormat = hwVdisformat; + +#ifdef USE_BUFFER_WITH_STRIDE + /* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[ispcNodeType].bytesPerPlane[0] = bdsSize.w; +#endif + + pipeInfo[ispcNodeType].rectInfo = tempRect; + pipeInfo[ispcNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[ispcNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[ispcNodeType].bufInfo.count = config->current->bufInfo.num_hwdis_buffers; + + pipeInfo[ispcNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispcNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]) OUT", __FUNCTION__, __LINE__); + m_frameCount = 0; + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::preparePipes(void) +{ + int ret = 0; + + /* NOTE: Prepare for 3AA is moved after ISP stream on */ + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):%s prepare fail, ret(%d)", __FUNCTION__, __LINE__, m_pipes[INDEX(pipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::startPipes(void) +{ + int ret = 0; + + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):%s start fail, ret(%d)", __FUNCTION__, __LINE__, m_pipes[INDEX(pipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_3AA)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagFlite3aaOTF == true) { + /* Here is doing 3AA prepare(qbuf) */ + ret = m_pipes[INDEX(PIPE_3AA)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream on fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]):Starting Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::startInitialThreads(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):start pre-ordered initial pipe thread", __FUNCTION__, __LINE__); + + if (m_requestFLITE) { + ret = startThread(PIPE_FLITE); + if (ret < 0) + return ret; + } + + ret = startThread(PIPE_3AA); + if (ret < 0) + return ret; + + if (m_flag3aaIspOTF == false) { + ret = startThread(PIPE_ISP); + if (ret < 0) + return ret; + } + + if (m_parameters->getTpuEnabledMode() == true) { + ret = startThread(PIPE_DIS); + if (ret < 0) + return ret; + } + + if(m_supportSCC) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = startThread(pipe); + if (ret < 0) + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::setStopFlag(void) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + + ret = m_pipes[INDEX(PIPE_FLITE)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_3AA)]->flagStart() == true) + ret = m_pipes[INDEX(PIPE_3AA)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) + ret = m_pipes[INDEX(PIPE_ISP)]->setStopFlag(); + + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->setStopFlag(); + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryFront::stopPipes(void) +{ + int ret = 0; + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):%s stopThread fail, ret(%d)", __FUNCTION__, __LINE__, m_pipes[INDEX(pipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_3AA)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_3AA)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* stream off for ISP */ + if (m_pipes[INDEX(PIPE_ISP)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_GSC)]->isThreadRunning() == true) { + ret = stopThread(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + } + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream off fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* 3AA force done */ + ret = m_pipes[INDEX(PIPE_3AA)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_3AA force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for 3AA */ + ret = m_pipes[INDEX(PIPE_3AA)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISP force done */ + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_ISP force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for ISP */ + ret = m_pipes[INDEX(PIPE_ISP)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):%s stop fail, ret(%d)", __FUNCTION__, __LINE__, m_pipes[INDEX(pipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = stopThreadAndWait(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC stopThreadAndWait fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + CLOGI("INFO(%s[%d]):Stopping Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void ExynosCameraFrameFactoryFront::m_init(void) +{ + m_supportReprocessing = false; + m_flagFlite3aaOTF = false; + m_supportSCC = false; + m_supportPureBayerReprocessing = false; + m_flagReprocessing = false; + m_requestISP = 0; +} + +status_t ExynosCameraFrameFactoryFront::m_setupConfig() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + int32_t *secondaryControlId = NULL; + int32_t *prevNode = NULL; + + enum NODE_TYPE nodeType = INVALID_NODE; + int pipeId = -1; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + + m_flagFlite3aaOTF = (m_cameraId == CAMERA_ID_BACK)?MAIN_CAMERA_DUAL_FLITE_3AA_OTF:FRONT_CAMERA_DUAL_FLITE_3AA_OTF; + m_flag3aaIspOTF = (m_cameraId == CAMERA_ID_BACK)?MAIN_CAMERA_DUAL_3AA_ISP_OTF:FRONT_CAMERA_DUAL_3AA_ISP_OTF; + m_supportReprocessing = m_parameters->isReprocessing(); + m_supportSCC = m_parameters->isOwnScc(m_cameraId); + m_supportPureBayerReprocessing = (m_cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_DUAL : USE_PURE_BAYER_REPROCESSING_FRONT_ON_DUAL; + + m_flagReprocessing = false; + + if (m_supportReprocessing == false) { + if (m_supportSCC == true) + m_requestSCC = 1; + else + m_requestISPC = 1; + } + + if (m_flag3aaIspOTF == true) + m_request3AP = 0; + else + m_request3AP = 1; + + nodeNums = m_nodeNums[INDEX(PIPE_FLITE)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = (m_cameraId == CAMERA_ID_BACK) ? MAIN_CAMERA_FLITE_NUM : FRONT_CAMERA_FLITE_NUM; + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_FLITE)]; + controlId[CAPTURE_NODE_1] = m_getSensorId(nodeNums[CAPTURE_NODE_1], m_flagReprocessing); + prevNode = nodeNums; + +#if 1 + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = -1; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; +#else + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = -1; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; +#endif + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I1S_NUM; + ispNums[CAPTURE_NODE_1] = -1; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I1C_NUM; + + /* 1. 3AAS */ + pipeId = INDEX(PIPE_3AA); + nodeType = getNodeType(PIPE_3AA); + m_deviceInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + + /* 2. 3AAP */ + nodeType = getNodeType(PIPE_3AP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = -1; + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + /* 3. ISPS */ + nodeType = getNodeType(PIPE_ISP); + m_deviceInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_deviceInfo[pipeId].secondaryNodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPC + nodeType = getNodeType(PIPE_ISPC); + m_deviceInfo[pipeId].nodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = -1; + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP)], true, false, m_flagReprocessing); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPC; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC)]; + nodeNums[OUTPUT_NODE] = PREVIEW_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_VIDEO)]; + nodeNums[OUTPUT_NODE] = VIDEO_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_PICTURE)]; + nodeNums[OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_JPEG)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_deviceInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_checkNodeSetting(%d) fail", __FUNCTION__, __LINE__, pipeId); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +enum NODE_TYPE ExynosCameraFrameFactoryFront::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_ISPC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = CAPTURE_NODE_2; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_3; + break; + case PIPE_ISP: + nodeType = OTF_NODE_1; + break; + case PIPE_ISPP: + nodeType = CAPTURE_NODE_5; + break; + case PIPE_JPEG: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_7; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.h b/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.h new file mode 100644 index 0000000..b82062b --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryFront.h @@ -0,0 +1,70 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_FRONT_H +#define EXYNOS_CAMERA_FRAME_FACTORY_FRONT_H + +#include "ExynosCameraFrameFactory.h" + +namespace android { + +class ExynosCameraFrameFactoryFront : public ExynosCameraFrameFactory { +public: + ExynosCameraFrameFactoryFront() + { + m_init(); + } + + ExynosCameraFrameFactoryFront(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "FrameFactoryBackDualPreview" : "FrameFactoryFrontDualPreview"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactoryFront(); + + virtual status_t create(bool active = true); + + virtual ExynosCameraFrame *createNewFrame(void); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t setStopFlag(void); + virtual status_t stopPipes(void); + enum NODE_TYPE getNodeType(uint32_t pipeId); + +protected: + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + virtual status_t m_setupConfig(void); + +private: + void m_init(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.cpp new file mode 100644 index 0000000..209d85a --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.cpp @@ -0,0 +1,853 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactoryPreview" +#include + +#include "ExynosCameraFrameFactoryPreview.h" + +namespace android { + +ExynosCameraFrameFactoryPreview::~ExynosCameraFrameFactoryPreview() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraFrameFactoryPreview::create(__unused bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_setupConfig(); + + int ret = 0; + int leaderPipe = PIPE_3AA; + int32_t nodeNums[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) + nodeNums[i] = -1; + + m_pipes[INDEX(PIPE_FLITE)] = (ExynosCameraPipe*)new ExynosCameraPipeFlite(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_FLITE)]); + m_pipes[INDEX(PIPE_FLITE)]->setPipeId(PIPE_FLITE); + m_pipes[INDEX(PIPE_FLITE)]->setPipeName("PIPE_FLITE"); + + m_pipes[INDEX(PIPE_3AA)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, false, &m_deviceInfo[INDEX(PIPE_3AA)]); + m_pipes[INDEX(PIPE_3AA)]->setPipeId(PIPE_3AA); + m_pipes[INDEX(PIPE_3AA)]->setPipeName("PIPE_3AA"); + + m_pipes[INDEX(PIPE_ISP)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, false, &m_deviceInfo[INDEX(PIPE_ISP)]); + m_pipes[INDEX(PIPE_ISP)]->setPipeId(PIPE_ISP); + m_pipes[INDEX(PIPE_ISP)]->setPipeName("PIPE_ISP"); + + if (m_parameters->getHWVdisMode()) { + m_pipes[INDEX(PIPE_DIS)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, false, &m_deviceInfo[INDEX(PIPE_DIS)]); + m_pipes[INDEX(PIPE_DIS)]->setPipeId(PIPE_DIS); + m_pipes[INDEX(PIPE_DIS)]->setPipeName("PIPE_DIS"); + } + + if (m_flagMcscVraOTF == false) { + m_pipes[INDEX(PIPE_VRA)] = (ExynosCameraPipe*)new ExynosCameraPipeVRA(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_VRA)]); + m_pipes[INDEX(PIPE_VRA)]->setPipeId(PIPE_VRA); + m_pipes[INDEX(PIPE_VRA)]->setPipeName("PIPE_VRA"); + } + + m_pipes[INDEX(PIPE_GSC)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_GSC)]); + m_pipes[INDEX(PIPE_GSC)]->setPipeId(PIPE_GSC); + m_pipes[INDEX(PIPE_GSC)]->setPipeName("PIPE_GSC"); + + m_pipes[INDEX(PIPE_GSC_VIDEO)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_GSC_VIDEO)]); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeId(PIPE_GSC_VIDEO); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeName("PIPE_GSC_VIDEO"); + + if (m_supportReprocessing == false) { + m_pipes[INDEX(PIPE_GSC_PICTURE)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_GSC_PICTURE)]); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeId(PIPE_GSC_PICTURE); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeName("PIPE_GSC_PICTURE"); + + m_pipes[INDEX(PIPE_JPEG)] = (ExynosCameraPipe*)new ExynosCameraPipeJpeg(m_cameraId, m_parameters, true, m_nodeNums[INDEX(PIPE_JPEG)]); + m_pipes[INDEX(PIPE_JPEG)]->setPipeId(PIPE_JPEG); + m_pipes[INDEX(PIPE_JPEG)]->setPipeName("PIPE_JPEG"); + } + + /* flite pipe initialize */ + ret = m_pipes[INDEX(PIPE_FLITE)]->create(m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_FLITE)); + + /* ISP pipe initialize */ + ret = m_pipes[INDEX(PIPE_ISP)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_ISP)); + + /* 3AA pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_3AA)); + + /* DIS pipe initialize */ + if (m_parameters->getHWVdisMode()) { + ret = m_pipes[INDEX(PIPE_DIS)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_DIS)); + } + + /* VRA pipe initialize */ + if (m_flagMcscVraOTF == false) { + /* EOS */ + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", __FUNCTION__, __LINE__, leaderPipe, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* Change leaderPipe to VRA, Create new instance */ + leaderPipe = PIPE_VRA; + + ret = m_pipes[INDEX(PIPE_VRA)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_VRA)); + } + + /* GSC_PREVIEW pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):GSC create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC)); + + ret = m_pipes[INDEX(PIPE_GSC_VIDEO)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC_VIDEO create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC_VIDEO)); + + if (m_supportReprocessing == false) { + /* GSC_PICTURE pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_PICTURE)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):GSC_PICTURE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_GSC_PICTURE)); + + /* JPEG pipe initialize */ + ret = m_pipes[INDEX(PIPE_JPEG)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):JPEG create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_JPEG)); + } + + /* EOS */ + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", __FUNCTION__, __LINE__, leaderPipe, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryPreview::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + camera2_node_group node_group_info_3aa, node_group_info_isp, node_group_info_dis; + int zoom = m_parameters->getZoomLevel(); + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + ExynosRect bnsSize; /* == bayerCropInputSize */ + ExynosRect bayerCropSize; + ExynosRect bdsSize; + int perFramePos = 0; + bool tpu = false; + bool dis = false; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + tpu = m_parameters->getTpuEnabledMode(); + dis = m_parameters->getHWVdisMode(); + + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_dis, 0x0, sizeof(camera2_node_group)); + + /* should add this request value in FrameFactory */ + /* 3AA */ + node_group_info_3aa.leader.request = 1; + + /* 3AC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AC); + + /* 3AP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AP); + + /* should add this request value in FrameFactory */ + /* ISP */ + node_group_info_isp.leader.request = 1; + + /* SCC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCC_POS : PERFRAME_FRONT_SCC_POS; + + if (m_supportSCC == true) + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_SCC); + else + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_ISPC); + + /* DIS */ + memcpy(&node_group_info_dis, &node_group_info_isp, sizeof (camera2_node_group)); + + if (tpu == true) { + /* ISPP */ + if (m_requestISPP == true) { + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_ISPP); + } + + /* SCP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + node_group_info_dis.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + } else { + /* SCP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + if (m_flag3aaIspOTF == true) + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + else + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + } + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info_3aa, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + dis); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + dis); + + frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_3AA, zoom); + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, zoom); + frame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoom); + + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCameraFrameFactoryPreview::createNewFrame(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount, FRAME_TYPE_PREVIEW); + if (frame == NULL) + return NULL; + + int requestEntityCount = 0; + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE("(%s[%d]): frame(%d) metadata initialize fail", __FUNCTION__, __LINE__, m_frameCount); + + if (m_flagFlite3aaOTF == true) { + if (m_requestFLITE) { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + } + + /* set 3AA_ISP pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_3AA)]); + requestEntityCount++; + + if (m_requestDIS == true) { + if (m_flag3aaIspOTF == true) { + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } + } else { + if (m_flag3aaIspOTF == true) { + /* skip ISP pipe to linkageList */ + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + } + } + } else { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + + /* set 3AA pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_FLITE)], newEntity[INDEX(PIPE_3AA)]); + + /* set ISP pipe to linkageList */ + if (m_requestISP == true) { + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)]); + } + + /* set DIS pipe to linkageList */ + if (m_requestDIS == true) { + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)]); + } + + /* flite, 3aa, isp, dis as one. */ + requestEntityCount++; + } + + if (m_flagMcscVraOTF == false) { + newEntity[INDEX(PIPE_VRA)] = new ExynosCameraFrameEntity(PIPE_VRA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_VRA)]); + requestEntityCount++; + } + + if (m_supportReprocessing == false) { + /* set GSC-Picture pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_PICTURE)] = new ExynosCameraFrameEntity(PIPE_GSC_PICTURE, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_PICTURE)]); + } + + /* set GSC pipe to linkageList */ + newEntity[INDEX(PIPE_GSC)] = new ExynosCameraFrameEntity(PIPE_GSC, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC)]); + + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + /* PIPE_VRA's internal pipe entity */ + newEntity[INDEX(PIPE_GSC_VRA)] = new ExynosCameraFrameEntity(PIPE_GSC_VRA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VRA)]); + + if (m_supportReprocessing == false) { + /* set JPEG pipe to linkageList */ + newEntity[INDEX(PIPE_JPEG)] = new ExynosCameraFrameEntity(PIPE_JPEG, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_JPEG)]); + } + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE("ERR(%s):m_initPipelines fail, ret(%d)", __FUNCTION__, ret); + } + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + + return frame; +} + +status_t ExynosCameraFrameFactoryPreview::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + + ret = m_initFlitePipe(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_initFlitePipe() fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = m_setDeviceInfo(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setDeviceInfo() fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = m_initPipes(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_initPipes() fail", __FUNCTION__, __LINE__); + return ret; + } + + m_frameCount = 0; + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryPreview::preparePipes(void) +{ + int ret = 0; + + /* NOTE: Prepare for 3AA is moved after ISP stream on */ + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryPreview::startPipes(void) +{ + int ret = 0; + + if (m_flagMcscVraOTF == false) { + ret = m_pipes[INDEX(PIPE_VRA)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->getTpuEnabledMode() == true) { + ret = m_pipes[INDEX(PIPE_DIS)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_3AA)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagFlite3aaOTF == true) { + /* Here is doing 3AA prepare(qbuf) */ + ret = m_pipes[INDEX(PIPE_3AA)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream on fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]):Starting Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryPreview::startInitialThreads(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):start pre-ordered initial pipe thread", __FUNCTION__, __LINE__); + + if (m_requestFLITE) { + ret = startThread(PIPE_FLITE); + if (ret < 0) + return ret; + } + + ret = startThread(PIPE_3AA); + if (ret < 0) + return ret; + + if (m_parameters->is3aaIspOtf() == false) { + ret = startThread(PIPE_ISP); + if (ret < 0) + return ret; + } + + if (m_parameters->getTpuEnabledMode() == true) { + ret = startThread(PIPE_DIS); + if (ret < 0) + return ret; + } + + if (m_parameters->isMcscVraOtf() == false) { + ret = startThread(PIPE_VRA); + if (ret < 0) + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryPreview::setStopFlag(void) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + + ret |= m_pipes[INDEX(PIPE_FLITE)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_3AA)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_3AA)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_ISP)]->setStopFlag(); + + if (m_parameters->getHWVdisMode() == true + && m_pipes[INDEX(PIPE_DIS)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_DIS)]->setStopFlag(); + + if (m_flagMcscVraOTF == false + && m_pipes[INDEX(PIPE_VRA)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_VRA)]->setStopFlag(); + + return ret; +} + +status_t ExynosCameraFrameFactoryPreview::stopPipes(void) +{ + int ret = 0; + + if (m_pipes[INDEX(PIPE_VRA)] != NULL + && m_pipes[INDEX(PIPE_VRA)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_VRA)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_DIS)] != NULL + && m_pipes[INDEX(PIPE_DIS)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_DIS)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_3AA)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_3AA)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* stream off for ISP */ + if (m_pipes[INDEX(PIPE_ISP)] != NULL + && m_pipes[INDEX(PIPE_ISP)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->stopThread(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_GSC)]->isThreadRunning() == true) { + ret = stopThread(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream off fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* 3AA force done */ + ret = m_pipes[INDEX(PIPE_3AA)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_3AA force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for 3AA */ + ret = m_pipes[INDEX(PIPE_3AA)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISP force done */ + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_ISP force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for ISP */ + ret = m_pipes[INDEX(PIPE_ISP)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):ISP stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->getHWVdisMode()) { + if (m_pipes[INDEX(PIPE_DIS)]->flagStart() == true) { + /* DIS force done */ + ret = m_pipes[INDEX(PIPE_DIS)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_DIS force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + ret = m_pipes[INDEX(PIPE_DIS)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):DIS stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + if (m_flagMcscVraOTF == false) { + if (m_pipes[INDEX(PIPE_VRA)]->flagStart() == true) { + /* VRA force done */ + ret = m_pipes[INDEX(PIPE_VRA)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_VRA force done fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + ret = m_pipes[INDEX(PIPE_VRA)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):VRA stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + ret = stopThreadAndWait(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):PIPE_GSC stopThreadAndWait fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + CLOGI("INFO(%s[%d]):Stopping Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void ExynosCameraFrameFactoryPreview::m_init(void) +{ + m_supportReprocessing = false; + m_flagFlite3aaOTF = false; + m_flagIspMcscOTF = false; + m_flagMcscVraOTF = false; + m_supportSCC = false; + m_supportMCSC = false; + m_supportPureBayerReprocessing = false; + m_flagReprocessing = false; +} + +status_t ExynosCameraFrameFactoryPreview::m_setupConfig() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + int32_t *secondaryControlId = NULL; + int32_t *prevNode = NULL; + + m_flagFlite3aaOTF = m_parameters->isFlite3aaOtf(); + m_flag3aaIspOTF = m_parameters->is3aaIspOtf(); + m_flagIspMcscOTF = m_parameters->isIspMcscOtf(); + m_flagMcscVraOTF = m_parameters->isMcscVraOtf(); + m_supportReprocessing = m_parameters->isReprocessing(); + m_supportSCC = m_parameters->isOwnScc(m_cameraId); + m_supportMCSC = m_parameters->isOwnMCSC(); + + if (m_parameters->getRecordingHint() == true) { + m_supportPureBayerReprocessing = (m_cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + } else { + m_supportPureBayerReprocessing = (m_cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + } + + m_flagReprocessing = false; + + if (m_supportReprocessing == false) { + if (m_supportSCC == true) + m_requestSCC = 1; + else + m_requestISPC = 1; + } + + if (m_flag3aaIspOTF == true) { + m_request3AP = 0; + m_requestISP = 0; + } else { + m_request3AP = 1; + m_requestISP = 1; + } + + if (m_flagMcscVraOTF == true) + m_requestVRA = 0; + else + m_requestVRA = 1; + + nodeNums = m_nodeNums[INDEX(PIPE_FLITE)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = m_getFliteNodenum(); + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_FLITE)]; + controlId[CAPTURE_NODE_1] = m_getSensorId(nodeNums[CAPTURE_NODE_1], m_flagReprocessing); + + ret = m_setDeviceInfo(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setDeviceInfo() fail", __FUNCTION__, __LINE__); + return ret; + } + + nodeNums = m_nodeNums[INDEX(PIPE_GSC)]; + nodeNums[OUTPUT_NODE] = PREVIEW_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_VIDEO)]; + nodeNums[OUTPUT_NODE] = VIDEO_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_PICTURE)]; + nodeNums[OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_JPEG)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + return NO_ERROR; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.h b/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.h new file mode 100644 index 0000000..7fd6982 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryPreview.h @@ -0,0 +1,78 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_PREVIEW_H +#define EXYNOS_CAMERA_FRAME_FACTORY_PREVIEW_H + +#include "ExynosCameraFrameFactory.h" + +#include "ExynosCameraFrame.h" + +namespace android { + +class ExynosCameraFrameFactoryPreview : public ExynosCameraFrameFactory { +public: + ExynosCameraFrameFactoryPreview() + { + m_init(); + } + + ExynosCameraFrameFactoryPreview(int cameraId, ExynosCamera1Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "FrameFactoryBackPreview" : "FrameFactoryFrontPreview"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCameraFrameFactoryPreview(); + + virtual status_t create(bool active = true); + + virtual ExynosCameraFrame *createNewFrame(void); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t setStopFlag(void); + virtual status_t stopPipes(void); + +protected: + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + virtual status_t m_setupConfig(void); + + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void) = 0; + + /* pipe setting */ + virtual status_t m_initPipes(void) = 0; + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.cpp b/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.cpp new file mode 100644 index 0000000..9b83c7e --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.cpp @@ -0,0 +1,317 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameFactoryVision" +#include + +#include "ExynosCameraFrameFactoryVision.h" + +namespace android { + +ExynosCameraFrameFactoryVision::~ExynosCameraFrameFactoryVision() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraFrameFactoryVision::create(__unused bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_setupConfig(); + + int ret = 0; + int32_t nodeNums[MAX_NODE] = {-1, -1, -1}; + + m_pipes[INDEX(PIPE_FLITE)] = (ExynosCameraPipe*)new ExynosCameraPipeFlite(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_FLITE)]); + m_pipes[INDEX(PIPE_FLITE)]->setPipeId(PIPE_FLITE); + m_pipes[INDEX(PIPE_FLITE)]->setPipeName("PIPE_FLITE"); + + /* flite pipe initialize */ + ret = m_pipes[INDEX(PIPE_FLITE)]->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + CLOGD("DEBUG(%s):Pipe(%d) created", __FUNCTION__, INDEX(PIPE_FLITE)); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::destroy(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + ret = m_pipes[i]->destroy(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_pipes[%d]->destroy() fail", __FUNCTION__, __LINE__, i); + return ret; + } + + SAFE_DELETE(m_pipes[i]); + + CLOGD("DEBUG(%s):Pipe(%d) destroyed", __FUNCTION__, i); + } + } + + m_setCreate(false); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::m_fillNodeGroupInfo(__unused ExynosCameraFrame *frame) +{ + /* Do nothing */ + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCameraFrameFactoryVision::createNewFrame(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES]; + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount); + + int requestEntityCount = 0; + + CLOGV("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE("(%s[%d]): frame(%d) metadata initialize fail", __FUNCTION__, __LINE__, m_frameCount); + + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE("ERR(%s):m_initPipelines fail, ret(%d)", __FUNCTION__, ret); + } + + m_fillNodeGroupInfo(frame); + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_frameCount++; + + return frame; +} + +status_t ExynosCameraFrameFactoryVision::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[3]; + int32_t nodeNums[MAX_NODE] = {-1, -1, -1}; + int32_t sensorIds[MAX_NODE] = {-1, -1, -1}; + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = V4L2_PIX_FMT_SBGGR12; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + CLOGI("INFO(%s[%d]): MaxSensorSize(%dx%d), HwSensorSize(%dx%d)", __FUNCTION__, __LINE__, maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): MaxPixtureSize(%dx%d), HwPixtureSize(%dx%d)", __FUNCTION__, __LINE__, maxPictureW, maxPictureH, hwPictureW, hwPictureH); + + memset(pipeInfo, 0, (sizeof(camera_pipe_info_t) * 3)); + + /* FLITE pipe */ +#if 0 + tempRect.fullW = maxSensorW + 16; + tempRect.fullH = maxSensorH + 10; + tempRect.colorFormat = bayerFormat; +#else + tempRect.fullW = VISION_WIDTH; + tempRect.fullH = VISION_HEIGHT; + tempRect.colorFormat = V4L2_PIX_FMT_SGRBG8; +#endif + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = FRONT_NUM_BAYER_BUFFERS; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_FLITE)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_frameCount = 0; + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::preparePipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::startPipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream on fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]):Starting Front [FLITE] Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::startInitialThreads(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d]):start pre-ordered initial pipe thread", __FUNCTION__, __LINE__); + + ret = startThread(PIPE_FLITE); + if (ret < 0) + return ret; + + return NO_ERROR; +} + +status_t ExynosCameraFrameFactoryVision::stopPipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE sensorStream fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->stop(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):FLITE stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]):Stopping Front [FLITE] Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void ExynosCameraFrameFactoryVision::setRequest3AC(__unused bool enable) +{ + /* Do nothing */ + + return; +} + +void ExynosCameraFrameFactoryVision::m_init(void) +{ + memset(m_nodeNums, -1, sizeof(m_nodeNums)); + memset(m_sensorIds, -1, sizeof(m_sensorIds)); + + /* This seems all need to set 0 */ + m_requestFLITE = 0; + m_request3AP = 0; + m_request3AC = 0; + m_requestISP = 1; + m_requestSCC = 0; + m_requestDIS = 0; + m_requestSCP = 1; + + m_supportReprocessing = false; + m_flagFlite3aaOTF = false; + m_supportSCC = false; + m_supportPureBayerReprocessing = false; + m_flagReprocessing = false; + +} + +status_t ExynosCameraFrameFactoryVision::m_setupConfig(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + int32_t *prevNode = NULL; + bool enableVision = true; + + nodeNums = m_nodeNums[INDEX(PIPE_FLITE)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = FRONT_CAMERA_FLITE_NUM; + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_FLITE)]; + controlId[CAPTURE_NODE_1] = m_getSensorId(nodeNums[CAPTURE_NODE_1], enableVision); + prevNode = nodeNums; + + return NO_ERROR; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.h b/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.h new file mode 100644 index 0000000..01b0651 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameFactoryVision.h @@ -0,0 +1,67 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_FACTORY_VISION_H +#define EXYNOS_CAMERA_FRAME_FACTORY_VISION_H + +#include "ExynosCameraFrameFactory.h" + +namespace android { + +class ExynosCameraFrameFactoryVision : public ExynosCameraFrameFactory { +public: + ExynosCameraFrameFactoryVision() + { + m_init(); + + strncpy(m_name, "FrameFactoryVision", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + ExynosCameraFrameFactoryVision(int cameraId, ExynosCamera1Parameters *param) : ExynosCameraFrameFactory(cameraId, param) + { + m_init(); + + strncpy(m_name, "FrameFactoryVision", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + virtual ~ExynosCameraFrameFactoryVision(); + + virtual status_t create(bool active = true); + virtual status_t destroy(void); + + virtual ExynosCameraFrame *createNewFrame(void); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t stopPipes(void); + + virtual void setRequest3AC(bool enable); +protected: + status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + virtual status_t m_setupConfig(void); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.cpp b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.cpp new file mode 100644 index 0000000..72bb0ef --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.cpp @@ -0,0 +1,1214 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameReprocessingFactory" +#include + +#include "ExynosCameraFrameReprocessingFactory.h" + +namespace android { + +ExynosCameraFrameReprocessingFactory::~ExynosCameraFrameReprocessingFactory() +{ + status_t ret = NO_ERROR; + + ret = destroy(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); + + m_setCreate(false); +} + +status_t ExynosCameraFrameReprocessingFactory::create(__unused bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + uint32_t leaderPipe = PIPE_3AA_REPROCESSING; + + m_setupConfig(); + m_constructReprocessingPipes(); + + /* 3AA_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->getPipeName(), PIPE_3AA_REPROCESSING); + + /* ISP_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->getPipeName(), PIPE_ISP_REPROCESSING); + + /* MCSC_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->getPipeName(), PIPE_MCSC_REPROCESSING); + + /* GSC_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):GSC create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->getPipeName(), PIPE_GSC_REPROCESSING); + + /* GSC_REPROCESSING3 pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):GSC3 create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->getPipeName(), PIPE_GSC_REPROCESSING3); + + if (m_flagHWFCEnabled == false + || m_parameters->isHWFCOnDemand() == true) { + /* JPEG_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_JPEG_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):JPEG create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]):%s(%d) created", __FUNCTION__, __LINE__, + m_pipes[INDEX(PIPE_JPEG_REPROCESSING)]->getPipeName(), PIPE_JPEG_REPROCESSING); + } + + /* EOS */ + if (m_parameters->isUseYuvReprocessing() == true) + leaderPipe = PIPE_MCSC_REPROCESSING; + else + leaderPipe = PIPE_3AA_REPROCESSING; + + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):%s V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", + __FUNCTION__, __LINE__, m_pipes[INDEX(leaderPipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + enum NODE_TYPE leaderNodeType = OUTPUT_NODE; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int hwSensorW = 0, hwSensorH = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int maxThumbnailW = 0, maxThumbnailH = 0; + int bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + int previewFormat = m_parameters->getPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + int perFramePos = 0; + + memset(&nullPipeInfo, 0, sizeof(camera_pipe_info_t)); + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + m_parameters->getPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): MaxPixtureSize(%dx%d), HwPixtureSize(%dx%d)", __FUNCTION__, __LINE__, maxPictureW, maxPictureH, hwPictureW, hwPictureH); + CLOGI("INFO(%s[%d]): PreviewSize(%dx%d), PictureSize(%dx%d)", __FUNCTION__, __LINE__, previewW, previewH, pictureW, pictureH); + CLOGI("INFO(%s[%d]): MaxThumbnailSize(%dx%d)", __FUNCTION__, __LINE__, maxThumbnailW, maxThumbnailH); + + + if (m_parameters->isUseYuvReprocessing() == false) { + /* + * 3AA for Reprocessing + */ + if (m_supportPureBayerReprocessing == true) { + pipeId = PIPE_3AA_REPROCESSING; + + /* 3AS */ + nodeType = getNodeType(PIPE_3AA_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_PURE_REPROCESSING_3AA); + +#if 0 + /* 3AC */ + nodeType = getNodeType(PIPE_3AC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AC_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + /* 3AP */ + nodeType = getNodeType(PIPE_3AP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AP_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to 3AA pipe */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } else { + /* + * 3A video node is opened for dirty bayer. + * So, we have to do setinput to 3A video node. + */ + pipeId = PIPE_3AA_REPROCESSING; + + /* setup pipe info to 3AA pipe */ + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA setupPipe for dirty bayer reprocessing fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + + + /* + * ISP for Reprocessing + */ + + /* ISP */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = PIPE_ISP_REPROCESSING; + nodeType = getNodeType(PIPE_ISP_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_ISP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = m_supportPureBayerReprocessing ? config->current->bufInfo.num_picture_buffers : config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + int ispPerframeInfoIndex = m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_ISP : PERFRAME_INFO_DIRTY_REPROCESSING_ISP; + SET_OUTPUT_DEVICE_BASIC_INFO(ispPerframeInfoIndex); + } + + /* ISPC */ + nodeType = getNodeType(PIPE_ISPC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPC_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* ISPP */ + nodeType = getNodeType(PIPE_ISPP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPP_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to ISP pipe */ + if (m_flagIspTpuOTF == false && m_flagIspMcscOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } + + + /* + * MCSC for Reprocessing + */ + + /* MCSC */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = PIPE_MCSC_REPROCESSING; + nodeType = getNodeType(PIPE_MCSC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set output node default info */ + int mcscPerframeInfoIndex = !(0) ? PERFRAME_INFO_YUV_REPROCESSING_MCSC : (m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_MCSC : PERFRAME_INFO_DIRTY_REPROCESSING_MCSC); + SET_OUTPUT_DEVICE_BASIC_INFO(mcscPerframeInfoIndex); + } + + /* MCSC0 */ + nodeType = getNodeType(PIPE_MCSC0_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC0_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* 34xx hasn't multi-port */ +#if 0 + /* MCSC3 */ + nodeType = getNodeType(PIPE_MCSC3_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC3_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* MCSC4 */ + nodeType = getNodeType(PIPE_MCSC4_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC4_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + if (m_flagHWFCEnabled == true) { + /* JPEG Src */ + nodeType = getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Src */ + nodeType = getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* JPEG Dst */ + nodeType = getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Dst */ + nodeType = getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + } + + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::preparePipes(void) +{ +#if 0 + status_t ret = NO_ERROR; + + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->prepare(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } +#endif + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::startPipes(void) +{ + status_t ret = NO_ERROR; + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + /* MCSC Reprocessing */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):MCSC start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->isUseYuvReprocessing() == false) { + /* ISP Reprocessing */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* 3AA Reprocessing */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP start fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + CLOGI("INFO(%s[%d]):Starting Reprocessing [SCC>ISP] Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::stopPipes(void) +{ + status_t ret = NO_ERROR; + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA Reprocessing Thread stop */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* ISP Reprocessing Thread stop */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + /* MCSC Reprocessing Thread stop */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):MCSC stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* GSC Reprocessing Thread stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):GSC stopThread fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA Reprocessing stop */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* ISP Reprocessing stop */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + /* MCSC Reprocessing stop */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):MCSC stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* GSC Reprocessing stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):GSC stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* GSC3 Reprocessing stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):GSC3 stop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI("INFO(%s[%d]):Stopping Reprocessing [3AA>MCSC] Success!", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::startInitialThreads(void) +{ + status_t ret = NO_ERROR; + + CLOGI("INFO(%s[%d]):start pre-ordered initial pipe thread", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::setStopFlag(void) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->setStopFlag(); + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->setStopFlag(); + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->setStopFlag(); + + return NO_ERROR; +} + +ExynosCameraFrame * ExynosCameraFrameReprocessingFactory::createNewFrame(void) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount, FRAME_TYPE_REPROCESSING); + + int requestEntityCount = 0; + int pipeId = -1; + int parentPipeId = PIPE_3AA_REPROCESSING; + int curShotMode = 0; + int curSeriesShotMode = 0; + if (m_parameters != NULL) { + curShotMode = m_parameters->getShotMode(); + curSeriesShotMode = m_parameters->getSeriesShotMode(); + } + + ret = m_initFrameMetadata(frame); + if (ret != NO_ERROR) + CLOGE("(%s[%d]):frame(%d) metadata initialize fail", __FUNCTION__, __LINE__, m_frameCount); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* set 3AA pipe to linkageList */ + if (m_supportPureBayerReprocessing == true) { + pipeId = PIPE_3AA_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + parentPipeId = pipeId; + } + + /* set ISP pipe to linkageList */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = PIPE_ISP_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + if (m_supportPureBayerReprocessing == true) + frame->addChildEntity(newEntity[INDEX(parentPipeId)], newEntity[INDEX(pipeId)], INDEX(PIPE_3AP_REPROCESSING)); + else + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + parentPipeId = pipeId; + } + + requestEntityCount++; + } + + /* set MCSC pipe to linkageList */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = PIPE_MCSC_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + if (m_parameters->isUseYuvReprocessing() == true) + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + else + frame->addChildEntity(newEntity[INDEX(parentPipeId)], newEntity[INDEX(pipeId)], INDEX(PIPE_ISPC_REPROCESSING)); + requestEntityCount++; + } + + /* set GSC pipe to linkageList */ + pipeId = PIPE_GSC_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + if (m_parameters->needGSCForCapture(m_cameraId) == true) + requestEntityCount++; + + /* set GSC pipe to linkageList */ + pipeId = PIPE_GSC_REPROCESSING2; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + if (m_parameters->getIsThumbnailCallbackOn() == true + ) + requestEntityCount++; + + /* set JPEG pipe to linkageList */ + pipeId = PIPE_JPEG_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + if (curShotMode != SHOT_MODE_RICH_TONE + && curSeriesShotMode != SERIES_SHOT_MODE_LLS + && curSeriesShotMode != SERIES_SHOT_MODE_SIS + && m_parameters->getShotMode() != SHOT_MODE_FRONT_PANORAMA + && m_parameters->getHighResolutionCallbackMode() == false + && (m_flagHWFCEnabled == false || m_parameters->isHWFCOnDemand() == true)) + requestEntityCount++; + + ret = m_initPipelines(frame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_initPipelines fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + + return frame; +} + +status_t ExynosCameraFrameReprocessingFactory::m_setupConfig(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int pipeId = INDEX(PIPE_3AA_REPROCESSING); + int node3aa = -1, node3ac = -1, node3ap = -1; + int nodeIsp = -1, nodeIspc = -1, nodeIspp = -1; + int nodeTpu = -1; + int nodeMcsc = -1, nodeMcscp2 = -1, nodeMcscp3 = -1, nodeMcscp4 = -1; + int previousPipeId = PIPE_FLITE; + enum NODE_TYPE nodeType = INVALID_NODE; + bool flagStreamLeader = false; + + m_flagFlite3aaOTF = m_parameters->isFlite3aaOtf(); + m_flag3aaIspOTF = m_parameters->isReprocessing3aaIspOTF(); + m_flagIspTpuOTF = false; + m_flagIspMcscOTF = m_parameters->isReprocessingIspMcscOTF(); + m_flagTpuMcscOTF = false; + + m_supportReprocessing = m_parameters->isReprocessing(); + m_supportPureBayerReprocessing = m_parameters->getUsePureBayerReprocessing(); + + m_request3AP = !(m_flag3aaIspOTF); + if (m_flagHWFCEnabled == true) { + m_requestJPEG = true; + m_requestThumbnail = true; + } + + node3aa = FIMC_IS_VIDEO_30S_NUM; + node3ac = FIMC_IS_VIDEO_30C_NUM; + node3ap = FIMC_IS_VIDEO_30P_NUM; + nodeIsp = FIMC_IS_VIDEO_I0S_NUM; + nodeIspc = FIMC_IS_VIDEO_I0C_NUM; + nodeIspp = FIMC_IS_VIDEO_I0P_NUM; + nodeMcsc = FIMC_IS_VIDEO_M1S_NUM; + nodeMcscp2 = FIMC_IS_VIDEO_M0P_NUM; + nodeMcscp3 = FIMC_IS_VIDEO_M1P_NUM; + nodeMcscp4 = FIMC_IS_VIDEO_M2P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA_REPROCESSING)); + m_initDeviceInfo(INDEX(PIPE_ISP_REPROCESSING)); + m_initDeviceInfo(INDEX(PIPE_MCSC_REPROCESSING)); + + + if (m_parameters->isUseYuvReprocessing() == false) { + /* + * 3AA for Reprocessing + */ + pipeId = INDEX(PIPE_3AA_REPROCESSING); + previousPipeId = PIPE_FLITE; + + /* 3AS */ + + /* + * If dirty bayer is used for reprocessing, the ISP video node is leader in the reprocessing stream. + */ + if (m_supportPureBayerReprocessing == false && m_flag3aaIspOTF == false) + flagStreamLeader = false; + else + flagStreamLeader = true; + + nodeType = getNodeType(PIPE_3AA_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AA_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = node3aa; + m_deviceInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagFlite3aaOTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_getFliteNodenum(), false, flagStreamLeader, m_flagReprocessing); + + /* Other nodes is not stream leader */ + flagStreamLeader = false; + +#if 0 + /* 3AC */ + nodeType = getNodeType(PIPE_3AC_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AC_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = node3ac; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_CAPTURE_OPT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); +#endif + + /* 3AP */ + nodeType = getNodeType(PIPE_3AP_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_3AP_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = node3ap; + m_deviceInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flag3aaIspOTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_CAPTURE_MAIN", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AA_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* + * ISP for Reprocessing + */ + previousPipeId = pipeId; + pipeId = m_flag3aaIspOTF ? INDEX(PIPE_3AA_REPROCESSING) : INDEX(PIPE_ISP_REPROCESSING); + + /* + * If dirty bayer is used for reprocessing, the ISP video node is leader in the reprocessing stream. + */ + if (m_supportPureBayerReprocessing == false && m_flag3aaIspOTF == false) + flagStreamLeader = true; + + /* ISPS */ + nodeType = getNodeType(PIPE_ISP_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISP_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIsp; + m_deviceInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flag3aaIspOTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_3AP_REPROCESSING)], m_flag3aaIspOTF, flagStreamLeader, m_flagReprocessing); + + flagStreamLeader = false; + + /* ISPC */ + nodeType = getNodeType(PIPE_ISPC_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPC_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIspc; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_CAPTURE_M2M", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* ISPP */ + nodeType = getNodeType(PIPE_ISPP_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_ISPP_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeIspp; + m_deviceInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagIspMcscOTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_CAPTURE_OTF", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISP_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + } + + /* + * MCSC for Reprocessing + */ + previousPipeId = pipeId; + + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) + pipeId = INDEX(PIPE_MCSC_REPROCESSING); + + /* MCSC */ + nodeType = getNodeType(PIPE_MCSC_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcsc; + m_deviceInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagIspMcscOTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + if (m_parameters->isUseYuvReprocessing() == true) + m_sensorIds[pipeId][nodeType] = m_getSensorId(node3ac, m_flagIspMcscOTF, true, m_flagReprocessing); + else + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[previousPipeId].nodeNum[getNodeType(PIPE_ISPP_REPROCESSING)], m_flagIspMcscOTF, flagStreamLeader, m_flagReprocessing); + + /* MCSC0 */ + nodeType = getNodeType(PIPE_MCSC0_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC0_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp2; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* 34xx hasn't multi-port */ +#if 0 + /* MCSC3 */ + nodeType = getNodeType(PIPE_MCSC3_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC3_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp3; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_CAPTURE_MAIN", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* MCSC4 */ + nodeType = getNodeType(PIPE_MCSC4_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_MCSC4_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = nodeMcscp4; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_THUMBNAIL", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); +#endif + + if (m_flagHWFCEnabled == true) { + /* JPEG Src */ + nodeType = getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_JPEG_SRC_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_JPEG_NUM; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "HWFC_JPEG_SRC", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* Thumbnail Src */ + nodeType = getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_THUMB_SRC_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_THUMB_NUM; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "HWFC_THUMBNAIL_SRC", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* JPEG Dst */ + nodeType = getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_JPEG_DST_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_JPEG_NUM; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "HWFC_JPEG_DST", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* Thumbnail Dst */ + nodeType = getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING); + m_deviceInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_THUMB_DST_REPROCESSING; + m_deviceInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_THUMB_NUM; + m_deviceInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_deviceInfo[pipeId].nodeName[nodeType], "HWFC_THUMBNAIL_DST", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + } + + /* GSC for Reprocessing */ + m_nodeNums[INDEX(PIPE_GSC_REPROCESSING)][OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + + /* GSC3 for Reprocessing */ + m_nodeNums[INDEX(PIPE_GSC_REPROCESSING3)][OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + + /* JPEG for Reprocessing */ + m_nodeNums[INDEX(PIPE_JPEG_REPROCESSING)][OUTPUT_NODE] = -1; + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::m_constructReprocessingPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int pipeId = -1; + + /* 3AA for Reprocessing */ + pipeId = PIPE_3AA_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_deviceInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_3AA_REPROCESSING"); + + /* ISP for Reprocessing */ + pipeId = PIPE_ISP_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_deviceInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_ISP_REPROCESSING"); + + /* MCSC for Reprocessing */ + pipeId = PIPE_MCSC_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_deviceInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_MCSC_REPROCESSING"); + + /* GSC for Reprocessing */ + pipeId = PIPE_GSC_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_GSC_REPROCESSING"); + + /* GSC3 for Reprocessing */ + pipeId = PIPE_GSC_REPROCESSING3; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_GSC_REPROCESSING3"); + + if (m_flagHWFCEnabled == false + || m_parameters->isHWFCOnDemand() == true) { + /* JPEG for Reprocessing */ + pipeId = PIPE_JPEG_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeJpeg(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_JPEG_REPROCESSING"); + } + + CLOGI("INFO(%s[%d]):pipe ids for reprocessing", __FUNCTION__, __LINE__); + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + CLOGI("INFO(%s[%d]):-> m_pipes[%d] : PipeId(%d)", __FUNCTION__, __LINE__ , i, m_pipes[i]->getPipeId()); + } + } + + return NO_ERROR; +} + +status_t ExynosCameraFrameReprocessingFactory::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + camera2_node_group node_group_info_3aa; + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_mcsc; + camera2_node_group *node_group_info_temp = NULL; + + int zoom = m_parameters->getZoomLevel(); + int pipeId = -1; + uint32_t perframePosition = 0; + + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_mcsc, 0x0, sizeof(camera2_node_group)); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA for Reprocessing */ + if (m_supportPureBayerReprocessing == true) { + pipeId = INDEX(PIPE_3AA_REPROCESSING); + node_group_info_temp = &node_group_info_3aa; + node_group_info_temp->leader.request = 1; + if (m_request3AC == true) { + node_group_info_temp->capture[perframePosition].request = m_request3AC; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AC_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + node_group_info_temp->capture[perframePosition].request = m_request3AP; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_3AP_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + /* ISP for Reprocessing */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = INDEX(PIPE_ISP_REPROCESSING); + perframePosition = 0; + node_group_info_temp = &node_group_info_isp; + node_group_info_temp->leader.request = 1; + } + + if (m_flagIspMcscOTF == false) { + node_group_info_temp->capture[perframePosition].request = m_requestISPC; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISPC_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + if (m_requestISPP == true) { + node_group_info_temp->capture[perframePosition].request = m_requestISPP; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_ISPP_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + } + + /* MCSC for Reprocessing */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = INDEX(PIPE_MCSC_REPROCESSING); + node_group_info_temp = &node_group_info_mcsc; + node_group_info_temp->leader.request = 1; + } + + perframePosition = PERFRAME_REPROCESSING_SCC_POS; + node_group_info_temp->capture[perframePosition].request = m_requestSCP; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC0_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + + /* 34xx hasn't multi-port */ +#if 0 + node_group_info_temp->capture[perframePosition].request = m_requestMCSC3; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC3_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + + node_group_info_temp->capture[perframePosition].request = m_requestMCSC4; + node_group_info_temp->capture[perframePosition].vid = m_deviceInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC4_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; +#endif + + if (m_parameters->isUseYuvReprocessing() == false) { + updateNodeGroupInfo( + PIPE_3AA_REPROCESSING, + m_parameters, + &node_group_info_3aa); + frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_PURE_REPROCESSING_3AA, zoom); + + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + updateNodeGroupInfo( + PIPE_ISP_REPROCESSING, + m_parameters, + &node_group_info_isp); + if (m_supportPureBayerReprocessing == true) + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_PURE_REPROCESSING_ISP, zoom); + else + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_DIRTY_REPROCESSING_ISP, zoom); + } + } else { + frame->storeNodeGroupInfo(&node_group_info_mcsc, PERFRAME_INFO_YUV_REPROCESSING_MCSC, zoom); + } + + return NO_ERROR; +} + +void ExynosCameraFrameReprocessingFactory::m_init(void) +{ + m_flagReprocessing = true; + m_flagHWFCEnabled = m_parameters->isHWFCEnabled(); +} + +enum NODE_TYPE ExynosCameraFrameReprocessingFactory::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + switch (pipeId) { + case PIPE_FLITE_REPROCESSING: + nodeType = CAPTURE_NODE; + break; + case PIPE_3AA_REPROCESSING: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AP_REPROCESSING: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_ISP_REPROCESSING: + nodeType = OTF_NODE_1; + break; + case PIPE_ISPC_REPROCESSING: + nodeType = CAPTURE_NODE_2; + break; + case PIPE_ISPP_REPROCESSING: + case PIPE_SCC_REPROCESSING: + nodeType = CAPTURE_NODE_3; + break; + case PIPE_MCSC_REPROCESSING: + if (0) + nodeType = OTF_NODE_2; + else + nodeType = OUTPUT_NODE; + break; + case PIPE_MCSC0_REPROCESSING: + nodeType = CAPTURE_NODE_4; + break; + case PIPE_HWFC_JPEG_DST_REPROCESSING: + nodeType = CAPTURE_NODE_5; + break; + case PIPE_HWFC_JPEG_SRC_REPROCESSING: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_HWFC_THUMB_SRC_REPROCESSING: + nodeType = CAPTURE_NODE_7; + break; + case PIPE_HWFC_THUMB_DST_REPROCESSING: + nodeType = CAPTURE_NODE_8; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +}; + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.h b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.h new file mode 100644 index 0000000..8f89da3 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactory.h @@ -0,0 +1,69 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_REPROCESSING_FACTORY_H +#define EXYNOS_CAMERA_FRAME_REPROCESSING_FACTORY_H + +#include "ExynosCameraFrameFactory.h" + +namespace android { + +class ExynosCameraFrameReprocessingFactory : public ExynosCameraFrameFactory { +public: + ExynosCameraFrameReprocessingFactory() + { + m_init(); + } + + ExynosCameraFrameReprocessingFactory(int cameraId, ExynosCamera1Parameters *param) : ExynosCameraFrameFactory(cameraId, param) + { + m_init(); + + strncpy(m_name, "ReprocessingFactory", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + virtual ~ExynosCameraFrameReprocessingFactory(); + + virtual status_t create(bool active = true); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t stopPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t setStopFlag(void); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + + virtual ExynosCameraFrame *createNewFrame(void); + +protected: + virtual status_t m_setupConfig(void); + virtual status_t m_constructReprocessingPipes(void); + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + +private: + void m_init(void); + +protected: + bool m_flagHWFCEnabled; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.cpp b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.cpp new file mode 100644 index 0000000..3e610bc --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.cpp @@ -0,0 +1,409 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameReprocessingFactoryNV21" +#include + +#include "ExynosCameraFrameReprocessingFactoryNV21.h" + +namespace android { + +ExynosCameraFrameReprocessingFactoryNV21::~ExynosCameraFrameReprocessingFactoryNV21() +{ + status_t ret = NO_ERROR; + + ret = destroy(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):destroy fail", __FUNCTION__, __LINE__); + + m_setCreate(false); +} + +status_t ExynosCameraFrameReprocessingFactoryNV21::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + enum NODE_TYPE leaderNodeType = OUTPUT_NODE; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int hwSensorW = 0, hwSensorH = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int maxThumbnailW = 0, maxThumbnailH = 0; + int bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + int previewFormat = m_parameters->getPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + int perFramePos = 0; + + memset(&nullPipeInfo, 0, sizeof(camera_pipe_info_t)); + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + m_parameters->getPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + + CLOGI("INFO(%s[%d]): MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", __FUNCTION__, __LINE__, maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI("INFO(%s[%d]): MaxPixtureSize(%dx%d), HwPixtureSize(%dx%d)", __FUNCTION__, __LINE__, maxPictureW, maxPictureH, hwPictureW, hwPictureH); + CLOGI("INFO(%s[%d]): PreviewSize(%dx%d), PictureSize(%dx%d)", __FUNCTION__, __LINE__, previewW, previewH, pictureW, pictureH); + CLOGI("INFO(%s[%d]): MaxThumbnailSize(%dx%d)", __FUNCTION__, __LINE__, maxThumbnailW, maxThumbnailH); + + + if (m_parameters->isUseYuvReprocessing() == false) { + /* + * 3AA for Reprocessing + */ + if (m_supportPureBayerReprocessing == true) { + pipeId = PIPE_3AA_REPROCESSING; + + /* 3AS */ + nodeType = getNodeType(PIPE_3AA_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_PURE_REPROCESSING_3AA); + +#if 0 + /* 3AC */ + nodeType = getNodeType(PIPE_3AC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AC_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + /* 3AP */ + nodeType = getNodeType(PIPE_3AP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AP_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to 3AA pipe */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } else { + /* + * 3A video node is opened for dirty bayer. + * So, we have to do setinput to 3A video node. + */ + pipeId = PIPE_3AA_REPROCESSING; + + /* setup pipe info to 3AA pipe */ + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):3AA setupPipe for dirty bayer reprocessing fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + + + /* + * ISP for Reprocessing + */ + + /* ISP */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = PIPE_ISP_REPROCESSING; + nodeType = getNodeType(PIPE_ISP_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_ISP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = m_supportPureBayerReprocessing ? config->current->bufInfo.num_picture_buffers : config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + int ispPerframeInfoIndex = m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_ISP : PERFRAME_INFO_DIRTY_REPROCESSING_ISP; + SET_OUTPUT_DEVICE_BASIC_INFO(ispPerframeInfoIndex); + } + + /* ISPC */ + nodeType = getNodeType(PIPE_ISPC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPC_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* ISPP */ + nodeType = getNodeType(PIPE_ISPP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPP_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to ISP pipe */ + if (m_flagIspTpuOTF == false && m_flagIspMcscOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } + + + /* + * MCSC for Reprocessing + */ + + /* MCSC */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = PIPE_MCSC_REPROCESSING; + nodeType = getNodeType(PIPE_MCSC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set output node default info */ + int mcscPerframeInfoIndex = !(0) ? PERFRAME_INFO_YUV_REPROCESSING_MCSC : (m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_MCSC : PERFRAME_INFO_DIRTY_REPROCESSING_MCSC); + SET_OUTPUT_DEVICE_BASIC_INFO(mcscPerframeInfoIndex); + } + + /* MCSC0 */ + nodeType = getNodeType(PIPE_MCSC0_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC0_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = V4L2_PIX_FMT_NV21; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* 34xx hasn't multi-port */ +#if 0 + /* MCSC3 */ + nodeType = getNodeType(PIPE_MCSC3_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC3_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* MCSC4 */ + nodeType = getNodeType(PIPE_MCSC4_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC4_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + if (m_flagHWFCEnabled == true) { + /* JPEG Src */ + nodeType = getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Src */ + nodeType = getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* JPEG Dst */ + nodeType = getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Dst */ + nodeType = getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + } + + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ISP setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.h b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.h new file mode 100644 index 0000000..808bc84 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraFrameReprocessingFactoryNV21.h @@ -0,0 +1,52 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_REPROCESSING_FACTORY_NV21_H +#define EXYNOS_CAMERA_FRAME_REPROCESSING_FACTORY_NV21_H + +#include "ExynosCameraFrameReprocessingFactory.h" + +namespace android { + +class ExynosCameraFrameReprocessingFactoryNV21 : public ExynosCameraFrameReprocessingFactory { +public: + ExynosCameraFrameReprocessingFactoryNV21() + { + m_init(); + } + + ExynosCameraFrameReprocessingFactoryNV21(int cameraId, ExynosCamera1Parameters *param) : ExynosCameraFrameReprocessingFactory(cameraId, param) + { + m_init(); + + strncpy(m_name, "ReprocessingFactoryNV21", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + virtual ~ExynosCameraFrameReprocessingFactoryNV21(); + + virtual status_t initPipes(void); + +protected: + +private: + void m_init(void) {}; + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal1/ExynosCameraSizeControl.cpp b/libcamera/34xx/hal1/ExynosCameraSizeControl.cpp new file mode 100644 index 0000000..03577d1 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraSizeControl.cpp @@ -0,0 +1,96 @@ +/* + ** + ** Copyright 2015, Samsung Electronics Co. LTD + ** + ** Licensed under the Apache License, Version 2.0 (the "License"); + ** you may not use this file except in compliance with the License. + ** You may obtain a copy of the License at + ** + ** http://www.apache.org/licenses/LICENSE-2.0 + ** + ** Unless required by applicable law or agreed to in writing, software + ** distributed under the License is distributed on an "AS IS" BASIS, + ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ** See the License for the specific language governing permissions and + ** limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraSizeControl" +#include + +#include "ExynosCameraSizeControl.h" + +namespace android { + +void updateNodeGroupInfo( + __unused int pipeId, + __unused ExynosCamera1Parameters *params, + __unused camera2_node_group *node_group_info) +{ + ALOGE("ERR(%s[%d]):This is invalid function call in 34xx. Use ExynosCameraUtilsModule's APIs", __FUNCTION__, __LINE__); +} + +void setLeaderSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + int cropX, int cropY, + int width, int height) +{ + node_group_info->leader.input.cropRegion[0] = cropX; + node_group_info->leader.input.cropRegion[1] = cropY; + node_group_info->leader.input.cropRegion[2] = width; + node_group_info->leader.input.cropRegion[3] = height; + + node_group_info->leader.output.cropRegion[0] = 0; + node_group_info->leader.output.cropRegion[1] = 0; + node_group_info->leader.output.cropRegion[2] = width; + node_group_info->leader.output.cropRegion[3] = height; +} + +void setCaptureSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int width, int height) +{ + node_group_info->capture[perframePosition].input.cropRegion[0] = 0; + node_group_info->capture[perframePosition].input.cropRegion[1] = 0; + node_group_info->capture[perframePosition].input.cropRegion[2] = width; + node_group_info->capture[perframePosition].input.cropRegion[3] = height; + + node_group_info->capture[perframePosition].output.cropRegion[0] = 0; + node_group_info->capture[perframePosition].output.cropRegion[1] = 0; + node_group_info->capture[perframePosition].output.cropRegion[2] = width; + node_group_info->capture[perframePosition].output.cropRegion[3] = height; +} + +void setCaptureCropNScaleSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int cropX, int cropY, + int cropWidth, int cropHeight, + int targetWidth, int targetHeight) +{ + node_group_info->capture[perframePosition].input.cropRegion[0] = cropX; + node_group_info->capture[perframePosition].input.cropRegion[1] = cropY; + node_group_info->capture[perframePosition].input.cropRegion[2] = cropWidth; + node_group_info->capture[perframePosition].input.cropRegion[3] = cropHeight; + + node_group_info->capture[perframePosition].output.cropRegion[0] = 0; + node_group_info->capture[perframePosition].output.cropRegion[1] = 0; + node_group_info->capture[perframePosition].output.cropRegion[2] = targetWidth; + node_group_info->capture[perframePosition].output.cropRegion[3] = targetHeight; +} + +bool useSizeControlApi(void) +{ + bool use = false; +#ifdef USE_SIZE_CONTROL_API + use = USE_SIZE_CONTROL_API; +#else + use = false; + ALOGV("INFO(%s[%d]):Use Legacy Utils Module API", __FUNCTION__, __LINE__); +#endif + return use; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/ExynosCameraSizeControl.h b/libcamera/34xx/hal1/ExynosCameraSizeControl.h new file mode 100644 index 0000000..425ffb1 --- /dev/null +++ b/libcamera/34xx/hal1/ExynosCameraSizeControl.h @@ -0,0 +1,65 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_SIZE_CONTROL_H +#define EXYNOS_CAMERA_SIZE_CONTROL_H + +#include +#include +#include + +#include "ExynosCamera1Parameters.h" + +#include "ExynosCameraConfig.h" + +#include "ExynosRect.h" +#include "fimc-is-metadata.h" +#include "ExynosCameraUtils.h" + +/* #define DEBUG_PERFRAME_SIZE */ + +namespace android { + +void updateNodeGroupInfo( + int pipeId, + ExynosCamera1Parameters *params, + camera2_node_group *node_group_info); + +/* Helper function */ +void setLeaderSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + int cropX, int cropY, + int width, int height); + +void setCaptureSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int width, int height); + +void setCaptureCropNScaleSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int cropX, int cropY, + int cropWidth, int cropHeight, + int targetWidth, int targetHeight); + +bool useSizeControlApi(void); + +}; /* namespace android */ + +#endif + diff --git a/libcamera/34xx/hal1/Ged/ExynosCamera1ParametersVendor.cpp b/libcamera/34xx/hal1/Ged/ExynosCamera1ParametersVendor.cpp new file mode 100644 index 0000000..ce1d83d --- /dev/null +++ b/libcamera/34xx/hal1/Ged/ExynosCamera1ParametersVendor.cpp @@ -0,0 +1,4212 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera1Parameters" +#include + +#include "ExynosCamera1Parameters.h" + +namespace android { + +void ExynosCamera1Parameters::vendorSpecificConstructor(__unused int cameraId) +{ + mDebugInfo.debugSize[APP_MARKER_4] = sizeof(struct camera2_udm); + + m_zoom_activated = false; +} + +status_t ExynosCamera1Parameters::setParameters(const CameraParameters& params) +{ + status_t ret = NO_ERROR; + +#ifdef TEST_GED_HIGH_SPEED_RECORDING + int minFpsRange = 0, maxFpsRange = 0; + int frameRate = 0; + + params.getPreviewFpsRange(&minFpsRange, &maxFpsRange); + frameRate = params.getPreviewFrameRate(); + CLOGD("DEBUG(%s[%d]):getFastFpsMode=%d, maxFpsRange=%d, frameRate=%d", + __FUNCTION__, __LINE__, getFastFpsMode(), maxFpsRange, frameRate); + if (frameRate == 60) { + setFastFpsMode(1); + } else if (frameRate == 120) { + setFastFpsMode(2); + } else { + setFastFpsMode(0); + } + + CLOGD("DEBUG(%s[%d]):getFastFpsMode=%d", __FUNCTION__, __LINE__, getFastFpsMode()); +#endif + + /* Return OK means that the vision mode is enabled */ + if (checkVisionMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkVisionMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (getVisionMode() == true) { + CLOGD("DEBUG(%s[%d]): Vision mode enabled", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (checkRecordingHint(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkRecordingHint fail", __FUNCTION__, __LINE__); + + if (checkDualMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkDualMode fail", __FUNCTION__, __LINE__); + + if (checkDualRecordingHint(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkDualRecordingHint fail", __FUNCTION__, __LINE__); + + if (checkEffectHint(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkEffectHint fail", __FUNCTION__, __LINE__); + + if (checkEffectRecordingHint(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkEffectRecordingHint fail", __FUNCTION__, __LINE__); + + if (checkPreviewFps(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkPreviewFps fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (getRecordingRunning() == false) { + if (checkVideoSize(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkVideoSize fail", __FUNCTION__, __LINE__); + } + + if (getCameraId() == CAMERA_ID_BACK) { + if (checkFastFpsMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkFastFpsMode fail", __FUNCTION__, __LINE__); + } + + if (checkVideoStabilization(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkVideoStabilization fail", __FUNCTION__, __LINE__); + + if (checkSWVdisMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSWVdisMode fail", __FUNCTION__, __LINE__); + + bool swVdisUIMode = false; + m_setSWVdisUIMode(swVdisUIMode); + + if (checkVtMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkVtMode fail", __FUNCTION__, __LINE__); + + if (checkHWVdisMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkHWVdisMode fail", __FUNCTION__, __LINE__); + + if (check3dnrMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): check3dnrMode fail", __FUNCTION__, __LINE__); + + if (checkDrcMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkDrcMode fail", __FUNCTION__, __LINE__); + + if (checkOdcMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkOdcMode fail", __FUNCTION__, __LINE__); + + if (checkPreviewSize(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkPreviewSize fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkPreviewFormat(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkPreviewFormat fail", __FUNCTION__, __LINE__); + + if (checkPictureSize(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkPictureSize fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkPictureFormat(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkPictureFormat fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkJpegQuality(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkJpegQuality fail", __FUNCTION__, __LINE__); + + if (checkThumbnailSize(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkThumbnailSize fail", __FUNCTION__, __LINE__); + + if (checkThumbnailQuality(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkThumbnailQuality fail", __FUNCTION__, __LINE__); + + if (checkZoomLevel(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkZoomLevel fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkRotation(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkRotation fail", __FUNCTION__, __LINE__); + + if (checkAutoExposureLock(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkAutoExposureLock fail", __FUNCTION__, __LINE__); + + ret = checkExposureCompensation(params); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkExposureCompensation fail", __FUNCTION__, __LINE__); + return ret; + } + + if (checkMeteringMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkMeteringMode fail", __FUNCTION__, __LINE__); + + if (checkMeteringAreas(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkMeteringAreas fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkAntibanding(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkAntibanding fail", __FUNCTION__, __LINE__); + + if (checkSceneMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkSceneMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkFocusMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkFocusMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkFlashMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkFlashMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkWhiteBalanceMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkWhiteBalanceMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkAutoWhiteBalanceLock(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkAutoWhiteBalanceLock fail", __FUNCTION__, __LINE__); + +/* W/B setting is available, but it is not used */ +#if 0 + if (checkWbLevel(params) != NO_ERROR) { + ALOGE("ERR(%s[%d]): checkWbLevel fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } +#endif + + if (checkFocusAreas(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkFocusAreas fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkColorEffectMode(params) != NO_ERROR) { + CLOGE("ERR(%s[%d]): checkColorEffectMode fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (checkGpsAltitude(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGpsAltitude fail", __FUNCTION__, __LINE__); + + if (checkGpsLatitude(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGpsLatitude fail", __FUNCTION__, __LINE__); + + if (checkGpsLongitude(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGpsLongitude fail", __FUNCTION__, __LINE__); + + if (checkGpsProcessingMethod(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGpsProcessingMethod fail", __FUNCTION__, __LINE__); + + if (checkGpsTimeStamp(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGpsTimeStamp fail", __FUNCTION__, __LINE__); + +#if 0 + if (checkCityId(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkCityId fail", __FUNCTION__, __LINE__); + + if (checkWeatherId(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkWeatherId fail", __FUNCTION__, __LINE__); +#endif + + if (checkBrightness(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkBrightness fail", __FUNCTION__, __LINE__); + + if (checkSaturation(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSaturation fail", __FUNCTION__, __LINE__); + + if (checkSharpness(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSharpness fail", __FUNCTION__, __LINE__); + + if (checkHue(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkHue fail", __FUNCTION__, __LINE__); + + if (checkIso(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkIso fail", __FUNCTION__, __LINE__); + + if (checkContrast(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkContrast fail", __FUNCTION__, __LINE__); + + if (checkHdrMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkHdrMode fail", __FUNCTION__, __LINE__); + + if (checkShotMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkShotMode fail", __FUNCTION__, __LINE__); + + if (checkAntiShake(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkAntiShake fail", __FUNCTION__, __LINE__); + + if (checkVtMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkVtMode fail", __FUNCTION__, __LINE__); + + if (checkGamma(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkGamma fail", __FUNCTION__, __LINE__); + + if (checkSlowAe(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSlowAe fail", __FUNCTION__, __LINE__); + + if (checkScalableSensorMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkScalableSensorMode fail", __FUNCTION__, __LINE__); + + if (checkImageUniqueId(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkImageUniqueId fail", __FUNCTION__, __LINE__); + + if (checkSeriesShotMode(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSeriesShotMode fail", __FUNCTION__, __LINE__); + +#ifdef BURST_CAPTURE + if (checkSeriesShotFilePath(params) != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSeriesShotFilePath fail", __FUNCTION__, __LINE__); +#endif + + if (m_getRestartPreviewChecked() == true) { + CLOGD("DEBUG(%s[%d]):Need restart preview", __FUNCTION__, __LINE__); + m_setRestartPreview(m_flagRestartPreviewChecked); + } + + if (checkSetfileYuvRange() != NO_ERROR) + CLOGE("ERR(%s[%d]): checkSetfileYuvRange fail", __FUNCTION__, __LINE__); + + checkHorizontalViewAngle(); + + return ret; +} + +void ExynosCamera1Parameters::setDefaultParameter(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + CameraParameters p; + String8 tempStr; + char strBuf[256]; + + m_cameraInfo.autoFocusMacroPosition = ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_BASE; + + /* Preview Size */ + getMaxPreviewSize(&m_cameraInfo.previewW, &m_cameraInfo.previewH); + m_setHwPreviewSize(m_cameraInfo.previewW, m_cameraInfo.previewH); + + tempStr.setTo(""); + if (getResolutionList(tempStr, m_staticInfo, &m_cameraInfo.previewW, &m_cameraInfo.previewH, MODE_PREVIEW, m_cameraId) != NO_ERROR) { + CLOGE("ERR(%s):getResolutionList(MODE_PREVIEW) fail", __FUNCTION__); + + m_cameraInfo.previewW = 640; + m_cameraInfo.previewH = 480; + tempStr = String8::format("%dx%d", m_cameraInfo.previewW, m_cameraInfo.previewH); + } + + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, tempStr.string()); + CLOGD("DEBUG(%s): Default preview size is %dx%d", __FUNCTION__, m_cameraInfo.previewW, m_cameraInfo.previewH); + p.setPreviewSize(m_cameraInfo.previewW, m_cameraInfo.previewH); + + /* Preview Format */ + tempStr.setTo(""); + tempStr = String8::format("%s,%s", CameraParameters::PIXEL_FORMAT_YUV420SP, CameraParameters::PIXEL_FORMAT_YUV420P); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, tempStr); + p.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420SP); + + + /* Video Size */ + getMaxVideoSize(&m_cameraInfo.maxVideoW, &m_cameraInfo.maxVideoH); + + tempStr.setTo(""); + if (getResolutionList(tempStr, m_staticInfo, &m_cameraInfo.maxVideoW, &m_cameraInfo.maxVideoH, MODE_VIDEO, m_cameraId) != NO_ERROR) { + CLOGE("ERR(%s):getResolutionList(MODE_VIDEO) fail", __FUNCTION__); + + m_cameraInfo.videoW = 640; + m_cameraInfo.videoH = 480; + tempStr = String8::format("%dx%d", m_cameraInfo.maxVideoW, m_cameraInfo.maxVideoH); + } +#ifdef CAMERA_GED_FEATURE + else { +#ifdef USE_WQHD_RECORDING + if (m_addHiddenResolutionList(tempStr, m_staticInfo, 2560, 1440, MODE_VIDEO, m_cameraId) != NO_ERROR) { + CLOGW("WARN(%s):getResolutionList(MODE_VIDEO) fail", __FUNCTION__); + } +#endif +#ifdef USE_UHD_RECORDING + if (m_addHiddenResolutionList(tempStr, m_staticInfo, 3840, 2160, MODE_VIDEO, m_cameraId) != NO_ERROR) { + CLOGW("WARN(%s):getResolutionList(MODE_VIDEO) fail", __FUNCTION__); + } +#endif + } +#endif + + CLOGD("DEBUG(%s): KEY_SUPPORTED_VIDEO_SIZES %s", __FUNCTION__, tempStr.string()); + + p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, tempStr.string()); + + CLOGD("DEBUG(%s): Max video size is %dx%d", __FUNCTION__, m_cameraInfo.maxVideoW, m_cameraInfo.maxVideoH); + CLOGD("DEBUG(%s): Default video size is %dx%d", __FUNCTION__, m_cameraInfo.videoW, m_cameraInfo.videoH); + p.setVideoSize(m_cameraInfo.videoW, m_cameraInfo.videoH); + + /* Video Format */ + if (getAdaptiveCSCRecording() == true) { + CLOGI("INFO(%s[%d]):video_frame_foramt == YUV420SP_NV21", __FUNCTION__, __LINE__); + p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP_NV21); + } else { + CLOGI("INFO(%s[%d]):video_frame_foramt == YUV420SP", __FUNCTION__, __LINE__); + p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP); + } + + /* Preferred preview size for Video */ + tempStr.setTo(""); + tempStr = String8::format("%dx%d", m_cameraInfo.previewW, m_cameraInfo.previewH); + p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, tempStr.string()); + + /* Picture Size */ + getMaxPictureSize(&m_cameraInfo.pictureW, &m_cameraInfo.pictureH); + + tempStr.setTo(""); + if (getResolutionList(tempStr, m_staticInfo, &m_cameraInfo.pictureW, &m_cameraInfo.pictureH, MODE_PICTURE, m_cameraId) != NO_ERROR) { + CLOGE("ERR(%s):m_getResolutionList(MODE_PICTURE) fail", __FUNCTION__); + + m_cameraInfo.pictureW = 640; + m_cameraInfo.pictureW = 480; + tempStr = String8::format("%dx%d", m_cameraInfo.pictureW, m_cameraInfo.pictureH); + } + + p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, tempStr.string()); + CLOGD("DEBUG(%s): Default picture size is %dx%d", __FUNCTION__, m_cameraInfo.pictureW, m_cameraInfo.pictureH); + p.setPictureSize(m_cameraInfo.pictureW, m_cameraInfo.pictureH); + + /* Picture Format */ + p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, CameraParameters::PIXEL_FORMAT_JPEG); + p.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG); + + /* Jpeg Quality */ + p.set(CameraParameters::KEY_JPEG_QUALITY, "96"); /* maximum quality */ + + /* Thumbnail Size */ + getMaxThumbnailSize(&m_cameraInfo.thumbnailW, &m_cameraInfo.thumbnailH); + + tempStr.setTo(""); + if (getResolutionList(tempStr, m_staticInfo, &m_cameraInfo.thumbnailW, &m_cameraInfo.thumbnailH, MODE_THUMBNAIL, m_cameraId) != NO_ERROR) { + tempStr = String8::format("%dx%d", m_cameraInfo.thumbnailW, m_cameraInfo.thumbnailH); + } + /* 0x0 is no thumbnail mode */ + tempStr.append(",0x0"); + p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, tempStr.string()); + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, m_cameraInfo.thumbnailW); + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, m_cameraInfo.thumbnailH); + + /* Thumbnail Quality */ + p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "100"); + + /* Exposure */ + p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, getMinExposureCompensation()); + p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, getMaxExposureCompensation()); + p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, 0); + p.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, getExposureCompensationStep()); + + /* Auto Exposure Lock supported */ + if (getAutoExposureLockSupported() == true) + p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true"); + else + p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false"); + + /* Face Detection */ + p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, getMaxNumDetectedFaces()); + p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, 0); + + /* Video Sanptshot Supported */ + if (getVideoSnapshotSupported() == true) + p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "true"); + else + p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "false"); + + /* Video Stabilization Supported */ + if (getVideoStabilizationSupported() == true) + p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "true"); + else + p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "false"); + + /* Focus Mode */ + int focusMode = getSupportedFocusModes(); + tempStr.setTo(""); + if (focusMode & FOCUS_MODE_AUTO) { + tempStr.append(CameraParameters::FOCUS_MODE_AUTO); + } else if (focusMode & FOCUS_MODE_FIXED){ + tempStr.append(CameraParameters::FOCUS_MODE_FIXED); + } + if (focusMode & FOCUS_MODE_INFINITY) { + tempStr.append(","); + tempStr.append(CameraParameters::FOCUS_MODE_INFINITY); + } + if (focusMode & FOCUS_MODE_MACRO) { + tempStr.append(","); + tempStr.append(CameraParameters::FOCUS_MODE_MACRO); + } + if (focusMode & FOCUS_MODE_EDOF) { + tempStr.append(","); + tempStr.append(CameraParameters::FOCUS_MODE_EDOF); + } + if (focusMode & FOCUS_MODE_CONTINUOUS_VIDEO) { + tempStr.append(","); + tempStr.append(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); + } + if (focusMode & FOCUS_MODE_CONTINUOUS_PICTURE) { + tempStr.append(","); + tempStr.append(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE); + } + if (focusMode & FOCUS_MODE_CONTINUOUS_PICTURE_MACRO) { + tempStr.append(","); + tempStr.append("continuous-picture-macro"); + } + + p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, + tempStr.string()); + + if (focusMode & FOCUS_MODE_AUTO) + p.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_AUTO); + else if (focusMode & FOCUS_MODE_CONTINUOUS_PICTURE) + p.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE); + else if (focusMode & FOCUS_MODE_CONTINUOUS_VIDEO) + p.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); + else if (focusMode & FOCUS_MODE_FIXED) + p.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_FIXED); + else + p.set(CameraParameters::KEY_FOCUS_MODE, + CameraParameters::FOCUS_MODE_INFINITY); + +/*TODO: This values will be changed */ +#define BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" +#define FRONT_CAMERA_FOCUS_DISTANCES_STR "0.20,0.25,Infinity" + +#define BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR "0.10,0.20,Infinity" +#define BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" + + /* Focus Distances */ + if (getCameraId() == CAMERA_ID_BACK) + p.set(CameraParameters::KEY_FOCUS_DISTANCES, + BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR); + else + p.set(CameraParameters::KEY_FOCUS_DISTANCES, + FRONT_CAMERA_FOCUS_DISTANCES_STR); + + p.set(CameraParameters::FOCUS_DISTANCE_INFINITY, "Infinity"); + + /* Max number of Focus Areas */ + p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, 0); + if (focusMode & FOCUS_MODE_TOUCH) { + p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, 1); + p.set(CameraParameters::KEY_FOCUS_AREAS, "(0,0,0,0,0)"); + } + + /* Flash */ + int flashMode = getSupportedFlashModes(); + tempStr.setTo(""); + if (flashMode & FLASH_MODE_OFF) { + tempStr.append(CameraParameters::FLASH_MODE_OFF); + } + if (flashMode & FLASH_MODE_AUTO) { + tempStr.append(","); + tempStr.append(CameraParameters::FLASH_MODE_AUTO); + } + if (flashMode & FLASH_MODE_ON) { + tempStr.append(","); + tempStr.append(CameraParameters::FLASH_MODE_ON); + } + if (flashMode & FLASH_MODE_RED_EYE) { + tempStr.append(","); + tempStr.append(CameraParameters::FLASH_MODE_RED_EYE); + } + if (flashMode & FLASH_MODE_TORCH) { + tempStr.append(","); + tempStr.append(CameraParameters::FLASH_MODE_TORCH); + } + + p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, tempStr.string()); + p.set(CameraParameters::KEY_FLASH_MODE, CameraParameters::FLASH_MODE_OFF); + + /* scene mode */ + int sceneMode = getSupportedSceneModes(); + tempStr.setTo(""); + if (sceneMode & SCENE_MODE_AUTO) { + tempStr.append(CameraParameters::SCENE_MODE_AUTO); + } + if (sceneMode & SCENE_MODE_ACTION) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_ACTION); + } + if (sceneMode & SCENE_MODE_PORTRAIT) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_PORTRAIT); + } + if (sceneMode & SCENE_MODE_LANDSCAPE) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_LANDSCAPE); + } + if (sceneMode & SCENE_MODE_NIGHT) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_NIGHT); + } + if (sceneMode & SCENE_MODE_NIGHT_PORTRAIT) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_NIGHT_PORTRAIT); + } + if (sceneMode & SCENE_MODE_THEATRE) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_THEATRE); + } + if (sceneMode & SCENE_MODE_BEACH) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_BEACH); + } + if (sceneMode & SCENE_MODE_SNOW) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_SNOW); + } + if (sceneMode & SCENE_MODE_SUNSET) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_SUNSET); + } + if (sceneMode & SCENE_MODE_STEADYPHOTO) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_STEADYPHOTO); + } + if (sceneMode & SCENE_MODE_FIREWORKS) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_FIREWORKS); + } + if (sceneMode & SCENE_MODE_SPORTS) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_SPORTS); + } + if (sceneMode & SCENE_MODE_PARTY) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_PARTY); + } + if (sceneMode & SCENE_MODE_CANDLELIGHT) { + tempStr.append(","); + tempStr.append(CameraParameters::SCENE_MODE_CANDLELIGHT); + } + + p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, + tempStr.string()); + p.set(CameraParameters::KEY_SCENE_MODE, + CameraParameters::SCENE_MODE_AUTO); + + if (getHalVersion() != IS_HAL_VER_3_2) { + /* effect */ + int effect = getSupportedColorEffects(); + tempStr.setTo(""); + if (effect & EFFECT_NONE) { + tempStr.append(CameraParameters::EFFECT_NONE); + } + if (effect & EFFECT_MONO) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_MONO); + } + if (effect & EFFECT_NEGATIVE) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_NEGATIVE); + } + if (effect & EFFECT_SOLARIZE) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_SOLARIZE); + } + if (effect & EFFECT_SEPIA) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_SEPIA); + } + if (effect & EFFECT_POSTERIZE) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_POSTERIZE); + } + if (effect & EFFECT_WHITEBOARD) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_WHITEBOARD); + } + if (effect & EFFECT_BLACKBOARD) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_BLACKBOARD); + } + if (effect & EFFECT_AQUA) { + tempStr.append(","); + tempStr.append(CameraParameters::EFFECT_AQUA); + } + + p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, tempStr.string()); + p.set(CameraParameters::KEY_EFFECT, CameraParameters::EFFECT_NONE); + } + + /* white balance */ + int whiteBalance = getSupportedWhiteBalance(); + tempStr.setTo(""); + if (whiteBalance & WHITE_BALANCE_AUTO) { + tempStr.append(CameraParameters::WHITE_BALANCE_AUTO); + } + if (whiteBalance & WHITE_BALANCE_INCANDESCENT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_INCANDESCENT); + } + if (whiteBalance & WHITE_BALANCE_FLUORESCENT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_FLUORESCENT); + } + if (whiteBalance & WHITE_BALANCE_WARM_FLUORESCENT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT); + } + if (whiteBalance & WHITE_BALANCE_DAYLIGHT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_DAYLIGHT); + } + if (whiteBalance & WHITE_BALANCE_CLOUDY_DAYLIGHT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT); + } + if (whiteBalance & WHITE_BALANCE_TWILIGHT) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_TWILIGHT); + } + if (whiteBalance & WHITE_BALANCE_SHADE) { + tempStr.append(","); + tempStr.append(CameraParameters::WHITE_BALANCE_SHADE); + } + + p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, + tempStr.string()); + p.set(CameraParameters::KEY_WHITE_BALANCE, CameraParameters::WHITE_BALANCE_AUTO); + + /* Auto Whitebalance Lock supported */ + if (getAutoWhiteBalanceLockSupported() == true) + p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true"); + else + p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false"); + + /* anti banding */ + tempStr.setTo(""); + int antiBanding = getSupportedAntibanding(); + + if (antiBanding & ANTIBANDING_AUTO) { + tempStr.append(CameraParameters::ANTIBANDING_AUTO); + } + if (antiBanding & ANTIBANDING_50HZ) { + tempStr.append(","); + tempStr.append(CameraParameters::ANTIBANDING_50HZ); + } + if (antiBanding & ANTIBANDING_60HZ) { + tempStr.append(","); + tempStr.append(CameraParameters::ANTIBANDING_60HZ); + } + if (antiBanding & ANTIBANDING_OFF) { + tempStr.append(","); + tempStr.append(CameraParameters::ANTIBANDING_OFF); + } + + p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, + tempStr.string()); + + p.set(CameraParameters::KEY_ANTIBANDING, CameraParameters::ANTIBANDING_AUTO); + + /* rotation */ + p.set(CameraParameters::KEY_ROTATION, 0); + + /* view angle */ + p.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); + p.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, getVerticalViewAngle()); + + /* metering */ + p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, getMaxNumMeteringAreas()); + p.set(CameraParameters::KEY_METERING_AREAS, ""); + + /* zoom */ + if (getZoomSupported() == true) { + int maxZoom = getMaxZoomLevel(); + ALOGI("INFO(%s):getMaxZoomLevel(%d)", __FUNCTION__, maxZoom); + + if (0 < maxZoom) { + p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true"); + + if (getSmoothZoomSupported() == true) + p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true"); + else + p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + + p.set(CameraParameters::KEY_MAX_ZOOM, maxZoom - 1); + p.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + + int max_zoom_ratio = (int)getMaxZoomRatio(); + tempStr.setTo(""); + if (getZoomRatioList(tempStr, maxZoom, max_zoom_ratio, m_staticInfo->zoomRatioList) == NO_ERROR) + p.set(CameraParameters::KEY_ZOOM_RATIOS, tempStr.string()); + else + p.set(CameraParameters::KEY_ZOOM_RATIOS, "100"); + + p.set("constant-growth-rate-zoom-supported", "true"); + + ALOGV("INFO(%s):zoomRatioList=%s", "setDefaultParameter", tempStr.string()); + } else { + p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); + p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + p.set(CameraParameters::KEY_MAX_ZOOM, ZOOM_LEVEL_0); + p.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + } + } else { + p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); + p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + p.set(CameraParameters::KEY_MAX_ZOOM, ZOOM_LEVEL_0); + p.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + } + + /* fps */ + uint32_t minFpsRange = 15; + uint32_t maxFpsRange = 30; + + getPreviewFpsRange(&minFpsRange, &maxFpsRange); +#ifdef TEST_GED_HIGH_SPEED_RECORDING + maxFpsRange = 120; +#endif + CLOGI("INFO(%s[%d]):minFpsRange=%d, maxFpsRange=%d", "getPreviewFpsRange", __LINE__, (int)minFpsRange, (int)maxFpsRange); + int minFps = (minFpsRange == 0) ? 0 : (int)minFpsRange; + int maxFps = (maxFpsRange == 0) ? 0 : (int)maxFpsRange; + + tempStr.setTo(""); + snprintf(strBuf, 256, "%d", minFps); + tempStr.append(strBuf); + + for (int i = minFps + 1; i <= maxFps; i++) { + snprintf(strBuf, 256, ",%d", i); + tempStr.append(strBuf); + } + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, tempStr.string()); + + minFpsRange = minFpsRange * 1000; + maxFpsRange = maxFpsRange * 1000; + + tempStr.setTo(""); + getSupportedFpsList(tempStr, minFpsRange, maxFpsRange, m_cameraId, m_staticInfo); + CLOGI("INFO(%s):supportedFpsList=%s", "setDefaultParameter", tempStr.string()); + p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, tempStr.string()); + /* p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000),(30000,30000)"); */ + + /* limit 30 fps on default setting. */ + if (30 < maxFps) + maxFps = 30; + p.setPreviewFrameRate(maxFps); + + if (30000 < maxFpsRange) + maxFpsRange = 30000; + snprintf(strBuf, 256, "%d,%d", maxFpsRange/2, maxFpsRange); + p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, strBuf); + + /* focal length */ + int num = 0; + int den = 0; + int precision = 0; + getFocalLength(&num, &den); + + switch (den) { + default: + case 1000: + precision = 3; + break; + case 100: + precision = 2; + break; + case 10: + precision = 1; + break; + case 1: + precision = 0; + break; + } + snprintf(strBuf, 256, "%.*f", precision, ((float)num / (float)den)); + p.set(CameraParameters::KEY_FOCAL_LENGTH, strBuf); + + /* Additional params. */ + p.set("contrast", "auto"); + p.set("iso", "auto"); + + // Set supported ISO values + int isoValues = getSupportedISO(); + tempStr.setTo(""); + if (isoValues & ISO_AUTO) { + tempStr.append(CameraParameters::ISO_AUTO); + } + if (isoValues & ISO_100) { + tempStr.append(","); + tempStr.append(CameraParameters::ISO_100); + } + if (isoValues & ISO_200) { + tempStr.append(","); + tempStr.append(CameraParameters::ISO_200); + } + if (isoValues & ISO_400) { + tempStr.append(","); + tempStr.append(CameraParameters::ISO_400); + } + if (isoValues & ISO_800) { + tempStr.append(","); + tempStr.append(CameraParameters::ISO_800); + } + + p.set("iso-values", + tempStr.string()); + + p.set("wdr", 0); + p.set("hdr-mode", 0); + p.set("metering", "center"); + + // Set Supported Metering Mode + int meteringModes = getSupportedMeteringMode(); + tempStr.setTo(""); + if (meteringModes & METERING_MODE_MATRIX) { + tempStr.append(CameraParameters::METERING_MATRIX); + } + if (meteringModes & METERING_MODE_CENTER) { + tempStr.append(","); + tempStr.append(CameraParameters::METERING_CENTER); + } + if (meteringModes & METERING_MODE_SPOT) { + tempStr.append(","); + tempStr.append(CameraParameters::METERING_SPOT); + } + + p.set("brightness", 0); + p.set("brightness-max", 2); + p.set("brightness-min", -2); + + p.set("saturation", 0); + p.set("saturation-max", 2); + p.set("saturation-min", -2); + + p.set("sharpness", 0); + p.set("sharpness-max", 2); + p.set("sharpness-min", -2); + + p.set("hue", 0); + p.set("hue-max", 2); + p.set("hue-min", -2); + + /* For Series shot */ + p.set("burst-capture", 0); + p.set("best-capture", 0); + + /* fnumber */ + getFnumber(&num, &den); + p.set("fnumber-value-numerator", num); + p.set("fnumber-value-denominator", den); + + /* max aperture value */ + getApertureValue(&num, &den); + p.set("maxaperture-value-numerator", num); + p.set("maxaperture-value-denominator", den); + + /* focal length */ + getFocalLength(&num, &den); + p.set("focallength-value-numerator", num); + p.set("focallength-value-denominator", den); + + /* focal length in 35mm film */ + int focalLengthIn35mmFilm = 0; + focalLengthIn35mmFilm = getFocalLengthIn35mmFilm(); + p.set("focallength-35mm-value", focalLengthIn35mmFilm); + +#if defined(TEST_APP_HIGH_SPEED_RECORDING) + p.set("fast-fps-mode", 0); +#endif + p.set(CameraParameters::KEY_DYNAMIC_RANGE_CONTROL, "off"); + + p.set("imageuniqueid-value", 0); + + p.set("drc", "false"); + p.set("3dnr", "false"); + p.set("odc", "false"); + + p.set("effectrecording-hint", 0); + + m_params = p; + + /* make sure m_secCamera has all the settings we do. applications + * aren't required to call setParameters themselves (only if they + * want to change something. + */ + ret = setParameters(p); + if (ret < 0) + CLOGE("ERR(%s[%d]):setParameters is fail", __FUNCTION__, __LINE__); +} + +status_t ExynosCamera1Parameters::checkVisionMode(const CameraParameters& params) +{ + status_t ret; + + /* Check vision mode */ + int intelligent_mode = params.getInt("intelligent-mode"); + CLOGD("DEBUG(%s):intelligent_mode : %d", "setParameters", intelligent_mode); + + ret = m_setIntelligentMode(intelligent_mode); + if (ret != NO_ERROR) { + CLOGE("ERR(%s): Inavalid Intelligent mode", "setParameters"); + return ret; + } + m_params.set("intelligent-mode", intelligent_mode); + + CLOGD("DEBUG(%s):intelligent_mode(%d) getVisionMode(%d)", "setParameters", intelligent_mode, getVisionMode()); + + /* Smart stay need to skip more frames */ + int skipCompensation = m_frameSkipCounter.getCompensation(); + if (intelligent_mode == 1) { + m_frameSkipCounter.setCompensation(skipCompensation + SMART_STAY_SKIP_COMPENSATION); + } else { + m_frameSkipCounter.setCompensation(skipCompensation); + } + + if (getVisionMode() == true) { + /* preset for each vision mode */ + switch (intelligent_mode) { + case 2: + m_setVisionModeFps(10); + break; + case 3: + m_setVisionModeFps(5); + break; + default: + m_setVisionModeFps(10); + break; + } + +/* Vision mode custom frame rate will be enabled when application ready */ +#if 0 + /* If user wants to set custom fps, vision mode set max fps to frame rate */ + int minFps = -1; + int maxFps = -1; + params.getPreviewFpsRange(&minFps, &maxFps); + + if (minFps > 0 && maxFps > 0) { + CLOGD("DEBUG(%s): set user frame rate (%d)", __FUNCTION__, maxFps / 1000); + m_setVisionModeFps(maxFps / 1000); + } +#endif + + /* smart-screen-exposure */ + int newVisionAeTarget = params.getInt("smart-screen-exposure"); + if (0 < newVisionAeTarget) { + CLOGD("DEBUG(%s):newVisionAeTarget : %d", "setParameters", newVisionAeTarget); + m_setVisionModeAeTarget(newVisionAeTarget); + m_params.set("smart-screen-exposure", newVisionAeTarget); + } + + return OK; + } else { + return NO_ERROR; + } +} + +status_t ExynosCamera1Parameters::m_setIntelligentMode(int intelligentMode) +{ + status_t ret = NO_ERROR; + bool visionMode = false; + + m_cameraInfo.intelligentMode = intelligentMode; + + if (intelligentMode > 1) { + if (m_staticInfo->visionModeSupport == true) { + visionMode = true; + } else { + CLOGE("ERR(%s): tried to set vision mode(not supported)", "setParameters"); + ret = BAD_VALUE; + } + } else if (getVisionMode()) { + CLOGE("ERR(%s[%d]):vision mode can not change before stoppreview", __FUNCTION__, __LINE__); + visionMode = true; + } + + m_setVisionMode(visionMode); + + return ret; + } + +int ExynosCamera1Parameters::getIntelligentMode(void) +{ + return m_cameraInfo.intelligentMode; +} + +void ExynosCamera1Parameters::m_setVisionMode(bool vision) +{ + m_cameraInfo.visionMode = vision; +} + +bool ExynosCamera1Parameters::getVisionMode(void) +{ + return m_cameraInfo.visionMode; +} + +void ExynosCamera1Parameters::m_setVisionModeFps(int fps) +{ + m_cameraInfo.visionModeFps = fps; +} + +int ExynosCamera1Parameters::getVisionModeFps(void) +{ + return m_cameraInfo.visionModeFps; +} + +void ExynosCamera1Parameters::m_setVisionModeAeTarget(int ae) +{ + m_cameraInfo.visionModeAeTarget = ae; +} + +int ExynosCamera1Parameters::getVisionModeAeTarget(void) +{ + return m_cameraInfo.visionModeAeTarget; +} + +status_t ExynosCamera1Parameters::checkEffectRecordingHint(const CameraParameters& params) +{ + /* Effect recording hint */ + bool flagEffectRecordingHint = false; + int newEffectRecordingHint = params.getInt("effectrecording-hint"); + int curEffectRecordingHint = m_params.getInt("effectrecording-hint"); + + if (newEffectRecordingHint < 0) { + CLOGV("DEBUG(%s):Invalid newEffectRecordingHint", "setParameters"); + return NO_ERROR; + } + + if (newEffectRecordingHint != curEffectRecordingHint) { + CLOGD("DEBUG(%s):newEffectRecordingHint : %d", "setParameters", newEffectRecordingHint); + if (newEffectRecordingHint == 1) + flagEffectRecordingHint = true; + m_setEffectRecordingHint(flagEffectRecordingHint); + m_params.set("effectrecording-hint", newEffectRecordingHint); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setEffectRecordingHint(bool hint) +{ + ExynosCameraActivityAutofocus *autoFocusMgr = m_activityControl->getAutoFocusMgr(); + ExynosCameraActivityFlash *flashMgr = m_activityControl->getFlashMgr(); + + m_cameraInfo.effectRecordingHint = hint; + + if (hint) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_ON); + } else if (!hint && !getRecordingHint() && !getDualRecordingHint()) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_OFF); + } + autoFocusMgr->setRecordingHint(hint); + flashMgr->setRecordingHint(hint); +} + + +status_t ExynosCamera1Parameters::checkSWVdisMode(const CameraParameters& params) +{ + const char *newSwVdis = params.get("sw-vdis"); + bool currSwVdis = getSWVdisMode(); + if (newSwVdis != NULL) { + CLOGD("DEBUG(%s):newSwVdis %s", "setParameters", newSwVdis); + bool swVdisMode = true; + + if (!strcmp(newSwVdis, "off")) + swVdisMode = false; + + m_setSWVdisMode(swVdisMode); + m_params.set("sw-vdis", newSwVdis); + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::checkHWVdisMode(__unused const CameraParameters& params) +{ + status_t ret = NO_ERROR; + + bool hwVdisMode = this->getHWVdisMode(); + + CLOGD("DEBUG(%s):newHwVdis %d", "setParameters", hwVdisMode); + + ret = setDisEnable(hwVdisMode); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDisEnable(%d) fail", __FUNCTION__, __LINE__, hwVdisMode); + } else { + if (m_flagHWVDisMode != hwVdisMode) { + m_flagHWVDisMode = hwVdisMode; + } + } + + return ret; +} + +bool ExynosCamera1Parameters::isSWVdisMode(void) +{ + bool swVDIS_mode = false; + bool use3DNR_dmaout = false; + + int nPreviewW, nPreviewH; + getPreviewSize(&nPreviewW, &nPreviewH); + + if ((getRecordingHint() == true) && + (getHighSpeedRecording() == false) && + (use3DNR_dmaout == false) && + (getSWVdisUIMode() == true) && + ((nPreviewW == 1920 && nPreviewH == 1080) || (nPreviewW == 1280 && nPreviewH == 720))) + { + swVDIS_mode = true; + } + + return swVDIS_mode; +} + +bool ExynosCamera1Parameters::isSWVdisModeWithParam(int nPreviewW, int nPreviewH) +{ + bool swVDIS_mode = false; + bool use3DNR_dmaout = false; + + if ((getRecordingHint() == true) && + (getHighSpeedRecording() == false) && + (use3DNR_dmaout == false) && + (getSWVdisUIMode() == true) && + ((nPreviewW == 1920 && nPreviewH == 1080) || (nPreviewW == 1280 && nPreviewH == 720))) + { + swVDIS_mode = true; + } + + return swVDIS_mode; +} + +bool ExynosCamera1Parameters::getHWVdisMode(void) +{ + bool ret = this->getVideoStabilization(); + + /* + * Only true case, + * we will test whether support or not. + */ + if (ret == true) { + switch (getCameraId()) { + case CAMERA_ID_BACK: +#ifdef SUPPORT_BACK_HW_VDIS + ret = SUPPORT_BACK_HW_VDIS; +#else + ret = false; +#endif + break; + case CAMERA_ID_FRONT: +#ifdef SUPPORT_FRONT_HW_VDIS + ret = SUPPORT_FRONT_HW_VDIS; +#else + ret = false; +#endif + break; + default: + ret = false; + break; + } + } + + return ret; +} + +int ExynosCamera1Parameters::getHWVdisFormat(void) +{ + return V4L2_PIX_FMT_YUYV; +} + +void ExynosCamera1Parameters::m_setSWVdisMode(bool swVdis) +{ + m_cameraInfo.swVdisMode = swVdis; +} + +bool ExynosCamera1Parameters::getSWVdisMode(void) +{ + return m_cameraInfo.swVdisMode; +} + +void ExynosCamera1Parameters::m_setSWVdisUIMode(bool swVdisUI) +{ + m_cameraInfo.swVdisUIMode = swVdisUI; +} + +bool ExynosCamera1Parameters::getSWVdisUIMode(void) +{ + return m_cameraInfo.swVdisUIMode; +} + +status_t ExynosCamera1Parameters::m_adjustPreviewSize(__unused int previewW, __unused int previewH, + int *newPreviewW, int *newPreviewH, + int *newCalHwPreviewW, int *newCalHwPreviewH) +{ + /* hack : when app give 1446, we calibrate to 1440 */ + if (*newPreviewW == 1446 && *newPreviewH == 1080) { + CLOGW("WARN(%s):Invalid previewSize(%d/%d). so, calibrate to (1440/%d)", __FUNCTION__, *newPreviewW, *newPreviewH, *newPreviewH); + *newPreviewW = 1440; + } + + if (getRecordingHint() == true && getHighSpeedRecording() == true) { + int sizeList[SIZE_LUT_INDEX_END]; + + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + /* On high-speed recording, scaling-up by SCC/SCP occurs the IS-ISP performance degradation. + The scaling-up might be done by GSC for recording */ + *newPreviewW = (sizeList[BDS_W] < sizeList[TARGET_W])? sizeList[BDS_W] : sizeList[TARGET_W]; + *newPreviewH = (sizeList[BDS_H] < sizeList[TARGET_H])? sizeList[BDS_H] : sizeList[TARGET_H]; + } else { + ALOGE("ERR(%s):m_getPreviewSizeList() fail", __FUNCTION__); + } + } + + /* calibrate H/W aligned size*/ + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + ExynosRect bdsRect; + + getVideoSize(&videoW, &videoH); + + if ((videoW <= *newPreviewW) && (videoH <= *newPreviewH)) { + { +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + if ((videoW <= 1920 || videoH <= 1080) && + (1920 < *newPreviewW || 1080 < *newPreviewH)) { + + float videoRatio = ROUND_OFF(((float)videoW / (float)videoH), 2); + + if (videoRatio == 1.33f) { /* 4:3 */ + *newCalHwPreviewW = 1440; + *newCalHwPreviewH = 1080; + } else if (videoRatio == 1.77f) { /* 16:9 */ + *newCalHwPreviewW = 1920; + *newCalHwPreviewH = 1080; + } else if (videoRatio == 1.00f) { /* 1:1 */ + *newCalHwPreviewW = 1088; + *newCalHwPreviewH = 1088; + } else { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + + if (*newCalHwPreviewW != *newPreviewW || + *newCalHwPreviewH != *newPreviewH) { + CLOGW("WARN(%s[%d]):Limit hw preview size until %d x %d when videoSize(%d x %d)", + __FUNCTION__, __LINE__, *newCalHwPreviewW, *newCalHwPreviewH, videoW, videoH); + } + } else +#endif + { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + } + } else { + /* video size > preview size : Use BDS size for SCP output size */ + { + ALOGV("DEBUG(%s[%d]):preview(%dx%d) is smaller than video(%dx%d)", + __FUNCTION__, __LINE__, *newPreviewW, *newPreviewH, videoW, videoH); + + /* If the video ratio is differ with preview ratio, + the default ratio is set into preview ratio */ + if (SIZE_RATIO(*newPreviewW, *newPreviewH) != SIZE_RATIO(videoW, videoH)) + ALOGW("WARN(%s): preview ratio(%dx%d) is not matched with video ratio(%dx%d)", __FUNCTION__, + *newPreviewW, *newPreviewH, videoW, videoH); + + if (m_isSupportedPreviewSize(*newPreviewW, *newPreviewH) == false) { + ALOGE("ERR(%s): new preview size is invalid(%dx%d)", + __FUNCTION__, *newPreviewW, *newPreviewH); + return BAD_VALUE; + } + + /* + * This call is to get real preview size. + * so, HW dis size must not be added. + */ + m_getPreviewBdsSize(&bdsRect); + + if ((bdsRect.w <= videoW) && (bdsRect.h <= videoH)) { + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; + } + else { + *newCalHwPreviewW = videoW; + *newCalHwPreviewH = videoH; + + if (SIZE_RATIO(*newPreviewW, *newPreviewH) != SIZE_RATIO(videoW, videoH)) { + int cropX, cropY, cropW, cropH; + + getCropRectAlign(videoW, videoH, + *newPreviewW, *newPreviewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + 0, 1); + + *newCalHwPreviewW = cropW; + *newCalHwPreviewH = cropH; + } + } + } + } + } else if (getHighResolutionCallbackMode() == true) { + if(CAMERA_LCD_SIZE == LCD_SIZE_1280_720) { + *newCalHwPreviewW = 1280; + *newCalHwPreviewH = 720; + } else { + *newCalHwPreviewW = 1920; + *newCalHwPreviewH = 1080; + } + } else { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + +#if defined(SCALER_MAX_SCALE_DOWN_RATIO) + { + ExynosRect bdsRect; + + if (getPreviewBdsSize(&bdsRect) == NO_ERROR) { + int minW = bdsRect.w / SCALER_MAX_SCALE_DOWN_RATIO; + int minH = bdsRect.h / SCALER_MAX_SCALE_DOWN_RATIO; + int adjustW = 1; + int adjustH = 1; + + if ((*newCalHwPreviewW < minW) || (*newCalHwPreviewH < minH)) { + if (*newCalHwPreviewW < minW) { + adjustW = ROUND_UP(minW / *newCalHwPreviewW, 2); + CLOGW("WARN(%s): newCalHwPreviewW=%d, minW=%d, adjustW=%d", __FUNCTION__, + *newCalHwPreviewW, minW, adjustW); + } + if (*newCalHwPreviewH < minH) { + adjustH = ROUND_UP(minH / *newCalHwPreviewH, 2); + CLOGW("WARN(%s): newCalHwPreviewH=%d, minH=%d, adjustH=%d", __FUNCTION__, + *newCalHwPreviewH, minH, adjustH); + } + adjustW = (adjustW > adjustH) ? adjustW : adjustH; + *newCalHwPreviewW *= adjustW; + *newCalHwPreviewH *= adjustW; + } + } + } +#endif + + if (getHalVersion() == IS_HAL_VER_3_2) { +#if defined(ENABLE_FULL_FRAME) + ExynosRect bdsRect; + getPreviewBdsSize(&bdsRect); + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; +#else + /* 1. try to get exact ratio */ + if (m_isSupportedPreviewSize(*newPreviewW, *newPreviewH) == false) { + ALOGE("ERR(%s): new preview size is invalid(%dx%d)", "Parameters", newPreviewW, newPreviewH); + } + + /* 2. get bds size to set size to scp node due to internal scp buffer */ + int sizeList[SIZE_LUT_INDEX_END]; + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + *newCalHwPreviewW = sizeList[BDS_W]; + *newCalHwPreviewH = sizeList[BDS_H]; + } else { + ExynosRect bdsRect; + getPreviewBdsSize(&bdsRect); + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; + } +#endif + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::setCropRegion(int x, int y, int w, int h) +{ + status_t ret = NO_ERROR; + + ret = setMetaCtlCropRegion(&m_metadata, x, y, w, h); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setMetaCtlCropRegion(%d, %d, %d, %d)", + __FUNCTION__, __LINE__, x, y, w, h); + } + + return ret; +} + +void ExynosCamera1Parameters::m_adjustAeMode(enum aa_aemode curAeMode, enum aa_aemode *newAeMode) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + int curMeteringMode = getMeteringMode(); + if (curAeMode == AA_AEMODE_OFF) { + switch(curMeteringMode) { + case METERING_MODE_AVERAGE: + *newAeMode = AA_AEMODE_AVERAGE; + break; + case METERING_MODE_CENTER: + *newAeMode = AA_AEMODE_CENTER; + break; + case METERING_MODE_MATRIX: + *newAeMode = AA_AEMODE_MATRIX; + break; + case METERING_MODE_SPOT: + *newAeMode = AA_AEMODE_SPOT; + break; + default: + *newAeMode = curAeMode; + break; + } + } + } +} + +/* TODO: Who explane this offset value? */ +/* F/W's middle value is 5, and step is -4, -3, -2, -1, 0, 1, 2, 3, 4 */ +void ExynosCamera1Parameters::m_setExposureCompensation(int32_t value) +{ + setMetaCtlExposureCompensation(&m_metadata, value); +#if defined(USE_SUBDIVIDED_EV) + setMetaCtlExposureCompensationStep(&m_metadata, m_staticInfo->exposureCompensationStep); +#endif +} + +int32_t ExynosCamera1Parameters::getExposureCompensation(void) +{ + int32_t expCompensation; + getMetaCtlExposureCompensation(&m_metadata, &expCompensation); + return expCompensation; +} + +void ExynosCamera1Parameters::m_setMeteringAreas(uint32_t num, ExynosRect2 *rect2s, int *weights) +{ + uint32_t maxNumMeteringAreas = getMaxNumMeteringAreas(); + + if (maxNumMeteringAreas == 0) { + CLOGV("DEBUG(%s):maxNumMeteringAreas is 0. so, ignored", __FUNCTION__); + return; + } + + if (maxNumMeteringAreas < num) + num = maxNumMeteringAreas; + + if (getAutoExposureLock() == true) { + CLOGD("DEBUG(%s):autoExposure is Locked", __FUNCTION__); + return; + } + + if (num == 1) { +#ifdef CAMERA_GED_FEATURE + int meteringMode = getMeteringMode(); + + if (isRectNull(&rect2s[0]) == true) { + switch (meteringMode) { + case METERING_MODE_SPOT: + /* + * Even if SPOT metering mode, area must set valid values, + * but areas was invalid values, we change mode to CENTER. + */ + m_setMeteringMode(METERING_MODE_CENTER); + m_cameraInfo.isTouchMetering = false; + break; + case METERING_MODE_AVERAGE: + case METERING_MODE_CENTER: + case METERING_MODE_MATRIX: + default: + /* adjust metering setting */ + break; + } + } else { + switch (meteringMode) { + case METERING_MODE_CENTER: + /* + * SPOT metering mode in GED camera App was not set METERING_MODE_SPOT, + * but set metering areas only. + */ + m_setMeteringMode(METERING_MODE_SPOT); + m_cameraInfo.isTouchMetering = true; + break; + case METERING_MODE_AVERAGE: + case METERING_MODE_MATRIX: + case METERING_MODE_SPOT: + default: + /* adjust metering setting */ + break; + } + } +#endif + } else { + if (num > 1 && isRectEqual(&rect2s[0], &rect2s[1]) == false) { + /* if MATRIX mode support, mode set METERING_MODE_MATRIX */ + m_setMeteringMode(METERING_MODE_AVERAGE); + m_cameraInfo.isTouchMetering = false; + } else { + m_setMeteringMode(METERING_MODE_AVERAGE); + m_cameraInfo.isTouchMetering = false; + } + } + + ExynosRect cropRegionRect; + ExynosRect2 newRect2; + + getHwBayerCropRegion(&cropRegionRect.w, &cropRegionRect.h, &cropRegionRect.x, &cropRegionRect.y); + + for (uint32_t i = 0; i < num; i++) { + bool isChangeMeteringArea = false; +#ifdef CAMERA_GED_FEATURE + if (isRectNull(&rect2s[i]) == false) + isChangeMeteringArea = true; + else + isChangeMeteringArea = false; +#else + if ((isRectNull(&rect2s[i]) == false) ||((isRectNull(&rect2s[i]) == true) && (getMeteringMode() == METERING_MODE_SPOT))) + isChangeMeteringArea = true; +#ifdef TOUCH_AE + else if((getMeteringMode() == METERING_MODE_SPOT_TOUCH) || (getMeteringMode() == METERING_MODE_MATRIX_TOUCH) + || (getMeteringMode() == METERING_MODE_CENTER_TOUCH) || (getMeteringMode() == METERING_MODE_AVERAGE_TOUCH)) + isChangeMeteringArea = true; +#endif + else + isChangeMeteringArea = false; +#endif + if (isChangeMeteringArea == true) { + CLOGD("DEBUG(%s) (%d %d %d %d) %d", __FUNCTION__, rect2s->x1, rect2s->y1, rect2s->x2, rect2s->y2, getMeteringMode()); + newRect2 = convertingAndroidArea2HWAreaBcropOut(&rect2s[i], &cropRegionRect); + setMetaCtlAeRegion(&m_metadata, newRect2.x1, newRect2.y1, + newRect2.x2, newRect2.y2, weights[i]); + } + } +} + +status_t ExynosCamera1Parameters::checkPictureFormat(const CameraParameters& params) +{ + int curPictureFormat = 0; + int newPictureFormat = 0; + int newHwPictureFormat = 0; + const char *strNewPictureFormat = params.getPictureFormat(); + const char *strCurPictureFormat = m_params.getPictureFormat(); + + if (strNewPictureFormat == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newPictureFormat %s", "setParameters", strNewPictureFormat); + + if (!strcmp(strNewPictureFormat, CameraParameters::PIXEL_FORMAT_JPEG)) { + newPictureFormat = V4L2_PIX_FMT_JPEG; + newHwPictureFormat = SCC_OUTPUT_COLOR_FMT; + } else { + CLOGE("ERR(%s[%d]): Picture format(%s) is not supported!", + __FUNCTION__, __LINE__, strNewPictureFormat); + return BAD_VALUE; + } + + curPictureFormat = getPictureFormat(); + + if (newPictureFormat != curPictureFormat) { + CLOGI("INFO(%s[%d]): Picture format changed, cur(%s) -> new(%s)", + "Parameters", __LINE__, strCurPictureFormat, strNewPictureFormat); + m_setPictureFormat(newPictureFormat); + m_setHwPictureFormat(newHwPictureFormat); + m_params.setPictureFormat(strNewPictureFormat); + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::checkMeteringMode(const CameraParameters& params) +{ + const char *strNewMeteringMode = params.get("metering"); + int newMeteringMode = -1; + int curMeteringMode = -1; + if (strNewMeteringMode == NULL) { + return NO_ERROR; + } + + ALOGD("DEBUG(%s):strNewMeteringMode %s", "setParameters", strNewMeteringMode); + + if (!strcmp(strNewMeteringMode, "average")) + newMeteringMode = METERING_MODE_AVERAGE; + else if (!strcmp(strNewMeteringMode, "center")) + newMeteringMode = METERING_MODE_CENTER; + else if (!strcmp(strNewMeteringMode, "matrix")) + newMeteringMode = METERING_MODE_MATRIX; + else if (!strcmp(strNewMeteringMode, "spot")) + newMeteringMode = METERING_MODE_SPOT; +#ifdef TOUCH_AE + else if (!strcmp(strNewMeteringMode, "weighted-center")) + newMeteringMode = METERING_MODE_CENTER_TOUCH; + else if (!strcmp(strNewMeteringMode, "weighted-matrix")) + newMeteringMode = METERING_MODE_MATRIX_TOUCH; + else if (!strcmp(strNewMeteringMode, "weighted-spot")) + newMeteringMode = METERING_MODE_SPOT_TOUCH; + else if (!strcmp(strNewMeteringMode, "weighted-average")) + newMeteringMode = METERING_MODE_AVERAGE_TOUCH; +#endif + else { + ALOGE("ERR(%s):Invalid metering newMetering(%s)", __FUNCTION__, strNewMeteringMode); + return UNKNOWN_ERROR; + } + + curMeteringMode = getMeteringMode(); + + m_setMeteringMode(newMeteringMode); + m_params.set("metering", strNewMeteringMode); + + if (curMeteringMode != newMeteringMode) { + ALOGI("INFO(%s): Metering Area is changed (%d -> %d)", __FUNCTION__, curMeteringMode, newMeteringMode); + m_flagMeteringRegionChanged = true; + } + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::checkSceneMode(const CameraParameters& params) +{ + int newSceneMode = -1; + int curSceneMode = -1; + const char *strNewSceneMode = params.get(CameraParameters::KEY_SCENE_MODE); + + if (strNewSceneMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewSceneMode %s", "setParameters", strNewSceneMode); + + if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_AUTO)) { + newSceneMode = SCENE_MODE_AUTO; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_ACTION)) { + newSceneMode = SCENE_MODE_ACTION; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PORTRAIT)) { + newSceneMode = SCENE_MODE_PORTRAIT; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_LANDSCAPE)) { + newSceneMode = SCENE_MODE_LANDSCAPE; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT)) { + newSceneMode = SCENE_MODE_NIGHT; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT)) { + newSceneMode = SCENE_MODE_NIGHT_PORTRAIT; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_THEATRE)) { + newSceneMode = SCENE_MODE_THEATRE; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_BEACH)) { + newSceneMode = SCENE_MODE_BEACH; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SNOW)) { + newSceneMode = SCENE_MODE_SNOW; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SUNSET)) { + newSceneMode = SCENE_MODE_SUNSET; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO)) { + newSceneMode = SCENE_MODE_STEADYPHOTO; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_FIREWORKS)) { + newSceneMode = SCENE_MODE_FIREWORKS; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SPORTS)) { + newSceneMode = SCENE_MODE_SPORTS; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PARTY)) { + newSceneMode = SCENE_MODE_PARTY; + } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT)) { + newSceneMode = SCENE_MODE_CANDLELIGHT; + } else { + CLOGE("ERR(%s):unmatched scene_mode(%s)", "Parameters", strNewSceneMode); + return BAD_VALUE; + } + + curSceneMode = getSceneMode(); + + if (curSceneMode != newSceneMode) { + m_setSceneMode(newSceneMode); + m_params.set(CameraParameters::KEY_SCENE_MODE, strNewSceneMode); + updatePreviewFpsRange(); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setSceneMode(int value) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + + switch (value) { + case SCENE_MODE_PORTRAIT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PORTRAIT; + break; + case SCENE_MODE_LANDSCAPE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_LANDSCAPE; + break; + case SCENE_MODE_NIGHT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_NIGHT; + break; + case SCENE_MODE_BEACH: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_BEACH; + break; + case SCENE_MODE_SNOW: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SNOW; + break; + case SCENE_MODE_SUNSET: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SUNSET; + break; + case SCENE_MODE_FIREWORKS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_FIREWORKS; + break; + case SCENE_MODE_SPORTS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SPORTS; + break; + case SCENE_MODE_PARTY: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PARTY; + break; + case SCENE_MODE_CANDLELIGHT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_CANDLELIGHT; + break; + case SCENE_MODE_STEADYPHOTO: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_STEADYPHOTO; + break; + case SCENE_MODE_ACTION: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ACTION; + break; + case SCENE_MODE_NIGHT_PORTRAIT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_NIGHT_PORTRAIT; + break; + case SCENE_MODE_THEATRE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_THEATRE; + break; + case SCENE_MODE_AQUA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_AQUA; + break; + case SCENE_MODE_AUTO: + default: + mode = AA_CONTROL_AUTO; + sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + break; + } + + m_cameraInfo.sceneMode = value; + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); + m_cameraInfo.whiteBalanceMode = m_convertMetaCtlAwbMode(&m_metadata); +} + +status_t ExynosCamera1Parameters::checkFocusMode(const CameraParameters& params) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return NO_ERROR; + } else { + int newFocusMode = -1; + int curFocusMode = -1; + const char *strFocusMode = params.get(CameraParameters::KEY_FOCUS_MODE); + const char *strNewFocusMode = m_adjustFocusMode(strFocusMode); + + if (strNewFocusMode == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewFocusMode %s", "setParameters", strNewFocusMode); + + if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_AUTO)) { + newFocusMode = FOCUS_MODE_AUTO; + m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, + BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR); + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_INFINITY)) { + newFocusMode = FOCUS_MODE_INFINITY; + m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, + BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR); + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_MACRO)) { + newFocusMode = FOCUS_MODE_MACRO; + m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, + BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR); + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_FIXED)) { + newFocusMode = FOCUS_MODE_FIXED; + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_EDOF)) { + newFocusMode = FOCUS_MODE_EDOF; + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) { + newFocusMode = FOCUS_MODE_CONTINUOUS_VIDEO; + } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) { + newFocusMode = FOCUS_MODE_CONTINUOUS_PICTURE; + } else if (!strcmp(strNewFocusMode, "face-priority")) { + newFocusMode = FOCUS_MODE_CONTINUOUS_PICTURE; + } else if (!strcmp(strNewFocusMode, "continuous-picture-macro")) { + newFocusMode = FOCUS_MODE_CONTINUOUS_PICTURE_MACRO; + } else { + CLOGE("ERR(%s):unmatched focus_mode(%s)", __FUNCTION__, strNewFocusMode); + return BAD_VALUE; + } + + if (!(newFocusMode & getSupportedFocusModes())){ + CLOGE("ERR(%s[%d]): Focus mode(%s) is not supported!", __FUNCTION__, __LINE__, strNewFocusMode); + return BAD_VALUE; + } + + m_setFocusMode(newFocusMode); + m_params.set(CameraParameters::KEY_FOCUS_MODE, strNewFocusMode); + + return NO_ERROR; + } +} + +const char *ExynosCamera1Parameters::m_adjustFocusMode(const char *focusMode) +{ + int sceneMode = getSceneMode(); + const char *newFocusMode = NULL; + + /* TODO: vendor specific adjust */ + + newFocusMode = focusMode; + + return newFocusMode; +} + +void ExynosCamera1Parameters::m_setFocusMode(int focusMode) +{ + m_cameraInfo.focusMode = focusMode; + + if(getZoomActiveOn()) { + ALOGD("DEBUG(%s):zoom moving..", "setParameters"); + return; + } + + /* TODO: Notify auto focus activity */ + if(getPreviewRunning() == true) { + ALOGD("set Focus Mode(%s[%d]) !!!!", __FUNCTION__, __LINE__); + m_activityControl->setAutoFocusMode(focusMode); + } else { + m_setFocusmodeSetting = true; + } +} + +status_t ExynosCamera1Parameters::checkFocusAreas(const CameraParameters& params) +{ + int ret = NO_ERROR; + const char *newFocusAreas = params.get(CameraParameters::KEY_FOCUS_AREAS); + int curFocusMode = getFocusMode(); + uint32_t maxNumFocusAreas = getMaxNumFocusAreas(); + + if (newFocusAreas == NULL) { + ExynosRect2 nullRect2[1]; + nullRect2[0].x1 = 0; + nullRect2[0].y1 = 0; + nullRect2[0].x2 = 0; + nullRect2[0].y2 = 0; + + if (m_cameraInfo.numValidFocusArea != 0) + m_setFocusAreas(0, nullRect2, NULL); + + return NO_ERROR; + } + + CLOGD("DEBUG(%s):newFocusAreas %s", "setParameters", newFocusAreas); + + /* In CameraParameters.h */ + /* + * Focus area only has effect if the cur focus mode is FOCUS_MODE_AUTO, + * FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or + * FOCUS_MODE_CONTINUOUS_PICTURE. + */ + if (curFocusMode & FOCUS_MODE_AUTO + || curFocusMode & FOCUS_MODE_MACRO + || curFocusMode & FOCUS_MODE_CONTINUOUS_VIDEO + || curFocusMode & FOCUS_MODE_CONTINUOUS_PICTURE + || curFocusMode & FOCUS_MODE_CONTINUOUS_PICTURE_MACRO) { + + /* ex : (-10,-10,0,0,300),(0,0,10,10,700) */ + ExynosRect2 *rect2s = new ExynosRect2[maxNumFocusAreas]; + int *weights = new int[maxNumFocusAreas]; + + uint32_t validFocusedAreas = bracketsStr2Ints((char *)newFocusAreas, maxNumFocusAreas, rect2s, weights, 1); + + /* Check duplicate area */ + if (validFocusedAreas > 1) { + for (uint32_t k = 0; k < validFocusedAreas; k++) { + if (rect2s[k].x1 == rect2s[k+1].x1 && + rect2s[k].y1 == rect2s[k+1].y1 && + rect2s[k].x2 == rect2s[k+1].x2 && + rect2s[k].y2 == rect2s[k+1].y2) + validFocusedAreas = 0; + } + } + + if (0 < validFocusedAreas) { + /* CameraParameters.h */ + /* + * A special case of single focus area (0,0,0,0,0) means driver to decide + * the focus area. For example, the driver may use more signals to decide + * focus areas and change them dynamically. Apps can set (0,0,0,0,0) if they + * want the driver to decide focus areas. + */ + m_setFocusAreas(validFocusedAreas, rect2s, weights); + m_params.set(CameraParameters::KEY_FOCUS_AREAS, newFocusAreas); + } else { + CLOGE("ERR(%s):FocusAreas value is invalid", __FUNCTION__); + ret = UNKNOWN_ERROR; + } + + delete [] rect2s; + delete [] weights; + } + + return ret; +} + +void ExynosCamera1Parameters::m_setFocusAreas(uint32_t numValid, ExynosRect2 *rect2s, int *weights) +{ + uint32_t maxNumFocusAreas = getMaxNumFocusAreas(); + if (maxNumFocusAreas < numValid) + numValid = maxNumFocusAreas; + + if ((numValid == 1 || numValid == 0) && (isRectNull(&rect2s[0]) == true)) { + /* m_setFocusMode(FOCUS_MODE_AUTO); */ + ExynosRect2 newRect2(0,0,0,0); + m_activityControl->setAutoFcousArea(newRect2, 1000); + + m_activityControl->touchAFMode = false; + m_activityControl->touchAFModeForFlash = false; + } else { + ExynosRect cropRegionRect; + ExynosRect2 newRect2; + + getHwBayerCropRegion(&cropRegionRect.w, &cropRegionRect.h, &cropRegionRect.x, &cropRegionRect.y); + + for (uint32_t i = 0; i < numValid; i++) { + newRect2 = convertingAndroidArea2HWAreaBcropOut(&rect2s[i], &cropRegionRect); + /*setMetaCtlAfRegion(&m_metadata, rect2s[i].x1, rect2s[i].y1, + rect2s[i].x2, rect2s[i].y2, weights[i]);*/ + m_activityControl->setAutoFcousArea(newRect2, weights[i]); + } + m_activityControl->touchAFMode = true; + m_activityControl->touchAFModeForFlash = true; + } + + m_cameraInfo.numValidFocusArea = numValid; +} + +void ExynosCamera1Parameters::m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + __unused camera2_dm *dm, + camera2_udm *udm) +{ + /* 2 0th IFD TIFF Tags */ + /* 3 Width */ + exifInfo->width = pictureRect->w; + /* 3 Height */ + exifInfo->height = pictureRect->h; + + /* 3 Orientation */ + switch (m_cameraInfo.rotation) { + case 90: + exifInfo->orientation = EXIF_ORIENTATION_90; + break; + case 180: + exifInfo->orientation = EXIF_ORIENTATION_180; + break; + case 270: + exifInfo->orientation = EXIF_ORIENTATION_270; + break; + case 0: + default: + exifInfo->orientation = EXIF_ORIENTATION_UP; + break; + } + + /* 3 Maker note */ + /* back-up udm info for exif's maker note */ + memcpy((void *)mDebugInfo.debugData[APP_MARKER_4], (void *)udm, mDebugInfo.debugSize[APP_MARKER_4]); + exifInfo->maker_note_size = 0; + + /* 3 Date time */ + struct timeval rawtime; + struct tm timeinfo; + gettimeofday(&rawtime, NULL); + localtime_r((time_t *)&rawtime.tv_sec, &timeinfo); + strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", &timeinfo); + snprintf((char *)exifInfo->sec_time, 5, "%04d", (int)(rawtime.tv_usec/1000)); + + /* 2 0th IFD Exif Private Tags */ + bool flagSLSIAlgorithm = true; + /* + * vendorSpecific2[100] : exposure + * vendorSpecific2[101] : iso(gain) + * vendorSpecific2[102] /256 : Bv + * vendorSpecific2[103] : Tv + */ + + /* 3 ISO Speed Rating */ + exifInfo->iso_speed_rating = udm->internal.vendorSpecific2[101]; + + /* 3 Exposure Time */ + exifInfo->exposure_time.num = 1; + + if (udm->ae.vendorSpecific[0] == 0xAEAEAEAE) + exifInfo->exposure_time.den = (uint32_t)udm->ae.vendorSpecific[64]; + else + exifInfo->exposure_time.den = (uint32_t)udm->internal.vendorSpecific2[100]; + + /* 3 Shutter Speed */ + exifInfo->shutter_speed.num = (uint32_t)(ROUND_OFF_HALF(((double)(udm->internal.vendorSpecific2[103] / 256.f) * EXIF_DEF_APEX_DEN), 0)); + exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* 3 Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->aperture.den = COMMON_DENOMINATOR; + + /* 3 Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->max_aperture.den = COMMON_DENOMINATOR; + } else { + /* 3 Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * m_staticInfo->apertureDen; + exifInfo->aperture.den = m_staticInfo->apertureDen; + + /* 3 Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * m_staticInfo->apertureDen; + exifInfo->max_aperture.den = m_staticInfo->apertureDen; + } + + /* 3 Brightness */ + int temp = udm->internal.vendorSpecific2[102]; + if ((int)udm->ae.vendorSpecific[102] < 0) + temp = -temp; + + exifInfo->brightness.num = (int32_t)(ROUND_OFF_HALF((double)((temp * EXIF_DEF_APEX_DEN) / 256.f), 0)); + if ((int)udm->ae.vendorSpecific[102] < 0) + exifInfo->brightness.num = -exifInfo->brightness.num; + + exifInfo->brightness.den = EXIF_DEF_APEX_DEN; + + CLOGD("DEBUG(%s):udm->internal.vendorSpecific2[100](%d)", __FUNCTION__, udm->internal.vendorSpecific2[100]); + CLOGD("DEBUG(%s):udm->internal.vendorSpecific2[101](%d)", __FUNCTION__, udm->internal.vendorSpecific2[101]); + CLOGD("DEBUG(%s):udm->internal.vendorSpecific2[102](%d)", __FUNCTION__, udm->internal.vendorSpecific2[102]); + CLOGD("DEBUG(%s):udm->internal.vendorSpecific2[103](%d)", __FUNCTION__, udm->internal.vendorSpecific2[103]); + + CLOGD("DEBUG(%s):iso_speed_rating(%d)", __FUNCTION__, exifInfo->iso_speed_rating); + CLOGD("DEBUG(%s):exposure_time(%d/%d)", __FUNCTION__, exifInfo->exposure_time.num, exifInfo->exposure_time.den); + CLOGD("DEBUG(%s):shutter_speed(%d/%d)", __FUNCTION__, exifInfo->shutter_speed.num, exifInfo->shutter_speed.den); + CLOGD("DEBUG(%s):aperture (%d/%d)", __FUNCTION__, exifInfo->aperture.num, exifInfo->aperture.den); + CLOGD("DEBUG(%s):brightness (%d/%d)", __FUNCTION__, exifInfo->brightness.num, exifInfo->brightness.den); + + /* 3 Exposure Bias */ + exifInfo->exposure_bias.num = (int32_t)getExposureCompensation() * (m_staticInfo->exposureCompensationStep * 10); + exifInfo->exposure_bias.den = 10; + + /* 3 Metering Mode */ + switch (m_cameraInfo.meteringMode) { + case METERING_MODE_CENTER: + exifInfo->metering_mode = EXIF_METERING_CENTER; + break; + case METERING_MODE_MATRIX: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + case METERING_MODE_SPOT: + exifInfo->metering_mode = EXIF_METERING_SPOT; + break; + case METERING_MODE_AVERAGE: + default: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + } + + /* 3 Flash */ + if (m_cameraInfo.flashMode == FLASH_MODE_OFF) { + exifInfo->flash = 0; + } else if (m_cameraInfo.flashMode == FLASH_MODE_TORCH) { + exifInfo->flash = 1; + } else { + exifInfo->flash = getMarkingOfExifFlash(); + } + + /* 3 White Balance */ + switch (m_cameraInfo.whiteBalanceMode) { + case WHITE_BALANCE_AUTO: + exifInfo->white_balance = EXIF_WB_AUTO; + break; + case WHITE_BALANCE_INCANDESCENT: + exifInfo->white_balance = EXIF_WB_INCANDESCENT; + break; + case WHITE_BALANCE_FLUORESCENT: + exifInfo->white_balance = EXIF_WB_FLUORESCENT; + break; + case WHITE_BALANCE_DAYLIGHT: + exifInfo->white_balance = EXIF_WB_DAYLIGHT; + break; + case WHITE_BALANCE_CLOUDY_DAYLIGHT: + exifInfo->white_balance = EXIF_WB_CLOUDY; + break; + default: + exifInfo->white_balance = EXIF_WB_MANUAL; + break; + } + + /* 3 Focal Length in 35mm length */ + exifInfo->focal_length_in_35mm_length = m_staticInfo->focalLengthIn35mmLength; + + /* 3 Scene Capture Type */ + switch (m_cameraInfo.sceneMode) { + case SCENE_MODE_PORTRAIT: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + case SCENE_MODE_LANDSCAPE: + exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; + break; + case SCENE_MODE_NIGHT: + exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; + break; + default: + exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; + break; + } + + /* 3 Image Unique ID */ + struct v4l2_ext_controls ctrls; + struct v4l2_ext_control ctrl; + int uniqueId = 0; + char uniqueIdBuf[32] = {0,}; + + memset(&ctrls, 0, sizeof(struct v4l2_ext_controls)); + memset(&ctrl, 0, sizeof(struct v4l2_ext_control)); + + ctrls.ctrl_class = V4L2_CTRL_CLASS_CAMERA; + ctrls.count = 1; + ctrls.controls = &ctrl; + ctrl.id = V4L2_CID_CAM_SENSOR_FW_VER; + ctrl.string = uniqueIdBuf; + + /* 2 0th IFD GPS Info Tags */ + if (m_cameraInfo.gpsLatitude != 0 && m_cameraInfo.gpsLongitude != 0) { + if (m_cameraInfo.gpsLatitude > 0) + strncpy((char *)exifInfo->gps_latitude_ref, "N", 2); + else + strncpy((char *)exifInfo->gps_latitude_ref, "S", 2); + + if (m_cameraInfo.gpsLongitude > 0) + strncpy((char *)exifInfo->gps_longitude_ref, "E", 2); + else + strncpy((char *)exifInfo->gps_longitude_ref, "W", 2); + + if (m_cameraInfo.gpsAltitude > 0) + exifInfo->gps_altitude_ref = 0; + else + exifInfo->gps_altitude_ref = 1; + + double latitude = fabs(m_cameraInfo.gpsLatitude); + double longitude = fabs(m_cameraInfo.gpsLongitude); + double altitude = fabs(m_cameraInfo.gpsAltitude); + + exifInfo->gps_latitude[0].num = (uint32_t)latitude; + exifInfo->gps_latitude[0].den = 1; + exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); + exifInfo->gps_latitude[1].den = 1; + exifInfo->gps_latitude[2].num = (uint32_t)(round((((latitude - exifInfo->gps_latitude[0].num) * 60) + - exifInfo->gps_latitude[1].num) * 60)); + exifInfo->gps_latitude[2].den = 1; + + exifInfo->gps_longitude[0].num = (uint32_t)longitude; + exifInfo->gps_longitude[0].den = 1; + exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); + exifInfo->gps_longitude[1].den = 1; + exifInfo->gps_longitude[2].num = (uint32_t)(round((((longitude - exifInfo->gps_longitude[0].num) * 60) + - exifInfo->gps_longitude[1].num) * 60)); + exifInfo->gps_longitude[2].den = 1; + + exifInfo->gps_altitude.num = (uint32_t)altitude; + exifInfo->gps_altitude.den = 1; + + struct tm tm_data; + gmtime_r(&m_cameraInfo.gpsTimeStamp, &tm_data); + exifInfo->gps_timestamp[0].num = tm_data.tm_hour; + exifInfo->gps_timestamp[0].den = 1; + exifInfo->gps_timestamp[1].num = tm_data.tm_min; + exifInfo->gps_timestamp[1].den = 1; + exifInfo->gps_timestamp[2].num = tm_data.tm_sec; + exifInfo->gps_timestamp[2].den = 1; + snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), + "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); + + exifInfo->enableGps = true; + } else { + exifInfo->enableGps = false; + } + + /* 2 1th IFD TIFF Tags */ + exifInfo->widthThumb = thumbnailRect->w; + exifInfo->heightThumb = thumbnailRect->h; + + setMarkingOfExifFlash(0); +} + +/* for CAMERA2_API_SUPPORT */ +void ExynosCamera1Parameters::m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) +{ + /* JPEG Picture Size */ + exifInfo->width = pictureRect->w; + exifInfo->height = pictureRect->h; + + /* Orientation */ + switch (shot->ctl.jpeg.orientation) { + case 90: + exifInfo->orientation = EXIF_ORIENTATION_90; + break; + case 180: + exifInfo->orientation = EXIF_ORIENTATION_180; + break; + case 270: + exifInfo->orientation = EXIF_ORIENTATION_270; + break; + case 0: + default: + exifInfo->orientation = EXIF_ORIENTATION_UP; + break; + } + + /* Maker Note Size */ + /* back-up udm info for exif's maker note */ + memcpy((void *)mDebugInfo.debugData[4], (void *)&shot->udm, mDebugInfo.debugSize[APP_MARKER_4]); + + /* TODO */ +#if 0 + if (getSeriesShotCount() && getShotMode() != SHOT_MODE_BEST_PHOTO) { + unsigned char l_makernote[98] = { 0x07, 0x00, 0x01, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x30, 0x31, 0x30, 0x30, 0x02, 0x00, 0x04, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x20, 0x01, 0x00, 0x40, 0x00, 0x04, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, + 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x05, 0x00, 0x01, 0x00, 0x00, 0x00, 0x5A, 0x00, + 0x00, 0x00, 0x50, 0x00, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; + long long int mCityId = getCityId(); + l_makernote[46] = getWeatherId(); + memcpy(l_makernote + 90, &mCityId, 8); + exifInfo->maker_note_size = 98; + memcpy(exifInfo->maker_note, l_makernote, sizeof(l_makernote)); + } else { + exifInfo->maker_note_size = 0; + } +#else + exifInfo->maker_note_size = 0; +#endif + + /* Date Time */ + struct timeval rawtime; + struct tm timeinfo; + gettimeofday(&rawtime, NULL); + localtime_r((time_t *)&rawtime.tv_sec, &timeinfo); + strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", &timeinfo); + sprintf((char *)exifInfo->sec_time, "%d", (int)(rawtime.tv_usec/1000)); + + /* Exif Private Tags */ + bool flagSLSIAlgorithm = true; + /* + * vendorSpecific2[0] : info + * vendorSpecific2[100] : 0:sirc 1:cml + * vendorSpecific2[101] : cml exposure + * vendorSpecific2[102] : cml iso(gain) + * vendorSpecific2[103] : cml Bv + */ + + /* ISO Speed Rating */ + exifInfo->iso_speed_rating = shot->dm.aa.vendor_isoValue; + + /* Exposure Time */ + exifInfo->exposure_time.num = 1; + /* HACK : Sometimes, F/W does NOT send the exposureTime */ + if (shot->dm.sensor.exposureTime != 0) + exifInfo->exposure_time.den = (uint32_t) 1e9 / shot->dm.sensor.exposureTime; + else + exifInfo->exposure_time.num = 0; + + /* Shutter Speed */ + exifInfo->shutter_speed.num = (uint32_t) (ROUND_OFF_HALF(((double) (shot->udm.internal.vendorSpecific2[104] / 256.f) * EXIF_DEF_APEX_DEN), 0)); + exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; + + /* Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double) (exifInfo->fnumber.num) / (double) (exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->aperture.den = COMMON_DENOMINATOR; + + /* Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double) (exifInfo->fnumber.num) / (double) (exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->max_aperture.den = COMMON_DENOMINATOR; + + /* Brightness */ + int temp = shot->udm.internal.vendorSpecific2[103]; + if ((int) shot->udm.ae.vendorSpecific[103] < 0) + temp = -temp; + exifInfo->brightness.num = (int32_t) (ROUND_OFF_HALF((double)((temp * EXIF_DEF_APEX_DEN)/256.f), 0)); + if ((int) shot->udm.ae.vendorSpecific[103] < 0) + exifInfo->brightness.num = -exifInfo->brightness.num; + exifInfo->brightness.den = EXIF_DEF_APEX_DEN; + + ALOGD("DEBUG(%s):udm->internal.vendorSpecific2[101](%d)", __FUNCTION__, shot->udm.internal.vendorSpecific2[101]); + ALOGD("DEBUG(%s):udm->internal.vendorSpecific2[102](%d)", __FUNCTION__, shot->udm.internal.vendorSpecific2[102]); + ALOGD("DEBUG(%s):udm->internal.vendorSpecific2[103](%d)", __FUNCTION__, shot->udm.internal.vendorSpecific2[103]); + ALOGD("DEBUG(%s):udm->internal.vendorSpecific2[104](%d)", __FUNCTION__, shot->udm.internal.vendorSpecific2[104]); + + ALOGD("DEBUG(%s):iso_speed_rating(%d)", __FUNCTION__, exifInfo->iso_speed_rating); + ALOGD("DEBUG(%s):exposure_time(%d/%d)", __FUNCTION__, exifInfo->exposure_time.num, exifInfo->exposure_time.den); + ALOGD("DEBUG(%s):shutter_speed(%d/%d)", __FUNCTION__, exifInfo->shutter_speed.num, exifInfo->shutter_speed.den); + ALOGD("DEBUG(%s):aperture (%d/%d)", __FUNCTION__, exifInfo->aperture.num, exifInfo->aperture.den); + ALOGD("DEBUG(%s):brightness (%d/%d)", __FUNCTION__, exifInfo->brightness.num, exifInfo->brightness.den); + + /* Exposure Bias */ +#if defined(USE_SUBDIVIDED_EV) + exifInfo->exposure_bias.num = shot->ctl.aa.aeExpCompensation * (m_staticInfo->exposureCompensationStep * 10); +#else + exifInfo->exposure_bias.num = + (shot->ctl.aa.aeExpCompensation) * (m_staticInfo->exposureCompensationStep * 10); +#endif + exifInfo->exposure_bias.den = 10; + + /* Metering Mode */ + { + switch (shot->ctl.aa.aeMode) { + case AA_AEMODE_CENTER: + exifInfo->metering_mode = EXIF_METERING_CENTER; + break; + case AA_AEMODE_MATRIX: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + case AA_AEMODE_SPOT: + exifInfo->metering_mode = EXIF_METERING_SPOT; + break; + default: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + } + } + + /* Flash Mode */ + if (shot->ctl.flash.flashMode == CAM2_FLASH_MODE_OFF) { + exifInfo->flash = 0; + } else if (shot->ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) { + exifInfo->flash = 1; + } else { + exifInfo->flash = getMarkingOfExifFlash(); + } + + /* White Balance */ + if (shot->ctl.aa.awbMode == AA_AWBMODE_WB_AUTO) + exifInfo->white_balance = EXIF_WB_AUTO; + else + exifInfo->white_balance = EXIF_WB_MANUAL; + + /* Focal Length in 35mm length */ + exifInfo->focal_length_in_35mm_length = getFocalLengthIn35mmFilm(); + + /* Scene Capture Type */ + switch (shot->ctl.aa.sceneMode) { + case AA_SCENE_MODE_PORTRAIT: + case AA_SCENE_MODE_FACE_PRIORITY: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + case AA_SCENE_MODE_LANDSCAPE: + exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; + break; + case AA_SCENE_MODE_NIGHT: + exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; + break; + default: + exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; + break; + } + + switch (this->getShotMode()) { + case SHOT_MODE_BEAUTY_FACE: + case SHOT_MODE_BEST_FACE: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + default: + break; + } + + /* Image Unique ID */ + /* GPS Coordinates */ + double gpsLatitude = shot->ctl.jpeg.gpsCoordinates[0]; + double gpsLongitude = shot->ctl.jpeg.gpsCoordinates[1]; + double gpsAltitude = shot->ctl.jpeg.gpsCoordinates[2]; + if (gpsLatitude != 0 && gpsLongitude != 0) { + if (gpsLatitude > 0) + strncpy((char *) exifInfo->gps_latitude_ref, "N", 2); + else + strncpy((char *) exifInfo->gps_latitude_ref, "s", 2); + + if (gpsLongitude > 0) + strncpy((char *) exifInfo->gps_longitude_ref, "E", 2); + else + strncpy((char *) exifInfo->gps_longitude_ref, "W", 2); + + if (gpsAltitude > 0) + exifInfo->gps_altitude_ref = 0; + else + exifInfo->gps_altitude_ref = 1; + + gpsLatitude = fabs(gpsLatitude); + gpsLongitude = fabs(gpsLongitude); + gpsAltitude = fabs(gpsAltitude); + + exifInfo->gps_latitude[0].num = (uint32_t) gpsLatitude; + exifInfo->gps_latitude[0].den = 1; + exifInfo->gps_latitude[1].num = (uint32_t)((gpsLatitude - exifInfo->gps_latitude[0].num) * 60); + exifInfo->gps_latitude[1].den = 1; + exifInfo->gps_latitude[2].num = (uint32_t)(round((((gpsLatitude - exifInfo->gps_latitude[0].num) * 60) + - exifInfo->gps_latitude[1].num) * 60)); + exifInfo->gps_latitude[2].den = 1; + + exifInfo->gps_longitude[0].num = (uint32_t)gpsLongitude; + exifInfo->gps_longitude[0].den = 1; + exifInfo->gps_longitude[1].num = (uint32_t)((gpsLongitude - exifInfo->gps_longitude[0].num) * 60); + exifInfo->gps_longitude[1].den = 1; + exifInfo->gps_longitude[2].num = (uint32_t)(round((((gpsLongitude - exifInfo->gps_longitude[0].num) * 60) + - exifInfo->gps_longitude[1].num) * 60)); + exifInfo->gps_longitude[2].den = 1; + + exifInfo->gps_altitude.num = (uint32_t)gpsAltitude; + exifInfo->gps_altitude.den = 1; + + struct tm tm_data; + long gpsTimestamp = (long) shot->ctl.jpeg.gpsTimestamp; + gmtime_r(&gpsTimestamp, &tm_data); + exifInfo->gps_timestamp[0].num = tm_data.tm_hour; + exifInfo->gps_timestamp[0].den = 1; + exifInfo->gps_timestamp[1].num = tm_data.tm_min; + exifInfo->gps_timestamp[1].den = 1; + exifInfo->gps_timestamp[2].num = tm_data.tm_sec; + exifInfo->gps_timestamp[2].den = 1; + snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), + "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); + + exifInfo->enableGps = true; + } else { + exifInfo->enableGps = false; + } + + /* Thumbnail Size */ + exifInfo->widthThumb = thumbnailRect->w; + exifInfo->heightThumb = thumbnailRect->h; + + setMarkingOfExifFlash(0); +} + +#ifdef USE_BINNING_MODE +status_t ExynosCamera1Parameters::needBinningMode(void) +{ + char cameraModeProperty[PROPERTY_VALUE_MAX]; + int ret = 0; + + if (m_staticInfo->vtcallSizeLutMax == 0 || m_staticInfo->vtcallSizeLut == NULL) { + ALOGV("(%s):vtCallSizeLut is NULL, can't support the binnig mode", __FUNCTION__); + return ret; + } + + /* For VT Call with DualCamera Scenario */ + if (getDualMode()) { + ALOGV("(%s):DualMode can't support the binnig mode.(%d,%d)", __FUNCTION__, getCameraId(), getDualMode()); + return ret; + } + + if (getVtMode() > 0 && getVtMode() < 3) { + ret = 1; + } else { + property_get("sys.cameramode.blackbox", cameraModeProperty, "0"); + if (strcmp(cameraModeProperty, "1") == 0) { + ret = 1; + } else { + property_get("sys.hangouts.fps", cameraModeProperty, "0"); + int newHangOutFPS = atoi(cameraModeProperty); + if (newHangOutFPS > 0) { + ret = 1; + } else { + property_get("sys.cameramode.vtcall", cameraModeProperty, "0"); + if (strcmp(cameraModeProperty, "1") == 0) { + ret = 1; + } + } + } + } + return ret; +} +#endif + +status_t ExynosCamera1Parameters::checkShotMode(const CameraParameters& params) +{ + int newShotMode = params.getInt("shot-mode"); + int curShotMode = -1; + char cameraModeProperty[PROPERTY_VALUE_MAX]; + +#ifdef USE_LIMITATION_FOR_THIRD_PARTY + property_get("sys.cameramode.blackbox", cameraModeProperty, "0"); + if (strcmp(cameraModeProperty, "1") == 0) { + newShotMode = THIRD_PARTY_BLACKBOX_MODE; + } else { + property_get("sys.hangouts.fps", cameraModeProperty, "0"); + int newHangOutFPS = atoi(cameraModeProperty); + if (newHangOutFPS > 0) { + newShotMode = THIRD_PARTY_HANGOUT_MODE; + } else { + int vtMode = params.getInt("vtmode"); + if ((vtMode <= 0) || (vtMode > 2)) { + property_get("sys.cameramode.vtcall", cameraModeProperty, "0"); + if (strcmp(cameraModeProperty, "1") == 0) { + newShotMode = THIRD_PARTY_VTCALL_MODE; + } + } + } + } +#endif + if (newShotMode < 0) { + return NO_ERROR; + } + + ALOGD("DEBUG(%s):newShotMode %d", "setParameters", newShotMode); + + curShotMode = getShotMode(); + + if ((getRecordingHint() == true) + && (newShotMode != SHOT_MODE_SEQUENCE) + ) { + m_setShotMode(SHOT_MODE_NORMAL); + m_params.set("shot-mode", SHOT_MODE_NORMAL); + } else if (curShotMode != newShotMode) { + m_setShotMode(newShotMode); + m_params.set("shot-mode", newShotMode); + + updatePreviewFpsRange(); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setShotMode(int shotMode) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + bool changeSceneMode = true; + + switch (shotMode) { + case SHOT_MODE_DRAMA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_DRAMA; + break; + case SHOT_MODE_3D_PANORAMA: + case SHOT_MODE_PANORAMA: + case SHOT_MODE_FRONT_PANORAMA: + case SHOT_MODE_INTERACTIVE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PANORAMA; + break; + case SHOT_MODE_NIGHT: + case SHOT_MODE_NIGHT_SCENE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_LLS; + break; + case SHOT_MODE_ANIMATED_SCENE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ANIMATED; + break; + case SHOT_MODE_SPORTS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SPORTS; + break; + case SHOT_MODE_GOLF: + case SHOT_MODE_SEQUENCE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_GOLF; + break; + case SHOT_MODE_NORMAL: + case SHOT_MODE_AUTO: + case SHOT_MODE_BEAUTY_FACE: + case SHOT_MODE_BEST_PHOTO: + case SHOT_MODE_BEST_FACE: + case SHOT_MODE_ERASER: + case SHOT_MODE_RICH_TONE: + case SHOT_MODE_STORY: + case SHOT_MODE_SELFIE_ALARM: + case SHOT_MODE_FASTMOTION: + mode = AA_CONTROL_AUTO; + sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + break; + case SHOT_MODE_AQUA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_AQUA; + break; + case SHOT_MODE_AUTO_PORTRAIT: + case SHOT_MODE_PET: +#ifdef USE_LIMITATION_FOR_THIRD_PARTY + case THIRD_PARTY_BLACKBOX_MODE: + case THIRD_PARTY_VTCALL_MODE: +#endif + default: + changeSceneMode = false; + break; + } + + m_cameraInfo.shotMode = shotMode; + if (changeSceneMode == true) + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); +} + +int ExynosCamera1Parameters::getShotMode(void) +{ + return m_cameraInfo.shotMode; +} + +status_t ExynosCamera1Parameters::checkVtMode(const CameraParameters& params) +{ + int newVTMode = params.getInt("vtmode"); + int curVTMode = -1; + + CLOGD("DEBUG(%s):newVTMode %d", "setParameters", newVTMode); + + /* + * VT mode + * 1: 3G vtmode (176x144, Fixed 7fps) + * 2: LTE or WIFI vtmode (640x480, Fixed 15fps) + */ + if (newVTMode < 0 || newVTMode > 2) { + newVTMode = 0; + } + + curVTMode = getVtMode(); + + if (curVTMode != newVTMode) { + m_setVtMode(newVTMode); + m_params.set("vtmode", newVTMode); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setVtMode(int vtMode) +{ + m_cameraInfo.vtMode = vtMode; +} + +int ExynosCamera1Parameters::getVtMode(void) +{ + return m_cameraInfo.vtMode; +} + +status_t ExynosCamera1Parameters::checkGamma(const CameraParameters& params) +{ + bool newGamma = false; + bool curGamma = false; + const char *strNewGamma = params.get("video_recording_gamma"); + + if (strNewGamma == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewGamma %s", "setParameters", strNewGamma); + + if (!strcmp(strNewGamma, "off")) { + newGamma = false; + } else if (!strcmp(strNewGamma, "on")) { + newGamma = true; + } else { + CLOGE("ERR(%s):unmatched gamma(%s)", __FUNCTION__, strNewGamma); + return BAD_VALUE; + } + + curGamma = getGamma(); + + if (curGamma != newGamma) { + m_setGamma(newGamma); + m_params.set("video_recording_gamma", strNewGamma); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setGamma(bool gamma) +{ + m_cameraInfo.gamma = gamma; +} + +bool ExynosCamera1Parameters::getGamma(void) +{ + return m_cameraInfo.gamma; +} + +status_t ExynosCamera1Parameters::checkSlowAe(const CameraParameters& params) +{ + bool newSlowAe = false; + bool curSlowAe = false; + const char *strNewSlowAe = params.get("slow_ae"); + + if (strNewSlowAe == NULL) { + return NO_ERROR; + } + + CLOGD("DEBUG(%s):strNewSlowAe %s", "setParameters", strNewSlowAe); + + if (!strcmp(strNewSlowAe, "off")) + newSlowAe = false; + else if (!strcmp(strNewSlowAe, "on")) + newSlowAe = true; + else { + CLOGE("ERR(%s):unmatched slow_ae(%s)", __FUNCTION__, strNewSlowAe); + return BAD_VALUE; + } + + curSlowAe = getSlowAe(); + + if (curSlowAe != newSlowAe) { + m_setSlowAe(newSlowAe); + m_params.set("slow_ae", strNewSlowAe); + } + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_setSlowAe(bool slowAe) +{ + m_cameraInfo.slowAe = slowAe; +} + +bool ExynosCamera1Parameters::getSlowAe(void) +{ + return m_cameraInfo.slowAe; +} + +status_t ExynosCamera1Parameters::checkImageUniqueId(__unused const CameraParameters& params) +{ + const char *strCurImageUniqueId = m_params.get("imageuniqueid-value"); + const char *strNewImageUniqueId = NULL; + + if(strCurImageUniqueId == NULL || strcmp(strCurImageUniqueId, "") == 0 || strcmp(strCurImageUniqueId, "0") == 0) { + strNewImageUniqueId = getImageUniqueId(); + + if (strNewImageUniqueId != NULL && strcmp(strNewImageUniqueId, "") != 0) { + ALOGD("DEBUG(%s):newImageUniqueId %s ", "setParameters", strNewImageUniqueId ); + m_params.set("imageuniqueid-value", strNewImageUniqueId); + } + } + return NO_ERROR; +} + +const char *ExynosCamera1Parameters::getImageUniqueId(void) +{ + return NULL; +} + +#ifdef BURST_CAPTURE +status_t ExynosCamera1Parameters::checkSeriesShotFilePath(const CameraParameters& params) +{ + const char *seriesShotFilePath = params.get("capture-burst-filepath"); + + if (seriesShotFilePath != NULL) { + snprintf(m_seriesShotFilePath, sizeof(m_seriesShotFilePath), "%s", seriesShotFilePath); + CLOGD("DEBUG(%s): seriesShotFilePath %s", "setParameters", seriesShotFilePath); + m_params.set("capture-burst-filepath", seriesShotFilePath); + } else { + CLOGD("DEBUG(%s): seriesShotFilePath NULL", "setParameters"); + memset(m_seriesShotFilePath, 0, 100); + } + + return NO_ERROR; +} +#endif + +status_t ExynosCamera1Parameters::checkSeriesShotMode(const CameraParameters& params) +{ + int burstCount = params.getInt("burst-capture"); + int bestCount = params.getInt("best-capture"); + + CLOGD("DEBUG(%s): burstCount(%d), bestCount(%d)", "setParameters", burstCount, bestCount); + + if (burstCount < 0 || bestCount < 0) { + CLOGE("ERR(%s[%d]): Invalid burst-capture count(%d), best-capture count(%d)", __FUNCTION__, __LINE__, burstCount, bestCount); + return BAD_VALUE; + } + + /* TODO: select shot count */ + if (bestCount > burstCount) { + m_setSeriesShotCount(bestCount); + m_params.set("burst-capture", 0); + m_params.set("best-capture", bestCount); + } else { + m_setSeriesShotCount(burstCount); + m_params.set("burst-capture", burstCount); + m_params.set("best-capture", 0); + } + return NO_ERROR; +} +#ifdef BURST_CAPTURE +int ExynosCamera1Parameters::getSeriesShotSaveLocation(void) +{ + int seriesShotSaveLocation = m_seriesShotSaveLocation; + int shotMode = getShotMode(); + + /* GED's series shot work as callback */ + seriesShotSaveLocation = BURST_SAVE_CALLBACK; + + return seriesShotSaveLocation; +} + +void ExynosCamera1Parameters::setSeriesShotSaveLocation(int ssaveLocation) +{ + m_seriesShotSaveLocation = ssaveLocation; +} + +char *ExynosCamera1Parameters::getSeriesShotFilePath(void) +{ + return m_seriesShotFilePath; +} +#endif + +int ExynosCamera1Parameters::getSeriesShotDuration(void) +{ + return NORMAL_BURST_DURATION; +} + +int ExynosCamera1Parameters::getSeriesShotMode(void) +{ + return m_cameraInfo.seriesShotMode; +} + +void ExynosCamera1Parameters::setSeriesShotMode(int sshotMode, int count) +{ + int sshotCount = 0; + int shotMode = getShotMode(); + if (sshotMode == SERIES_SHOT_MODE_BURST) { + if (shotMode == SHOT_MODE_BEST_PHOTO) { + sshotMode = SERIES_SHOT_MODE_BEST_PHOTO; + sshotCount = 8; + } else if (shotMode == SHOT_MODE_BEST_FACE) { + sshotMode = SERIES_SHOT_MODE_BEST_FACE; + sshotCount = 5; + } else if (shotMode == SHOT_MODE_ERASER) { + sshotMode = SERIES_SHOT_MODE_ERASER; + sshotCount = 5; + } else { + sshotMode = SERIES_SHOT_MODE_BURST; + sshotCount = MAX_SERIES_SHOT_COUNT; + } + } else if (sshotMode == SERIES_SHOT_MODE_LLS || + sshotMode == SERIES_SHOT_MODE_SIS) { + if(count > 0) { + sshotCount = count; + } else { + sshotCount = 5; + } + } + + CLOGD("DEBUG(%s[%d]: set shotmode(%d), shotCount(%d)", __FUNCTION__, __LINE__, sshotMode, sshotCount); + + m_cameraInfo.seriesShotMode = sshotMode; + m_setSeriesShotCount(sshotCount); +} + +void ExynosCamera1Parameters::m_setSeriesShotCount(int seriesShotCount) +{ + m_cameraInfo.seriesShotCount = seriesShotCount; +} + +int ExynosCamera1Parameters::getSeriesShotCount(void) +{ + return m_cameraInfo.seriesShotCount; +} + +bool ExynosCamera1Parameters::getSamsungCamera(void) +{ + return false; +} + +void ExynosCamera1Parameters::m_initMetadata(void) +{ + memset(&m_metadata, 0x00, sizeof(struct camera2_shot_ext)); + struct camera2_shot *shot = &m_metadata.shot; + + // 1. ctl + // request + shot->ctl.request.id = 0; + shot->ctl.request.metadataMode = METADATA_MODE_FULL; + shot->ctl.request.frameCount = 0; + + // lens + shot->ctl.lens.focusDistance = -1.0f; + if (getHalVersion() == IS_HAL_VER_3_2) { + shot->ctl.lens.aperture = m_staticInfo->aperture; + shot->ctl.lens.focalLength = m_staticInfo->focalLength; + } else { + shot->ctl.lens.aperture = (float)m_staticInfo->apertureNum / (float)m_staticInfo->apertureDen; + shot->ctl.lens.focalLength = (float)m_staticInfo->focalLengthNum / (float)m_staticInfo->focalLengthDen; + } + shot->ctl.lens.filterDensity = 0.0f; + shot->ctl.lens.opticalStabilizationMode = ::OPTICAL_STABILIZATION_MODE_OFF; + + int minFps = (m_staticInfo->minFps == 0) ? 0 : (m_staticInfo->maxFps / 2); + int maxFps = (m_staticInfo->maxFps == 0) ? 0 : m_staticInfo->maxFps; + + /* The min fps can not be '0'. Therefore it is set up default value '15'. */ + if (minFps == 0) { + CLOGW("WRN(%s): Invalid min fps value(%d)", __FUNCTION__, minFps); + minFps = 15; + } + + /* The initial fps can not be '0' and bigger than '30'. Therefore it is set up default value '30'. */ + if (maxFps == 0 || 30 < maxFps) { + CLOGW("WRN(%s): Invalid max fps value(%d)", __FUNCTION__, maxFps); + maxFps = 30; + } + + /* sensor */ + shot->ctl.sensor.exposureTime = 0; + shot->ctl.sensor.frameDuration = (1000 * 1000 * 1000) / maxFps; + shot->ctl.sensor.sensitivity = 0; + + /* flash */ + shot->ctl.flash.flashMode = ::CAM2_FLASH_MODE_OFF; + shot->ctl.flash.firingPower = 0; + shot->ctl.flash.firingTime = 0; + + /* hotpixel */ + shot->ctl.hotpixel.mode = (enum processing_mode)0; + + /* demosaic */ + shot->ctl.demosaic.mode = (enum demosaic_processing_mode)0; + + /* noise */ +#ifdef USE_NEW_NOISE_REDUCTION_ALGORITHM + shot->ctl.noise.mode = ::PROCESSING_MODE_FAST; +#else + shot->ctl.noise.mode = ::PROCESSING_MODE_OFF; +#endif + shot->ctl.noise.strength = 5; + + /* shading */ + shot->ctl.shading.mode = (enum processing_mode)0; + + /* color */ + shot->ctl.color.mode = ::COLORCORRECTION_MODE_FAST; + static const float colorTransform_hal3[9] = { + 1.0f, 0.f, 0.f, + 0.f, 1.f, 0.f, + 0.f, 0.f, 1.f + }; + static const struct rational colorTransform[9] = { + {1, 0}, {0, 0}, {0, 0}, + {0, 0}, {1, 0}, {0, 0}, + {0, 0}, {0, 0}, {1, 0} + }; + + if (getHalVersion() == IS_HAL_VER_3_2) { + for (size_t i = 0; i < sizeof(colorTransform)/sizeof(colorTransform[0]); i++) { + shot->ctl.color.transform[i].num = colorTransform_hal3[i] * COMMON_DENOMINATOR; + shot->ctl.color.transform[i].den = COMMON_DENOMINATOR; + } + } else { + memcpy(shot->ctl.color.transform, colorTransform, sizeof(shot->ctl.color.transform)); + } + + /* tonemap */ + shot->ctl.tonemap.mode = ::TONEMAP_MODE_FAST; + static const float tonemapCurve[4] = { + 0.f, 0.f, + 1.f, 1.f + }; + + int tonemapCurveSize = sizeof(tonemapCurve); + int sizeOfCurve = sizeof(shot->ctl.tonemap.curveRed) / sizeof(shot->ctl.tonemap.curveRed[0]); + + for (int i = 0; i < sizeOfCurve; i ++) { + memcpy(&(shot->ctl.tonemap.curveRed[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveGreen[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveBlue[i]), tonemapCurve, tonemapCurveSize); + } + + /* edge */ +#ifdef USE_NEW_NOISE_REDUCTION_ALGORITHM + shot->ctl.edge.mode = ::PROCESSING_MODE_FAST; +#else + shot->ctl.edge.mode = ::PROCESSING_MODE_OFF; +#endif + shot->ctl.edge.strength = 5; + + /* scaler + * Max Picture Size == Max Sensor Size - Sensor Margin + */ + if (m_setParamCropRegion(0, + m_staticInfo->maxPictureW, m_staticInfo->maxPictureH, + m_staticInfo->maxPreviewW, m_staticInfo->maxPreviewH + ) != NO_ERROR) { + ALOGE("ERR(%s):m_setZoom() fail", __FUNCTION__); + } + + /* jpeg */ + shot->ctl.jpeg.quality = 96; + shot->ctl.jpeg.thumbnailSize[0] = m_staticInfo->maxThumbnailW; + shot->ctl.jpeg.thumbnailSize[1] = m_staticInfo->maxThumbnailH; + shot->ctl.jpeg.thumbnailQuality = 100; + shot->ctl.jpeg.gpsCoordinates[0] = 0; + shot->ctl.jpeg.gpsCoordinates[1] = 0; + shot->ctl.jpeg.gpsCoordinates[2] = 0; + memset(&shot->ctl.jpeg.gpsProcessingMethod, 0x0, + sizeof(shot->ctl.jpeg.gpsProcessingMethod)); + shot->ctl.jpeg.gpsTimestamp = 0L; + shot->ctl.jpeg.orientation = 0L; + + /* stats */ + shot->ctl.stats.faceDetectMode = ::FACEDETECT_MODE_OFF; + shot->ctl.stats.histogramMode = ::STATS_MODE_OFF; + shot->ctl.stats.sharpnessMapMode = ::STATS_MODE_OFF; + + /* aa */ + shot->ctl.aa.captureIntent = ::AA_CAPTURE_INTENT_CUSTOM; + shot->ctl.aa.mode = ::AA_CONTROL_AUTO; + shot->ctl.aa.effectMode = ::AA_EFFECT_OFF; + shot->ctl.aa.sceneMode = ::AA_SCENE_MODE_FACE_PRIORITY; + shot->ctl.aa.videoStabilizationMode = + (enum aa_videostabilization_mode) VIDEO_STABILIZATION_MODE_OFF ; + + /* default metering is center */ + shot->ctl.aa.aeMode = ::AA_AEMODE_CENTER; + shot->ctl.aa.aeRegions[0] = 0; + shot->ctl.aa.aeRegions[1] = 0; + shot->ctl.aa.aeRegions[2] = 0; + shot->ctl.aa.aeRegions[3] = 0; + shot->ctl.aa.aeRegions[4] = 1000; + shot->ctl.aa.aeLock = ::AA_AE_LOCK_OFF; +#if defined(USE_SUBDIVIDED_EV) + shot->ctl.aa.aeExpCompensation = 0; /* 21 is middle */ +#else + shot->ctl.aa.aeExpCompensation = 5; /* 5 is middle */ +#endif + shot->ctl.aa.aeTargetFpsRange[0] = minFps; + shot->ctl.aa.aeTargetFpsRange[1] = maxFps; + + shot->ctl.aa.aeAntibandingMode = ::AA_AE_ANTIBANDING_AUTO; + shot->ctl.aa.vendor_aeflashMode = ::AA_FLASHMODE_OFF; + shot->ctl.aa.awbMode = ::AA_AWBMODE_WB_AUTO; + shot->ctl.aa.awbLock = ::AA_AWB_LOCK_OFF; + shot->ctl.aa.afMode = ::AA_AFMODE_OFF; + shot->ctl.aa.afRegions[0] = 0; + shot->ctl.aa.afRegions[1] = 0; + shot->ctl.aa.afRegions[2] = 0; + shot->ctl.aa.afRegions[3] = 0; + shot->ctl.aa.afRegions[4] = 1000; + shot->ctl.aa.afTrigger = (enum aa_af_trigger) AA_AF_TRIGGER_IDLE; + shot->ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot->ctl.aa.vendor_isoValue = 0; + + /* 2. dm */ + + /* 3. utrl */ +#ifdef USE_FW_ZOOMRATIO + m_metadata.shot.uctl.zoomRatio = 1.00f; +#else + m_metadata.shot.uctl.zoomRatio = 0; +#endif + + /* 4. udm */ + + /* 5. magicNumber */ + shot->magicNumber = SHOT_MAGIC_NUMBER; + + setMetaSetfile(&m_metadata, 0x0); + + /* user request */ + m_metadata.drc_bypass = 1; + m_metadata.dis_bypass = 1; + m_metadata.dnr_bypass = 1; + m_metadata.fd_bypass = 1; +} + +status_t ExynosCamera1Parameters::duplicateCtrlMetadata(void *buf) +{ + if (buf == NULL) { + CLOGE("ERR: buf is NULL"); + return BAD_VALUE; + } + + struct camera2_shot_ext *meta_shot_ext = (struct camera2_shot_ext *)buf; + memcpy(&meta_shot_ext->shot.ctl, &m_metadata.shot.ctl, sizeof(struct camera2_ctl)); + +#ifdef USE_FW_ZOOMRATIO + if(getCameraId() == CAMERA_ID_BACK) { + meta_shot_ext->shot.uctl.zoomRatio = m_metadata.shot.uctl.zoomRatio; + } +#endif + + return NO_ERROR; +} + +void ExynosCamera1Parameters::m_getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) +{ + uint32_t currentSetfile = 0; + uint32_t stateReg = 0; + int flagYUVRange = YUV_FULL_RANGE; + + unsigned int minFps = 0; + unsigned int maxFps = 0; + getPreviewFpsRange(&minFps, &maxFps); + + if (getRecordingHint() == true) { + stateReg |= STATE_REG_RECORDINGHINT; + } + + if (m_isUHDRecordingMode() == true) + stateReg |= STATE_REG_UHD_RECORDING; + + if (getDualMode() == true) { + stateReg |= STATE_REG_DUAL_MODE; + if (getDualRecordingHint() == true) + stateReg |= STATE_REG_DUAL_RECORDINGHINT; + } else { + if (getRecordingHint() == true) + stateReg |= STATE_REG_RECORDINGHINT; + } + + if (flagReprocessing == true) + stateReg |= STATE_REG_FLAG_REPROCESSING; + + if ((stateReg & STATE_REG_RECORDINGHINT)|| + (stateReg & STATE_REG_UHD_RECORDING)|| + (stateReg & STATE_REG_DUAL_RECORDINGHINT)) { + if (flagReprocessing == false) { + flagYUVRange = YUV_LIMITED_RANGE; + } + } + + if (m_cameraId == CAMERA_ID_FRONT) { + int vtMode = getVtMode(); + + if (0 < vtMode) { + switch (vtMode) { + case 1: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT1; + break; + case 2: + default: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT2; + break; + } + } else if (getIntelligentMode() == 1) { + currentSetfile = ISS_SUB_SCENARIO_FRONT_SMART_STAY; + } else if (getShotMode() == SHOT_MODE_FRONT_PANORAMA) { + currentSetfile = ISS_SUB_SCENARIO_FRONT_PANORAMA; + } else { + switch(stateReg) { + case STATE_STILL_PREVIEW: + case STATE_UHD_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW; + break; + + case STATE_VIDEO: + case STATE_UHD_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_VIDEO; + break; + + case STATE_DUAL_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_DUAL_STILL; + break; + + case STATE_DUAL_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_DUAL_VIDEO; + break; + + default: + ALOGD("(%s)can't define senario of setfile.(0x%4x)",__func__, stateReg); + break; + } + } + } else { + switch(stateReg) { + case STATE_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW; + break; + + case STATE_STILL_CAPTURE: + case STATE_VIDEO_CAPTURE: + case STATE_DUAL_STILL_CAPTURE: + case STATE_DUAL_VIDEO_CAPTURE: + case STATE_UHD_PREVIEW_CAPTURE: + case STATE_UHD_VIDEO_CAPTURE: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE; + break; + + case STATE_VIDEO: + if (30 < minFps && 30 < maxFps) { + if (300 == minFps && 300 == maxFps) { + currentSetfile = ISS_SUB_SCENARIO_WVGA_300FPS; + } else if (60 == minFps && 60 == maxFps) { + currentSetfile = ISS_SUB_SCENARIO_FHD_60FPS; + } else { + currentSetfile = ISS_SUB_SCENARIO_VIDEO_HIGH_SPEED; + } + } else { + currentSetfile = ISS_SUB_SCENARIO_VIDEO; + } + break; + + case STATE_DUAL_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_DUAL_VIDEO; + break; + + case STATE_DUAL_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_DUAL_STILL; + break; + + case STATE_UHD_PREVIEW: + case STATE_UHD_VIDEO: +#if 1 /* HACK: enable when FW ready */ + currentSetfile = ISS_SUB_SCENARIO_UHD_30FPS; +#else + currentSetfile = ISS_SUB_SCENARIO_VIDEO; +#endif + break; + + default: + ALOGD("(%s)can't define senario of setfile.(0x%4x)",__func__, stateReg); + break; + } + } +#if 0 + ALOGD("(%s)[%d] : ===============================================================================",__func__, __LINE__); + ALOGD("(%s)[%d] : CurrentState(0x%4x)",__func__, __LINE__, stateReg); + ALOGD("(%s)[%d] : getRTHdr()(%d)",__func__, __LINE__, getRTHdr()); + ALOGD("(%s)[%d] : getRecordingHint()(%d)",__func__, __LINE__, getRecordingHint()); + ALOGD("(%s)[%d] : m_isUHDRecordingMode()(%d)",__func__, __LINE__, m_isUHDRecordingMode()); + ALOGD("(%s)[%d] : getDualMode()(%d)",__func__, __LINE__, getDualMode()); + ALOGD("(%s)[%d] : getDualRecordingHint()(%d)",__func__, __LINE__, getDualRecordingHint()); + ALOGD("(%s)[%d] : flagReprocessing(%d)",__func__, __LINE__, flagReprocessing); + ALOGD("(%s)[%d] : ===============================================================================",__func__, __LINE__); + ALOGD("(%s)[%d] : currentSetfile(%d)",__func__, __LINE__, currentSetfile); + ALOGD("(%s)[%d] : flagYUVRange(%d)",__func__, __LINE__, flagYUVRange); + ALOGD("(%s)[%d] : ===============================================================================",__func__, __LINE__); +#else + ALOGD("(%s)[%d] : CurrentState (0x%4x), currentSetfile(%d)",__func__, __LINE__, stateReg, currentSetfile); +#endif + +done: + *setfile = currentSetfile; + *yuvRange = flagYUVRange; +} + +int *ExynosCamera1Parameters::getHighSpeedSizeTable(int fpsMode) { + return getHighSpeedSizeTable(fpsMode); +} + +status_t ExynosCamera1Parameters::calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int previewW = 0, previewH = 0, previewFormat = 0; + int hwPreviewW = 0, hwPreviewH = 0, hwPreviewFormat = 0; + previewFormat = getPreviewFormat(); + hwPreviewFormat = getHwPreviewFormat(); + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getPreviewSize(&previewW, &previewH); + + ret = getCropRectAlign(hwPreviewW, hwPreviewH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + 0, 1); + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = hwPreviewFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + dstRect->colorFormat = previewFormat; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + + int hwPreviewW = 0, hwPreviewH = 0, hwPreviewFormat = 0; + int videoW = 0, videoH = 0, videoFormat = 0; + float zoomRatio = getZoomRatio(0) / 1000; + + hwPreviewFormat = getHwPreviewFormat(); + videoFormat = getVideoFormat(); + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getVideoSize(&videoW, &videoH); + + if (SIZE_RATIO(hwPreviewW, hwPreviewH) == SIZE_RATIO(videoW, videoH)) { + cropW = hwPreviewW; + cropH = hwPreviewH; + } else { + ret = getCropRectAlign(hwPreviewW, hwPreviewH, + videoW, videoH, + &cropX, &cropY, + &cropW, &cropH, + 2, 2, + 0, zoomRatio); + } + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = hwPreviewW; + srcRect->fullH = hwPreviewH; + srcRect->colorFormat = hwPreviewFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = videoW; + dstRect->h = videoH; + dstRect->fullW = videoW; + dstRect->fullH = videoH; + dstRect->colorFormat = videoFormat; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; + int hwSensorMarginW = 0, hwSensorMarginH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; +#if 0 + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int zoomLevel = 0; + int maxZoomRatio = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = getZoomRatio(0) / 1000; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getHwPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + m_adjustSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + hwSensorW -= hwSensorMarginW; + hwSensorH -= hwSensorMarginH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwSensorW / (float) maxSensorW; + scaleRatioY = (float) hwSensorH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwSensorW; + cropRegionH = hwSensorH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + ret = getCropRectAlign(cropRegionW, cropRegionH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_BCROP_ALIGN, 2, + 0, 0.0f); + + cropX = ALIGN_DOWN((cropRegionX + cropX), 2); + cropY = ALIGN_DOWN((cropRegionY + cropY), 2); + } else { + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_BCROP_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = ALIGN_UP(hwSensorW - (cropX * 2), CAMERA_BCROP_ALIGN); + cropH = hwSensorH - (cropY * 2); + } + + if (getUsePureBayerReprocessing() == false) { + int pictureCropX = 0, pictureCropY = 0; + int pictureCropW = 0, pictureCropH = 0; + + zoomLevel = 0; + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &pictureCropX, &pictureCropY, + &pictureCropW, &pictureCropH, + CAMERA_BCROP_ALIGN, 2, + zoomLevel, zoomRatio); + + pictureCropX = ALIGN_DOWN(pictureCropX, 2); + pictureCropY = ALIGN_DOWN(pictureCropY, 2); + pictureCropW = cropW - (pictureCropX * 2); + pictureCropH = cropH - (pictureCropY * 2); + + if (pictureCropW < pictureW / maxZoomRatio || pictureCropH < pictureH / maxZoomRatio) { + ALOGW("WRN(%s[%d]): zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", __FUNCTION__, __LINE__, maxZoomRatio, cropW, cropH, pictureW, pictureH); + float src_ratio = 1.0f; + float dst_ratio = 1.0f; + /* ex : 1024 / 768 */ + src_ratio = ROUND_OFF_HALF(((float)cropW / (float)cropH), 2); + /* ex : 352 / 288 */ + dst_ratio = ROUND_OFF_HALF(((float)pictureW / (float)pictureH), 2); + + if (dst_ratio <= src_ratio) { + /* shrink w */ + cropX = ALIGN_DOWN(((int)(hwSensorW - ((pictureH / maxZoomRatio) * src_ratio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorH - (pictureH / maxZoomRatio)) >> 1), 2); + } else { + /* shrink h */ + cropX = ALIGN_DOWN(((hwSensorW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((int)(hwSensorH - ((pictureW / maxZoomRatio) / src_ratio)) >> 1), 2); + } + cropW = ALIGN_UP(hwSensorW - (cropX * 2), CAMERA_BCROP_ALIGN); + cropH = hwSensorH - (cropY * 2); + } + } + +#if 0 + ALOGD("DEBUG(%s):hwSensorSize (%dx%d), previewSize (%dx%d)", + __FUNCTION__, hwSensorW, hwSensorH, previewW, previewH); + ALOGD("DEBUG(%s):hwPictureSize (%dx%d), pictureSize (%dx%d)", + __FUNCTION__, hwPictureW, hwPictureH, pictureW, pictureH); + ALOGD("DEBUG(%s):size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, cropX, cropY, cropW, cropH, zoomLevel); + ALOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + ALOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwSensorW; + srcRect->h = hwSensorH; + srcRect->fullW = hwSensorW; + srcRect->fullH = hwSensorH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = cropX; + dstRect->y = cropY; + dstRect->w = cropW; + dstRect->h = cropH; + dstRect->fullW = cropW; + dstRect->fullH = cropH; + dstRect->colorFormat = bayerFormat; + + m_setHwBayerCropRegion(dstRect->w, dstRect->h, dstRect->x, dstRect->y); + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPreviewDzoomCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int previewW = 0, previewH = 0; + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + + int zoomLevel = 0; + int maxZoomRatio = 0; + float zoomRatio = getZoomRatio(0) / 1000; + + /* TODO: check state ready for start */ + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPreviewSize(&previewW, &previewH); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + ret = getCropRectAlign(srcRect->w, srcRect->h, + previewW, previewH, + &srcRect->x, &srcRect->y, + &srcRect->w, &srcRect->h, + 2, 2, + zoomLevel, zoomRatio); + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + + ALOGV("INFO(%s[%d]):SRC cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d ratio = %f", __FUNCTION__, __LINE__, srcRect->x, srcRect->y, srcRect->w, srcRect->h, zoomLevel, zoomRatio); + ALOGV("INFO(%s[%d]):DST cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d ratio = %f", __FUNCTION__, __LINE__, dstRect->x, dstRect->y, dstRect->w, dstRect->h, zoomLevel, zoomRatio); + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int hwBnsW = 0; + int hwBnsH = 0; + int hwBcropW = 0; + int hwBcropH = 0; + int zoomLevel = 0; + float zoomRatio = 1.00f; + int hwSensorMarginW = 0; + int hwSensorMarginH = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->pictureSizeLut == NULL + || m_staticInfo->pictureSizeLutMax <= m_cameraInfo.pictureSizeRatioId + || m_cameraInfo.pictureSizeRatioId != m_cameraInfo.previewSizeRatioId) + return calcPictureBayerCropSize(srcRect, dstRect); + + /* use LUT */ + hwBnsW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BNS_W]; + hwBnsH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BNS_H]; + hwBcropW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BCROP_W]; + hwBcropH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BCROP_H]; + + if (getHalVersion() != IS_HAL_VER_3_2) { + zoomLevel = getZoomLevel(); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + +#if defined(SCALER_MAX_SCALE_UP_RATIO) + /* + * After dividing float & casting int, + * zoomed size can be smaller too much. + * so, when zoom until max, ceil up about floating point. + */ + if (ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN) * SCALER_MAX_SCALE_UP_RATIO < hwBcropW || + ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2) * SCALER_MAX_SCALE_UP_RATIO < hwBcropH) { + hwBcropW = ALIGN_UP((int)ceil((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)ceil((float)hwBcropH / zoomRatio), 2); + } else +#endif + { + hwBcropW = ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2); + } + } + + /* Re-calculate the BNS size for removing Sensor Margin. + On Capture Stream(3AA_M2M_Input), the BNS is not used. + So, the BNS ratio is not needed to be considered for sensor margin */ + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + hwBnsW = hwBnsW - hwSensorMarginW; + hwBnsH = hwBnsH - hwSensorMarginH; + + /* src */ + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwBnsW; + srcRect->h = hwBnsH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + status_t ret = NO_ERROR; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwBnsW / (float) maxSensorW; + scaleRatioY = (float) hwBnsH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwBnsW; + cropRegionH = hwBnsH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + if ((cropRegionW > hwBcropW) && (cropRegionH > hwBcropH)) { + dstRect->x = ALIGN_DOWN((cropRegionX + ((cropRegionW - hwBcropW) >> 1)), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + ((cropRegionH - hwBcropH) >> 1)), 2); + dstRect->w = hwBcropW; + dstRect->h = hwBcropH; + } else { + ret = getCropRectAlign(cropRegionW, cropRegionH, + hwBcropW, hwBcropH, + &(dstRect->x), &(dstRect->y), + &(dstRect->w), &(dstRect->h), + CAMERA_MAGIC_ALIGN, 2, + 0, 0.0f); + dstRect->x = ALIGN_DOWN((cropRegionX + dstRect->x), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + dstRect->y), 2); + } + } else { + /* dst */ + if (hwBnsW > hwBcropW) { + dstRect->x = ALIGN_UP(((hwBnsW - hwBcropW) >> 1), 2); + dstRect->w = hwBcropW; + } else { + dstRect->x = 0; + dstRect->w = hwBnsW; + } + + if (hwBnsH > hwBcropH) { + dstRect->y = ALIGN_UP(((hwBnsH - hwBcropH) >> 1), 2); + dstRect->h = hwBcropH; + } else { + dstRect->y = 0; + dstRect->h = hwBnsH; + } + } + +#if DEBUG + ALOGD("DEBUG(%s):zoomRatio=%f", __FUNCTION__, zoomRatio); + ALOGD("DEBUG(%s):hwBnsSize (%dx%d), hwBcropSize (%d, %d)(%dx%d)", + __FUNCTION__, srcRect->w, srcRect->h, dstRect->x, dstRect->y, dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int maxSensorW = 0, maxSensorH = 0; + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0, hwPictureFormat = 0; + int hwSensorCropX = 0, hwSensorCropY = 0; + int hwSensorCropW = 0, hwSensorCropH = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int previewW = 0, previewH = 0; + int hwSensorMarginW = 0, hwSensorMarginH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + float zoomRatio = 1.0f; + int maxZoomRatio = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ + pictureFormat = getHwPictureFormat(); + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getMaxSensorSize(&maxSensorW, &maxSensorH); + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + hwSensorW -= hwSensorMarginW; + hwSensorH -= hwSensorMarginH; + + if (getUsePureBayerReprocessing() == true) { + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwSensorW / (float) maxSensorW; + scaleRatioY = (float) hwSensorH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwSensorW; + cropRegionH = hwSensorH; + } + + ret = getCropRectAlign(cropRegionW, cropRegionH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + 0, 0.0f); + + cropX = ALIGN_DOWN((cropRegionX + cropX), 2); + cropY = ALIGN_DOWN((cropRegionY + cropY), 2); + } else { + ret = getCropRectAlign(hwSensorW, hwSensorH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + } + + if (cropW < pictureW / maxZoomRatio || cropH < pictureH / maxZoomRatio) { + ALOGW("WRN(%s[%d]): zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", __FUNCTION__, __LINE__, maxZoomRatio, cropW, cropH, pictureW, pictureH); + cropX = ALIGN_DOWN(((hwSensorW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorH - (pictureH / maxZoomRatio)) >> 1), 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + } + } else { + zoomLevel = 0; + zoomRatio = getZoomRatio(zoomLevel) / 1000; + getHwBayerCropRegion(&hwSensorCropW, &hwSensorCropH, &hwSensorCropX, &hwSensorCropY); + + ret = getCropRectAlign(hwSensorCropW, hwSensorCropH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = hwSensorCropW - (cropX * 2); + cropH = hwSensorCropH - (cropY * 2); + + if (cropW < pictureW / maxZoomRatio || cropH < pictureH / maxZoomRatio) { + ALOGW("WRN(%s[%d]): zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", __FUNCTION__, __LINE__, maxZoomRatio, cropW, cropH, pictureW, pictureH); + cropX = ALIGN_DOWN(((hwSensorCropW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorCropH - (pictureH / maxZoomRatio)) >> 1), 2); + cropW = hwSensorCropW - (cropX * 2); + cropH = hwSensorCropH - (cropY * 2); + } + } + +#if 1 + ALOGD("DEBUG(%s):maxSensorSize (%dx%d), hwSensorSize (%dx%d), previewSize (%dx%d)", + __FUNCTION__, maxSensorW, maxSensorH, hwSensorW, hwSensorH, previewW, previewH); + ALOGD("DEBUG(%s):hwPictureSize (%dx%d), pictureSize (%dx%d)", + __FUNCTION__, hwPictureW, hwPictureH, pictureW, pictureH); + ALOGD("DEBUG(%s):size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, cropX, cropY, cropW, cropH, zoomLevel); + ALOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + ALOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = maxSensorW; + srcRect->h = maxSensorH; + srcRect->fullW = maxSensorW; + srcRect->fullH = maxSensorH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = cropX; + dstRect->y = cropY; + dstRect->w = cropW; + dstRect->h = cropH; + dstRect->fullW = cropW; + dstRect->fullH = cropH; + dstRect->colorFormat = bayerFormat; + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::m_getPreviewBdsSize(ExynosRect *dstRect) +{ + int hwBdsW = 0; + int hwBdsH = 0; + int sizeList[SIZE_LUT_INDEX_END]; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->previewSizeLut == NULL + || m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId + || m_getPreviewSizeList(sizeList) != NO_ERROR) { + ExynosRect rect; + return calcPreviewBDSSize(&rect, dstRect); + } + + /* use LUT */ + hwBdsW = sizeList[BDS_W]; + hwBdsH = sizeList[BDS_H]; + + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if (m_cameraInfo.previewSizeRatioId != m_cameraInfo.videoSizeRatioId) + ALOGV("WARN(%s[%d]):preview ratioId(%d) != videoRatioId(%d), use previewRatioId", + __FUNCTION__, __LINE__, + m_cameraInfo.previewSizeRatioId, m_cameraInfo.videoSizeRatioId); + + if ((videoW == 3840 && videoH == 2160) || (videoW == 2560 && videoH == 1440)) { + hwBdsW = videoW; + hwBdsH = videoH; + } + } + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = hwBdsW; + dstRect->h = hwBdsH; + +#ifdef DEBUG_PERFRAME + ALOGD("DEBUG(%s):hwBdsSize (%dx%d)", __FUNCTION__, dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPreviewBDSSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; + ExynosRect bnsSize; + ExynosRect bayerCropSize; +#if 0 + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getHwPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + + ret = getCropRectAlign(cropW, cropH, + previewW, previewH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + cropX = ALIGN_UP(cropX, 2); + cropY = ALIGN_UP(cropY, 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + +// ALIGN_UP(crop_crop_x, 2); +// ALIGN_UP(crop_crop_y, 2); + +#if 0 + ALOGD("DEBUG(%s):hwSensorSize (%dx%d), previewSize (%dx%d)", + __FUNCTION__, hwSensorW, hwSensorH, previewW, previewH); + ALOGD("DEBUG(%s):hwPictureSize (%dx%d), pictureSize (%dx%d)", + __FUNCTION__, hwPictureW, hwPictureH, pictureW, pictureH); + ALOGD("DEBUG(%s):size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, cropX, cropY, cropW, cropH, zoomLevel); + ALOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + ALOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + /* For Front Single Scenario, BDS should not be used */ + if (m_cameraId == CAMERA_ID_FRONT && getDualMode() == false) { + getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + dstRect->w = bayerCropSize.w; + dstRect->h = bayerCropSize.h; + dstRect->fullW = bayerCropSize.w; + dstRect->fullH = bayerCropSize.h; + } else { + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + } + + if (dstRect->w > srcRect->w) + dstRect->w = srcRect->w; + if (dstRect->h > srcRect->h) + dstRect->h = srcRect->h; + + return NO_ERROR; +} + +status_t ExynosCamera1Parameters::calcPictureBDSSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int maxSensorW = 0, maxSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; +#if 0 + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getHwPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getMaxSensorSize(&maxSensorW, &maxSensorH); + getPreviewSize(&previewW, &previewH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(maxSensorW, maxSensorH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + cropX = ALIGN_UP(cropX, 2); + cropY = ALIGN_UP(cropY, 2); + cropW = maxSensorW - (cropX * 2); + cropH = maxSensorH - (cropY * 2); + +// ALIGN_UP(crop_crop_x, 2); +// ALIGN_UP(crop_crop_y, 2); + +#if 0 + ALOGD("DEBUG(%s):SensorSize (%dx%d), previewSize (%dx%d)", + __FUNCTION__, maxSensorW, maxSensorH, previewW, previewH); + ALOGD("DEBUG(%s):hwPictureSize (%dx%d), pictureSize (%dx%d)", + __FUNCTION__, hwPictureW, hwPictureH, pictureW, pictureH); + ALOGD("DEBUG(%s):size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, cropX, cropY, cropW, cropH, zoomLevel); + ALOGD("DEBUG(%s):size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + __FUNCTION__, crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + ALOGD("DEBUG(%s):size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + __FUNCTION__, pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + if (dstRect->w > srcRect->w) + dstRect->w = srcRect->w; + if (dstRect->h > srcRect->h) + dstRect->h = srcRect->h; + + return NO_ERROR; +} + +bool ExynosCamera1Parameters::doCscRecording(void) +{ + bool ret = true; + int hwPreviewW = 0, hwPreviewH = 0; + int videoW = 0, videoH = 0; + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getVideoSize(&videoW, &videoH); + CLOGV("DEBUG(%s[%d]):hwPreviewSize = %d x %d", __FUNCTION__, __LINE__, hwPreviewW, hwPreviewH); + CLOGV("DEBUG(%s[%d]):videoSize = %d x %d", __FUNCTION__, __LINE__, videoW, videoH); + + if (((videoW == 3840 && videoH == 2160) || (videoW == 1920 && videoH == 1080) || (videoW == 2560 && videoH == 1440)) + && m_useAdaptiveCSCRecording == true + && videoW == hwPreviewW + && videoH == hwPreviewH) { + ret = false; + } + + return ret; +} + +bool ExynosCamera1Parameters::increaseMaxBufferOfPreview(void) +{ + if((getShotMode() == SHOT_MODE_BEAUTY_FACE)||(getShotMode() == SHOT_MODE_FRONT_PANORAMA) + ) { + return true; + } else { + return false; + } +} + +bool ExynosCamera1Parameters::getSupportedZoomPreviewWIthScaler(void) +{ + return true; +} + +#ifdef DEBUG_RAWDUMP +bool ExynosCamera1Parameters::checkBayerDumpEnable(void) +{ + char enableRawDump[PROPERTY_VALUE_MAX]; + property_get("ro.debug.rawdump", enableRawDump, "0"); + + if (strcmp(enableRawDump, "1") == 0) { + /*CLOGD("checkBayerDumpEnable : 1");*/ + return true; + } else { + /*CLOGD("checkBayerDumpEnable : 0");*/ + return false; + } +} +#endif /* DEBUG_RAWDUMP */ + +void ExynosCamera1Parameters::setIsThumbnailCallbackOn(bool enable) +{ + m_IsThumbnailCallbackOn = enable; +} + +bool ExynosCamera1Parameters::getIsThumbnailCallbackOn() +{ + return m_IsThumbnailCallbackOn; +} +}; /* namespace android */ diff --git a/libcamera/34xx/hal1/Ged/ExynosCameraVendor.cpp b/libcamera/34xx/hal1/Ged/ExynosCameraVendor.cpp new file mode 100644 index 0000000..69573b4 --- /dev/null +++ b/libcamera/34xx/hal1/Ged/ExynosCameraVendor.cpp @@ -0,0 +1,7870 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraGed" +#include + +#include "ExynosCamera.h" + +namespace android { + +void ExynosCamera::vendorSpecificConstructor(__unused int cameraId, __unused camera_device_t *dev) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + +#ifdef RAWDUMP_CAPTURE + /* RawCaptureDump Thread */ + m_RawCaptureDumpThread = new mainCameraThread(this, &ExynosCamera::m_RawCaptureDumpThreadFunc, "m_RawCaptureDumpThread"); + CLOGD("DEBUG(%s):RawCaptureDumpThread created", __FUNCTION__); +#endif + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return; +} + +void ExynosCamera::vendorSpecificDestructor(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return; +} + +status_t ExynosCamera::startRecording() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return INVALID_OPERATION; + } + } + + int ret = 0; + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + ExynosCameraActivityFlash *flashMgr = m_exynosCameraActivityControl->getFlashMgr(); + + if (m_getRecordingEnabled() == true) { + CLOGW("WARN(%s[%d]):m_recordingEnabled equals true", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + +#ifdef USE_FD_AE + if (m_parameters != NULL) { + if (m_parameters->getFaceDetectionMode() == false) { + m_startFaceDetection(false); + } else { + /* stay current fd mode */ + } + } else { + CLOGW("(%s[%d]):m_parameters is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } +#endif + + + /* Do start recording process */ + ret = m_startRecordingInternal(); + if (ret < 0) { + CLOGE("ERR"); + return ret; + } + + m_lastRecordingTimeStamp = 0; + m_recordingStartTimeStamp = 0; + m_recordingFrameSkipCount = 0; + + m_setRecordingEnabled(true); + m_parameters->setRecordingRunning(true); + + autoFocusMgr->setRecordingHint(true); + flashMgr->setRecordingHint(true); + +func_exit: + /* wait for initial preview skip */ + if (m_parameters != NULL) { + int retry = 0; + while (m_parameters->getFrameSkipCount() > 0 && retry < 3) { + retry++; + usleep(33000); + CLOGI("INFO(%s[%d]): -OUT- (frameSkipCount:%d) (retry:%d)", __FUNCTION__, __LINE__, m_frameSkipCount, retry); + } + } + + return NO_ERROR; +} + +void ExynosCamera::stopRecording() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return; + } + } + + int ret = 0; + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + ExynosCameraActivityFlash *flashMgr = m_exynosCameraActivityControl->getFlashMgr(); + m_skipCount = 0; + + if (m_getRecordingEnabled() == false) { + return; + } + m_setRecordingEnabled(false); + m_parameters->setRecordingRunning(false); + /* Do stop recording process */ + + ret = m_stopRecordingInternal(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_stopRecordingInternal fail", __FUNCTION__, __LINE__); + +#ifdef USE_FD_AE + if (m_parameters != NULL) { + m_startFaceDetection(m_parameters->getFaceDetectionMode(false)); + } +#endif + + autoFocusMgr->setRecordingHint(false); + flashMgr->setRecordingHint(false); + flashMgr->setNeedFlashOffDelay(false); +} + +status_t ExynosCamera::setParameters(const CameraParameters& params) +{ + status_t ret = NO_ERROR; + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if(m_parameters == NULL) + return INVALID_OPERATION; + +#ifdef SCALABLE_ON + m_parameters->setScalableSensorMode(true); +#else + m_parameters->setScalableSensorMode(false); +#endif + + ret = m_parameters->setParameters(params); +#if 1 + /* HACK Reset Preview Flag*/ + if (m_parameters->getRestartPreview() == true && m_previewEnabled == true) { + m_resetPreview = true; + ret = m_restartPreviewInternal(); + m_resetPreview = false; + CLOGI("INFO(%s[%d]) m_resetPreview(%d)", __FUNCTION__, __LINE__, m_resetPreview); + + if (ret < 0) + CLOGE("(%s[%d]): restart preview internal fail", __FUNCTION__, __LINE__); + } +#endif + return ret; + +} + +status_t ExynosCamera::m_doFdCallbackFunc(ExynosCameraFrame *frame) +{ + ExynosCameraDurationTimer m_fdcallbackTimer; + long long m_fdcallbackTimerTime; + + struct camera2_shot_ext *meta_shot_ext = NULL; + meta_shot_ext = new struct camera2_shot_ext; + if (meta_shot_ext == NULL) { + CLOGE("ERR(%s[%d]) meta_shot_ext is null", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + memset(meta_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + if (frame->getDynamicMeta(meta_shot_ext) != NO_ERROR) { + CLOGE("ERR(%s[%d]) meta_shot_ext is null", __FUNCTION__, __LINE__); + if (meta_shot_ext) { + delete meta_shot_ext; + meta_shot_ext = NULL; + } + return INVALID_OPERATION; + } + + if (m_flagStartFaceDetection == true) { + int id[NUM_OF_DETECTED_FACES]; + int score[NUM_OF_DETECTED_FACES]; + ExynosRect2 detectedFace[NUM_OF_DETECTED_FACES]; + ExynosRect2 detectedLeftEye[NUM_OF_DETECTED_FACES]; + ExynosRect2 detectedRightEye[NUM_OF_DETECTED_FACES]; + ExynosRect2 detectedMouth[NUM_OF_DETECTED_FACES]; + int numOfDetectedFaces = 0; + int num = 0; + struct camera2_dm *dm = NULL; + int previewW, previewH; + + memset(&id, 0x00, sizeof(int) * NUM_OF_DETECTED_FACES); + memset(&score, 0x00, sizeof(int) * NUM_OF_DETECTED_FACES); + + m_parameters->getHwPreviewSize(&previewW, &previewH); + + camera2_node_group node_group_info; + frame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA); + + dm = &(meta_shot_ext->shot.dm); + if (dm == NULL) { + CLOGE("ERR(%s[%d]) dm data is null", __FUNCTION__, __LINE__); + delete meta_shot_ext; + meta_shot_ext = NULL; + return INVALID_OPERATION; + } + + CLOGV("DEBUG(%s[%d]) faceDetectMode(%d)", __FUNCTION__, __LINE__, dm->stats.faceDetectMode); + CLOGV("[%d %d]", dm->stats.faceRectangles[0][0], dm->stats.faceRectangles[0][1]); + CLOGV("[%d %d]", dm->stats.faceRectangles[0][2], dm->stats.faceRectangles[0][3]); + + num = NUM_OF_DETECTED_FACES; + if (getMaxNumDetectedFaces() < num) + num = getMaxNumDetectedFaces(); + + switch (dm->stats.faceDetectMode) { + case FACEDETECT_MODE_SIMPLE: + case FACEDETECT_MODE_FULL: + break; + case FACEDETECT_MODE_OFF: + default: + num = 0; + break; + } + + for (int i = 0; i < num; i++) { + if (dm->stats.faceIds[i]) { + switch (dm->stats.faceDetectMode) { + case FACEDETECT_MODE_FULL: + id[i] = dm->stats.faceIds[i]; + score[i] = dm->stats.faceScores[i]; + + detectedLeftEye[i].x1 + = detectedLeftEye[i].y1 + = detectedLeftEye[i].x2 + = detectedLeftEye[i].y2 = -1; + + detectedRightEye[i].x1 + = detectedRightEye[i].y1 + = detectedRightEye[i].x2 + = detectedRightEye[i].y2 = -1; + + detectedMouth[i].x1 + = detectedMouth[i].y1 + = detectedMouth[i].x2 + = detectedMouth[i].y2 = -1; + case FACEDETECT_MODE_SIMPLE: + if (m_parameters->isMcscVraOtf() == false) { + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + detectedFace[i].x1 = m_calibratePosition(vraWidth, previewW, dm->stats.faceRectangles[i][0]); + detectedFace[i].y1 = m_calibratePosition(vraHeight, previewH, dm->stats.faceRectangles[i][1]); + detectedFace[i].x2 = m_calibratePosition(vraWidth, previewW, dm->stats.faceRectangles[i][2]); + detectedFace[i].y2 = m_calibratePosition(vraHeight, previewH, dm->stats.faceRectangles[i][3]); + } else if ((int)(node_group_info.leader.output.cropRegion[2]) < previewW + || (int)(node_group_info.leader.output.cropRegion[3]) < previewH) { + detectedFace[i].x1 = m_calibratePosition(node_group_info.leader.output.cropRegion[2], previewW, dm->stats.faceRectangles[i][0]); + detectedFace[i].y1 = m_calibratePosition(node_group_info.leader.output.cropRegion[3], previewH, dm->stats.faceRectangles[i][1]); + detectedFace[i].x2 = m_calibratePosition(node_group_info.leader.output.cropRegion[2], previewW, dm->stats.faceRectangles[i][2]); + detectedFace[i].y2 = m_calibratePosition(node_group_info.leader.output.cropRegion[3], previewH, dm->stats.faceRectangles[i][3]); + } else { + detectedFace[i].x1 = dm->stats.faceRectangles[i][0]; + detectedFace[i].y1 = dm->stats.faceRectangles[i][1]; + detectedFace[i].x2 = dm->stats.faceRectangles[i][2]; + detectedFace[i].y2 = dm->stats.faceRectangles[i][3]; + } + numOfDetectedFaces++; + break; + default: + break; + } + } + } + + if (0 < numOfDetectedFaces) { + /* + * camera.h + * width : -1000~1000 + * height : -1000~1000 + * if eye, mouth is not detectable : -2000, -2000. + */ + memset(m_faces, 0, sizeof(camera_face_t) * NUM_OF_DETECTED_FACES); + + int realNumOfDetectedFaces = 0; + + for (int i = 0; i < numOfDetectedFaces; i++) { + /* + * over 50s, we will catch + * if (score[i] < 50) + * continue; + */ + m_faces[realNumOfDetectedFaces].rect[0] = m_calibratePosition(previewW, 2000, detectedFace[i].x1) - 1000; + m_faces[realNumOfDetectedFaces].rect[1] = m_calibratePosition(previewH, 2000, detectedFace[i].y1) - 1000; + m_faces[realNumOfDetectedFaces].rect[2] = m_calibratePosition(previewW, 2000, detectedFace[i].x2) - 1000; + m_faces[realNumOfDetectedFaces].rect[3] = m_calibratePosition(previewH, 2000, detectedFace[i].y2) - 1000; + + m_faces[realNumOfDetectedFaces].id = id[i]; + m_faces[realNumOfDetectedFaces].score = score[i] > 100 ? 100 : score[i]; + + m_faces[realNumOfDetectedFaces].left_eye[0] = (detectedLeftEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedLeftEye[i].x1) - 1000; + m_faces[realNumOfDetectedFaces].left_eye[1] = (detectedLeftEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedLeftEye[i].y1) - 1000; + + m_faces[realNumOfDetectedFaces].right_eye[0] = (detectedRightEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedRightEye[i].x1) - 1000; + m_faces[realNumOfDetectedFaces].right_eye[1] = (detectedRightEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedRightEye[i].y1) - 1000; + + m_faces[realNumOfDetectedFaces].mouth[0] = (detectedMouth[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedMouth[i].x1) - 1000; + m_faces[realNumOfDetectedFaces].mouth[1] = (detectedMouth[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedMouth[i].y1) - 1000; + + CLOGV("face posision(cal:%d,%d %dx%d)(org:%d,%d %dx%d), id(%d), score(%d)", + m_faces[realNumOfDetectedFaces].rect[0], m_faces[realNumOfDetectedFaces].rect[1], + m_faces[realNumOfDetectedFaces].rect[2], m_faces[realNumOfDetectedFaces].rect[3], + detectedFace[i].x1, detectedFace[i].y1, + detectedFace[i].x2, detectedFace[i].y2, + m_faces[realNumOfDetectedFaces].id, + m_faces[realNumOfDetectedFaces].score); + + CLOGV("DEBUG(%s[%d]): left eye(%d,%d), right eye(%d,%d), mouth(%dx%d), num of facese(%d)", + __FUNCTION__, __LINE__, + m_faces[realNumOfDetectedFaces].left_eye[0], + m_faces[realNumOfDetectedFaces].left_eye[1], + m_faces[realNumOfDetectedFaces].right_eye[0], + m_faces[realNumOfDetectedFaces].right_eye[1], + m_faces[realNumOfDetectedFaces].mouth[0], + m_faces[realNumOfDetectedFaces].mouth[1], + realNumOfDetectedFaces + ); + + realNumOfDetectedFaces++; + } + + m_frameMetadata.number_of_faces = realNumOfDetectedFaces; + m_frameMetadata.faces = m_faces; + + m_faceDetected = true; + m_fdThreshold = 0; + } else { + if (m_faceDetected == true && m_fdThreshold < NUM_OF_DETECTED_FACES_THRESHOLD) { + /* waiting the unexpected fail about face detected */ + m_fdThreshold++; + } else { + if (0 < m_frameMetadata.number_of_faces) + memset(m_faces, 0, sizeof(camera_face_t) * NUM_OF_DETECTED_FACES); + + m_frameMetadata.number_of_faces = 0; + m_frameMetadata.faces = m_faces; + m_fdThreshold = 0; + m_faceDetected = false; + } + } + } else { + if (0 < m_frameMetadata.number_of_faces) + memset(m_faces, 0, sizeof(camera_face_t) * NUM_OF_DETECTED_FACES); + + m_frameMetadata.number_of_faces = 0; + m_frameMetadata.faces = m_faces; + + m_fdThreshold = 0; + + m_faceDetected = false; + } + +#ifdef TOUCH_AE + if(((m_parameters->getMeteringMode() >= METERING_MODE_CENTER_TOUCH) && (m_parameters->getMeteringMode() <= METERING_MODE_AVERAGE_TOUCH)) + && ((meta_shot_ext->shot.dm.aa.aeState == AE_STATE_CONVERGED) || (meta_shot_ext->shot.dm.aa.aeState == AE_STATE_FLASH_REQUIRED))) { + m_notifyCb(AE_RESULT, 0, 0, m_callbackCookie); + CLOGV("INFO(%s[%d]): AE_RESULT(%d)", __FUNCTION__, __LINE__, meta_shot_ext->shot.dm.aa.aeState); + } +#endif + + delete meta_shot_ext; + meta_shot_ext = NULL; + + m_fdcallbackTimer.start(); + + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) && + (m_flagStartFaceDetection || m_flagLLSStart || m_flagLightCondition)) + { + setBit(&m_callbackState, CALLBACK_STATE_PREVIEW_META, false); + m_dataCb(CAMERA_MSG_PREVIEW_METADATA, m_fdCallbackHeap, 0, &m_frameMetadata, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_PREVIEW_META, false); + } + m_fdcallbackTimer.stop(); + m_fdcallbackTimerTime = m_fdcallbackTimer.durationUsecs(); + + if((int)m_fdcallbackTimerTime / 1000 > 50) { + CLOGD("INFO(%s[%d]): FD callback duration time : (%d)mec", __FUNCTION__, __LINE__, (int)m_fdcallbackTimerTime / 1000); + } + + return NO_ERROR; +} + +status_t ExynosCamera::sendCommand(int32_t command, int32_t arg1, __unused int32_t arg2) +{ + ExynosCameraActivityUCTL *uctlMgr = NULL; + CLOGV("INFO(%s[%d])", __FUNCTION__, __LINE__); + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return INVALID_OPERATION; + } + } else { + CLOGE("ERR(%s):m_parameters is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + /* TO DO : implemented based on the command */ + switch(command) { + case CAMERA_CMD_START_FACE_DETECTION: + case CAMERA_CMD_STOP_FACE_DETECTION: + if (getMaxNumDetectedFaces() == 0) { + CLOGE("ERR(%s):getMaxNumDetectedFaces == 0", __FUNCTION__); + return BAD_VALUE; + } + + if (arg1 == CAMERA_FACE_DETECTION_SW) { + CLOGD("DEBUG(%s):only support HW face dectection", __FUNCTION__); + return BAD_VALUE; + } + + if (command == CAMERA_CMD_START_FACE_DETECTION) { + CLOGD("DEBUG(%s):CAMERA_CMD_START_FACE_DETECTION is called!", __FUNCTION__); + if (m_flagStartFaceDetection == false + && startFaceDetection() == false) { + CLOGE("ERR(%s):startFaceDetection() fail", __FUNCTION__); + return BAD_VALUE; + } + } else { + CLOGD("DEBUG(%s):CAMERA_CMD_STOP_FACE_DETECTION is called!", __FUNCTION__); + if ( m_flagStartFaceDetection == true + && stopFaceDetection() == false) { + CLOGE("ERR(%s):stopFaceDetection() fail", __FUNCTION__); + return BAD_VALUE; + } + } + break; + case 1351: /*CAMERA_CMD_AUTO_LOW_LIGHT_SET */ + CLOGD("DEBUG(%s):CAMERA_CMD_AUTO_LOW_LIGHT_SET is called!%d", __FUNCTION__, arg1); + if(arg1) { + if( m_flagLLSStart != true) { + m_flagLLSStart = true; + } + } else { + m_flagLLSStart = false; + } + break; + case 1801: /* HAL_ENABLE_LIGHT_CONDITION*/ + CLOGD("DEBUG(%s):HAL_ENABLE_LIGHT_CONDITION is called!%d", __FUNCTION__, arg1); + if(arg1) { + m_flagLightCondition = true; + } else { + m_flagLightCondition = false; + } + break; + /* 1510: CAMERA_CMD_SET_FLIP */ + case 1510 : + CLOGD("DEBUG(%s):CAMERA_CMD_SET_FLIP is called!%d", __FUNCTION__, arg1); + m_parameters->setFlipHorizontal(arg1); + break; + /* 1521: CAMERA_CMD_DEVICE_ORIENTATION */ + case 1521: + CLOGD("DEBUG(%s):CAMERA_CMD_DEVICE_ORIENTATION is called!%d", __FUNCTION__, arg1); + m_parameters->setDeviceOrientation(arg1); + uctlMgr = m_exynosCameraActivityControl->getUCTLMgr(); + if (uctlMgr != NULL) + uctlMgr->setDeviceRotation(m_parameters->getFdOrientation()); + break; + /*1641: CAMERA_CMD_ADVANCED_MACRO_FOCUS*/ + case 1641: + CLOGD("DEBUG(%s):CAMERA_CMD_ADVANCED_MACRO_FOCUS is called!%d", __FUNCTION__, arg1); + m_parameters->setAutoFocusMacroPosition(ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_CENTER); + break; + /*1642: CAMERA_CMD_FOCUS_LOCATION*/ + case 1642: + CLOGD("DEBUG(%s):CAMERA_CMD_FOCUS_LOCATION is called!%d", __FUNCTION__, arg1); + m_parameters->setAutoFocusMacroPosition(ExynosCameraActivityAutofocus::AUTOFOCUS_MACRO_POSITION_CENTER_UP); + break; + /*1661: CAMERA_CMD_START_ZOOM */ + case 1661: + CLOGD("DEBUG(%s):CAMERA_CMD_START_ZOOM is called!", __FUNCTION__); + m_parameters->setZoomActiveOn(true); + m_parameters->setFocusModeLock(true); + break; + /*1662: CAMERA_CMD_STOP_ZOOM */ + case 1662: + CLOGD("DEBUG(%s):CAMERA_CMD_STOP_ZOOM is called!", __FUNCTION__); + m_parameters->setZoomActiveOn(false); + m_parameters->setFocusModeLock(false); + break; + default: + CLOGV("DEBUG(%s):unexpectect command(%d)", __FUNCTION__, command); + break; + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_startRecordingInternal(void) +{ + int ret = 0; + + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int videoW = 0, videoH = 0; + int planeCount = 1; + int minBufferCount = 1; + int maxBufferCount = 1; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_SILENT; + int heapFd = 0; + + m_parameters->getVideoSize(&videoW, &videoH); + CLOGD("DEBUG(%s[%d]):videoSize = %d x %d", __FUNCTION__, __LINE__, videoW, videoH); + + m_doCscRecording = true; + if (m_parameters->doCscRecording() == true) { + m_recordingBufferCount = m_exynosconfig->current->bufInfo.num_recording_buffers; + CLOGI("INFO(%s[%d]):do Recording CSC !!! m_recordingBufferCount(%d)", __FUNCTION__, __LINE__, m_recordingBufferCount); + } else { + m_doCscRecording = false; + m_recordingBufferCount = m_exynosconfig->current->bufInfo.num_preview_buffers; + CLOGI("INFO(%s[%d]):skip Recording CSC !!! m_recordingBufferCount(%d->%d)", + __FUNCTION__, __LINE__, m_exynosconfig->current->bufInfo.num_recording_buffers, m_recordingBufferCount); + } + + /* clear previous recording frame */ + CLOGD("DEBUG(%s[%d]):Recording m_recordingProcessList(%d) IN", + __FUNCTION__, __LINE__, m_recordingProcessList.size()); + m_recordingListLock.lock(); + ret = m_clearList(&m_recordingProcessList); + if (ret < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + } + m_recordingListLock.unlock(); + CLOGD("DEBUG(%s[%d]):Recording m_recordingProcessList(%d) OUT", + __FUNCTION__, __LINE__, m_recordingProcessList.size()); + + for (int32_t i = 0; i < m_recordingBufferCount; i++) { + m_recordingTimeStamp[i] = 0L; + m_recordingBufAvailable[i] = true; + } + + /* alloc recording Callback Heap */ + m_recordingCallbackHeap = m_getMemoryCb(-1, sizeof(struct addrs), m_recordingBufferCount, &heapFd); + if (!m_recordingCallbackHeap || m_recordingCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%zd) fail", __FUNCTION__, __LINE__, sizeof(struct addrs)); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_doCscRecording == true) { + /* alloc recording Image buffer */ + planeSize[0] = ROUND_UP(videoW, CAMERA_MAGIC_ALIGN) * ROUND_UP(videoH, CAMERA_MAGIC_ALIGN) + MFC_7X_BUFFER_OFFSET; + planeSize[1] = ROUND_UP(videoW, CAMERA_MAGIC_ALIGN) * ROUND_UP(videoH / 2, CAMERA_MAGIC_ALIGN) + MFC_7X_BUFFER_OFFSET; + planeCount = 2; + if( m_parameters->getHighSpeedRecording() == true) + minBufferCount = m_recordingBufferCount; + else + minBufferCount = 1; + + maxBufferCount = m_recordingBufferCount; + + ret = m_allocBuffers(m_recordingBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, false, true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_recordingBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + } + } + + if (m_doCscRecording == true) { + int recPipeId = PIPE_GSC_VIDEO; + + m_previewFrameFactory->startThread(recPipeId); + + if (m_recordingQ->getSizeOfProcessQ() > 0) { + CLOGE("ERR(%s[%d]):m_startRecordingInternal recordingQ(%d)", __FUNCTION__, __LINE__, m_recordingQ->getSizeOfProcessQ()); + m_clearList(m_recordingQ); + } + + m_recordingThread->run(); + } + +func_exit: + + return ret; +} + +status_t ExynosCamera::m_stopRecordingInternal(void) +{ + int ret = 0; + if (m_doCscRecording == true) { + int recPipeId = PIPE_GSC_VIDEO; + + { + Mutex::Autolock _l(m_recordingStopLock); + m_previewFrameFactory->stopPipe(recPipeId); + } + + m_recordingQ->sendCmd(WAKE_UP); + + m_recordingThread->requestExitAndWait(); + m_recordingQ->release(); + + m_recordingBufferMgr->deinit(); + } else { + CLOGI("INFO(%s[%d]):reset m_recordingBufferCount(%d->%d)", + __FUNCTION__, __LINE__, m_recordingBufferCount, m_exynosconfig->current->bufInfo.num_recording_buffers); + m_recordingBufferCount = m_exynosconfig->current->bufInfo.num_recording_buffers; + } + + /* Checking all frame(buffer) released from Media recorder */ + int sleepUsecs = 33300; + int retryCount = 3; /* 33.3ms*3 = 100ms */ + bool allBufferReleased = true; + + for (int i = 0; i < retryCount; i++) { + allBufferReleased = true; + + for (int bufferIndex = 0; bufferIndex < m_recordingBufferCount; bufferIndex++) { + if (m_recordingBufAvailable[bufferIndex] == false) { + CLOGW("WRN(%s[%d]):Media recorder doesn't release frame(buffer), index(%d)", + __FUNCTION__, __LINE__, bufferIndex); + allBufferReleased = false; + } + } + + if(allBufferReleased == true) { + break; + } + + usleep(sleepUsecs); + } + + if (allBufferReleased == false) { + CLOGE("ERR(%s[%d]):Media recorder doesn't release frame(buffer) all!!", __FUNCTION__, __LINE__); + } + + if (m_recordingCallbackHeap != NULL) { + m_recordingCallbackHeap->release(m_recordingCallbackHeap); + m_recordingCallbackHeap = NULL; + } + + return NO_ERROR; +} + +bool ExynosCamera::m_shutterCallbackThreadFunc(void) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + int loop = false; + + if (m_parameters->msgTypeEnabled(CAMERA_MSG_SHUTTER)) { + CLOGI("INFO(%s[%d]): CAMERA_MSG_SHUTTER callback S", __FUNCTION__, __LINE__); + + m_notifyCb(CAMERA_MSG_SHUTTER, 0, 0, m_callbackCookie); + + CLOGI("INFO(%s[%d]): CAMERA_MSG_SHUTTER callback E", __FUNCTION__, __LINE__); + } + + /* one shot */ + return loop; +} + +bool ExynosCamera::m_recordingThreadFunc(void) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + int pipeId = PIPE_GSC_VIDEO; + nsecs_t timeStamp = 0; + + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + + CLOGV("INFO(%s[%d]):wait gsc done output", __FUNCTION__, __LINE__); + ret = m_recordingQ->waitAndPopProcessQ(&frame); + if (m_getRecordingEnabled() == false) { + CLOGI("INFO(%s[%d]):recording stopped", __FUNCTION__, __LINE__); + goto func_exit; + } + + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + CLOGV("INFO(%s[%d]):gsc done for recording callback", __FUNCTION__, __LINE__); + + ret = frame->getDstBuffer(pipeId, &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto func_exit; + } + + if (buffer.index < 0 || buffer.index >= (int)m_recordingBufferCount) { + CLOGE("ERR(%s[%d]):Out of Index! (Max: %d, Index: %d)", __FUNCTION__, __LINE__, m_recordingBufferCount, buffer.index); + goto func_exit; + } + + timeStamp = m_recordingTimeStamp[buffer.index]; + + if (m_recordingStartTimeStamp == 0) { + m_recordingStartTimeStamp = timeStamp; + CLOGI("INFO(%s[%d]):m_recordingStartTimeStamp=%lld", + __FUNCTION__, __LINE__, m_recordingStartTimeStamp); + } + + if ((0L < timeStamp) + && (m_lastRecordingTimeStamp < timeStamp) + && (m_recordingStartTimeStamp <= timeStamp)) { + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME)) { +#ifdef CHECK_MONOTONIC_TIMESTAMP + CLOGD("DEBUG(%s[%d]):m_dataCbTimestamp::recordingFrameIndex=%d, recordingTimeStamp=%lld", + __FUNCTION__, __LINE__, buffer.index, timeStamp); +#endif +#ifdef DEBUG + CLOGD("DEBUG(%s[%d]): - lastTimeStamp(%lld), systemTime(%lld), recordingStart(%lld)", + __FUNCTION__, __LINE__, + m_lastRecordingTimeStamp, + systemTime(SYSTEM_TIME_MONOTONIC), + m_recordingStartTimeStamp); +#endif + struct addrs *recordAddrs = NULL; + + recordAddrs = (struct addrs *)m_recordingCallbackHeap->data; + recordAddrs[buffer.index].type = kMetadataBufferTypeCameraSource; + recordAddrs[buffer.index].fdPlaneY = (unsigned int)buffer.fd[0]; + recordAddrs[buffer.index].fdPlaneCbcr = (unsigned int)buffer.fd[1]; + recordAddrs[buffer.index].bufIndex = buffer.index; + + m_recordingBufAvailable[buffer.index] = false; + + m_dataCbTimestamp( + timeStamp, + CAMERA_MSG_VIDEO_FRAME, + m_recordingCallbackHeap, + buffer.index, + m_callbackCookie); + m_lastRecordingTimeStamp = timeStamp; + } + } else { + CLOGW("WARN(%s[%d]):recordingFrameIndex=%d, timeStamp(%lld) invalid -" + " lastTimeStamp(%lld), systemTime(%lld), recordingStart(%lld)", + __FUNCTION__, __LINE__, buffer.index, timeStamp, + m_lastRecordingTimeStamp, + systemTime(SYSTEM_TIME_MONOTONIC), + m_recordingStartTimeStamp); + m_releaseRecordingBuffer(buffer.index); + } + +func_exit: + + m_recordingListLock.lock(); + if (frame != NULL) { + ret = m_removeFrameFromList(&m_recordingProcessList, frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + frame->decRef(); + m_frameMgr->deleteFrame(frame);; + frame = NULL; + } + m_recordingListLock.unlock(); + + return m_recordingEnabled; +} + +bool ExynosCamera::m_releasebuffersForRealloc() +{ + status_t ret = NO_ERROR; + /* skip to free and reallocate buffers : flite / 3aa / isp / ispReprocessing */ + CLOGD("DEBUG(%s[%d]):m_setBuffers free all buffers", __FUNCTION__, __LINE__); + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->deinit(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->deinit(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->deinit(); + } + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->deinit(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->deinit(); + } + + /* realloc callback buffers */ + if (m_scpBufferMgr != NULL) { + m_scpBufferMgr->deinit(); + m_scpBufferMgr->setBufferCount(0); + } + + if (m_sccBufferMgr != NULL) { + m_sccBufferMgr->deinit(); + } + + if (m_previewCallbackBufferMgr != NULL) { + m_previewCallbackBufferMgr->deinit(); + } + if (m_highResolutionCallbackBufferMgr != NULL) { + m_highResolutionCallbackBufferMgr->deinit(); + } + + m_parameters->setReallocBuffer(false); + + if (m_parameters->getRestartPreview() == true) { + ret = setPreviewWindow(m_previewWindow); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPreviewWindow fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + return true; +} + + +status_t ExynosCamera::m_setBuffers(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + CLOGI("INFO(%s[%d]):alloc buffer - camera ID: %d", + __FUNCTION__, __LINE__, m_cameraId); + int ret = 0; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int hwPreviewW, hwPreviewH; + int hwPictureW, hwPictureH; + + int ispBufferW, ispBufferH; + int previewMaxW, previewMaxH; + int pictureMaxW, pictureMaxH; + int sensorMaxW, sensorMaxH; + int sensorMarginW, sensorMarginH; + ExynosRect bdsRect; + + int planeCount = 1; + int minBufferCount = 1; + int maxBufferCount = 1; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + + if( m_parameters->getReallocBuffer() ) { + /* skip to free and reallocate buffers : flite / 3aa / isp / ispReprocessing */ + m_releasebuffersForRealloc(); + } + + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + CLOGI("(%s):HW Preview width x height = %dx%d", __FUNCTION__, hwPreviewW, hwPreviewH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + CLOGI("(%s):HW Picture width x height = %dx%d", __FUNCTION__, hwPictureW, hwPictureH); + m_parameters->getMaxPictureSize(&pictureMaxW, &pictureMaxH); + CLOGI("(%s):Picture MAX width x height = %dx%d", __FUNCTION__, pictureMaxW, pictureMaxH); + if( m_parameters->getHighSpeedRecording() ) { + m_parameters->getHwSensorSize(&sensorMaxW, &sensorMaxH); + CLOGI("(%s):HW Sensor(HighSpeed) MAX width x height = %dx%d", __FUNCTION__, sensorMaxW, sensorMaxH); + m_parameters->getHwPreviewSize(&previewMaxW, &previewMaxH); + CLOGI("(%s):HW Preview(HighSpeed) MAX width x height = %dx%d", __FUNCTION__, previewMaxW, previewMaxH); + } else { + m_parameters->getMaxSensorSize(&sensorMaxW, &sensorMaxH); + CLOGI("(%s):Sensor MAX width x height = %dx%d", __FUNCTION__, sensorMaxW, sensorMaxH); + m_parameters->getMaxPreviewSize(&previewMaxW, &previewMaxH); + CLOGI("(%s):Preview MAX width x height = %dx%d", __FUNCTION__, previewMaxW, previewMaxH); + } + +#if (SUPPORT_BACK_HW_VDIS || SUPPORT_FRONT_HW_VDIS) + /* + * we cannot expect TPU on or not, when open() api. + * so extract memory TPU size + */ + int w = 0, h = 0; + m_parameters->calcNormalToTpuSize(previewMaxW, previewMaxH, &w, &h); + if (ret < 0) { + CLOGE("ERR(%s[%d]):Hw vdis buffer calulation fail src(%d x %d) dst(%d x %d)",__FUNCTION__, __LINE__, previewMaxW, previewMaxH, w, h); + } + previewMaxW = w; + previewMaxH = h; + CLOGI("(%s): TPU based Preview MAX width x height = %dx%d", __FUNCTION__, previewMaxW, previewMaxH); +#endif + + m_parameters->getPreviewBdsSize(&bdsRect); + + /* FLITE */ +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = sensorMaxW * 2; + planeSize[0] = sensorMaxW * sensorMaxH * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + bytesPerLine[0] = ROUND_UP(sensorMaxW , 10) * 8 / 5; + planeSize[0] = bytesPerLine[0] * sensorMaxH; + } +#else + planeSize[0] = sensorMaxW * sensorMaxH * 2; +#endif + planeCount = 2; + + /* TO DO : make num of buffers samely */ + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; +#ifdef RESERVED_MEMORY_ENABLE + if (getCameraId() == CAMERA_ID_BACK) { + type = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE; + m_bayerBufferMgr->setContigBufCount(RESERVED_NUM_BAYER_BUFFERS); + } else { + if (m_parameters->getDualMode() == false) { + type = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE; + m_bayerBufferMgr->setContigBufCount(FRONT_RESERVED_NUM_BAYER_BUFFERS); + } + } +#endif + +#ifndef DEBUG_RAWDUMP + if (m_parameters->isUsing3acForIspc() == false + || m_parameters->getDualMode() == true) +#endif + { + ret = m_allocBuffers(m_bayerBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):bayerBuffer m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + } + + type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + +#ifdef CAMERA_PACKED_BAYER_ENABLE + memset(&bytesPerLine, 0, sizeof(unsigned int) * EXYNOS_CAMERA_BUFFER_MAX_PLANES); +#endif + + /* for preview */ + planeSize[0] = 32 * 64 * 2; + planeCount = 2; + /* TO DO : make num of buffers samely */ + if (m_parameters->isFlite3aaOtf() == true) + maxBufferCount = m_exynosconfig->current->bufInfo.num_3aa_buffers; + else + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; + + ret = m_allocBuffers(m_3aaBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_3aaBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + + if (m_parameters->isUsing3acForIspc() == true) { + if (m_parameters->isReprocessing() == true) { +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = previewMaxW * 2; + planeSize[0] = previewMaxW * previewMaxH * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + planeSize[0] = previewMaxW * previewMaxH * 2; + } else { + bytesPerLine[0] = ROUND_UP((previewMaxW * 3 / 2), 16); + planeSize[0] = bytesPerLine[0] * previewMaxH; + } + } +#else + /* planeSize[0] = width * height * 2; */ + planeSize[0] = previewMaxW * previewMaxH * 2; +#endif + } else { + /* Picture Max Size == Sensor Max Size - Sensor Margin */ +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = pictureMaxW * 2; + planeSize[0] = pictureMaxW * pictureMaxH * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + bytesPerLine[0] = ROUND_UP((pictureMaxW * 3 / 2), 16); + planeSize[0] = bytesPerLine[0] * pictureMaxH; + } +#else + /* planeSize[0] = width * height * 2; */ + planeSize[0] = pictureMaxW * pictureMaxH * 2; +#endif + } + } else { +#if defined (USE_ISP_BUFFER_SIZE_TO_BDS) + ispBufferW = bdsRect.w; + ispBufferH = bdsRect.h; +#else + ispBufferW = previewMaxW; + ispBufferH = previewMaxH; +#endif + +#ifdef CAMERA_PACKED_BAYER_ENABLE + bytesPerLine[0] = ROUND_UP((ispBufferW* 3 / 2), 16); + planeSize[0] = bytesPerLine[0] * ispBufferH; +#else + bytesPerLine[0] = ispBufferW * 2; + planeSize[0] = ispBufferW * ispBufferH * 2; +#endif + } + planeCount = 2; + /* TO DO : make num of buffers samely */ + if (m_parameters->isFlite3aaOtf() == true) { + maxBufferCount = m_exynosconfig->current->bufInfo.num_3aa_buffers; +#ifdef RESERVED_MEMORY_ENABLE + if (getCameraId() == CAMERA_ID_BACK) { + type = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE; + if(m_parameters->getUHDRecordingMode() == true) { + m_ispBufferMgr->setContigBufCount(RESERVED_NUM_ISP_BUFFERS_ON_UHD); + } else { + m_ispBufferMgr->setContigBufCount(RESERVED_NUM_ISP_BUFFERS); + } + } else { + if (m_parameters->getDualMode() == false) { + type = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE; + m_ispBufferMgr->setContigBufCount(FRONT_RESERVED_NUM_ISP_BUFFERS); + } + } +#endif + } else { + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; +#ifdef RESERVED_MEMORY_ENABLE + if (m_parameters->getDualMode() == false) { + type = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE; + m_ispBufferMgr->setContigBufCount(FRONT_RESERVED_NUM_ISP_BUFFERS); + } +#endif + } + + if (m_parameters->is3aaIspOtf() == false) { + ret = m_allocBuffers(m_ispBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_ispBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + } + + /* HW VDIS memory */ + if (m_parameters->getTpuEnabledMode() == true) { + maxBufferCount = m_exynosconfig->current->bufInfo.num_hwdis_buffers; + + /* DIS MEMORY */ + int disFormat = m_parameters->getHWVdisFormat(); + unsigned int bpp = 0; + unsigned int disPlanes = 0; + + getYuvFormatInfo(disFormat, &bpp, &disPlanes); + + switch (disFormat) { + case V4L2_PIX_FMT_YUYV: + planeSize[0] = bdsRect.w * bdsRect.h * 2; + break; + default: + CLOGE("ERR(%s[%d]):unexpected VDIS format(%d). so, fail", __FUNCTION__, __LINE__, disFormat); + return INVALID_OPERATION; + break; + } + + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + ret = m_allocBuffers(m_hwDisBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_hwDisBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + + CLOGD("DEBUG(%s[%d]):m_allocBuffers(m_hwDisBufferMgr): %d x %d, planeCount(%d), maxBufferCount(%d)", + __FUNCTION__, __LINE__, bdsRect.w, bdsRect.h, planeCount, maxBufferCount); + } + + /* VRA buffers */ + if (m_parameters->isMcscVraOtf() == false) { + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + bytesPerLine[0] = ROUND_UP((vraWidth * 3 / 2), CAMERA_16PX_ALIGN); + planeSize[0] = bytesPerLine[0] * vraHeight; + planeCount = 2; + + maxBufferCount = m_exynosconfig->current->bufInfo.num_vra_buffers; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + ret = m_allocBuffers(m_vraBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_vraBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + } + + /* SW VDIS memory */ + type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + + planeSize[0] = hwPreviewW * hwPreviewH; + planeSize[1] = hwPreviewW * hwPreviewH / 2; + planeCount = 3; + if(m_parameters->increaseMaxBufferOfPreview()){ + maxBufferCount = m_parameters->getPreviewBufferCount(); + } else { + maxBufferCount = m_exynosconfig->current->bufInfo.num_preview_buffers; + } + + bool needMmap = false; + if (m_previewWindow == NULL) + needMmap = true; + + ret = m_allocBuffers(m_scpBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, true, needMmap); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_scpBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + + int stride = m_scpBufferMgr->getBufStride(); + if (stride != hwPreviewW) { + CLOGI("INFO(%s[%d]):hwPreviewW(%d), stride(%d)", __FUNCTION__, __LINE__, hwPreviewW, stride); + if (stride == 0) { + /* If the SCP buffer manager is not instance of GrallocExynosCameraBufferManager + (In case of setPreviewWindow(null) is called), return value of setHwPreviewStride() + will be zero. If this value is passed as SCP width to firmware, firmware will + generate PABORT error. */ + CLOGW("WARN(%s[%d]):HACK: Invalid stride(%d). It will be replaced as hwPreviewW(%d) value.", + __FUNCTION__, __LINE__, stride, hwPreviewW); + stride = hwPreviewW; + } + } + + m_parameters->setHwPreviewStride(stride); + + if (m_parameters->isSccCapture() == true + || m_parameters->isUsing3acForIspc() == true) { + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + if (m_parameters->isUsing3acForIspc() == true){ + hwPictureW = sensorMaxW; + hwPictureH = sensorMaxW; + } + CLOGI("(%s):HW Picture width x height = %dx%d", __FUNCTION__, hwPictureW, hwPictureH); + if (SCC_OUTPUT_COLOR_FMT == V4L2_PIX_FMT_NV21M) { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN); + planeSize[1] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) / 2; + planeCount = 3; + } else if (SCC_OUTPUT_COLOR_FMT == V4L2_PIX_FMT_NV21) { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 3 / 2; + planeCount = 2; + } else { + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 2; + planeCount = 2; + } + /* TO DO : make same num of buffers */ + if (m_parameters->isFlite3aaOtf() == true) + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + else + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; + + if (m_parameters->isUsing3acForIspc() == true) { + allocMode = BUFFER_MANAGER_ALLOCATION_SILENT; + minBufferCount = 1; + } else { + allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + minBufferCount = maxBufferCount; + } + type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + + ret = m_allocBuffers(m_sccBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_sccBufferMgr m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, maxBufferCount); + return ret; + } + } + + CLOGI("INFO(%s[%d]):alloc buffer done - camera ID: %d", + __FUNCTION__, __LINE__, m_cameraId); + + return NO_ERROR; +} + +status_t ExynosCamera::m_setReprocessingBuffer(void) +{ + int ret = 0; + int pictureMaxW, pictureMaxH; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int planeCount = 0; + int bufferCount = 0; + int minBufferCount = NUM_REPROCESSING_BUFFERS; + int maxBufferCount = NUM_PICTURE_BUFFERS; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + m_parameters->getMaxPictureSize(&pictureMaxW, &pictureMaxH); + CLOGI("(%s):HW Picture MAX width x height = %dx%d", __FUNCTION__, pictureMaxW, pictureMaxH); + + /* for reprocessing */ +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = pictureMaxW * 2; + planeSize[0] = pictureMaxW * pictureMaxH * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + bytesPerLine[0] = ROUND_UP((pictureMaxW * 3 / 2), 16); + planeSize[0] = bytesPerLine[0] * pictureMaxH; + } +#else + planeSize[0] = pictureMaxW * pictureMaxH * 2; +#endif + planeCount = 2; + bufferCount = NUM_REPROCESSING_BUFFERS; + + if (m_parameters->getHighResolutionCallbackMode() == true) { + /* ISP Reprocessing Buffer realloc for high resolution callback */ + minBufferCount = 2; + } + + ret = m_allocBuffers(m_ispReprocessingBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, true, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_ispReprocessingBufferMgr m_allocBuffers(minBufferCount=%d/maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCamera::m_setPictureBuffer(void) +{ + int ret = 0; + unsigned int planeSize[3] = {0}; + unsigned int bytesPerLine[3] = {0}; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int planeCount = 0; + int minBufferCount = 1; + int maxBufferCount = 1; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + m_parameters->getMaxPictureSize(&pictureW, &pictureH); + pictureFormat = m_parameters->getHwPictureFormat(); + if ((m_parameters->needGSCForCapture(getCameraId()) == true)) { + if (JPEG_INPUT_COLOR_FMT == V4L2_PIX_FMT_NV21M) { + planeSize[0] = pictureW * pictureH; + planeSize[1] = pictureW * pictureH / 2; + planeCount = 2; + } else if (JPEG_INPUT_COLOR_FMT == V4L2_PIX_FMT_NV21) { + planeSize[0] = pictureW * pictureH * 3 / 2; + planeCount = 1; + }else { + planeSize[0] = pictureW * pictureH * 2; + planeCount = 1; + } + + minBufferCount = 1; + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + + // Pre-allocate certain amount of buffers enough to fed into 3 JPEG save threads. + if (m_parameters->getSeriesShotCount() > 0) + minBufferCount = NUM_BURST_GSC_JPEG_INIT_BUFFER; + + ret = m_allocBuffers(m_gscBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, false, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_gscBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + __FUNCTION__, __LINE__, minBufferCount, maxBufferCount); + return ret; + } + } + + if( m_hdrEnabled == false ) { + if (JPEG_INPUT_COLOR_FMT == V4L2_PIX_FMT_NV21M) { + planeSize[0] = pictureW * pictureH * 3 / 2; + planeCount = 2; + } else if (JPEG_INPUT_COLOR_FMT == V4L2_PIX_FMT_NV21) { + planeSize[0] = pictureW * pictureH * 3 / 2; + planeCount = 1; + } else { + planeSize[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); + planeCount = 1; + } + minBufferCount = 1; + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + CLOGD("DEBUG(%s[%d]): jpegBuffer picture(%dx%d) size(%d)", __FUNCTION__, __LINE__, pictureW, pictureH, planeSize[0]); + + // Same with above GSC buffers + if (m_parameters->getSeriesShotCount() > 0) + minBufferCount = NUM_BURST_GSC_JPEG_INIT_BUFFER; + +#ifdef RESERVED_MEMORY_ENABLE + if (getCameraId() == CAMERA_ID_BACK) { + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_RESERVED_TYPE; + if (m_parameters->getUHDRecordingMode() == true) { + m_jpegBufferMgr->setContigBufCount(RESERVED_NUM_JPEG_BUFFERS_ON_UHD); + } else { + m_jpegBufferMgr->setContigBufCount(RESERVED_NUM_JPEG_BUFFERS); + + /* alloc at once */ + minBufferCount = NUM_BURST_GSC_JPEG_INIT_BUFFER; + } + } +#endif + + ret = m_allocBuffers(m_jpegBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, false, true); + if (ret < 0) + CLOGE("ERR(%s:%d):jpegSrcHeapBuffer m_allocBuffers(bufferCount=%d) fail", + __FUNCTION__, __LINE__, NUM_REPROCESSING_BUFFERS); + } + + return ret; +} + +status_t ExynosCamera::m_releaseBuffers(void) +{ + CLOGI("INFO(%s[%d]):release buffer", __FUNCTION__, __LINE__); + int ret = 0; + + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->deinit(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->deinit(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->deinit(); + } + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->deinit(); + } + if (m_scpBufferMgr != NULL) { + m_scpBufferMgr->deinit(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->deinit(); + } + if (m_ispReprocessingBufferMgr != NULL) { + m_ispReprocessingBufferMgr->deinit(); + } + if (m_sccReprocessingBufferMgr != NULL) { + m_sccReprocessingBufferMgr->deinit(); + } + if (m_sccBufferMgr != NULL) { + m_sccBufferMgr->deinit(); + } + if (m_gscBufferMgr != NULL) { + m_gscBufferMgr->deinit(); + } + if (m_jpegBufferMgr != NULL) { + m_jpegBufferMgr->deinit(); + } + if (m_thumbnailBufferMgr != NULL) { + m_thumbnailBufferMgr->deinit(); + } + if (m_recordingBufferMgr != NULL) { + m_recordingBufferMgr->deinit(); + } + if (m_previewCallbackBufferMgr != NULL) { + m_previewCallbackBufferMgr->deinit(); + } + if (m_highResolutionCallbackBufferMgr != NULL) { + m_highResolutionCallbackBufferMgr->deinit(); + } + + CLOGI("INFO(%s[%d]):free buffer done", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCamera::m_monitorThreadFunc(void) +{ + CLOGV("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int *threadState; + struct timeval dqTime; + uint64_t *timeInterval; + int *countRenew; + int camId = getCameraId(); + int ret = NO_ERROR; + int loopCount = 0; + + int dtpStatus = 0; + int pipeIdFlite = 0; + int pipeIdErrorCheck = 0; + + for (loopCount = 0; loopCount < MONITOR_THREAD_INTERVAL; loopCount += (MONITOR_THREAD_INTERVAL/20)) { + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]): m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + + return false; + } + + usleep(MONITOR_THREAD_INTERVAL/20); + } + + if (m_previewFrameFactory == NULL) { + CLOGW("WARN(%s[%d]): m_previewFrameFactory is NULL. Skip monitoring.", __FUNCTION__, __LINE__); + + return false; + } + + pipeIdFlite = PIPE_FLITE; + + if (m_parameters->is3aaIspOtf() == true) { + if (m_parameters->getTpuEnabledMode() == true) + pipeIdErrorCheck = PIPE_DIS; + else + pipeIdErrorCheck = PIPE_3AA; + } else { + pipeIdErrorCheck = PIPE_ISP; + } + +#ifdef MONITOR_LOG_SYNC + uint32_t pipeIdIsp = 0; + + /* define pipe for isp node cause of sync log sctrl */ + if (m_parameters->isFlite3aaOtf() == true) + pipeIdIsp = PIPE_3AA; + + /* If it is not front camera in dual and sensor pipe is running, do sync log */ + if (m_previewFrameFactory->checkPipeThreadRunning(pipeIdIsp) && + !(getCameraId() == CAMERA_ID_FRONT && m_parameters->getDualMode())){ + if (!(m_syncLogDuration % MONITOR_LOG_SYNC_INTERVAL)) { + uint32_t syncLogId = m_getSyncLogId(); + CLOGI("INFO(%s[%d]): @FIMC_IS_SYNC %d", __FUNCTION__, __LINE__, syncLogId); + m_previewFrameFactory->syncLog(pipeIdIsp, syncLogId); + } + m_syncLogDuration++; + } +#endif + + m_previewFrameFactory->getControl(V4L2_CID_IS_G_DTPSTATUS, &dtpStatus, pipeIdFlite); + if (dtpStatus == 1) { + CLOGD("DEBUG(%s[%d]):DTP Detected. dtpStatus(%d)", __FUNCTION__, __LINE__, dtpStatus); + dump(); + + /* in GED */ + m_notifyCb(CAMERA_MSG_ERROR, 100, 0, m_callbackCookie); + return false; + } + +#ifdef SENSOR_OVERFLOW_CHECK + m_previewFrameFactory->getControl(V4L2_CID_IS_G_DTPSTATUS, &dtpStatus, pipeIdFlite); + if (dtpStatus == 1) { + CLOGD("DEBUG(%s[%d]):DTP Detected. dtpStatus(%d)", __FUNCTION__, __LINE__, dtpStatus); + dump(); + + /* in GED */ + /* m_notifyCb(CAMERA_MSG_ERROR, 100, 0, m_callbackCookie); */ + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1002, 0, m_callbackCookie); */ + /* or */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); + + return false; + } +#endif + + m_previewFrameFactory->getThreadState(&threadState, pipeIdErrorCheck); + m_previewFrameFactory->getThreadRenew(&countRenew, pipeIdErrorCheck); + + if ((*threadState == ERROR_POLLING_DETECTED) || (*countRenew > ERROR_DQ_BLOCKED_COUNT)) { + CLOGD("DEBUG(%s[%d]):ESD Detected. threadState(%d) *countRenew(%d)", __FUNCTION__, __LINE__, *threadState, *countRenew); + dump(); + + /* in GED */ + /* skip error callback */ + /* m_notifyCb(CAMERA_MSG_ERROR, 100, 0, m_callbackCookie); */ + + return false; + } else { + CLOGV("[%s] (%d) (%d)", __FUNCTION__, __LINE__, *threadState); + } + +#if 0 + m_checkThreadState(threadState, countRenew)?:ret = false; + m_checkThreadInterval(PIPE_SCP, WARNING_SCP_THREAD_INTERVAL, threadState)?:ret = false; + + enum pipeline pipe; + + /* check PIPE_3AA thread state & interval */ + if (m_parameters->isFlite3aaOtf() == true) { + pipe = PIPE_3AA_ISP; + + m_previewFrameFactory->getThreadRenew(&countRenew, pipe); + m_checkThreadState(threadState, countRenew)?:ret = false; + + if (ret == false) { + dump(); + + /* in GED */ + m_notifyCb(CAMERA_MSG_ERROR, 100, 0, m_callbackCookie); + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1001, 0, m_callbackCookie); */ + /* or */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); + } + } else { + pipe = PIPE_3AA; + + m_previewFrameFactory->getThreadRenew(&countRenew, pipe); + m_checkThreadState(threadState, countRenew)?:ret = false; + + if (ret == false) { + dump(); + + /* in GED */ + m_notifyCb(CAMERA_MSG_ERROR, 100, 0, m_callbackCookie); + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1001, 0, m_callbackCookie); */ + /* or */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); + } + } + + m_checkThreadInterval(pipe, WARNING_3AA_THREAD_INTERVAL, threadState)?:ret = false; + + if (m_callbackState == 0) { + m_callbackStateOld = 0; + m_callbackState = 0; + m_callbackMonitorCount = 0; + } else { + if (m_callbackStateOld != m_callbackState) { + m_callbackStateOld = m_callbackState; + CLOGD("INFO(%s[%d]):callback state is updated (0x%x)", __FUNCTION__, __LINE__, m_callbackStateOld); + } else { + if ((m_callbackStateOld & m_callbackState) != 0) + CLOGE("ERR(%s[%d]):callback is blocked (0x%x), Duration:%d msec", __FUNCTION__, __LINE__, m_callbackState, m_callbackMonitorCount*(MONITOR_THREAD_INTERVAL/1000)); + } + } +#endif + + gettimeofday(&dqTime, NULL); + m_previewFrameFactory->getThreadInterval(&timeInterval, pipeIdErrorCheck); + + CLOGV("Thread IntervalTime [%lld]", *timeInterval); + CLOGV("Thread Renew Count [%d]", *countRenew); + + m_previewFrameFactory->incThreadRenew(pipeIdErrorCheck); + + return true; +} + +bool ExynosCamera::m_autoFocusResetNotify(int focusMode) +{ + /* show restart */ + CLOGD("DEBUG(%s):CAMERA_MSG_FOCUS(%d) mode(%d)", __func__, 4, focusMode); + m_notifyCb(CAMERA_MSG_FOCUS, 4, 0, m_callbackCookie); + + /* show focusing */ + CLOGD("DEBUG(%s):CAMERA_MSG_FOCUS(%d) mode(%d)", __func__, 3, focusMode); + m_notifyCb(CAMERA_MSG_FOCUS, 3, 0, m_callbackCookie); + + return true; +} + +bool ExynosCamera::m_autoFocusThreadFunc(void) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + bool afResult = false; + int focusMode = 0; + + /* block until we're told to start. we don't want to use + * a restartable thread and requestExitAndWait() in cancelAutoFocus() + * because it would cause deadlock between our callbacks and the + * caller of cancelAutoFocus() which both want to grab the same lock + * in CameraServices layer. + */ + + if (getCameraId() == CAMERA_ID_FRONT) { + if (m_parameters->msgTypeEnabled(CAMERA_MSG_FOCUS)) { + if (m_notifyCb != NULL) { + CLOGD("DEBUG(%s):Do not support autoFocus in front camera.", __FUNCTION__); + m_notifyCb(CAMERA_MSG_FOCUS, true, 0, m_callbackCookie); + } else { + CLOGD("DEBUG(%s):m_notifyCb is NULL!", __FUNCTION__); + } + } else { + CLOGD("DEBUG(%s):autoFocus msg disabled !!", __FUNCTION__); + } + return false; + } + + if (m_autoFocusType == AUTO_FOCUS_SERVICE) { + focusMode = m_parameters->getFocusMode(); + } else if (m_autoFocusType == AUTO_FOCUS_HAL) { + focusMode = FOCUS_MODE_AUTO; + + m_autoFocusResetNotify(focusMode); + } + + /* check early exit request */ + if (m_exitAutoFocusThread == true) { + CLOGD("DEBUG(%s):exiting on request", __FUNCTION__); + goto done; + } + + m_autoFocusLock.lock(); + m_autoFocusRunning = true; + + if (m_autoFocusRunning == true) { + afResult = m_exynosCameraActivityControl->autoFocus(focusMode, m_autoFocusType); + if (afResult == true) + CLOGV("DEBUG(%s):autoFocus Success!!", __FUNCTION__); + else + CLOGV("DEBUG(%s):autoFocus Fail !!", __FUNCTION__); + } else { + CLOGV("DEBUG(%s):autoFocus canceled !!", __FUNCTION__); + } + + /* + * CAMERA_MSG_FOCUS only takes a bool. true for + * finished and false for failure. + * If cancelAutofocus() called, no callback. + */ + if ((m_autoFocusRunning == true) && + m_parameters->msgTypeEnabled(CAMERA_MSG_FOCUS)) { + + if (m_notifyCb != NULL) { + int afFinalResult = (int)afResult; + + /* if inactive detected, tell it */ + if (focusMode == FOCUS_MODE_CONTINUOUS_PICTURE) { + if (m_exynosCameraActivityControl->getCAFResult() == 2) { + afFinalResult = 2; + } + } + + CLOGD("DEBUG(%s):CAMERA_MSG_FOCUS(%d) mode(%d)", __FUNCTION__, afFinalResult, focusMode); + m_notifyCb(CAMERA_MSG_FOCUS, afFinalResult, 0, m_callbackCookie); + } else { + CLOGD("DEBUG(%s):m_notifyCb is NULL mode(%d)", __FUNCTION__, focusMode); + } + } else { + CLOGV("DEBUG(%s):autoFocus canceled, no callback !!", __FUNCTION__); + } + + m_autoFocusRunning = false; + + CLOGV("DEBUG(%s):exiting with no error", __FUNCTION__); + +done: + m_autoFocusLock.unlock(); + + CLOGI("DEBUG(%s):end", __FUNCTION__); + + return false; +} + +bool ExynosCamera::m_autoFocusContinousThreadFunc(void) +{ + int ret = 0; + int index = 0; + uint32_t frameCnt = 0; + uint32_t count = 0; + + + ret = m_autoFocusContinousQ.waitAndPopProcessQ(&frameCnt); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + return false; + } + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return true; + } + + count = m_autoFocusContinousQ.getSizeOfProcessQ(); + if( count >= MAX_FOCUSCONTINUS_THREADQ_SIZE ) { + for( uint32_t i = 0 ; i < count ; i++) { + m_autoFocusContinousQ.popProcessQ(&frameCnt); + } + CLOGD("DEBUG(%s[%d]):m_autoFocusContinousQ skipped QSize(%d) frame(%d)", __FUNCTION__, __LINE__, count, frameCnt); + } + + /* Continuous Auto-focus */ + if (m_parameters->getFocusMode() == FOCUS_MODE_CONTINUOUS_PICTURE) { + int afstatus = 0; + static int afResult = 1; + int prev_afstatus = afResult; + afstatus = m_exynosCameraActivityControl->getCAFResult(); + afResult = afstatus; + + if (afstatus == 3 && (prev_afstatus == 0 || prev_afstatus == 1)) { + afResult = 4; + } + + if (m_parameters->msgTypeEnabled(CAMERA_MSG_FOCUS) + && (prev_afstatus != afstatus)) { + CLOGD("DEBUG(%s):CAMERA_MSG_FOCUS(%d) mode(%d)", + __FUNCTION__, afResult, m_parameters->getFocusMode()); + m_notifyCb(CAMERA_MSG_FOCUS, afResult, 0, m_callbackCookie); + } + } + + return true; +} + +status_t ExynosCamera::m_getBufferManager(uint32_t pipeId, ExynosCameraBufferManager **bufMgr, uint32_t direction) +{ + status_t ret = NO_ERROR; + ExynosCameraBufferManager **bufMgrList[2] = {NULL}; + *bufMgr = NULL; + int internalPipeId = pipeId; + + /* + * front / back is different up to scenario(3AA OTF/M2M, etc) + * so, we don't need to distinguish front / back camera. + * but. reprocessing must handle the separated operation + */ + if (pipeId < PIPE_FLITE_REPROCESSING) + internalPipeId = INDEX(pipeId); + + switch (internalPipeId) { + case PIPE_FLITE: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_bayerBufferMgr; + break; + case PIPE_3AA_ISP: + bufMgrList[0] = &m_3aaBufferMgr; + bufMgrList[1] = &m_ispBufferMgr; + break; + case PIPE_3AC: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_bayerBufferMgr; + break; + case PIPE_3AA: + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + bufMgrList[0] = &m_bayerBufferMgr; + bufMgrList[1] = &m_sccBufferMgr; + } else { + bufMgrList[0] = &m_3aaBufferMgr; + if (m_parameters->isUsing3acForIspc() == true) + bufMgrList[1] = &m_sccBufferMgr; + else + bufMgrList[1] = &m_ispBufferMgr; + } + break; + case PIPE_ISP: + bufMgrList[0] = &m_ispBufferMgr; + + if (m_parameters->getTpuEnabledMode() == true) + bufMgrList[1] = &m_hwDisBufferMgr; + else + bufMgrList[1] = &m_scpBufferMgr; + break; + case PIPE_DIS: + bufMgrList[0] = &m_ispBufferMgr; + bufMgrList[1] = &m_scpBufferMgr; + break; + case PIPE_ISPC: + case PIPE_SCC: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_sccBufferMgr; + break; + case PIPE_SCP: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_scpBufferMgr; + break; + case PIPE_VRA: + bufMgrList[0] = &m_vraBufferMgr; + bufMgrList[1] = NULL; + break; + case PIPE_GSC: + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) + bufMgrList[0] = &m_sccBufferMgr; + else + bufMgrList[0] = &m_scpBufferMgr; + bufMgrList[1] = &m_scpBufferMgr; + break; + case PIPE_GSC_PICTURE: + bufMgrList[0] = &m_sccBufferMgr; + bufMgrList[1] = &m_gscBufferMgr; + break; + case PIPE_3AA_REPROCESSING: + bufMgrList[0] = &m_bayerBufferMgr; + if (m_parameters->getDualMode() == false) + bufMgrList[1] = &m_ispReprocessingBufferMgr; + else + bufMgrList[1] = &m_sccReprocessingBufferMgr; + break; + case PIPE_ISP_REPROCESSING: + bufMgrList[0] = &m_ispReprocessingBufferMgr; + bufMgrList[1] = &m_sccReprocessingBufferMgr; + break; + case PIPE_MCSC_REPROCESSING: + bufMgrList[0] = &m_sccBufferMgr; + bufMgrList[1] = &m_sccReprocessingBufferMgr; + break; + case PIPE_ISPC_REPROCESSING: + case PIPE_SCC_REPROCESSING: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_sccReprocessingBufferMgr; + break; + case PIPE_GSC_REPROCESSING: + bufMgrList[0] = &m_sccReprocessingBufferMgr; + bufMgrList[1] = &m_gscBufferMgr; + break; + default: + CLOGE("ERR(%s[%d]): Unknown pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + bufMgrList[0] = NULL; + bufMgrList[1] = NULL; + ret = BAD_VALUE; + break; + } + + if (bufMgrList[direction] != NULL) + *bufMgr = *bufMgrList[direction]; + + return ret; +} + +#ifdef RAWDUMP_CAPTURE +bool ExynosCamera::m_RawCaptureDumpThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int sensorMaxW, sensorMaxH; + int sensorMarginW, sensorMarginH; + bool bRet; + char filePath[70]; + ExynosCameraBufferManager *bufferMgr = NULL; + int bayerPipeId = 0; + ExynosCameraBuffer bayerBuffer; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *inListFrame = NULL; + unsigned int fliteFcount = 0; + + ret = m_RawCaptureDumpQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + // TODO: doing exception handling + goto CLEAN; + } + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + bayerPipeId = newFrame->getFirstEntity()->getPipeId(); + ret = newFrame->getDstBuffer(bayerPipeId, &bayerBuffer); + ret = m_getBufferManager(bayerPipeId, &bufferMgr, DST_BUFFER_DIRECTION); + + /* Rawdump capture is available in pure bayer only */ + if (m_parameters->getUsePureBayerReprocessing() == true) { + camera2_shot_ext *shot_ext = NULL; + shot_ext = (camera2_shot_ext *)(bayerBuffer.addr[1]); + if (shot_ext != NULL) + fliteFcount = shot_ext->shot.dm.request.frameCount; + else + ALOGE("ERR(%s[%d]):fliteReprocessingBuffer is null", __FUNCTION__, __LINE__); + } else { + camera2_stream *shot_stream = NULL; + shot_stream = (camera2_stream *)(bayerBuffer.addr[1]); + if (shot_stream != NULL) + fliteFcount = shot_stream->fcount; + else + ALOGE("ERR(%s[%d]):fliteReprocessingBuffer is null", __FUNCTION__, __LINE__); + } + + memset(filePath, 0, sizeof(filePath)); + snprintf(filePath, sizeof(filePath), "/data/media/0/RawCapture%d_%d.raw", + m_parameters->getCameraId(), fliteFcount); + /* Pure Bayer Buffer Size == MaxPictureSize + Sensor Margin == Max Sensor Size */ + m_parameters->getMaxSensorSize(&sensorMaxW, &sensorMaxH); + + CLOGD("INFO(%s[%d]):Raw Dump start (%s)", __FUNCTION__, __LINE__, filePath); + + bRet = dumpToFile((char *)filePath, + bayerBuffer.addr[0], + sensorMaxW * sensorMaxH * 2); + if (bRet != true) + ALOGE("couldn't make a raw file", __FUNCTION__, __LINE__); + + ret = bufferMgr->putBuffer(bayerBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putIndex is %d", __FUNCTION__, __LINE__, bayerBuffer.index); + bufferMgr->printBufferState(); + bufferMgr->printBufferQState(); + } + +CLEAN: + + if (newFrame != NULL) { + newFrame->frameUnlock(); + + ret = m_searchFrameFromList(&m_processList, newFrame->getFrameCount(), &inListFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):searchFrameFromList fail", __FUNCTION__, __LINE__); + } else { + if (inListFrame == NULL) { + CLOGD("DEBUG(%s[%d]): Selected frame(%d) complete, Delete", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + } + newFrame = NULL; + } + } + + return ret; +} +#endif + +status_t ExynosCamera::m_getBayerBuffer(uint32_t pipeId, ExynosCameraBuffer *buffer, camera2_shot_ext *updateDmShot) +{ + status_t ret = NO_ERROR; + bool isSrc = false; + int retryCount = 30; /* 200ms x 30 */ + camera2_shot_ext *shot_ext = NULL; + camera2_stream *shot_stream = NULL; + ExynosCameraFrame *inListFrame = NULL; + ExynosCameraFrame *bayerFrame = NULL; + + m_captureSelector->setWaitTime(200000000); +#ifdef RAWDUMP_CAPTURE + for (int i = 0; i < 2; i++) { + bayerFrame = m_captureSelector->selectFrames(m_reprocessingCounter.getCount(), pipeId, isSrc, retryCount); + + if(i == 0) { + m_RawCaptureDumpQ->pushProcessQ(&bayerFrame); + } else if (i == 1) { + m_parameters->setRawCaptureModeOn(false); + } + } +#else + bayerFrame = m_captureSelector->selectFrames(m_reprocessingCounter.getCount(), pipeId, isSrc, retryCount); +#endif + if (bayerFrame == NULL) { + CLOGE("ERR(%s[%d]):bayerFrame is NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto CLEAN; + } + + if (pipeId == PIPE_3AA) { + ret = bayerFrame->getDstBuffer(pipeId, buffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + goto CLEAN; + } + } else if (pipeId == PIPE_FLITE) { + ret = bayerFrame->getDstBuffer(pipeId, buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + goto CLEAN; + } + } + + if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_DYNAMIC) { + shot_ext = (struct camera2_shot_ext *)buffer->addr[1]; + CLOGD("DEBUG(%s[%d]): Selected frame count(hal : %d / driver : %d)", __FUNCTION__, __LINE__, + bayerFrame->getFrameCount(), shot_ext->shot.dm.request.frameCount); + } else if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + if (updateDmShot == NULL) { + CLOGE("ERR(%s[%d]): updateDmShot is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + retryCount = 12; /* 80ms * 12 */ + while(retryCount > 0) { + if(bayerFrame->getMetaDataEnable() == false) { + CLOGD("DEBUG(%s[%d]): Waiting for update jpeg metadata failed (%d), retryCount(%d)", __FUNCTION__, __LINE__, ret, retryCount); + } else { + break; + } + retryCount--; + usleep(DM_WAITING_TIME); + } + + /* update meta like pure bayer */ + bayerFrame->getUserDynamicMeta(updateDmShot); + bayerFrame->getDynamicMeta(updateDmShot); + + shot_stream = (struct camera2_stream *)buffer->addr[1]; + CLOGD("DEBUG(%s[%d]): Selected fcount(hal : %d / driver : %d)", __FUNCTION__, __LINE__, + bayerFrame->getFrameCount(), shot_stream->fcount); + } else { + CLOGE("ERR(%s[%d]): reprocessing is not valid pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId, ret); + goto CLEAN; + } + +CLEAN: + + if (bayerFrame != NULL) { + bayerFrame->frameUnlock(); + + ret = m_searchFrameFromList(&m_processList, bayerFrame->getFrameCount(), &inListFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):searchFrameFromList fail", __FUNCTION__, __LINE__); + } else { + CLOGD("DEBUG(%s[%d]): Selected frame(%d) complete, Delete", __FUNCTION__, __LINE__, bayerFrame->getFrameCount()); + bayerFrame->decRef(); + m_frameMgr->deleteFrame(bayerFrame); + bayerFrame = NULL; + } + } + + return ret; +} + +/* vision */ +status_t ExynosCamera::m_startVisionInternal(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + CLOGI("DEBUG(%s[%d]):IN", __FUNCTION__, __LINE__); + + CLOGI("DEBUG(%s[%d]):OUT", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCamera::m_stopVisionInternal(void) +{ + int ret = 0; + + CLOGI("DEBUG(%s[%d]):IN", __FUNCTION__, __LINE__); + + CLOGI("DEBUG(%s[%d]):OUT", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCamera::generateFrameVision(__unused int32_t frameCount, __unused ExynosCameraFrame **newFrame) +{ + Mutex::Autolock lock(m_frameLock); + + int ret = 0; + + return ret; +} + +status_t ExynosCamera::m_setVisionBuffers(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + CLOGI("INFO(%s[%d]):alloc buffer - camera ID: %d", + __FUNCTION__, __LINE__, m_cameraId); + + return NO_ERROR; +} + +status_t ExynosCamera::m_setVisionCallbackBuffer(void) +{ + return NO_ERROR; +} + + +bool ExynosCamera::m_visionThreadFunc(void) +{ + + return false; +} + +status_t ExynosCamera::m_startCompanion(void) +{ + return NO_ERROR; +} + +status_t ExynosCamera::m_stopCompanion(void) +{ + return NO_ERROR; +} + +status_t ExynosCamera::startPreview() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + bool needRestartPreview = false; + if ((m_parameters== NULL) && (m_frameMgr == NULL)) { + CLOGE("INFO(%s[%d]) initialize HAL", __FUNCTION__, __LINE__); + needRestartPreview = true; + this->initialize(); + } + + int ret = 0; + int32_t skipFrameCount = INITIAL_SKIP_FRAME; + unsigned int fdCallbackSize = 0; + + m_hdrSkipedFcount = 0; + m_isTryStopFlash= false; + m_exitAutoFocusThread = false; + m_curMinFps = 0; + m_isNeedAllocPictureBuffer = false; + m_flagThreadStop= false; + m_frameSkipCount = 0; + +#ifdef FIRST_PREVIEW_TIME_CHECK + if (m_flagFirstPreviewTimerOn == false) { + m_firstPreviewTimer.start(); + m_flagFirstPreviewTimerOn = true; + + CLOGD("DEBUG(%s[%d]):m_firstPreviewTimer start", __FUNCTION__, __LINE__); + } +#endif + + if (m_previewEnabled == true) { + return INVALID_OPERATION; + } + + /* frame manager start */ + m_frameMgr->start(); + + fdCallbackSize = sizeof(camera_frame_metadata_t) * NUM_OF_DETECTED_FACES; + + if (m_getMemoryCb != NULL) { + m_fdCallbackHeap = m_getMemoryCb(-1, fdCallbackSize, 1, m_callbackCookie); + if (!m_fdCallbackHeap || m_fdCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, fdCallbackSize); + m_fdCallbackHeap = NULL; + goto err; + } + } + /* + * This is for updating parameter value at once. + * This must be just before making factory + */ + m_parameters->updateTpuParameters(); + + /* setup frameFactory with scenario */ + m_setupFrameFactory(); + + /* vision */ + CLOGI("INFO(%s[%d]): getVisionMode(%d)", __FUNCTION__, __LINE__, m_parameters->getVisionMode()); + if (m_parameters->getVisionMode() == true) { + ret = m_setVisionBuffers(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setVisionCallbackBuffer() fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = m_setVisionCallbackBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setVisionCallbackBuffer() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (m_visionFrameFactory->isCreated() == false) { + ret = m_visionFrameFactory->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_visionFrameFactory->create() failed", __FUNCTION__, __LINE__); + goto err; + } + CLOGD("DEBUG(%s):FrameFactory(VisionFrameFactory) created", __FUNCTION__); + } + + m_parameters->setFrameSkipCount(INITIAL_SKIP_FRAME); + + ret = m_startVisionInternal(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_startVisionInternal() failed", __FUNCTION__, __LINE__); + goto err; + } + + m_visionThread->run(PRIORITY_DEFAULT); + return NO_ERROR; + } else { + m_parameters->setSeriesShotMode(SERIES_SHOT_MODE_NONE); + + if(m_parameters->increaseMaxBufferOfPreview()) { + m_parameters->setPreviewBufferCount(NUM_PREVIEW_BUFFERS + NUM_PREVIEW_SPARE_BUFFERS); + } else { + m_parameters->setPreviewBufferCount(NUM_PREVIEW_BUFFERS); + } + + if ((m_parameters->getRestartPreview() == true) || + m_previewBufferCount != m_parameters->getPreviewBufferCount() || + needRestartPreview == true) { + ret = setPreviewWindow(m_previewWindow); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPreviewWindow fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + m_previewBufferCount = m_parameters->getPreviewBufferCount(); + } + + CLOGI("INFO(%s[%d]):setBuffersThread is run", __FUNCTION__, __LINE__); + m_setBuffersThread->run(PRIORITY_DEFAULT); + + if (m_captureSelector == NULL) { + ExynosCameraBufferManager *bufMgr = NULL; +#ifdef DEBUG_RAWDUMP + bufMgr = m_bayerBufferMgr; +#else + if (m_parameters->isReprocessing() == true) { + if (m_parameters->isUseYuvReprocessing() == true + && m_parameters->isUsing3acForIspc() == true) + bufMgr = m_sccBufferMgr; + else + bufMgr = m_bayerBufferMgr; + } +#endif + m_captureSelector = new ExynosCameraFrameSelector(m_parameters, bufMgr, m_frameMgr); + + if (m_parameters->isReprocessing() == true) { + ret = m_captureSelector->setFrameHoldCount(REPROCESSING_BAYER_HOLD_COUNT); + if (ret < 0) + CLOGE("ERR(%s[%d]): setFrameHoldCount(%d) is fail", __FUNCTION__, __LINE__, REPROCESSING_BAYER_HOLD_COUNT); + } + } + + if (m_sccCaptureSelector == NULL) { + ExynosCameraBufferManager *bufMgr = NULL; + + if (m_parameters->isSccCapture() == true + || m_parameters->isUsing3acForIspc() == true) { + /* TODO: Dynamic select buffer manager for capture */ + bufMgr = m_sccBufferMgr; + } + + m_sccCaptureSelector = new ExynosCameraFrameSelector(m_parameters, bufMgr, m_frameMgr); + } + + if (m_captureSelector != NULL) + m_captureSelector->release(); + + if (m_sccCaptureSelector != NULL) + m_sccCaptureSelector->release(); + +#ifdef RAWDUMP_CAPTURE + ExynosCameraActivitySpecialCapture *m_sCapture = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + m_sCapture->resetRawCaptureFcount(); +#endif + if (m_previewFrameFactory->isCreated() == false) { + ret = m_previewFrameFactory->create(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->create() failed", __FUNCTION__, __LINE__); + goto err; + } + CLOGD("DEBUG(%s):FrameFactory(previewFrameFactory) created", __FUNCTION__); + } + + if (m_parameters->getDualMode() == true) { + skipFrameCount = INITIAL_SKIP_FRAME + 2; + } + +#ifdef SET_FPS_SCENE /* This codes for 5260, Do not need other project */ + struct camera2_shot_ext *initMetaData = new struct camera2_shot_ext; + if (initMetaData != NULL) { + m_parameters->duplicateCtrlMetadata(initMetaData); + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_MIN_TARGET_FPS, initMetaData->shot.ctl.aa.aeTargetFpsRange[0], PIPE_FLITE); + if (ret < 0) + CLOGE("ERR(%s[%d]):FLITE setControl fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_MAX_TARGET_FPS, initMetaData->shot.ctl.aa.aeTargetFpsRange[1], PIPE_FLITE); + if (ret < 0) + CLOGE("ERR(%s[%d]):FLITE setControl fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_SCENE_MODE, initMetaData->shot.ctl.aa.sceneMode, PIPE_FLITE); + if (ret < 0) + CLOGE("ERR(%s[%d]):FLITE setControl fail, ret(%d)", __FUNCTION__, __LINE__, ret); + delete initMetaData; + initMetaData = NULL; + } else { + CLOGE("ERR(%s[%d]):initMetaData is NULL", __FUNCTION__, __LINE__); + } +#elif SET_FPS_FRONTCAM + if (m_parameters->getCameraId() == CAMERA_ID_FRONT) { + struct camera2_shot_ext *initMetaData = new struct camera2_shot_ext; + if (initMetaData != NULL) { + m_parameters->duplicateCtrlMetadata(initMetaData); + CLOGD("(%s:[%d]) : setControl for Frame Range.", __FUNCTION__, __LINE__); + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_MIN_TARGET_FPS, initMetaData->shot.ctl.aa.aeTargetFpsRange[0], PIPE_FLITE_FRONT); + if (ret < 0) + CLOGE("ERR(%s[%d]):FLITE setControl fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_MAX_TARGET_FPS, initMetaData->shot.ctl.aa.aeTargetFpsRange[1], PIPE_FLITE_FRONT); + if (ret < 0) + CLOGE("ERR(%s[%d]):FLITE setControl fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + delete initMetaData; + initMetaData = NULL; + } else { + CLOGE("ERR(%s[%d]):initMetaData is NULL", __FUNCTION__, __LINE__); + } + } +#endif + + m_parameters->setFrameSkipCount(skipFrameCount); + + m_setBuffersThread->join(); + + if (m_isSuccessedBufferAllocation == false) { + CLOGE("ERR(%s[%d]):m_setBuffersThread() failed", __FUNCTION__, __LINE__); + goto err; + } + + ret = m_startPreviewInternal(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_startPreviewInternal() failed", __FUNCTION__, __LINE__); + goto err; + } + + if (m_parameters->isReprocessing() == true) { +#ifdef START_PICTURE_THREAD +#if !defined(USE_SNAPSHOT_ON_UHD_RECORDING) + if (!m_parameters->getDualRecordingHint() && !m_parameters->getUHDRecordingMode()) +#endif + { + m_startPictureInternalThread->run(PRIORITY_DEFAULT); + } +#endif + } else { + m_pictureFrameFactory = m_previewFrameFactory; + CLOGD("DEBUG(%s[%d]):FrameFactory(pictureFrameFactory) created", __FUNCTION__, __LINE__); + + /* + * Make remained frameFactory here. + * in case of SCC capture, make here. + */ + m_framefactoryThread->run(); + } + +#if defined(USE_UHD_RECORDING) && !defined(USE_SNAPSHOT_ON_UHD_RECORDING) + if (!m_parameters->getDualRecordingHint() && !m_parameters->getUHDRecordingMode()) +#endif + { + m_startPictureBufferThread->run(PRIORITY_DEFAULT); + } + + if (m_previewWindow != NULL) + m_previewWindow->set_timestamp(m_previewWindow, systemTime(SYSTEM_TIME_MONOTONIC)); + +#if defined(RAWDUMP_CAPTURE) || defined(DEBUG_RAWDUMP) + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); +#endif + /* setup frame thread */ + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); + } else { + switch (m_parameters->getReprocessingBayerMode()) { + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + m_mainSetupQ[INDEX(PIPE_FLITE)]->setup(NULL); + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); + break; + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + CLOGD("DEBUG(%s[%d]):setupThread with List pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQ[INDEX(PIPE_FLITE)]->setup(m_mainSetupQThread[INDEX(PIPE_FLITE)]); + break; + default: + CLOGI("INFO(%s[%d]):setupThread not started pipeID(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + break; + } + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_3AA); + m_mainSetupQThread[INDEX(PIPE_3AA)]->run(PRIORITY_URGENT_DISPLAY); + } + + if (m_facedetectThread->isRunning() == false) + m_facedetectThread->run(); + + m_previewThread->run(PRIORITY_DISPLAY); + m_mainThread->run(PRIORITY_DEFAULT); + if(m_parameters->getCameraId() == CAMERA_ID_BACK) + m_autoFocusContinousThread->run(PRIORITY_DEFAULT); + + m_monitorThread->run(PRIORITY_DEFAULT); + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == false)) { + CLOGD("DEBUG(%s[%d]):High resolution preview callback start", __FUNCTION__, __LINE__); + + m_highResolutionCallbackRunning = true; + if (skipFrameCount > 0) + m_skipReprocessing = true; + + if (m_parameters->isReprocessing() == true) { + if (m_parameters->isHWFCEnabled() == true) { + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_JPEG_DST_REPROCESSING, false); + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_THUMB_DST_REPROCESSING, false); + } + + m_startPictureInternalThread->run(PRIORITY_DEFAULT); + m_startPictureInternalThread->join(); + } + m_prePictureThread->run(PRIORITY_DEFAULT); + } + + /* FD-AE is always on */ +#ifdef USE_FD_AE + m_startFaceDetection(m_parameters->getFaceDetectionMode()); +#endif + } + +#ifdef BURST_CAPTURE + m_burstInitFirst = true; +#endif + return NO_ERROR; + +err: + + /* frame manager stop */ + m_frameMgr->stop(); + + m_setBuffersThread->join(); + + m_releaseBuffers(); + + return ret; +} + +void ExynosCamera::stopPreview() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + ExynosCameraActivityFlash *m_flashMgr = m_exynosCameraActivityControl->getFlashMgr(); + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + ExynosCameraFrame *frame = NULL; + + if (m_previewEnabled == false) { + CLOGD("DEBUG(%s[%d]): preview is not enabled", __FUNCTION__, __LINE__); + return; + } + + if (m_pictureEnabled == true) { + CLOGW("WARN(%s[%d]):m_pictureEnabled == true (picture is not finished)", __FUNCTION__, __LINE__); + int retry = 0; + do { + usleep(WAITING_TIME); + retry++; + } while(m_pictureEnabled == true && retry < (TOTAL_WAITING_TIME/WAITING_TIME)); + CLOGW("WARN(%s[%d]):wait (%d)msec (because, picture is not finished)", __FUNCTION__, __LINE__, WAITING_TIME * retry / 1000); + } + if (m_parameters->getVisionMode() == true) { + m_frameFactoryQ->release(); + m_visionThread->requestExitAndWait(); + ret = m_stopVisionInternal(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_stopVisionInternal fail", __FUNCTION__, __LINE__); + } else { + m_startPictureInternalThread->join(); + + /* release about frameFactory */ + m_framefactoryThread->stop(); + m_frameFactoryQ->sendCmd(WAKE_UP); + m_framefactoryThread->requestExitAndWait(); + m_frameFactoryQ->release(); + + m_startPictureBufferThread->join(); + + m_autoFocusRunning = false; + m_exynosCameraActivityControl->cancelAutoFocus(); + + CLOGD("DEBUG(%s[%d]): (%d, %d)", __FUNCTION__, __LINE__, m_flashMgr->getNeedCaptureFlash(), m_pictureEnabled); + if (m_flashMgr->getNeedCaptureFlash() == true && m_pictureEnabled == true) { + CLOGD("DEBUG(%s[%d]): force flash off", __FUNCTION__, __LINE__); + m_exynosCameraActivityControl->cancelFlash(); + autoFocusMgr->stopAutofocus(); + m_isTryStopFlash = true; + /* m_exitAutoFocusThread = true; */ + } + + /* Wait the end of the autoFocus Thread in order to the autofocus and the pre-flash is completed.*/ + m_autoFocusLock.lock(); + m_exitAutoFocusThread = true; + m_autoFocusLock.unlock(); + + int flashMode = AA_FLASHMODE_OFF; + int waitingTime = FLASH_OFF_MAX_WATING_TIME / TOTAL_FLASH_WATING_COUNT; /* Max waiting time: 500ms, Count:10, Waiting time: 50ms */ + + flashMode = m_flashMgr->getFlashStatus(); + if ((flashMode == AA_FLASHMODE_ON_ALWAYS) || (m_flashMgr->getNeedFlashOffDelay() == true)) { + int i = 0; + CLOGD("DEBUG(%s[%d]): flash torch was enabled", __FUNCTION__, __LINE__); + + m_parameters->setFrameSkipCount(100); + do { + if (m_flashMgr->checkFlashOff() == false) { + usleep(waitingTime); + } else { + CLOGD("DEBUG(%s[%d]):turn off the flash torch.(%d)", __FUNCTION__, __LINE__, i); + + flashMode = m_flashMgr->getFlashStatus(); + if (flashMode == AA_FLASHMODE_OFF || flashMode == AA_FLASHMODE_CANCEL) + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); + usleep(waitingTime); + break; + } + } while(++i < TOTAL_FLASH_WATING_COUNT); + if (i >= TOTAL_FLASH_WATING_COUNT) { + CLOGD("DEBUG(%s[%d]):timeOut-flashMode(%d),checkFlashOff(%d)", + __FUNCTION__, __LINE__, flashMode, m_flashMgr->checkFlashOff()); + } + } else if (m_isTryStopFlash == true) { + usleep(waitingTime*3); /* 150ms */ + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); + } + + m_flashMgr->setNeedFlashOffDelay(false); + + m_previewFrameFactory->setStopFlag(); + if (m_parameters->isReprocessing() == true && m_reprocessingFrameFactory->isCreated() == true) + m_reprocessingFrameFactory->setStopFlag(); + m_flagThreadStop = true; + + m_takePictureCounter.clearCount(); + m_reprocessingCounter.clearCount(); + m_pictureCounter.clearCount(); + m_jpegCounter.clearCount(); + m_captureSelector->cancelPicture(); + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == true)) { + m_skipReprocessing = false; + m_highResolutionCallbackRunning = false; + + if (m_parameters->isReprocessing() == true + && m_parameters->isHWFCEnabled() == true) { + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_JPEG_DST_REPROCESSING, true); + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_THUMB_DST_REPROCESSING, true); + } + + CLOGD("DEBUG(%s[%d]):High resolution preview callback stop", __FUNCTION__, __LINE__); + + if (m_parameters->isReprocessing() == false) { + m_sccCaptureSelector->cancelPicture(); + m_sccCaptureSelector->wakeupQ(); + CLOGD("DEBUG(%s[%d]):High resolution m_sccCaptureSelector cancel", __FUNCTION__, __LINE__); + } + + m_prePictureThread->requestExitAndWait(); + m_highResolutionCallbackQ->release(); + } + + ret = m_stopPictureInternal(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_stopPictureInternal fail", __FUNCTION__, __LINE__); + + m_exynosCameraActivityControl->stopAutoFocus(); + m_autoFocusThread->requestExitAndWait(); + + if (m_previewQ != NULL) { + m_previewQ->sendCmd(WAKE_UP); + } else { + CLOGI("INFO(%s[%d]): m_previewQ is NULL", __FUNCTION__, __LINE__); + } + + m_pipeFrameDoneQ->sendCmd(WAKE_UP); + m_mainThread->requestExitAndWait(); + m_monitorThread->requestExitAndWait(); + + if (m_parameters->isMcscVraOtf() == false) { + m_vraThreadQ->sendCmd(WAKE_UP); + m_vraGscDoneQ->sendCmd(WAKE_UP); + m_vraPipeDoneQ->sendCmd(WAKE_UP); + m_vraThread->requestExitAndWait(); + m_previewFrameFactory->stopThread(PIPE_VRA); + } + + m_shutterCallbackThread->requestExitAndWait(); + + m_previewThread->stop(); + if (m_previewQ != NULL) { + m_previewQ->sendCmd(WAKE_UP); + } + m_previewThread->requestExitAndWait(); + + if (m_parameters->isFlite3aaOtf() == true) { + m_mainSetupQThread[INDEX(PIPE_FLITE)]->stop(); + m_mainSetupQ[INDEX(PIPE_FLITE)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->requestExitAndWait(); + + if (m_mainSetupQThread[INDEX(PIPE_3AC)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_3AC)]->stop(); + m_mainSetupQ[INDEX(PIPE_3AC)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_3AC)]->requestExitAndWait(); + } + + m_mainSetupQThread[INDEX(PIPE_3AA)]->stop(); + m_mainSetupQ[INDEX(PIPE_3AA)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_3AA)]->requestExitAndWait(); + + if (m_mainSetupQThread[INDEX(PIPE_ISP)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_ISP)]->stop(); + m_mainSetupQ[INDEX(PIPE_ISP)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_ISP)]->requestExitAndWait(); + } + + /* Comment out, because it included ISP */ + /* m_mainSetupQThread[INDEX(PIPE_SCP)]->requestExitAndWait(); */ + } else { + if (m_mainSetupQThread[INDEX(PIPE_FLITE)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_FLITE)]->stop(); + m_mainSetupQ[INDEX(PIPE_FLITE)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->requestExitAndWait(); + } + } + + m_autoFocusContinousThread->stop(); + m_autoFocusContinousQ.sendCmd(WAKE_UP); + m_autoFocusContinousThread->requestExitAndWait(); + m_autoFocusContinousQ.release(); + + m_facedetectThread->stop(); + m_facedetectQ->sendCmd(WAKE_UP); + m_facedetectThread->requestExitAndWait(); + while (m_facedetectQ->getSizeOfProcessQ()) { + m_facedetectQ->popProcessQ(&frame); + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + + for(int i = 0 ; i < MAX_NUM_PIPES ; i++ ) { + m_clearList(m_mainSetupQ[i]); + } + + ret = m_stopPreviewInternal(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_stopPreviewInternal fail", __FUNCTION__, __LINE__); + + if (m_previewQ != NULL) + m_clearList(m_previewQ); + + if (m_vraThreadQ != NULL) + m_clearList(m_vraThreadQ); + + if (m_vraGscDoneQ != NULL) + m_clearList(m_vraGscDoneQ); + + if (m_vraPipeDoneQ != NULL) + m_clearList(m_vraPipeDoneQ); + + if (m_zoomPreviwWithCscQ != NULL) + m_zoomPreviwWithCscQ->release(); + + if (m_previewCallbackGscFrameDoneQ != NULL) + m_clearList(m_previewCallbackGscFrameDoneQ); + } + + /* skip to free and reallocate buffers : flite / 3aa / isp / ispReprocessing */ + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->resetBuffers(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->resetBuffers(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->resetBuffers(); + } + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->resetBuffers(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->resetBuffers(); + } + + /* realloc reprocessing buffer for change burst panorama <-> normal mode */ + if (m_ispReprocessingBufferMgr != NULL) { + m_ispReprocessingBufferMgr->resetBuffers(); + } + if (m_sccReprocessingBufferMgr != NULL) { + m_sccReprocessingBufferMgr->resetBuffers(); + } + if (m_thumbnailBufferMgr != NULL) { + m_thumbnailBufferMgr->resetBuffers(); + } + + /* realloc callback buffers */ + if (m_scpBufferMgr != NULL) { + m_scpBufferMgr->deinit(); + m_scpBufferMgr->setBufferCount(0); + } + + if (m_sccBufferMgr != NULL) { + // libcamera: 75xx: Fix size issue in preview and capture // Vijayakumar S N + if (m_parameters->isUsing3acForIspc() == true) { + m_sccBufferMgr->deinit(); + m_sccBufferMgr->setBufferCount(0); + } else { + m_sccBufferMgr->resetBuffers(); + } + } + if (m_gscBufferMgr != NULL) { + m_gscBufferMgr->resetBuffers(); + } + + if (m_jpegBufferMgr != NULL) { + m_jpegBufferMgr->deinit(); + } + + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->deinit(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->deinit(); + } + + if (m_recordingBufferMgr != NULL) { + m_recordingBufferMgr->deinit(); + } + if (m_previewCallbackBufferMgr != NULL) { + m_previewCallbackBufferMgr->deinit(); + } + if (m_highResolutionCallbackBufferMgr != NULL) { + m_highResolutionCallbackBufferMgr->deinit(); + } + if (m_captureSelector != NULL) { + m_captureSelector->release(); + } + if (m_sccCaptureSelector != NULL) { + m_sccCaptureSelector->release(); + } + +#if 0 + /* skip to free and reallocate buffers : flite / 3aa / isp / ispReprocessing */ + CLOGE(" m_setBuffers free all buffers"); + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->deinit(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->deinit(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->deinit(); + } + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->deinit(); + } +#endif + /* frame manager stop */ + m_frameMgr->stop(); + m_frameMgr->deleteAllFrame(); + + + m_reprocessingCounter.clearCount(); + m_pictureCounter.clearCount(); + + m_hdrSkipedFcount = 0; + m_dynamicSccCount = 0; + + /* HACK Reset Preview Flag*/ + m_resetPreview = false; + + m_isTryStopFlash= false; + m_exitAutoFocusThread = false; + m_isNeedAllocPictureBuffer = false; + + if (m_fdCallbackHeap != NULL) { + m_fdCallbackHeap->release(m_fdCallbackHeap); + m_fdCallbackHeap = NULL; + } + + m_burstInitFirst = false; + +#ifdef FORCE_RESET_MULTI_FRAME_FACTORY + /* + * HACK + * This is force-reset frameFactory adn companion + */ + m_deinitFrameFactory(); + m_initFrameFactory(); +#endif +} + +status_t ExynosCamera::setPreviewWindow(preview_stream_ops *w) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + int width, height; + int halPreviewFmt = 0; + bool flagRestart = false; + buffer_manager_type bufferType = BUFFER_MANAGER_ION_TYPE; + + if (m_parameters != NULL) { + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + /* android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); */ + + return NO_ERROR; + } + } else { + CLOGW("(%s):m_parameters is NULL. Skipped", __FUNCTION__); + return INVALID_OPERATION; + } + + if (previewEnabled() == true) { + CLOGW("WRN(%s[%d]): preview is started, we forcely re-start preview", __FUNCTION__, __LINE__); + flagRestart = true; + m_disablePreviewCB = true; + stopPreview(); + } + + m_previewWindow = w; + + if (m_scpBufferMgr != NULL) { + CLOGD("DEBUG(%s[%d]): scp buffer manager need recreate", __FUNCTION__, __LINE__); + m_scpBufferMgr->deinit(); + + delete m_scpBufferMgr; + m_scpBufferMgr = NULL; + } + + if (w == NULL) { + bufferType = BUFFER_MANAGER_ION_TYPE; + CLOGW("WARN(%s[%d]):window NULL, create internal buffer for preview", __FUNCTION__, __LINE__); + } else { + halPreviewFmt = m_parameters->getHalPixelFormat(); + bufferType = BUFFER_MANAGER_GRALLOC_TYPE; + m_parameters->getHwPreviewSize(&width, &height); + if (m_grAllocator == NULL) + m_grAllocator = new ExynosCameraGrallocAllocator(); + +#ifdef RESERVED_MEMORY_FOR_GRALLOC_ENABLE + if (!(((m_parameters->getShotMode() == SHOT_MODE_BEAUTY_FACE) && (getCameraId() == CAMERA_ID_BACK)) + || m_parameters->getRecordingHint() == true)) { + ret = m_grAllocator->init(m_previewWindow, m_exynosconfig->current->bufInfo.num_preview_buffers, + m_exynosconfig->current->bufInfo.preview_buffer_margin, (GRALLOC_SET_USAGE_FOR_CAMERA | GRALLOC_USAGE_CAMERA_RESERVED)); + } else +#endif + { + ret = m_grAllocator->init(m_previewWindow, m_exynosconfig->current->bufInfo.num_preview_buffers, m_exynosconfig->current->bufInfo.preview_buffer_margin); + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]):gralloc init fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + ret = m_grAllocator->setBuffersGeometry(width, height, halPreviewFmt); + if (ret < 0) { + CLOGE("ERR(%s[%d]):gralloc setBufferGeomety fail, size(%dx%d), fmt(%d), ret(%d)", + __FUNCTION__, __LINE__, width, height, halPreviewFmt, ret); + goto func_exit; + } + } + + m_createBufferManager(&m_scpBufferMgr, "SCP_BUF", bufferType); + + if (bufferType == BUFFER_MANAGER_GRALLOC_TYPE) + m_scpBufferMgr->setAllocator(m_grAllocator); + + if (flagRestart == true) { + startPreview(); + } + +func_exit: + m_disablePreviewCB = false; + + return ret; +} + +status_t ExynosCamera::m_startPreviewInternal(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + CLOGI("DEBUG(%s[%d]):IN", __FUNCTION__, __LINE__); + + uint32_t minBayerFrameNum = 0; + uint32_t min3AAFrameNum = 0; + int ret = 0; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer dstBuf; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + enum pipeline pipe; + int retrycount = 0; + + m_fliteFrameCount = 0; + m_3aa_ispFrameCount = 0; + m_ispFrameCount = 0; + m_sccFrameCount = 0; + m_scpFrameCount = 0; + m_displayPreviewToggle = 0; + + if (m_parameters->isFlite3aaOtf() == true) + minBayerFrameNum = m_exynosconfig->current->bufInfo.init_bayer_buffers; + else + minBayerFrameNum = m_exynosconfig->current->bufInfo.num_bayer_buffers + - m_exynosconfig->current->bufInfo.reprocessing_bayer_hold_count; + + /* + * with MCPipe, we need to putBuffer 3 buf. + */ + /* + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON) + min3AAFrameNum = minBayerFrameNum; + else + min3AAFrameNum = m_exynosconfig->current->pipeInfo.prepare[PIPE_3AA]; + */ + + min3AAFrameNum = m_exynosconfig->current->pipeInfo.prepare[PIPE_3AA]; + + ExynosCameraBufferManager *taaBufferManager[MAX_NODE]; + ExynosCameraBufferManager *ispBufferManager[MAX_NODE]; + ExynosCameraBufferManager *disBufferManager[MAX_NODE]; + ExynosCameraBufferManager *vraBufferManager[MAX_NODE]; + + for (int i = 0; i < MAX_NODE; i++) { + taaBufferManager[i] = NULL; + ispBufferManager[i] = NULL; + disBufferManager[i] = NULL; + vraBufferManager[i] = NULL; + } + +#ifdef FPS_CHECK + for (int i = 0; i < DEBUG_MAX_PIPE_NUM; i++) + m_debugFpsCount[i] = 0; +#endif + + switch (reprocessingBayerMode) { + case REPROCESSING_BAYER_MODE_NONE : /* Not using reprocessing */ + CLOGD("DEBUG(%s[%d]): Use REPROCESSING_BAYER_MODE_NONE", __FUNCTION__, __LINE__); +#ifdef DEBUG_RAWDUMP + m_previewFrameFactory->setRequestFLITE(true); +#else + m_previewFrameFactory->setRequestFLITE(false); +#endif + m_previewFrameFactory->setRequest3AC(false); + break; + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON : + CLOGD("DEBUG(%s[%d]): Use REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequestFLITE(true); + m_previewFrameFactory->setRequest3AC(false); + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON : + CLOGD("DEBUG(%s[%d]): Use REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequestFLITE(false); + m_previewFrameFactory->setRequest3AC(true); + break; + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC : + CLOGD("DEBUG(%s[%d]): Use REPROCESSING_BAYER_MODE_PURE_DYNAMIC", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequestFLITE(false); + m_previewFrameFactory->setRequest3AC(false); + break; + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC : + CLOGD("DEBUG(%s[%d]): Use REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequestFLITE(false); + m_previewFrameFactory->setRequest3AC(false); + break; + default : + CLOGE("ERR(%s[%d]): Unknown dynamic bayer mode", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequest3AC(false); + break; + } + + if (m_parameters->getTpuEnabledMode() == true) { + m_previewFrameFactory->setRequestISPP(true); + m_previewFrameFactory->setRequestDIS(true); + + if (m_parameters->is3aaIspOtf() == true) { + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AC)] = m_bayerBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_ISPP)] = m_hwDisBufferMgr; + + disBufferManager[m_previewFrameFactory->getNodeType(PIPE_DIS)] = m_hwDisBufferMgr; + disBufferManager[m_previewFrameFactory->getNodeType(PIPE_SCP)] = m_scpBufferMgr; + } else { + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AC)] = m_bayerBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AP)] = m_ispBufferMgr; + + ispBufferManager[m_previewFrameFactory->getNodeType(PIPE_ISP)] = m_ispBufferMgr; + ispBufferManager[m_previewFrameFactory->getNodeType(PIPE_ISPP)] = m_hwDisBufferMgr; + + disBufferManager[m_previewFrameFactory->getNodeType(PIPE_DIS)] = m_hwDisBufferMgr; + disBufferManager[m_previewFrameFactory->getNodeType(PIPE_SCP)] = m_scpBufferMgr; + } + } else { + m_previewFrameFactory->setRequestISPP(false); + m_previewFrameFactory->setRequestDIS(false); + + if (m_parameters->is3aaIspOtf() == true) { + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + m_previewFrameFactory->setRequestFLITE(true); + m_previewFrameFactory->setRequestISPC(true); + + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AA)] = m_bayerBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_ISPC)] = m_sccBufferMgr; + } else { + if (m_parameters->isUsing3acForIspc() == true) { + if (m_parameters->getRecordingHint() == true) + m_previewFrameFactory->setRequest3AC(false); + else + m_previewFrameFactory->setRequest3AC(true); + } + + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + if (m_parameters->isUsing3acForIspc() == true) + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AC)] = m_sccBufferMgr; +#ifndef RAWDUMP_CAPTURE + else + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AC)] = m_bayerBufferMgr; +#endif + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_SCP)] = m_scpBufferMgr; + } + } else { + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AC)] = m_bayerBufferMgr; + taaBufferManager[m_previewFrameFactory->getNodeType(PIPE_3AP)] = m_ispBufferMgr; + + ispBufferManager[m_previewFrameFactory->getNodeType(PIPE_ISP)] = m_ispBufferMgr; + ispBufferManager[m_previewFrameFactory->getNodeType(PIPE_SCP)] = m_scpBufferMgr; + } + } + + if (m_parameters->isMcscVraOtf() == false) { + vraBufferManager[OUTPUT_NODE] = m_vraBufferMgr; + + ret = m_previewFrameFactory->setBufferManagerToPipe(vraBufferManager, PIPE_VRA); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->setBufferManagerToPipe(vraBufferManager, %d) failed", + __FUNCTION__, __LINE__, PIPE_VRA); + return ret; + } + } + + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (taaBufferManager[i] != NULL) { + ret = m_previewFrameFactory->setBufferManagerToPipe(taaBufferManager, PIPE_3AA); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->setBufferManagerToPipe(taaBufferManager, %d) failed", + __FUNCTION__, __LINE__, PIPE_3AA); + return ret; + } + break; + } + } + + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (ispBufferManager[i] != NULL) { + ret = m_previewFrameFactory->setBufferManagerToPipe(ispBufferManager, PIPE_ISP); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->setBufferManagerToPipe(ispBufferManager, %d) failed", + __FUNCTION__, __LINE__, PIPE_ISP); + return ret; + } + break; + } + } + if (m_parameters->getHWVdisMode()) { + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (disBufferManager[i] != NULL) { + ret = m_previewFrameFactory->setBufferManagerToPipe(disBufferManager, PIPE_DIS); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->setBufferManagerToPipe(disBufferManager, %d) failed", + __FUNCTION__, __LINE__, PIPE_DIS); + return ret; + } + break; + } + } + } + + ret = m_previewFrameFactory->initPipes(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_previewFrameFactory->initPipes() failed", __FUNCTION__, __LINE__); + return ret; + } + + m_printExynosCameraInfo(__FUNCTION__); + + for (uint32_t i = 0; i < minBayerFrameNum; i++) { + retrycount = 0; + do { + ret = generateFrame(i, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + usleep(100); + } + if (++retrycount >= 10) { + return ret; + } + } while((ret < 0) && (retrycount < 10)); + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):new faame is NULL", __FUNCTION__, __LINE__); + return ret; + } + + m_fliteFrameCount++; + + if (m_parameters->isFlite3aaOtf() == true) { +#ifndef DEBUG_RAWDUMP + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON) +#endif + { + m_setupEntity(m_getBayerPipeId(), newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, m_getBayerPipeId()); + m_previewFrameFactory->pushFrameToPipe(&newFrame, m_getBayerPipeId()); + } + + if (i < min3AAFrameNum) { + m_setupEntity(PIPE_3AA, newFrame); + + if (m_parameters->is3aaIspOtf() == true) { + if (m_parameters->isMcscVraOtf() == true) + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + + if (m_parameters->getTpuEnabledMode() == true) { + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_DIS); + } + } else { + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + + if (m_parameters->getTpuEnabledMode() == true) { + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_DIS); + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, PIPE_ISP); + } else { + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_ISP); + } + } + + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_3AA); + m_3aa_ispFrameCount++; + } + } else { + m_setupEntity(m_getBayerPipeId(), newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, m_getBayerPipeId()); + m_previewFrameFactory->pushFrameToPipe(&newFrame, m_getBayerPipeId()); + + if (m_parameters->is3aaIspOtf() == true) { + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + } else { + m_setupEntity(PIPE_3AA, newFrame); + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_ISP); + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_3AA); + m_3aa_ispFrameCount++; + } + + } + + if (m_parameters->isMcscVraOtf() == false) { + m_previewFrameFactory->setFrameDoneQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_VRA); + } + +#if 0 + /* SCC */ + if(m_parameters->isSccCapture() == true) { + m_sccFrameCount++; + + if (isOwnScc(getCameraId()) == true) { + pipe = PIPE_SCC; + } else { + pipe = PIPE_ISPC; + } + + if(newFrame->getRequest(pipe)) { + m_setupEntity(pipe, newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipe); + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipe); + } + } +#endif + /* SCP */ +/* Comment out, because it included ISP */ +/* + m_scpFrameCount++; + + m_setupEntity(PIPE_SCP, newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_SCP); + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_SCP); +*/ + } + +/* Comment out, because it included ISP */ +/* + for (uint32_t i = minFrameNum; i < INIT_SCP_BUFFERS; i++) { + ret = generateFrame(i, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + return ret; + } + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):new faame is NULL", __FUNCTION__, __LINE__); + return ret; + } + + m_scpFrameCount++; + + m_setupEntity(PIPE_SCP, newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_SCP); + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_SCP); + } +*/ + + /* prepare pipes */ + ret = m_previewFrameFactory->preparePipes(); + if (ret < 0) { + CLOGE("ERR(%s):preparePipe fail", __FUNCTION__); + return ret; + } + +#ifndef START_PICTURE_THREAD + if (m_parameters->isReprocessing() == true) { + m_startPictureInternal(); + } +#endif + +#ifdef RAWDUMP_CAPTURE + /* s_ctrl HAL version for selecting dvfs table */ + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_HAL_VERSION, IS_HAL_VER_3_2, PIPE_3AA); + ALOGD("WARN(%s): V4L2_CID_IS_HAL_VERSION_%d pipe(%d)", __FUNCTION__, IS_HAL_VER_3_2, PIPE_3AA); + if (ret < 0) + ALOGW("WARN(%s): V4L2_CID_IS_HAL_VERSION is fail", __FUNCTION__); +#endif + + /* stream on pipes */ + ret = m_previewFrameFactory->startPipes(); + if (ret < 0) { + m_previewFrameFactory->stopPipes(); + CLOGE("ERR(%s):startPipe fail", __FUNCTION__); + return ret; + } + + /* start all thread */ + ret = m_previewFrameFactory->startInitialThreads(); + if (ret < 0) { + CLOGE("ERR(%s):startInitialThreads fail", __FUNCTION__); + return ret; + } + + m_previewEnabled = true; + m_parameters->setPreviewRunning(m_previewEnabled); + + if (m_parameters->getFocusModeSetting() == true) { + CLOGD("set Focus Mode(%s[%d])", __FUNCTION__, __LINE__); + int focusmode = m_parameters->getFocusMode(); + m_exynosCameraActivityControl->setAutoFocusMode(focusmode); + m_parameters->setFocusModeSetting(false); + } + + CLOGI("DEBUG(%s[%d]):OUT", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCamera::m_stopPreviewInternal(void) +{ + int ret = 0; + + CLOGI("DEBUG(%s[%d]):IN", __FUNCTION__, __LINE__); + if (m_previewFrameFactory != NULL) { + ret = m_previewFrameFactory->stopPipes(); + if (ret < 0) { + CLOGE("ERR(%s):stopPipe fail", __FUNCTION__); + return ret; + } + } + + CLOGD("DEBUG(%s[%d]):clear process Frame list", __FUNCTION__, __LINE__); + ret = m_clearList(&m_processList); + if (ret < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + return ret; + } + + /* clear previous recording frame */ + CLOGD("DEBUG(%s[%d]):Recording m_recordingProcessList(%d) IN", + __FUNCTION__, __LINE__, m_recordingProcessList.size()); + m_recordingListLock.lock(); + ret = m_clearList(&m_recordingProcessList); + if (ret < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + } + m_recordingListLock.unlock(); + CLOGD("DEBUG(%s[%d]):Recording m_recordingProcessList(%d) OUT", + __FUNCTION__, __LINE__, m_recordingProcessList.size()); + + m_pipeFrameDoneQ->release(); + + m_fliteFrameCount = 0; + m_3aa_ispFrameCount = 0; + m_ispFrameCount = 0; + m_sccFrameCount = 0; + m_scpFrameCount = 0; + + m_previewEnabled = false; + m_parameters->setPreviewRunning(m_previewEnabled); + + CLOGI("DEBUG(%s[%d]):OUT", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCamera::m_restartPreviewInternal(void) +{ + CLOGI("INFO(%s[%d]): Internal restart preview", __FUNCTION__, __LINE__); + int ret = 0; + int err = 0; + + m_flagThreadStop = true; + + m_startPictureInternalThread->join(); + + /* release about frameFactory */ + m_framefactoryThread->stop(); + m_frameFactoryQ->sendCmd(WAKE_UP); + m_framefactoryThread->requestExitAndWait(); + m_frameFactoryQ->release(); + + m_startPictureBufferThread->join(); + + if (m_previewFrameFactory != NULL) + m_previewFrameFactory->setStopFlag(); + + m_mainThread->requestExitAndWait(); + + ret = m_stopPictureInternal(); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_stopPictureInternal fail", __FUNCTION__, __LINE__); + + m_previewThread->stop(); + if(m_previewQ != NULL) + m_previewQ->sendCmd(WAKE_UP); + m_previewThread->requestExitAndWait(); + + if (m_parameters->isMcscVraOtf() == false) { + m_vraThreadQ->sendCmd(WAKE_UP); + m_vraGscDoneQ->sendCmd(WAKE_UP); + m_vraPipeDoneQ->sendCmd(WAKE_UP); + m_vraThread->requestExitAndWait(); + m_previewFrameFactory->stopThread(PIPE_VRA); + } + + if (m_parameters->isFlite3aaOtf() == true) { + m_mainSetupQThread[INDEX(PIPE_FLITE)]->stop(); + m_mainSetupQ[INDEX(PIPE_FLITE)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->requestExitAndWait(); + + if (m_mainSetupQThread[INDEX(PIPE_3AC)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_3AC)]->stop(); + m_mainSetupQ[INDEX(PIPE_3AC)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_3AC)]->requestExitAndWait(); + } + + m_mainSetupQThread[INDEX(PIPE_3AA)]->stop(); + m_mainSetupQ[INDEX(PIPE_3AA)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_3AA)]->requestExitAndWait(); + + if (m_mainSetupQThread[INDEX(PIPE_ISP)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_ISP)]->stop(); + m_mainSetupQ[INDEX(PIPE_ISP)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_ISP)]->requestExitAndWait(); + } + + /* Comment out, because it included ISP */ + /* + m_mainSetupQThread[INDEX(PIPE_SCP)]->stop(); + m_mainSetupQ[INDEX(PIPE_SCP)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_SCP)]->requestExitAndWait(); + */ + + m_clearList(m_mainSetupQ[INDEX(PIPE_FLITE)]); + m_clearList(m_mainSetupQ[INDEX(PIPE_3AA)]); + m_clearList(m_mainSetupQ[INDEX(PIPE_ISP)]); + /* Comment out, because it included ISP */ + /* m_clearList(m_mainSetupQ[INDEX(PIPE_SCP)]); */ + + m_mainSetupQ[INDEX(PIPE_FLITE)]->release(); + m_mainSetupQ[INDEX(PIPE_3AA)]->release(); + m_mainSetupQ[INDEX(PIPE_ISP)]->release(); + /* Comment out, because it included ISP */ + /* m_mainSetupQ[INDEX(PIPE_SCP)]->release(); */ + } else { + if (m_mainSetupQThread[INDEX(PIPE_FLITE)] != NULL) { + m_mainSetupQThread[INDEX(PIPE_FLITE)]->stop(); + m_mainSetupQ[INDEX(PIPE_FLITE)]->sendCmd(WAKE_UP); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->requestExitAndWait(); + m_clearList(m_mainSetupQ[INDEX(PIPE_FLITE)]); + m_mainSetupQ[INDEX(PIPE_FLITE)]->release(); + } + } + + ret = m_stopPreviewInternal(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_stopPreviewInternal fail", __FUNCTION__, __LINE__); + err = ret; + } + + if (m_previewQ != NULL) + m_clearList(m_previewQ); + + if (m_vraThreadQ != NULL) + m_clearList(m_vraThreadQ); + + if (m_vraGscDoneQ != NULL) + m_clearList(m_vraGscDoneQ); + + if (m_vraPipeDoneQ != NULL) + m_clearList(m_vraPipeDoneQ); + + if (m_zoomPreviwWithCscQ != NULL) + m_zoomPreviwWithCscQ->release(); + + if (m_previewCallbackGscFrameDoneQ != NULL) + m_clearList(m_previewCallbackGscFrameDoneQ); + + /* skip to free and reallocate buffers */ + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->resetBuffers(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->resetBuffers(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->resetBuffers(); + } + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->resetBuffers(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->deinit(); + } + if (m_sccBufferMgr != NULL) { + // libcamera: 75xx: Fix size issue in preview and capture // Vijayakumar S N + if (m_parameters->isUsing3acForIspc() == true) { + m_sccBufferMgr->deinit(); + m_sccBufferMgr->setBufferCount(0); + } else { + m_sccBufferMgr->resetBuffers(); + } + } + + if (m_highResolutionCallbackBufferMgr != NULL) { + m_highResolutionCallbackBufferMgr->resetBuffers(); + } + + /* skip to free and reallocate buffers */ + if (m_ispReprocessingBufferMgr != NULL) { + m_ispReprocessingBufferMgr->resetBuffers(); + } + if (m_sccReprocessingBufferMgr != NULL) { + m_sccReprocessingBufferMgr->resetBuffers(); + } + if (m_thumbnailBufferMgr != NULL) { + m_thumbnailBufferMgr->resetBuffers(); + } + + if (m_gscBufferMgr != NULL) { + m_gscBufferMgr->resetBuffers(); + } + if (m_jpegBufferMgr != NULL) { + m_jpegBufferMgr->resetBuffers(); + } + if (m_recordingBufferMgr != NULL) { + m_recordingBufferMgr->resetBuffers(); + } + + /* realloc callback buffers */ + if (m_scpBufferMgr != NULL) { + m_scpBufferMgr->deinit(); + m_scpBufferMgr->setBufferCount(0); + } + if (m_previewCallbackBufferMgr != NULL) { + m_previewCallbackBufferMgr->deinit(); + } + + if (m_captureSelector != NULL) { + m_captureSelector->release(); + } + if (m_sccCaptureSelector != NULL) { + m_sccCaptureSelector->release(); + } + + if (m_parameters->getHighSpeedRecording() && m_parameters->getReallocBuffer()) { + CLOGD("DEBUG(%s): realloc buffer all buffer deinit ", __FUNCTION__); + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->deinit(); + } + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->deinit(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->deinit(); + } + + if (m_sccBufferMgr != NULL) { + m_sccBufferMgr->deinit(); + } +/* + if (m_highResolutionCallbackBufferMgr != NULL) { + m_highResolutionCallbackBufferMgr->deinit(); + } +*/ + if (m_gscBufferMgr != NULL) { + m_gscBufferMgr->deinit(); + } + if (m_jpegBufferMgr != NULL) { + m_jpegBufferMgr->deinit(); + } + if (m_recordingBufferMgr != NULL) { + m_recordingBufferMgr->deinit(); + } + } + + if (m_parameters->getTpuEnabledMode() == true) { + if (m_hwDisBufferMgr != NULL) { + m_hwDisBufferMgr->deinit(); + } + } + + m_flagThreadStop = false; + + ret = setPreviewWindow(m_previewWindow); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setPreviewWindow fail", __FUNCTION__, __LINE__); + err = ret; + } + + CLOGI("INFO(%s[%d]):setBuffersThread is run", __FUNCTION__, __LINE__); + m_setBuffersThread->run(PRIORITY_DEFAULT); + m_setBuffersThread->join(); + + if (m_isSuccessedBufferAllocation == false) { + CLOGE("ERR(%s[%d]):m_setBuffersThread() failed", __FUNCTION__, __LINE__); + err = INVALID_OPERATION; + } + +#ifdef START_PICTURE_THREAD + m_startPictureInternalThread->join(); +#endif + m_startPictureBufferThread->join(); + + if (m_parameters->isReprocessing() == true) { +#ifdef START_PICTURE_THREAD + m_startPictureInternalThread->run(PRIORITY_DEFAULT); +#endif + } else { + m_pictureFrameFactory = m_previewFrameFactory; + CLOGD("DEBUG(%s[%d]):FrameFactory(pictureFrameFactory) created", __FUNCTION__, __LINE__); + + /* + * Make remained frameFactory here. + * in case of SCC capture, make here. + */ + m_framefactoryThread->run(); + } + + ret = m_startPreviewInternal(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_startPreviewInternal fail", __FUNCTION__, __LINE__); + err = ret; + } + +#if defined(RAWDUMP_CAPTURE) || defined(DEBUG_RAWDUMP) + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); + m_mainSetupQThread[INDEX(PIPE_3AA)]->run(PRIORITY_URGENT_DISPLAY); +#else + /* setup frame thread */ + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); + } else { + switch (m_parameters->getReprocessingBayerMode()) { + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + m_mainSetupQ[INDEX(PIPE_FLITE)]->setup(NULL); + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQThread[INDEX(PIPE_FLITE)]->run(PRIORITY_URGENT_DISPLAY); + break; + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + CLOGD("DEBUG(%s[%d]):setupThread with List pipeId(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + m_mainSetupQ[INDEX(PIPE_FLITE)]->setup(m_mainSetupQThread[INDEX(PIPE_FLITE)]); + break; + default: + CLOGI("INFO(%s[%d]):setupThread not started pipeID(%d)", __FUNCTION__, __LINE__, PIPE_FLITE); + break; + } + CLOGD("DEBUG(%s[%d]):setupThread Thread start pipeId(%d)", __FUNCTION__, __LINE__, PIPE_3AA); + m_mainSetupQThread[INDEX(PIPE_3AA)]->run(PRIORITY_URGENT_DISPLAY); + } +#endif + + if (m_facedetectThread->isRunning() == false) + m_facedetectThread->run(); + + if (m_monitorThread->isRunning() == false) + m_monitorThread->run(PRIORITY_DEFAULT); + + if (m_parameters->isMcscVraOtf() == false) + m_previewFrameFactory->startThread(PIPE_VRA); + + m_previewThread->run(PRIORITY_DISPLAY); + + m_mainThread->run(PRIORITY_DEFAULT); + m_startPictureInternalThread->join(); + + return err; +} + +status_t ExynosCamera::m_stopPictureInternal(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_prePictureThread->join(); + m_pictureThread->join(); + m_postPictureThread->join(); + + m_jpegCallbackThread->join(); + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + m_jpegSaveThread[threadNum]->join(); + } + + if (m_zslPictureEnabled == true) { + int numOfReprocessingFactory = m_parameters->getNumOfReprocessingFactory(); + + for (int i = FRAME_FACTORY_TYPE_REPROCESSING; i < numOfReprocessingFactory + FRAME_FACTORY_TYPE_REPROCESSING; i++) { + ret = m_frameFactory[i]->stopPipes(); + if (ret < 0) { + CLOGE("ERR(%s):m_reprocessingFrameFactory0>stopPipe() fail", __FUNCTION__); + } + } + } + + if (m_parameters->getHighResolutionCallbackMode() == true) { + m_highResolutionCallbackThread->stop(); + if (m_highResolutionCallbackQ != NULL) + m_highResolutionCallbackQ->sendCmd(WAKE_UP); + m_highResolutionCallbackThread->requestExitAndWait(); + } + + /* Clear frames & buffers which remain in capture processingQ */ + m_clearFrameQ(dstSccReprocessingQ, PIPE_SCC, PIPE_SCC, DST_BUFFER_DIRECTION); + m_clearFrameQ(m_postPictureQ, PIPE_SCC, PIPE_SCC, DST_BUFFER_DIRECTION); + m_clearFrameQ(dstJpegReprocessingQ, PIPE_JPEG, PIPE_JPEG, SRC_BUFFER_DIRECTION); + + dstIspReprocessingQ->release(); + dstGscReprocessingQ->release(); + dstJpegReprocessingQ->release(); + + m_jpegCallbackQ->release(); + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + m_jpegSaveQ[threadNum]->release(); + } + + if (m_highResolutionCallbackQ->getSizeOfProcessQ() != 0){ + CLOGD("DEBUG(%s[%d]):m_highResolutionCallbackQ->getSizeOfProcessQ(%d). release the highResolutionCallbackQ.", + __FUNCTION__, __LINE__, m_highResolutionCallbackQ->getSizeOfProcessQ()); + m_highResolutionCallbackQ->release(); + } + + /* + * HACK : + * Just sleep for + * all picture-related thread(having m_postProcessList) is over. + * if not : + * m_clearList will delete frames. + * and then, the internal mutex of other thead's deleted frame + * will sleep forever (pThread's tech report) + * to remove this hack : + * stopPreview()'s burstPanorama-related sequence. + * stop all pipe -> wait all thread. -> clear all frameQ. + */ + usleep(5000); + + CLOGD("DEBUG(%s[%d]):clear postProcess(Picture) Frame list", __FUNCTION__, __LINE__); + + ret = m_clearList(&m_postProcessList); + if (ret < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + return ret; + } + + m_zslPictureEnabled = false; + + /* TODO: need timeout */ + return NO_ERROR; +} + +status_t ExynosCamera::m_handlePreviewFrame(ExynosCameraFrame *frame) +{ + int ret = 0; + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *fdFrame = NULL; + + ExynosCameraBuffer buffer; + ExynosCameraBuffer t3acBuffer; + int pipeID = 0; + /* to handle the high speed frame rate */ + bool skipPreview = false; + int ratio = 1; + uint32_t minFps = 0, maxFps = 0; + uint32_t dispFps = EXYNOS_CAMERA_PREVIEW_FPS_REFERENCE; + uint32_t fvalid = 0; + uint32_t fcount = 0; + uint32_t skipCount = 0; + struct camera2_stream *shot_stream = NULL; + ExynosCameraBuffer resultBuffer; + camera2_node_group node_group_info_isp; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + int ispDstBufferIndex = -1; + + entity = frame->getFrameDoneFirstEntity(); + if (entity == NULL) { + CLOGE("ERR(%s[%d]):current entity is NULL", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return true; + } + + pipeID = entity->getPipeId(); +#ifdef FPS_CHECK + m_debugFpsCheck(entity->getPipeId()); +#endif + /* TODO: remove hard coding */ + switch(INDEX(entity->getPipeId())) { + case PIPE_3AA_ISP: + m_debugFpsCheck(entity->getPipeId()); + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + ret = m_putBuffers(m_3aaBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]):3AA_ISP frameCount(%d) frame.Count(%d)", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[1]), + frame->getFrameCount()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + ret = m_putBuffers(m_ispBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + break; + } + + frame->setMetaDataEnable(true); + + /* Face detection */ + if(!m_parameters->getHighSpeedRecording()) { + skipCount = m_parameters->getFrameSkipCount(); + if( m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) && + skipCount <= 0 && m_flagStartFaceDetection == true) { + fdFrame = m_frameMgr->createFrame(m_parameters, frame->getFrameCount()); + if (fdFrame != NULL) { + m_copyMetaFrameToFrame(frame, fdFrame, true, true); + m_facedetectQ->pushProcessQ(&fdFrame); + } + } + } + + /* ISP capture mode q/dq for vdis */ + if (m_parameters->getTpuEnabledMode() == true) { +#if 0 + /* case 1 : directly push on isp, tpu. */ + ret = m_pushFrameToPipeIspDIS(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_pushFrameToPipeIspDIS() fail", __FUNCTION__, __LINE__); + } +#else + /* case 2 : indirectly push on isp, tpu. */ + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(PIPE_ISP)]->pushProcessQ(&newFrame); +#endif + } + + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + break; + case PIPE_3AC: + case PIPE_FLITE: + m_debugFpsCheck(entity->getPipeId()); + + if (m_parameters->getHighSpeedRecording()) { + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + ret = m_putBuffers(m_bayerBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + break; + } + } else { + if (frame->getSccDrop() == true || frame->getIspcDrop() == true) { + CLOGE("ERR(%s[%d]):getSccDrop() == %d || getIspcDrop()== %d. so drop this frame(frameCount : %d)", + __FUNCTION__, __LINE__, frame->getSccDrop(), frame->getIspcDrop(), frame->getFrameCount()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + ret = m_putBuffers(m_bayerBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers(%d) fail", __FUNCTION__, __LINE__, buffer.index); + break; + } + } else { + ret = m_captureSelector->manageFrameHoldList(frame, entity->getPipeId(), false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):manageFrameHoldList fail", __FUNCTION__, __LINE__); + return ret; + } +#ifdef DEBUG_RAWDUMP + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); +#endif + } + } + + /* TODO: Dynamic bayer capture, currently support only single shot */ + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_DYNAMIC + || reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) + m_previewFrameFactory->stopThread(entity->getPipeId()); + + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON + || reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON) { + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + } + break; + case PIPE_ISP: + /* + if (entity->getSrcBufState() == ENTITY_BUFFER_STATE_ERROR) + m_previewFrameFactory->dump(); + */ + /* 3AP buffer handling */ + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_putBuffers(m_ispBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + break; + } + } + + /* Face detection */ + if (!m_parameters->getHighSpeedRecording() + && frame->getFrameState() != FRAME_STATE_SKIPPED) { + skipCount = m_parameters->getFrameSkipCount(); + if( m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_METADATA) && + skipCount <= 0 && m_flagStartFaceDetection == true) { + ret = m_doFdCallbackFunc(frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_doFdCallbackFunc fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + } + } + + /* ISP capture mode q/dq for vdis */ + if (m_parameters->getTpuEnabledMode() == true) { + break; + } + case PIPE_3AA: + case PIPE_DIS: + /* The following switch allows both PIPE_3AA and PIPE_ISP to + * fall through to PIPE_SCP + */ + + switch(INDEX(entity->getPipeId())) { + case PIPE_3AA: + m_debugFpsCheck(entity->getPipeId()); + + /* + if (entity->getSrcBufState() == ENTITY_BUFFER_STATE_ERROR) + m_previewFrameFactory->dump(); + */ + + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_putBuffers(m_3aaBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } + + frame->setMetaDataEnable(true); + + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + /* 3AC buffer handling */ + t3acBuffer.index = -1; + + if (frame->getRequest(PIPE_3AC) == true) { + ret = frame->getDstBuffer(entity->getPipeId(), &t3acBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + } + } + + if (0 <= t3acBuffer.index) { + if (frame->getRequest(PIPE_3AC) == true) { + if (m_parameters->getHighSpeedRecording() == true) { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_sccBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_bayerBufferMgr, t3acBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers(m_bayerBufferMgr, %d) fail", __FUNCTION__, __LINE__, t3acBuffer.index); + break; + } + } else { + entity_buffer_state_t bufferstate = ENTITY_BUFFER_STATE_NOREQ; + ret = frame->getDstBufferState(entity->getPipeId(), &bufferstate, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret == NO_ERROR && bufferstate != ENTITY_BUFFER_STATE_ERROR) { + if (m_parameters->isUseYuvReprocessing() == false + && m_parameters->isUsing3acForIspc() == true) + ret = m_sccCaptureSelector->manageFrameHoldList(frame, entity->getPipeId(), false, m_previewFrameFactory->getNodeType(PIPE_3AC)); + else + ret = m_captureSelector->manageFrameHoldList(frame, entity->getPipeId(), false, m_previewFrameFactory->getNodeType(PIPE_3AC)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):manageFrameHoldList fail", __FUNCTION__, __LINE__); + return ret; + } + } else { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_sccBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_bayerBufferMgr, t3acBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers(m_bayerBufferMgr, %d) fail", __FUNCTION__, __LINE__, t3acBuffer.index); + break; + } + } + } + } else { + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON) { + CLOGW("WARN(%s[%d]):frame->getRequest(PIPE_3AC) == false. so, just m_putBuffers(t3acBuffer.index(%d)..., pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, t3acBuffer.index, entity->getPipeId(), ret); + } + + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_sccBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_bayerBufferMgr, t3acBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers(m_bayerBufferMgr, %d) fail", __FUNCTION__, __LINE__, t3acBuffer.index); + break; + } + } + } + + /* Face detection for using 3AA-ISP OTF mode. */ + if (m_parameters->is3aaIspOtf() == true + && m_parameters->isMcscVraOtf() == true) { + skipCount = m_parameters->getFrameSkipCount(); + if (skipCount <= 0) { + /* Face detection */ + struct camera2_shot_ext shot; + camera2_node_group node_group_info; + frame->getDynamicMeta(&shot); + fdFrame = m_frameMgr->createFrame(m_parameters, frame->getFrameCount()); + if (fdFrame != NULL) { + fdFrame->storeDynamicMeta(&shot); + m_facedetectQ->pushProcessQ(&fdFrame); + + frame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA); + fdFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA); + } + + } + } + + CLOGV("DEBUG(%s[%d]):3AA_ISP frameCount(%d) frame.Count(%d)", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[1]), + frame->getFrameCount()); + + break; + case PIPE_DIS: + m_debugFpsCheck(pipeID); + if (m_parameters->getTpuEnabledMode() == true) { + ret = frame->getSrcBuffer(PIPE_DIS, &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, PIPE_ISP, ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_putBuffers(m_hwDisBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_putBuffers(m_hwDisBufferMgr, %d) fail", __FUNCTION__, __LINE__, buffer.index); + } + } + } + + CLOGV("DEBUG(%s[%d]):DIS done HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + + break; + default: + CLOGE("ERR(%s[%d]):Its impossible to be here. ", + __FUNCTION__, __LINE__); + break; + } + + if ((INDEX(entity->getPipeId())) == PIPE_3AA && + m_parameters->is3aaIspOtf() == true && + m_parameters->getTpuEnabledMode() == false) { + /* Fall through to PIPE_SCP */ + } else if ((INDEX(entity->getPipeId())) == PIPE_DIS) { + /* Fall through to PIPE_SCP */ + } else { + /* Break out of the outer switch and reach entity_state_complete: */ + break; + } + + case PIPE_SCP: + if (entity->getDstBufState() == ENTITY_BUFFER_STATE_ERROR) { + // libcamera: 75xx: Change SCP cancelbuffer condition to SCP request true. // Siyoung Hur + if (frame->getRequest(PIPE_SCP) == true) { + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } + /* For debug */ + /* m_previewFrameFactory->dump(); */ + + /* Comment out, because it included ISP */ + /* + newFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(PIPE_SCP, frame->getFrameCount()); + newFrame->setDstBuffer(PIPE_SCP, buffer); + newFrame->setFrameState(FRAME_STATE_SKIPPED); + + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + */ + } + CLOGV("DEBUG(%s[%d]):SCP done HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } else if (entity->getDstBufState() == ENTITY_BUFFER_STATE_COMPLETE) { + m_debugFpsCheck(entity->getPipeId()); + + ret = m_doZoomPrviewWithCSC(entity->getPipeId(), PIPE_GSC, frame); + if (ret < 0) { + CLOGW("WARN(%s[%d]):m_doPrviewWithCSC failed", __FUNCTION__, __LINE__); + } + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + /* TO DO : skip frame for HDR */ + shot_stream = (struct camera2_stream *)buffer.addr[2]; + + if (shot_stream != NULL) { + getStreamFrameValid(shot_stream, &fvalid); + getStreamFrameCount(shot_stream, &fcount); + } else { + CLOGE("ERR(%s[%d]):shot_stream is NULL", __FUNCTION__, __LINE__); + fvalid = false; + fcount = 0; + } + + /* drop preview frame if lcd supported frame rate < scp frame rate */ + frame->getFpsRange(&minFps, &maxFps); + if (dispFps < maxFps) { + ratio = (int)((maxFps * 10 / dispFps) / 10); + m_displayPreviewToggle = (m_displayPreviewToggle + 1) % ratio; + skipPreview = (m_displayPreviewToggle == 0) ? true : false; +#ifdef DEBUG + CLOGE("DEBUG(%s[%d]):preview frame skip! frameCount(%d) (m_displayPreviewToggle=%d, maxFps=%d, dispFps=%d, ratio=%d, skipPreview=%d)", + __FUNCTION__, __LINE__, frame->getFrameCount(), m_displayPreviewToggle, maxFps, dispFps, ratio, (int)skipPreview); +#endif + } + + newFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(PIPE_SCP, frame->getFrameCount()); + if (newFrame == NULL) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + + goto entity_state_complete; + } + + newFrame->setDstBuffer(PIPE_SCP, buffer); + + m_parameters->getFrameSkipCount(&m_frameSkipCount); + if (m_frameSkipCount > 0) { + CLOGD("INFO(%s[%d]):Skip frame for frameSkipCount(%d) buffer.index(%d)", + __FUNCTION__, __LINE__, m_frameSkipCount, buffer.index); + newFrame->setFrameState(FRAME_STATE_SKIPPED); + if (buffer.index >= 0) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } + } else { + if (m_skipReprocessing == true) + m_skipReprocessing = false; + nsecs_t timeStamp = (nsecs_t)frame->getTimeStamp(); + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME)) { + if (timeStamp <= 0L) { + CLOGE("WARN(%s[%d]):timeStamp(%lld) Skip", __FUNCTION__, __LINE__, timeStamp); + } else { + if (m_parameters->doCscRecording() == true) { + /* get Recording Image buffer */ + int bufIndex = -2; + ExynosCameraBuffer recordingBuffer; + ret = m_recordingBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &recordingBuffer); + if (ret < 0 || bufIndex < 0) { + if ((++m_recordingFrameSkipCount % 100) == 0) { + CLOGE("ERR(%s[%d]): Recording buffer is not available!! Recording Frames are Skipping(%d frames) (bufIndex=%d)", + __FUNCTION__, __LINE__, m_recordingFrameSkipCount, bufIndex); + m_recordingBufferMgr->printBufferQState(); + } + } else { + if (m_recordingFrameSkipCount != 0) { + CLOGE("ERR(%s[%d]): Recording buffer is not available!! Recording Frames are Skipped(%d frames) (bufIndex=%d) (recordingQ=%d)", + __FUNCTION__, __LINE__, m_recordingFrameSkipCount, bufIndex, m_recordingQ->getSizeOfProcessQ()); + m_recordingFrameSkipCount = 0; + m_recordingBufferMgr->printBufferQState(); + } + m_recordingTimeStamp[bufIndex] = timeStamp; + + ret = m_doPrviewToRecordingFunc(PIPE_GSC_VIDEO, buffer, recordingBuffer, timeStamp); + if (ret < 0) { + CLOGW("WARN(%s[%d]):recordingCallback Skip", __FUNCTION__, __LINE__); + } + } + } else { + m_recordingTimeStamp[buffer.index] = timeStamp; + + if (m_recordingStartTimeStamp == 0) { + m_recordingStartTimeStamp = timeStamp; + CLOGI("INFO(%s[%d]):m_recordingStartTimeStamp=%lld", + __FUNCTION__, __LINE__, m_recordingStartTimeStamp); + } + + if ((0L < timeStamp) + && (m_lastRecordingTimeStamp < timeStamp) + && (m_recordingStartTimeStamp <= timeStamp)) { + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME)) { +#ifdef CHECK_MONOTONIC_TIMESTAMP + CLOGD("DEBUG(%s[%d]):m_dataCbTimestamp::recordingFrameIndex=%d, recordingTimeStamp=%lld, fd[0]=%d", + __FUNCTION__, __LINE__, buffer.index, timeStamp, buffer.fd[0]); +#endif +#ifdef DEBUG + CLOGD("DEBUG(%s[%d]): - lastTimeStamp(%lld), systemTime(%lld), recordingStart(%lld)", + __FUNCTION__, __LINE__, + m_lastRecordingTimeStamp, + systemTime(SYSTEM_TIME_MONOTONIC), + m_recordingStartTimeStamp); +#endif + + if (m_recordingBufAvailable[buffer.index] == false) { + CLOGW("WARN(%s[%d]):recordingFrameIndex(%d) didn't release yet !!! drop the frame !!! " + " timeStamp(%lld) m_recordingBufAvailable(%d)", + __FUNCTION__, __LINE__, buffer.index, timeStamp, (int)m_recordingBufAvailable[buffer.index]); + } else { + struct addrs *recordAddrs = NULL; + + recordAddrs = (struct addrs *)m_recordingCallbackHeap->data; + recordAddrs[buffer.index].type = kMetadataBufferTypeCameraSource; + recordAddrs[buffer.index].fdPlaneY = (unsigned int)buffer.fd[0]; + recordAddrs[buffer.index].fdPlaneCbcr = (unsigned int)buffer.fd[1]; + recordAddrs[buffer.index].bufIndex = buffer.index; + + m_recordingBufAvailable[buffer.index] = false; + m_lastRecordingTimeStamp = timeStamp; + + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME)) { + m_dataCbTimestamp( + timeStamp, + CAMERA_MSG_VIDEO_FRAME, + m_recordingCallbackHeap, + buffer.index, + m_callbackCookie); + } + } + } + } else { + CLOGW("WARN(%s[%d]):recordingFrameIndex=%d, timeStamp(%lld) invalid -" + " lastTimeStamp(%lld), systemTime(%lld), recordingStart(%lld), m_recordingBufAvailable(%d)", + __FUNCTION__, __LINE__, buffer.index, timeStamp, + m_lastRecordingTimeStamp, + systemTime(SYSTEM_TIME_MONOTONIC), + m_recordingStartTimeStamp, + (int)m_recordingBufAvailable[buffer.index]); + m_recordingTimeStamp[buffer.index] = 0L; + } + } + } + } + + ExynosCameraBuffer callbackBuffer; + ExynosCameraFrame *callbackFrame = NULL; + struct camera2_shot_ext *shot_ext = new struct camera2_shot_ext; + + callbackFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(PIPE_SCP, frame->getFrameCount()); + if (callbackFrame == NULL) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + } + goto entity_state_complete; + } + + ret = frame->getDstBuffer(entity->getPipeId(), &callbackBuffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + frame->getMetaData(shot_ext); + callbackFrame->storeDynamicMeta(shot_ext); + callbackFrame->setDstBuffer(PIPE_SCP, callbackBuffer); + + if (((m_parameters->getPreviewBufferCount() == NUM_PREVIEW_BUFFERS + NUM_PREVIEW_SPARE_BUFFERS && + m_previewQ->getSizeOfProcessQ() >= 2) || + (m_parameters->getPreviewBufferCount() == NUM_PREVIEW_BUFFERS && + m_previewQ->getSizeOfProcessQ() >= 1)) && + (m_previewThread->isRunning() == true)) { + + if ((m_getRecordingEnabled() == true) && (m_parameters->doCscRecording() == false)) { + CLOGW("WARN(%s[%d]):push frame to previewQ. PreviewQ(%d), PreviewBufferCount(%d)", + __FUNCTION__, + __LINE__, + m_previewQ->getSizeOfProcessQ(), + m_parameters->getPreviewBufferCount()); + m_previewQ->pushProcessQ(&callbackFrame); + } else { + CLOGW("WARN(%s[%d]):Frames are stacked in previewQ. Skip frame. PreviewQ(%d), PreviewBufferCount(%d)", + __FUNCTION__, __LINE__, + m_previewQ->getSizeOfProcessQ(), + m_parameters->getPreviewBufferCount()); + newFrame->setFrameState(FRAME_STATE_SKIPPED); + if (buffer.index >= 0) { + /* only apply in the Full OTF of Exynos75xx. */ +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } + + callbackFrame->decRef(); + m_frameMgr->deleteFrame(callbackFrame); + callbackFrame = NULL; + } + + } else if((m_parameters->getFastFpsMode() > 1) && (m_parameters->getRecordingHint() == 1)) { + m_skipCount++; + CLOGV("INFO(%s[%d]):push frame to previewQ", __FUNCTION__, __LINE__); + m_previewQ->pushProcessQ(&callbackFrame); + } else { + if (m_getRecordingEnabled() == true) { + CLOGV("INFO(%s[%d]):push frame to previewQ", __FUNCTION__, __LINE__); + m_previewQ->pushProcessQ(&callbackFrame); + } else { + CLOGV("INFO(%s[%d]):push frame to previewQ", __FUNCTION__, __LINE__); + m_previewQ->pushProcessQ(&callbackFrame); + } + } + delete shot_ext; + shot_ext = NULL; + } + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + //m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + CLOGV("DEBUG(%s[%d]):SCP done HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } else { + CLOGV("DEBUG(%s[%d]):SCP droped - SCP buffer is not ready HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + ret = m_scpBufferMgr->cancelBuffer(buffer.index, true); +#else + ret = m_scpBufferMgr->cancelBuffer(buffer.index); +#endif + if (ret < 0) { + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } + } + + /* For debug */ + /* m_previewFrameFactory->dump(); */ + + /* Comment out, because it included ISP */ + /* + newFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(PIPE_SCP, frame->getFrameCount()); + newFrame->setDstBuffer(PIPE_SCP, buffer); + newFrame->setFrameState(FRAME_STATE_SKIPPED); + + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + */ + } + break; + case PIPE_VRA: + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + if (entity->getDstBufState() != ENTITY_BUFFER_STATE_ERROR) { + /* Face detection callback */ + struct camera2_shot_ext fd_shot; + frame->getDynamicMeta(&fd_shot); + + ExynosCameraFrame *fdFrame = m_frameMgr->createFrame(m_parameters, frame->getFrameCount()); + if (fdFrame != NULL) { + fdFrame->storeDynamicMeta(&fd_shot); + m_facedetectQ->pushProcessQ(&fdFrame); + } + } + + ret = m_vraBufferMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):Put VRA buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + break; + default: + break; + } + +entity_state_complete: + + ret = frame->setEntityState(entity->getPipeId(), ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState fail, pipeId(%d), state(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + if (frame->isComplete() == true) { + ret = m_removeFrameFromList(&m_processList, frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + } + + + return NO_ERROR; +} + +status_t ExynosCamera::m_handlePreviewFrameFrontDual(ExynosCameraFrame *frame) +{ + int ret = 0; + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *fdFrame = NULL; + + ExynosCameraBuffer buffer; + ExynosCameraBuffer t3acBuffer; + int pipeID = 0; + /* to handle the high speed frame rate */ + bool skipPreview = false; + int ratio = 1; + uint32_t minFps = 0, maxFps = 0; + uint32_t dispFps = EXYNOS_CAMERA_PREVIEW_FPS_REFERENCE; + uint32_t fvalid = 0; + uint32_t fcount = 0; + uint32_t skipCount = 0; + struct camera2_stream *shot_stream = NULL; + ExynosCameraBuffer resultBuffer; + camera2_node_group node_group_info_isp; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + int ispDstBufferIndex = -1; + + entity = frame->getFrameDoneFirstEntity(); + if (entity == NULL) { + CLOGE("ERR(%s[%d]):current entity is NULL frame(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + /* TODO: doing exception handling */ + return true; + } + + pipeID = entity->getPipeId(); + + /* TODO: remove hard coding */ + switch(INDEX(entity->getPipeId())) { + case PIPE_3AA_ISP: + break; + case PIPE_3AC: + case PIPE_FLITE: + m_debugFpsCheck(entity->getPipeId()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + m_setupEntity(PIPE_3AA, frame, &buffer, NULL); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_3AA); + m_previewFrameFactory->pushFrameToPipe(&frame, PIPE_3AA); + + if (m_parameters->isUsing3acForIspc() == true) { + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(entity->getPipeId())]->pushProcessQ(&newFrame); + } + break; + case PIPE_3AA: + m_debugFpsCheck(entity->getPipeId()); + + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (m_parameters->isUsing3acForIspc() == true) { + ret = m_sccCaptureSelector->manageFrameHoldList(frame, entity->getPipeId(), false, CAPTURE_NODE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):manageFrameHoldList fail", __FUNCTION__, __LINE__); + return ret; + } + } + + if (buffer.index >= 0) { + ret = m_putBuffers(m_bayerBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } + + if (m_parameters->isUsing3acForIspc() == false ) { + newFrame = m_frameMgr->createFrame(m_parameters, 0); + m_mainSetupQ[INDEX(m_getBayerPipeId())]->pushProcessQ(&newFrame); + } + + CLOGV("DEBUG(%s[%d]):3AA_ISP frameCount(%d) frame.Count(%d)", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[1]), + frame->getFrameCount()); + break; + default: + break; + } + +entity_state_complete: + + ret = frame->setEntityState(entity->getPipeId(), ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState fail, pipeId(%d), state(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + if (frame->isComplete() == true) { + ret = m_removeFrameFromList(&m_processList, frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + } + + return NO_ERROR; +} + +bool ExynosCamera::m_previewThreadFunc(void) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + + int ret = 0; + bool loop = true; + int pipeId = 0; + int pipeIdCsc = 0; + int maxbuffers = 0; +#ifdef USE_PREVIEW_DURATION_CONTROL + uint32_t offset = 1000; /* 1ms */ + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + uint64_t frameDurationUs; +#endif + ExynosCameraBuffer buffer; + ExynosCameraFrame *frame = NULL; + nsecs_t timeStamp = 0; + int frameCount = -1; + frame_queue_t *previewQ; + +#ifdef USE_PREVIEW_DURATION_CONTROL + m_parameters->getPreviewFpsRange(&curMinFps, &curMaxFps); + + /* Check the Slow/Fast Motion Scenario - sensor : 120fps, preview : 60fps */ + if(((curMinFps == 120) && (curMaxFps == 120)) + || (curMinFps == 240) && (curMaxFps == 240)) { + CLOGV("(%s[%d]) : Change PreviewDuration from (%d,%d) to (60000, 60000)", __FUNCTION__, __LINE__, curMinFps, curMaxFps); + curMinFps = 60; + curMaxFps = 60; + } + + frameDurationUs = 1000000/curMaxFps; + if (frameDurationUs > offset) { + frameDurationUs = frameDurationUs - offset; /* add the offset value for timing issue */ + } + PreviewDurationTimer.start(); +#endif + + pipeId = PIPE_SCP; + pipeIdCsc = PIPE_GSC; + + previewQ = m_previewQ; + + CLOGV("INFO(%s[%d]):wait previewQ", __FUNCTION__, __LINE__); + ret = previewQ->waitAndPopProcessQ(&frame); + if (m_flagThreadStop == true) { + CLOGI("INFO(%s[%d]):m_flagThreadStop(%d)", __FUNCTION__, __LINE__, m_flagThreadStop); + + if (frame != NULL) { + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + + return false; + } + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + CLOGV("INFO(%s[%d]):get frame from previewQ", __FUNCTION__, __LINE__); + timeStamp = (nsecs_t)frame->getTimeStamp(); + frameCount = frame->getFrameCount(); + ret = frame->getDstBuffer(pipeId, &buffer); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto func_exit; + } + + /* ------------- frome here "frame" cannot use ------------- */ + CLOGV("INFO(%s[%d]):push frame to previewReturnQ", __FUNCTION__, __LINE__); + if(m_parameters->increaseMaxBufferOfPreview()) { + maxbuffers = m_parameters->getPreviewBufferCount(); + } else { + maxbuffers = (int)m_exynosconfig->current->bufInfo.num_preview_buffers; + } + + if (buffer.index < 0 || buffer.index >= maxbuffers ) { + CLOGE("ERR(%s[%d]):Out of Index! (Max: %d, Index: %d)", __FUNCTION__, __LINE__, maxbuffers, buffer.index); + goto func_exit; + } + + CLOGV("INFO(%s[%d]):m_previewQ->getSizeOfProcessQ(%d) m_scpBufferMgr->getNumOfAvailableBuffer(%d)", __FUNCTION__, __LINE__, + previewQ->getSizeOfProcessQ(), m_scpBufferMgr->getNumOfAvailableBuffer()); + + /* Prevent displaying unprocessed beauty images in beauty shot. */ + if ((m_parameters->getShotMode() == SHOT_MODE_BEAUTY_FACE)) { + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME) && + checkBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE)) { + CLOGV("INFO(%s[%d]):skip the preview callback and the preview display while compressed callback.", + __FUNCTION__, __LINE__); + ret = m_scpBufferMgr->cancelBuffer(buffer.index); + goto func_exit; + } + } + + if ((m_previewWindow == NULL) || + (m_getRecordingEnabled() == true) || + (m_parameters->getPreviewBufferCount() == NUM_PREVIEW_BUFFERS + NUM_PREVIEW_SPARE_BUFFERS && + m_scpBufferMgr->getNumOfAvailableAndNoneBuffer() > 4 && + previewQ->getSizeOfProcessQ() < 2) || + (m_parameters->getPreviewBufferCount() == NUM_PREVIEW_BUFFERS && + m_scpBufferMgr->getNumOfAvailableAndNoneBuffer() > 2 && + previewQ->getSizeOfProcessQ() < 1)) { + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME) && + m_highResolutionCallbackRunning == false) { + ExynosCameraBuffer previewCbBuffer; + + ret = m_setPreviewCallbackBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setPreviewCallback Buffer fail", __FUNCTION__, __LINE__); + return ret; + } + + int bufIndex = -2; + m_previewCallbackBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &previewCbBuffer); + + ExynosCameraFrame *newFrame = NULL; + + newFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(pipeIdCsc); + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + m_scpBufferMgr->cancelBuffer(buffer.index); + m_previewCallbackBufferMgr->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + goto func_exit; + } + + m_copyMetaFrameToFrame(frame, newFrame, true, true); + + ret = m_doPreviewToCallbackFunc(pipeIdCsc, newFrame, buffer, previewCbBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_doPreviewToCallbackFunc fail", __FUNCTION__, __LINE__); + m_previewCallbackBufferMgr->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + m_scpBufferMgr->cancelBuffer(buffer.index); + goto func_exit; + } else { + if (m_parameters->getCallbackNeedCopy2Rendering() == true) { + ret = m_doCallbackToPreviewFunc(pipeIdCsc, frame, previewCbBuffer, buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_doCallbackToPreviewFunc fail", __FUNCTION__, __LINE__); + m_previewCallbackBufferMgr->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + m_scpBufferMgr->cancelBuffer(buffer.index); + goto func_exit; + } + } + } + + if (newFrame != NULL) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + m_previewCallbackBufferMgr->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + } + + if (m_previewWindow != NULL) { + if (timeStamp > 0L) { + m_previewWindow->set_timestamp(m_previewWindow, (int64_t)timeStamp); + } else { + uint32_t fcount = 0; + getStreamFrameCount((struct camera2_stream *)buffer.addr[2], &fcount); + CLOGW("WRN(%s[%d]): frameCount(%d)(%d), Invalid timeStamp(%lld)", + __FUNCTION__, __LINE__, + frameCount, + fcount, + timeStamp); + } + } + +#ifdef FIRST_PREVIEW_TIME_CHECK + if (m_flagFirstPreviewTimerOn == true) { + ExynosCameraActivityAutofocus *autoFocusMgr = m_exynosCameraActivityControl->getAutoFocusMgr(); + m_firstPreviewTimer.stop(); + m_flagFirstPreviewTimerOn = false; + + CLOGD("DEBUG(%s[%d]):m_firstPreviewTimer stop", __FUNCTION__, __LINE__); + + CLOGD("DEBUG(%s[%d]):============= First Preview time ==================", "m_printExynosCameraInfo", __LINE__); + CLOGD("DEBUG(%s[%d]):= startPreview ~ first frame : %d msec", "m_printExynosCameraInfo", __LINE__, (int)m_firstPreviewTimer.durationMsecs()); + CLOGD("DEBUG(%s[%d]):===================================================", "m_printExynosCameraInfo", __LINE__); + autoFocusMgr->displayAFInfo(); + } +#endif + + /* display the frame */ + ret = m_putBuffers(m_scpBufferMgr, buffer.index); + if (ret < 0) { + /* TODO: error handling */ + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } else { + ALOGW("WARN(%s[%d]):Preview frame buffer is canceled." + "PreviewThread is blocked or too many buffers are in Service." + "PreviewBufferCount(%d), ScpBufferMgr(%d), PreviewQ(%d)", + __FUNCTION__, __LINE__, + m_parameters->getPreviewBufferCount(), + m_scpBufferMgr->getNumOfAvailableAndNoneBuffer(), + previewQ->getSizeOfProcessQ()); + ret = m_scpBufferMgr->cancelBuffer(buffer.index); + } + +func_exit: + + if (frame != NULL) { + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + +#ifdef USE_PREVIEW_DURATION_CONTROL + PreviewDurationTimer.stop(); + if ((m_getRecordingEnabled() == true) && (curMinFps == curMaxFps) && + (m_parameters->getShotMode() != SHOT_MODE_SEQUENCE)) { + PreviewDurationTime = PreviewDurationTimer.durationUsecs(); + + if ( frameDurationUs > PreviewDurationTime ) { + uint64_t delay = frameDurationUs - PreviewDurationTime; + CLOGV("(%s):Delay Time(%lld),fpsRange(%d,%d), Duration(%lld)", __FUNCTION__, + frameDurationUs - PreviewDurationTime, + curMinFps, + curMaxFps, + frameDurationUs); + usleep(delay); + } + } +#endif + return loop; +} + +status_t ExynosCamera::m_doPreviewToCallbackFunc( + int32_t pipeId, + ExynosCameraFrame *newFrame, + ExynosCameraBuffer previewBuf, + ExynosCameraBuffer callbackBuf) +{ + CLOGV("DEBUG(%s): converting preview to callback buffer", __FUNCTION__); + + int ret = 0; + status_t statusRet = NO_ERROR; + + int hwPreviewW = 0, hwPreviewH = 0; + int hwPreviewFormat = m_parameters->getHwPreviewFormat(); + bool useCSC = m_parameters->getCallbackNeedCSC(); + + ExynosCameraDurationTimer probeTimer; + int probeTimeMSEC; + uint32_t fcount = 0; + + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + ExynosRect srcRect, dstRect; + + camera_memory_t *previewCallbackHeap = NULL; + previewCallbackHeap = m_getMemoryCb(callbackBuf.fd[0], callbackBuf.size[0], 1, m_callbackCookie); + if (!previewCallbackHeap || previewCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, callbackBuf.size[0]); + statusRet = INVALID_OPERATION; + goto done; + } + + ret = m_setCallbackBufferInfo(&callbackBuf, (char *)previewCallbackHeap->data); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setCallbackBufferInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + statusRet = INVALID_OPERATION; + goto done; + } + + if (m_flagThreadStop == true || m_previewEnabled == false) { + CLOGE("ERR(%s[%d]): preview was stopped!", __FUNCTION__, __LINE__); + statusRet = INVALID_OPERATION; + goto done; + } + + ret = m_calcPreviewGSCRect(&srcRect, &dstRect); + + if (useCSC) { + ret = newFrame->setSrcRect(pipeId, &srcRect); + ret = newFrame->setDstRect(pipeId, &dstRect); + + ret = m_setupEntity(pipeId, newFrame, &previewBuf, &callbackBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, ret); + statusRet = INVALID_OPERATION; + goto done; + } + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipeId); + m_previewFrameFactory->setOutputFrameQToPipe(m_previewCallbackGscFrameDoneQ, pipeId); + + CLOGV("INFO(%s[%d]):wait preview callback output", __FUNCTION__, __LINE__); + ret = m_previewCallbackGscFrameDoneQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + statusRet = INVALID_OPERATION; + goto done; + } + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + statusRet = INVALID_OPERATION; + goto done; + } + CLOGV("INFO(%s[%d]):preview callback done", __FUNCTION__, __LINE__); + +#if 0 + int remainedH = m_orgPreviewRect.h - dst_height; + + if (remainedH != 0) { + char *srcAddr = NULL; + char *dstAddr = NULL; + int planeDiver = 1; + + for (int plane = 0; plane < 2; plane++) { + planeDiver = (plane + 1) * 2 / 2; + + srcAddr = previewBuf.virt.extP[plane] + (ALIGN_UP(hwPreviewW, CAMERA_ISP_ALIGN) * dst_crop_height / planeDiver); + dstAddr = callbackBuf->virt.extP[plane] + (m_orgPreviewRect.w * dst_crop_height / planeDiver); + + for (int i = 0; i < remainedH; i++) { + memcpy(dstAddr, srcAddr, (m_orgPreviewRect.w / planeDiver)); + + srcAddr += (ALIGN_UP(hwPreviewW, CAMERA_ISP_ALIGN) / planeDiver); + dstAddr += (m_orgPreviewRect.w / planeDiver); + } + } + } +#endif + } else { /* neon memcpy */ + char *srcAddr = NULL; + char *dstAddr = NULL; + unsigned int size = 0; + int planeCount = getYuvPlaneCount(hwPreviewFormat); + if (planeCount <= 0) { + CLOGE("ERR(%s[%d]):getYuvPlaneCount(%d) fail", __FUNCTION__, __LINE__, hwPreviewFormat); + statusRet = INVALID_OPERATION; + goto done; + } + + /* TODO : have to consider all fmt(planes) and stride */ + for (int plane = 0; plane < planeCount; plane++) { + srcAddr = previewBuf.addr[plane]; + dstAddr = callbackBuf.addr[plane]; + + if (previewBuf.size[plane] != callbackBuf.size[plane]) + CLOGW("WARN(%s[%d]):Preview buffer(%d), Preview callback buffer(%d) size mismatch, plane(%d)", + __FUNCTION__, __LINE__, + previewBuf.size[plane], + callbackBuf.size[plane], + plane); + size = (previewBuf.size[plane] < callbackBuf.size[plane])? previewBuf.size[plane] : callbackBuf.size[plane]; + memcpy(dstAddr, srcAddr, size); + } + } + + probeTimer.start(); + if (m_parameters->msgTypeEnabled(CAMERA_MSG_PREVIEW_FRAME) && + !checkBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE) && + m_disablePreviewCB == false) { + setBit(&m_callbackState, CALLBACK_STATE_PREVIEW_FRAME, false); + m_dataCb(CAMERA_MSG_PREVIEW_FRAME, previewCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_PREVIEW_FRAME, false); + } + probeTimer.stop(); + getStreamFrameCount((struct camera2_stream *)previewBuf.addr[2], &fcount); + probeTimeMSEC = (int)probeTimer.durationMsecs(); + + if (probeTimeMSEC > 33 && probeTimeMSEC <= 66) + CLOGV("(%s[%d]):(%d) duration time(%5d msec)", __FUNCTION__, __LINE__, fcount, (int)probeTimer.durationMsecs()); + else if(probeTimeMSEC > 66) + CLOGD("(%s[%d]):(%d) duration time(%5d msec)", __FUNCTION__, __LINE__, fcount, (int)probeTimer.durationMsecs()); + else + CLOGV("(%s[%d]):(%d) duration time(%5d msec)", __FUNCTION__, __LINE__, fcount, (int)probeTimer.durationMsecs()); + +done: + if (previewCallbackHeap != NULL) { + previewCallbackHeap->release(previewCallbackHeap); + } + + return statusRet; +} + +status_t ExynosCamera::m_doCallbackToPreviewFunc( + __unused int32_t pipeId, + __unused ExynosCameraFrame *newFrame, + ExynosCameraBuffer callbackBuf, + ExynosCameraBuffer previewBuf) +{ + CLOGV("DEBUG(%s): converting callback to preview buffer", __FUNCTION__); + + int ret = 0; + status_t statusRet = NO_ERROR; + + int hwPreviewW = 0, hwPreviewH = 0; + int hwPreviewFormat = m_parameters->getHwPreviewFormat(); + bool useCSC = m_parameters->getCallbackNeedCSC(); + + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + camera_memory_t *previewCallbackHeap = NULL; + previewCallbackHeap = m_getMemoryCb(callbackBuf.fd[0], callbackBuf.size[0], 1, m_callbackCookie); + if (!previewCallbackHeap || previewCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, callbackBuf.size[0]); + statusRet = INVALID_OPERATION; + goto done; + } + + ret = m_setCallbackBufferInfo(&callbackBuf, (char *)previewCallbackHeap->data); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setCallbackBufferInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + statusRet = INVALID_OPERATION; + goto done; + } + + if (useCSC) { +#if 0 + if (m_exynosPreviewCSC) { + csc_set_src_format(m_exynosPreviewCSC, + ALIGN_DOWN(m_orgPreviewRect.w, CAMERA_MAGIC_ALIGN), ALIGN_DOWN(m_orgPreviewRect.h, CAMERA_MAGIC_ALIGN), + 0, 0, ALIGN_DOWN(m_orgPreviewRect.w, CAMERA_MAGIC_ALIGN), ALIGN_DOWN(m_orgPreviewRect.h, CAMERA_MAGIC_ALIGN), + V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat), + 1); + + csc_set_dst_format(m_exynosPreviewCSC, + previewW, previewH, + 0, 0, previewW, previewH, + V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat), + 0); + + csc_set_src_buffer(m_exynosPreviewCSC, + (void **)callbackBuf->virt.extP, CSC_MEMORY_USERPTR); + + csc_set_dst_buffer(m_exynosPreviewCSC, + (void **)previewBuf.fd.extFd, CSC_MEMORY_TYPE); + + if (csc_convert_with_rotation(m_exynosPreviewCSC, 0, m_flip_horizontal, 0) != 0) + CLOGE("ERR(%s):csc_convert() from callback to lcd fail", __FUNCTION__); + } else { + CLOGE("ERR(%s):m_exynosPreviewCSC == NULL", __FUNCTION__); + } +#else + CLOGW("WRN(%s[%d]): doCallbackToPreview use CSC is not yet possible", __FUNCTION__, __LINE__); +#endif + } else { /* neon memcpy */ + char *srcAddr = NULL; + char *dstAddr = NULL; + int planeCount = getYuvPlaneCount(hwPreviewFormat); + if (planeCount <= 0) { + CLOGE("ERR(%s[%d]):getYuvPlaneCount(%d) fail", __FUNCTION__, __LINE__, hwPreviewFormat); + statusRet = INVALID_OPERATION; + goto done; + } + + /* TODO : have to consider all fmt(planes) and stride */ + for (int plane = 0; plane < planeCount; plane++) { + srcAddr = callbackBuf.addr[plane]; + dstAddr = previewBuf.addr[plane]; + memcpy(dstAddr, srcAddr, callbackBuf.size[plane]); + } + } + +done: + if (previewCallbackHeap != NULL) { + previewCallbackHeap->release(previewCallbackHeap); + } + + return statusRet; +} + +status_t ExynosCamera::m_handlePreviewFrameFront(ExynosCameraFrame *frame) +{ + int ret = 0; + uint32_t skipCount = 0; + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *fdFrame = NULL; + ExynosCameraBuffer buffer; + int pipeID = 0; + ExynosCameraBuffer resultBuffer; + camera2_node_group node_group_info_isp; + enum pipeline pipe; + + entity = frame->getFrameDoneEntity(); + if (entity == NULL) { + CLOGE("ERR(%s[%d]):current entity is NULL, frameCount(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + /* TODO: doing exception handling */ + return true; + } + + if (entity->getEntityState() == ENTITY_STATE_FRAME_SKIP) + goto entity_state_complete; + + pipeID = entity->getPipeId(); + + switch(entity->getPipeId()) { + case PIPE_ISP: + m_debugFpsCheck(entity->getPipeId()); + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + frame->setMetaDataEnable(true); + + ret = m_putBuffers(m_ispBufferMgr, buffer.index); + + ret = frame->getSrcBuffer(PIPE_3AA, &buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (m_parameters->isReprocessing() == true) { + ret = m_captureSelector->manageFrameHoldList(frame, pipeID, true); + if (ret < 0) { + CLOGE("ERR(%s[%d]):manageFrameHoldList fail", __FUNCTION__, __LINE__); + return ret; + } + } else { + /* TODO: This is unusual case, flite buffer and isp buffer */ + ret = m_putBuffers(m_bayerBufferMgr, buffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + } + } + + skipCount = m_parameters->getFrameSkipCount(); + if (skipCount <= 0 && m_flagStartFaceDetection == true) { + /* Face detection */ + struct camera2_shot_ext shot; + frame->getDynamicMeta(&shot); + fdFrame = m_frameMgr->createFrame(m_parameters, frame->getFrameCount()); + fdFrame->storeDynamicMeta(&shot); + m_facedetectQ->pushProcessQ(&fdFrame); + } + + ret = generateFrame(m_3aa_ispFrameCount, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + return ret; + } + + m_setupEntity(PIPE_FLITE, newFrame); + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_FLITE); + + m_setupEntity(PIPE_3AA, newFrame); + m_setupEntity(PIPE_ISP, newFrame); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_ISP); + + m_3aa_ispFrameCount++; + + /* HACK: When SCC pipe is stopped, generate frame for SCC */ + if ((m_sccBufferMgr->getNumOfAvailableBuffer() > 2)) { + CLOGW("WRN(%s[%d]): Too many available SCC buffers, generating frame for SCC", __FUNCTION__, __LINE__); + + pipe = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC : PIPE_ISPC; + + while (m_sccBufferMgr->getNumOfAvailableBuffer() > 0) { + ret = generateFrame(m_sccFrameCount, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + return ret; + } + + m_setupEntity(pipe, newFrame); + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipe); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipe); + + m_sccFrameCount++; + } + } + + break; + case PIPE_ISPC: + case PIPE_SCC: + m_debugFpsCheck(entity->getPipeId()); + if (entity->getDstBufState() == ENTITY_BUFFER_STATE_COMPLETE) { + ret = m_sccCaptureSelector->manageFrameHoldList(frame, entity->getPipeId(), false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):manageFrameHoldList fail", __FUNCTION__, __LINE__); + return ret; + } + } + + pipe = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC : PIPE_ISPC; + + while (m_sccBufferMgr->getNumOfAvailableBuffer() > 0) { + ret = generateFrameSccScp(pipe, &m_sccFrameCount, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrameSccScp fail", __FUNCTION__, __LINE__); + return ret; + } + + m_setupEntity(pipe, newFrame); + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipe); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipe); + + m_sccFrameCount++; + } + break; + case PIPE_SCP: + if (entity->getDstBufState() == ENTITY_BUFFER_STATE_ERROR) { + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + ret = m_scpBufferMgr->cancelBuffer(buffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + + } else if (entity->getDstBufState() == ENTITY_BUFFER_STATE_COMPLETE) { + m_debugFpsCheck(entity->getPipeId()); + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + m_parameters->getFrameSkipCount(&m_frameSkipCount); + if (m_frameSkipCount > 0) { + CLOGD("INFO(%s[%d]):frameSkipCount=%d", __FUNCTION__, __LINE__, m_frameSkipCount); + ret = m_scpBufferMgr->cancelBuffer(buffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } else { + nsecs_t timeStamp = (nsecs_t)frame->getTimeStamp(); + if (m_getRecordingEnabled() == true + && m_parameters->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME)) { + if (timeStamp <= 0L) { + CLOGE("WARN(%s[%d]):timeStamp(%lld) Skip", __FUNCTION__, __LINE__, timeStamp); + } else { + /* get Recording Image buffer */ + int bufIndex = -2; + ExynosCameraBuffer recordingBuffer; + ret = m_recordingBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &recordingBuffer); + if (ret < 0 || bufIndex < 0) { + if ((++m_recordingFrameSkipCount % 100) == 0) { + CLOGE("ERR(%s[%d]): Recording buffer is not available!! Recording Frames are Skipping(%d frames) (bufIndex=%d)", + __FUNCTION__, __LINE__, m_recordingFrameSkipCount, bufIndex); + } + } else { + if (m_recordingFrameSkipCount != 0) { + CLOGE("ERR(%s[%d]): Recording buffer is not available!! Recording Frames are Skipped(%d frames) (bufIndex=%d)", + __FUNCTION__, __LINE__, m_recordingFrameSkipCount, bufIndex); + m_recordingFrameSkipCount = 0; + } + m_recordingTimeStamp[bufIndex] = timeStamp; + ret = m_doPrviewToRecordingFunc(PIPE_GSC_VIDEO, buffer, recordingBuffer, timeStamp); + if (ret < 0) { + CLOGW("WARN(%s[%d]):recordingCallback Skip", __FUNCTION__, __LINE__); + } + } + } + } + + ExynosCameraBuffer callbackBuffer; + ExynosCameraFrame *callbackFrame = NULL; + struct camera2_shot_ext *shot_ext = new struct camera2_shot_ext; + + callbackFrame = m_previewFrameFactory->createNewFrameOnlyOnePipe(PIPE_SCP); + ret = frame->getDstBuffer(PIPE_SCP, &callbackBuffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + frame->getMetaData(shot_ext); + callbackFrame->storeDynamicMeta(shot_ext); + callbackFrame->setDstBuffer(PIPE_SCP, callbackBuffer); + delete shot_ext; + + CLOGV("INFO(%s[%d]):push frame to front previewQ", __FUNCTION__, __LINE__); + m_previewQ->pushProcessQ(&callbackFrame); + + CLOGV("DEBUG(%s[%d]):SCP done HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } + } else { + CLOGV("DEBUG(%s[%d]):SCP droped - SCP buffer is not ready HAL-frameCount(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (buffer.index >= 0) { + ret = m_scpBufferMgr->cancelBuffer(buffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]):SCP buffer return fail", __FUNCTION__, __LINE__); + } + m_previewFrameFactory->dump(); + } + + ret = generateFrameSccScp(PIPE_SCP, &m_scpFrameCount, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrameSccScp fail", __FUNCTION__, __LINE__); + return ret; + } + + m_setupEntity(PIPE_SCP, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + break; + } + + /*check preview drop...*/ + ret = newFrame->getDstBuffer(PIPE_SCP, &resultBuffer, m_previewFrameFactory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + if (resultBuffer.index < 0) { + newFrame->setRequest(PIPE_SCP, false); + newFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + node_group_info_isp.capture[PERFRAME_FRONT_SCP_POS].request = 0; + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + + m_previewFrameFactory->dump(); + + /* when preview buffer is not ready, we should drop preview to make preview buffer ready */ + CLOGW("WARN(%s[%d]):Front preview drop. Failed to get preview buffer. FrameSkipcount(%d)", + __FUNCTION__, __LINE__, FRAME_SKIP_COUNT_PREVIEW_FRONT); + m_parameters->setFrameSkipCount(FRAME_SKIP_COUNT_PREVIEW_FRONT); + } + + m_previewFrameFactory->pushFrameToPipe(&newFrame, PIPE_SCP); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, PIPE_SCP); + + m_scpFrameCount++; + break; + default: + break; + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]):put Buffer fail", __FUNCTION__, __LINE__); + return ret; + } + +entity_state_complete: + + ret = frame->setEntityState(entity->getPipeId(), ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState fail, pipeId(%d), state(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + if (frame->isComplete() == true) { + ret = m_removeFrameFromList(&m_processList, frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + } + + return NO_ERROR; +} + +status_t ExynosCamera::takePicture() +{ + int ret = 0; + int takePictureCount = m_takePictureCounter.getCount(); + int seriesShotCount = 0; + int currentSeriesShotMode = 0; + ExynosCameraFrame *newFrame = NULL; + int32_t reprocessingBayerMode = 0; + + int retryCount = 0; + + if (m_previewEnabled == false) { + CLOGE("DEBUG(%s):preview is stopped, return error", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_parameters != NULL) { + seriesShotCount = m_parameters->getSeriesShotCount(); + currentSeriesShotMode = m_parameters->getSeriesShotMode(); + reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + if (m_parameters->getVisionMode() == true) { + CLOGW("WRN(%s[%d]): Vision mode does not support", __FUNCTION__, __LINE__); + android_printAssert(NULL, LOG_TAG, "Cannot support this operation"); + + return INVALID_OPERATION; + } + } else { + CLOGE("ERR(%s):m_parameters is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + /* wait autoFocus is over for turning on preFlash */ + m_autoFocusThread->join(); + + m_parameters->setMarkingOfExifFlash(0); + + /* HACK Reset Preview Flag*/ + while ((m_resetPreview == true) && (retryCount < 10)) { + usleep(200000); + retryCount ++; + CLOGI("INFO(%s[%d]) retryCount(%d) m_resetPreview(%d)", __FUNCTION__, __LINE__, retryCount, m_resetPreview); + } + + if (takePictureCount < 0) { + CLOGE("ERR(%s[%d]): takePicture is called too much. takePictureCount(%d) / seriesShotCount(%d) . so, fail", + __FUNCTION__, __LINE__, takePictureCount, seriesShotCount); + return INVALID_OPERATION; + } else if (takePictureCount == 0) { + if (seriesShotCount == 0) { + m_captureLock.lock(); + if (m_pictureEnabled == true) { + CLOGE("ERR(%s[%d]): take picture is inprogressing", __FUNCTION__, __LINE__); + /* return NO_ERROR; */ + m_captureLock.unlock(); + return INVALID_OPERATION; + } + m_captureLock.unlock(); + + /* general shot */ + seriesShotCount = 1; + } + m_takePictureCounter.setCount(seriesShotCount); + } + + CLOGI("INFO(%s[%d]): takePicture start m_takePictureCounter(%d), currentSeriesShotMode(%d) seriesShotCount(%d)", + __FUNCTION__, __LINE__, m_takePictureCounter.getCount(), currentSeriesShotMode, seriesShotCount); + + m_printExynosCameraInfo(__FUNCTION__); + + if(m_parameters->getShotMode() == SHOT_MODE_RICH_TONE) { + m_hdrEnabled = true; + } else { + m_hdrEnabled = false; + } + + if (m_parameters->isReprocessing() == true) { + if (m_parameters->getPictureFormat() == V4L2_PIX_FMT_NV21) { + m_reprocessingFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING_NV21]; + m_pictureFrameFactory = m_reprocessingFrameFactory; + } else { + m_reprocessingFrameFactory = m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING]; + m_pictureFrameFactory = m_reprocessingFrameFactory; + } + } + + /* TODO: Dynamic bayer capture, currently support only single shot */ + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_DYNAMIC) { + int pipeId = m_getBayerPipeId(); + + if (m_bayerBufferMgr->getNumOfAvailableBuffer() > 0) { + m_previewFrameFactory->setRequestFLITE(true); + ret = generateFrame(-1, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + return ret; + } + m_previewFrameFactory->setRequestFLITE(false); + + ret = m_setupEntity(pipeId, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + return ret; + } + + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipeId); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + } + + m_previewFrameFactory->startThread(pipeId); + } else if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + /* Comment out, because it included 3AA, it always running */ + /* + int pipeId = m_getBayerPipeId(); + + if (m_bayerBufferMgr->getNumOfAvailableBuffer() > 0) { + m_previewFrameFactory->setRequest3AC(true); + ret = generateFrame(-1, &newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrame fail", __FUNCTION__, __LINE__); + return ret; + } + m_previewFrameFactory->setRequest3AC(false); + + ret = m_setupEntity(pipeId, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail", __FUNCTION__, __LINE__); + return ret; + } + + m_previewFrameFactory->pushFrameToPipe(&newFrame, pipeId); + m_previewFrameFactory->setOutputFrameQToPipe(m_pipeFrameDoneQ, pipeId); + } + + m_previewFrameFactory->startThread(pipeId); + */ + if (m_bayerBufferMgr->getNumOfAvailableBuffer() > 0) + m_previewFrameFactory->setRequest3AC(true); + } else if (m_parameters->getRecordingHint() == true + && m_parameters->isUsing3acForIspc() == true) { + if (m_sccBufferMgr->getNumOfAvailableBuffer() > 0) + m_previewFrameFactory->setRequest3AC(true); + } + + + if (m_takePictureCounter.getCount() == seriesShotCount) { + ExynosCameraActivitySpecialCapture *m_sCaptureMgr = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + ExynosCameraActivityFlash *m_flashMgr = m_exynosCameraActivityControl->getFlashMgr(); + + m_stopBurstShot = false; + + if (m_parameters->isReprocessing() == true) + m_captureSelector->setIsFirstFrame(true); + else + m_sccCaptureSelector->setIsFirstFrame(true); + +#if 0 + if (m_parameters->isReprocessing() == false || m_parameters->getSeriesShotCount() > 0 || + m_hdrEnabled == true) { + m_pictureFrameFactory = m_previewFrameFactory; + if (m_parameters->getUseDynamicScc() == true) { + if (isOwnScc(getCameraId()) == true) + m_previewFrameFactory->setRequestSCC(true); + else + m_previewFrameFactory->setRequestISPC(true); + + /* boosting dynamic SCC */ + if (m_hdrEnabled == false && + currentSeriesShotMode == SERIES_SHOT_MODE_NONE) { + ret = m_boostDynamicCapture(); + if (ret < 0) + CLOGW("WRN(%s[%d]): fail to boosting dynamic capture", __FUNCTION__, __LINE__); + } + + } + } else { + m_pictureFrameFactory = m_reprocessingFrameFactory; + } +#endif + + if (m_parameters->getScalableSensorMode()) { + m_parameters->setScalableSensorMode(false); + stopPreview(); + setPreviewWindow(m_previewWindow); + startPreview(); + m_parameters->setScalableSensorMode(true); + } + + CLOGI("INFO(%s[%d]): takePicture enabled, takePictureCount(%d)", + __FUNCTION__, __LINE__, m_takePictureCounter.getCount()); + m_pictureEnabled = true; + m_takePictureCounter.decCount(); + m_isNeedAllocPictureBuffer = true; + + m_startPictureBufferThread->join(); + + if (m_parameters->isReprocessing() == true) { + m_startPictureInternalThread->join(); + +#ifdef BURST_CAPTURE + if (seriesShotCount > 1) { + int allocCount = 0; + int addCount = 0; + CLOGD("DEBUG(%s[%d]): realloc buffer for burst shot", __FUNCTION__, __LINE__); + m_burstRealloc = false; + + if (m_parameters->isHWFCEnabled() == false) { + allocCount = m_sccReprocessingBufferMgr->getAllocatedBufferCount(); + addCount = (allocCount <= NUM_BURST_GSC_JPEG_INIT_BUFFER)?(NUM_BURST_GSC_JPEG_INIT_BUFFER-allocCount):0; + if (addCount > 0) + m_sccReprocessingBufferMgr->increase(addCount); + } + + allocCount = m_jpegBufferMgr->getAllocatedBufferCount(); + addCount = (allocCount <= NUM_BURST_GSC_JPEG_INIT_BUFFER)?(NUM_BURST_GSC_JPEG_INIT_BUFFER-allocCount):0; + if (addCount > 0) + m_jpegBufferMgr->increase(addCount); + + m_isNeedAllocPictureBuffer = true; + } +#endif + } + + CLOGD("DEBUG(%s[%d]): currentSeriesShotMode(%d), m_flashMgr->getNeedCaptureFlash(%d)", + __FUNCTION__, __LINE__, currentSeriesShotMode, m_flashMgr->getNeedCaptureFlash()); + +#ifdef RAWDUMP_CAPTURE + if(m_use_companion == true) { + CLOGD("DEBUG(%s[%d]): start set Raw Capture mode", __FUNCTION__, __LINE__); + m_sCaptureMgr->resetRawCaptureFcount(); + m_sCaptureMgr->setCaptureMode(ExynosCameraActivitySpecialCapture::SCAPTURE_MODE_RAW); + + m_parameters->setRawCaptureModeOn(true); + + enum aa_capture_intent captureIntent = AA_CAPTRUE_INTENT_STILL_CAPTURE_COMP_BYPASS; + + ret = m_previewFrameFactory->setControl(V4L2_CID_IS_INTENT, captureIntent, PIPE_3AA); + if (ret < 0) + CLOGE("ERR(%s[%d]):setControl(STILL_CAPTURE_RAW) fail. ret(%d) intent(%d)", + __FUNCTION__, __LINE__, ret, captureIntent); + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_START); + } +#else + if (m_hdrEnabled == true) { + seriesShotCount = HDR_REPROCESSING_COUNT; + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_START); + m_sCaptureMgr->resetHdrStartFcount(); + m_parameters->setFrameSkipCount(13); + } else if ((m_flashMgr->getNeedCaptureFlash() == true && currentSeriesShotMode == SERIES_SHOT_MODE_NONE)) { + m_parameters->setMarkingOfExifFlash(1); + + if (m_flashMgr->checkPreFlash() == false && m_isTryStopFlash == false) { + m_flashMgr->setCaptureStatus(true); + CLOGD("DEBUG(%s[%d]): checkPreFlash(false), Start auto focus internally", __FUNCTION__, __LINE__); + m_autoFocusType = AUTO_FOCUS_HAL; + m_flashMgr->setFlashTrigerPath(ExynosCameraActivityFlash::FLASH_TRIGGER_SHORT_BUTTON); + m_flashMgr->setFlashWaitCancel(false); + + /* execute autoFocus for preFlash */ + m_autoFocusThread->requestExitAndWait(); + m_autoFocusThread->run(PRIORITY_DEFAULT); + } + } +#endif /* RAWDUMP_CAPTURE */ +#ifndef RAWDUMP_CAPTURE + if (currentSeriesShotMode == SERIES_SHOT_MODE_NONE && m_flashMgr->getNeedCaptureFlash() == false) + m_isZSLCaptureOn = true; +#endif + m_parameters->setSetfileYuvRange(); + + m_reprocessingCounter.setCount(seriesShotCount); + if (m_prePictureThread->isRunning() == false) { + if (m_prePictureThread->run(PRIORITY_DEFAULT) != NO_ERROR) { + CLOGE("ERR(%s[%d]):couldn't run pre-picture thread", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + m_jpegCounter.setCount(seriesShotCount); + m_pictureCounter.setCount(seriesShotCount); + if (m_pictureThread->isRunning() == false) + ret = m_pictureThread->run(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):couldn't run picture thread, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + + /* HDR, LLS, SIS should make YUV callback data. so don't use jpeg thread */ + if (!(m_hdrEnabled == true || + currentSeriesShotMode == SERIES_SHOT_MODE_LLS || + currentSeriesShotMode == SERIES_SHOT_MODE_SIS || + m_parameters->getShotMode() == SHOT_MODE_FRONT_PANORAMA)) { + m_jpegCallbackThread->join(); + ret = m_jpegCallbackThread->run(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):couldn't run jpeg callback thread, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + } + } else { + /* HDR, LLS, SIS should make YUV callback data. so don't use jpeg thread */ + if (!(m_hdrEnabled == true || + currentSeriesShotMode == SERIES_SHOT_MODE_LLS || + currentSeriesShotMode == SERIES_SHOT_MODE_SIS || + m_parameters->getShotMode() == SHOT_MODE_FRONT_PANORAMA)) { + /* series shot : push buffer to callback thread. */ + m_jpegCallbackThread->join(); + ret = m_jpegCallbackThread->run(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):couldn't run jpeg callback thread, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + } + CLOGI("INFO(%s[%d]): series shot takePicture, takePictureCount(%d)", + __FUNCTION__, __LINE__, m_takePictureCounter.getCount()); + m_takePictureCounter.decCount(); + + /* TODO : in case of no reprocesssing, we make zsl scheme or increse buf */ + if (m_parameters->isReprocessing() == false) + m_pictureEnabled = true; + } + + return NO_ERROR; +} + +bool ExynosCamera::m_reprocessingPrePictureInternal(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + bool loop = false; + int retry = 0; + int retryIsp = 0; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *doneFrame = NULL; + ExynosCameraFrameEntity *entity = NULL; + camera2_shot_ext *shot_ext = NULL; + camera2_stream *shot_stream = NULL; + uint32_t bayerFrameCount = 0; + struct camera2_node_output output_crop_info; + + ExynosCameraBufferManager *bufferMgr = NULL; + + int bayerPipeId = 0; + int prePictureDonePipeId = 0; + enum pipeline pipe; + ExynosCameraBuffer bayerBuffer; + ExynosCameraBuffer ispReprocessingBuffer; + ExynosCameraBuffer yuvReprocessingBuffer; + ExynosCameraBuffer thumbnailBuffer; + int bufferIndex = -2; + enum REPROCESSING_BAYER_MODE reprocessingBayerMode = (enum REPROCESSING_BAYER_MODE)(m_parameters->getReprocessingBayerMode()); + + camera2_shot_ext *updateDmShot = new struct camera2_shot_ext; + memset(updateDmShot, 0x0, sizeof(struct camera2_shot_ext)); + + bayerBuffer.index = -2; + ispReprocessingBuffer.index = -2; + yuvReprocessingBuffer.index = -2; + thumbnailBuffer.index = -2; + + int thumbnailW = 0, thumbnailH = 0; + + /* + * in case of pureBayer and 3aa_isp OTF, buffer will go isp directly + */ + if (m_parameters->isUseYuvReprocessing() == false) { + if (m_parameters->getUsePureBayerReprocessing() == true) { + if (m_parameters->isReprocessing3aaIspOTF() == true) + prePictureDonePipeId = PIPE_3AA_REPROCESSING; + else + prePictureDonePipeId = PIPE_ISP_REPROCESSING; + } else { + prePictureDonePipeId = PIPE_ISP_REPROCESSING; + } + } else { + prePictureDonePipeId = PIPE_MCSC_REPROCESSING; + } + + if (m_parameters->getHighResolutionCallbackMode() == true) { + if (m_highResolutionCallbackRunning == true) { + /* will be removed */ + while (m_skipReprocessing == true) { + usleep(WAITING_TIME); + if (m_skipReprocessing == false) { + CLOGD("DEBUG(%s[%d]:stop skip frame for high resolution preview callback", __FUNCTION__, __LINE__); + break; + } + } + } else if (m_highResolutionCallbackRunning == false) { + CLOGW("WRN(%s[%d]): m_reprocessingThreadfunc stop for high resolution preview callback", __FUNCTION__, __LINE__); + loop = false; + goto CLEAN_FRAME; + } + } + + if (m_isZSLCaptureOn == true) { + CLOGD("INFO(%s[%d]):fast shutter callback!!", __FUNCTION__, __LINE__); + m_shutterCallbackThread->join(); + m_shutterCallbackThread->run(); + } + + /* Get Bayer buffer for reprocessing */ + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON + || reprocessingBayerMode == REPROCESSING_BAYER_MODE_PURE_DYNAMIC) { + ret = m_getBayerBuffer(m_getBayerPipeId(), &bayerBuffer); + } else if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON + || reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + ret = m_getBayerBuffer(m_getBayerPipeId(), &bayerBuffer, updateDmShot); + } else { + CLOGE("ERR(%s[%d]): bayer mode is not valid (%d)", + __FUNCTION__, __LINE__, reprocessingBayerMode); + goto CLEAN_FRAME; + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]): getBayerBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + CLOGD("DEBUG(%s[%d]):bayerBuffer index %d", __FUNCTION__, __LINE__, bayerBuffer.index); + + if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) + m_captureSelector->clearList(m_getBayerPipeId(), false, m_previewFrameFactory->getNodeType(PIPE_3AC)); + + if (m_isZSLCaptureOn == false) { + m_shutterCallbackThread->join(); + m_shutterCallbackThread->run(); + } + + m_isZSLCaptureOn = false; + + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) + m_parameters->getThumbnailSize(&thumbnailW, &thumbnailH); + + if (m_parameters->isHWFCEnabled() == true) { + if (m_hdrEnabled == true + || m_parameters->getHighResolutionCallbackMode() == true + || m_parameters->getPictureFormat() == V4L2_PIX_FMT_NV21 + || (m_parameters->isUseYuvReprocessingForThumbnail() == true + && thumbnailW > 0 && thumbnailH > 0)) { + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, false); + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, false); + } else { + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, true); + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, true); + } + } + + /* This is reprocessing path for Thumbnail */ + if (m_parameters->isUseYuvReprocessingForThumbnail() == true + && m_parameters->getPictureFormat() != V4L2_PIX_FMT_NV21 + && m_parameters->getHighResolutionCallbackMode() == false + && m_hdrEnabled == false + && thumbnailW > 0 && thumbnailH > 0) { + /* Generate reprocessing Frame */ + newFrame = NULL; + ret = generateFrameReprocessing(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrameReprocessing fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + /* TODO: HACK: Will be removed, this is driver's job */ + ret = m_convertingStreamToShotExt(&bayerBuffer, &output_crop_info); + if (ret < 0) { + CLOGE("ERR(%s[%d]): shot_stream to shot_ext converting fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + camera2_node_group node_group_info; + ExynosRect ratioCropSize; + + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + + setLeaderSizeToNodeGroupInfo(&node_group_info, + output_crop_info.cropRegion[0], + output_crop_info.cropRegion[1], + output_crop_info.cropRegion[2], + output_crop_info.cropRegion[3]); + + ret = getCropRectAlign( + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + thumbnailW, thumbnailH, + &ratioCropSize.x, &ratioCropSize.y, &ratioCropSize.w, &ratioCropSize.h, + CAMERA_MCSC_ALIGN, 2, 0, 1.0); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getCropRectAlign failed. MCSC in_crop %dx%d, MCSC out_size %dx%d", + __FUNCTION__, __LINE__, + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + thumbnailW, thumbnailH); + + ratioCropSize.x = 0; + ratioCropSize.y = 0; + ratioCropSize.w = node_group_info.leader.input.cropRegion[2]; + ratioCropSize.h = node_group_info.leader.input.cropRegion[3]; + } + + setCaptureCropNScaleSizeToNodeGroupInfo(&node_group_info, + PERFRAME_REPROCESSING_SCC_POS, + ratioCropSize.x, ratioCropSize.y, + ratioCropSize.w, ratioCropSize.h, + thumbnailW, thumbnailH); + + CLOGV("DEBUG(%s[%d]):leader input(%d %d %d %d), output(%d %d %d %d)", __FUNCTION__, __LINE__, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + + CLOGV("DEBUG(%s[%d]):capture input(%d %d %d %d), output(%d %d %d %d)", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3]); + + if (node_group_info.leader.output.cropRegion[2] < node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2]) { + CLOGI("INFO(%s[%d]:(%d -> %d))", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2], + node_group_info.leader.output.cropRegion[2]); + + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2] = node_group_info.leader.output.cropRegion[2]; + } + if (node_group_info.leader.output.cropRegion[3] < node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3]) { + CLOGI("INFO(%s[%d]:(%d -> %d))", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3], + node_group_info.leader.output.cropRegion[3]); + + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3] = node_group_info.leader.output.cropRegion[3]; + } + + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + + shot_ext = (struct camera2_shot_ext *)(bayerBuffer.addr[bayerBuffer.planeCount-1]); + + /* Meta setting */ + if (shot_ext != NULL) { + ret = newFrame->storeDynamicMeta(updateDmShot); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + ret = newFrame->storeUserDynamicMeta(updateDmShot); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeUserDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + + CLOGD("DEBUG(%s[%d]):meta_shot_ext->shot.dm.request.frameCount : %d", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount(shot_ext)); + } else { + CLOGE("DEBUG(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + } + + ret = m_reprocessingFrameFactory->startInitialThreads(); + if (ret < 0) { + CLOGE("ERR(%s):startInitialThreads fail", __FUNCTION__); + goto CLEAN_FRAME; + } + + /* Get bayerPipeId at first entity */ + bayerPipeId = newFrame->getFirstEntity()->getPipeId(); + CLOGD("DEBUG(%s[%d]): bayer Pipe ID(%d)", __FUNCTION__, __LINE__, bayerPipeId); + + /* Check available buffer */ + ret = m_getBufferManager(bayerPipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getBufferManager fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + if (bufferMgr != NULL) { + ret = m_checkBufferAvailable(bayerPipeId, bufferMgr); + if (ret < 0) { + CLOGE("ERR(%s[%d]): Waiting buffer timeout, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + } + + ret = m_setupEntity(bayerPipeId, newFrame, &bayerBuffer, NULL); + if (ret < 0) { + CLOGE("ERR(%s[%d]:setupEntity fail, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + + m_reprocessingFrameFactory->setOutputFrameQToPipe(dstIspReprocessingQ, prePictureDonePipeId); + + while (dstIspReprocessingQ->getSizeOfProcessQ() > 0) { + dstIspReprocessingQ->popProcessQ(&doneFrame); + + doneFrame->decRef(); + m_frameMgr->deleteFrame(doneFrame); + doneFrame = NULL; + } + + newFrame->incRef(); + + /* push the newFrameReprocessing to pipe */ + m_reprocessingFrameFactory->pushFrameToPipe(&newFrame, bayerPipeId); + + /* wait ISP done */ + CLOGI("INFO(%s[%d]):wait ISP output", __FUNCTION__, __LINE__); + + doneFrame = NULL; + do { + ret = dstIspReprocessingQ->waitAndPopProcessQ(&doneFrame); + retryIsp++; + } while (ret == TIMED_OUT && retryIsp < 100 && m_flagThreadStop != true); + + if (ret < 0) { + CLOGW("WARN(%s[%d]):ISP wait and pop return, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + /* goto CLEAN; */ + } + + if (doneFrame == NULL) { + CLOGE("ERR(%s[%d]):doneFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN_FRAME; + } + + doneFrame->decRef(); + + ret = newFrame->setEntityState(bayerPipeId, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, bayerPipeId, ret); + + if (updateDmShot != NULL) { + delete updateDmShot; + updateDmShot = NULL; + } + + return ret; + } + + CLOGI("INFO(%s[%d]):ISP output done", __FUNCTION__, __LINE__); + + newFrame->setMetaDataEnable(true); + + /* Copy thumbnail image to thumbnail buffer */ + ret = newFrame->getDstBuffer(bayerPipeId, &yuvReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_MCSC0_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + + ret = m_thumbnailBufferMgr->getBuffer(&bufferIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &thumbnailBuffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):get thumbnail Buffer fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + memcpy(thumbnailBuffer.addr[0], yuvReprocessingBuffer.addr[0], thumbnailBuffer.size[0]); + + /* Put buffers */ + ret = m_putBuffers(m_thumbnailBufferMgr, thumbnailBuffer.index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ThumbnailBuffer putBuffer fail, index(%d), ret(%d)", + __FUNCTION__, __LINE__, thumbnailBuffer.index, ret); + goto CLEAN_FRAME; + } + + /* Put reprocessing dst buffer */ + ret = m_getBufferManager(bayerPipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + if (bufferMgr != NULL) { + ret = m_putBuffers(bufferMgr, yuvReprocessingBuffer.index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):DstBuffer putBuffer fail, index(%d), ret(%d)", + __FUNCTION__, __LINE__, yuvReprocessingBuffer.index, ret); + goto CLEAN_FRAME; + } + } + + /* Delete new frame */ + CLOGD("DEBUG(%s[%d]):Reprocessing frame for thumbnail delete(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + + /* Set JPEG request true */ + if (m_parameters->isHWFCEnabled() == true) { + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, true); + m_reprocessingFrameFactory->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, true); + } + } + + /* Generate reprocessing Frame */ + newFrame = NULL; + ret = generateFrameReprocessing(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):generateFrameReprocessing fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + +#ifndef RAWDUMP_CAPTURE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + int sensorMaxW, sensorMaxH; + int sensorMarginW, sensorMarginH; + bool bRet; + char filePath[70]; + + memset(filePath, 0, sizeof(filePath)); + snprintf(filePath, sizeof(filePath), "/data/media/0/RawCapture%d_%d.raw",m_cameraId, m_fliteFrameCount); + if (m_parameters->getUsePureBayerReprocessing() == true) + /* Pure Bayer Buffer Size == MaxPictureSize + Sensor Margin == Max Sensor Size */ + m_parameters->getMaxSensorSize(&sensorMaxW, &sensorMaxH); + else + m_parameters->getMaxPictureSize(&sensorMaxW, &sensorMaxH); + + bRet = dumpToFile((char *)filePath, + bayerBuffer.addr[0], + sensorMaxW * sensorMaxH * 2); + if (bRet != true) + CLOGE("couldn't make a raw file"); + } +#endif /* DEBUG_RAWDUMP */ +#endif + + if (m_parameters->getUsePureBayerReprocessing() == false) { + if (m_parameters->isUseYuvReprocessingForThumbnail() == false + || m_parameters->getPictureFormat() == V4L2_PIX_FMT_NV21 + || m_parameters->getHighResolutionCallbackMode() == true + || m_hdrEnabled == true + || thumbnailW <= 0 || thumbnailH <= 0) { + /* TODO: HACK: Will be removed, this is driver's job */ + ret = m_convertingStreamToShotExt(&bayerBuffer, &output_crop_info); + if (ret < 0) { + CLOGE("ERR(%s[%d]): shot_stream to shot_ext converting fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + } + + camera2_node_group node_group_info; + ExynosRect ratioCropSize; + int pictureW = 0, pictureH = 0; + + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + + if (m_parameters->isUseYuvReprocessing() == true) + newFrame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + else + newFrame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_DIRTY_REPROCESSING_ISP); + + m_parameters->getPictureSize(&pictureW, &pictureH); + + setLeaderSizeToNodeGroupInfo(&node_group_info, + output_crop_info.cropRegion[0], + output_crop_info.cropRegion[1], + output_crop_info.cropRegion[2], + output_crop_info.cropRegion[3]); + + if (m_parameters->isUseYuvReprocessing() == true) { + ret = getCropRectAlign( + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH, + &ratioCropSize.x, &ratioCropSize.y, &ratioCropSize.w, &ratioCropSize.h, + CAMERA_MCSC_ALIGN, 2, 0, 1.0); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getCropRectAlign failed. MCSC in_crop %dx%d, MCSC out_size %dx%d", + __FUNCTION__, __LINE__, + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH); + + ratioCropSize.x = 0; + ratioCropSize.y = 0; + ratioCropSize.w = node_group_info.leader.input.cropRegion[2]; + ratioCropSize.h = node_group_info.leader.input.cropRegion[3]; + } + + setCaptureCropNScaleSizeToNodeGroupInfo(&node_group_info, + PERFRAME_REPROCESSING_SCC_POS, + ratioCropSize.x, ratioCropSize.y, + ratioCropSize.w, ratioCropSize.h, + pictureW, pictureH); + } else { + setCaptureCropNScaleSizeToNodeGroupInfo(&node_group_info, + PERFRAME_REPROCESSING_SCC_POS, + output_crop_info.cropRegion[0], + output_crop_info.cropRegion[1], + output_crop_info.cropRegion[2], + output_crop_info.cropRegion[3], + output_crop_info.cropRegion[2], + output_crop_info.cropRegion[3]); + } + + CLOGV("DEBUG(%s[%d]):leader input(%d %d %d %d), output(%d %d %d %d)", __FUNCTION__, __LINE__, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + + CLOGV("DEBUG(%s[%d]):capture input(%d %d %d %d), output(%d %d %d %d)", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[0], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[1], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[0], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[1], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[2], + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].output.cropRegion[3]); + + if (node_group_info.leader.output.cropRegion[2] < node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2]) { + CLOGI("INFO(%s[%d]:(%d -> %d))", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2], + node_group_info.leader.output.cropRegion[2]); + + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[2] = node_group_info.leader.output.cropRegion[2]; + } + if (node_group_info.leader.output.cropRegion[3] < node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3]) { + CLOGI("INFO(%s[%d]:(%d -> %d))", __FUNCTION__, __LINE__, + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3], + node_group_info.leader.output.cropRegion[3]); + + node_group_info.capture[PERFRAME_REPROCESSING_SCC_POS].input.cropRegion[3] = node_group_info.leader.output.cropRegion[3]; + } + + if (m_parameters->isUseYuvReprocessing() == true) + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + else + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_DIRTY_REPROCESSING_ISP); + } + + shot_ext = (struct camera2_shot_ext *)(bayerBuffer.addr[bayerBuffer.planeCount-1]); + + /* Meta setting */ + if (shot_ext != NULL) { + if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_DYNAMIC) { + ret = newFrame->storeDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + ret = newFrame->storeUserDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeUserDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + } else if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + ret = newFrame->storeDynamicMeta(updateDmShot); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + ret = newFrame->storeUserDynamicMeta(updateDmShot); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeUserDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + } + + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + + CLOGD("DEBUG(%s[%d]):meta_shot_ext->shot.dm.request.frameCount : %d", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount(shot_ext)); + } else { + CLOGE("DEBUG(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + } + + if (m_parameters->isUseYuvReprocessing() == true) + pipe = PIPE_MCSC_REPROCESSING; + else if (m_parameters->isReprocessing3aaIspOTF() == false) + pipe = PIPE_ISP_REPROCESSING; + else + pipe = PIPE_3AA_REPROCESSING; + + if (m_parameters->getHighResolutionCallbackMode() == true + && m_highResolutionCallbackRunning == true) + m_reprocessingFrameFactory->setFrameDoneQToPipe(m_highResolutionCallbackQ, pipe); + else if (m_parameters->isUseYuvReprocessingForThumbnail() == false) + m_reprocessingFrameFactory->setFrameDoneQToPipe(dstSccReprocessingQ, pipe); + + /* Add frame to post processing list */ + CLOGD("DEBUG(%s[%d]):postPictureList size(%zd), frame(%d)", + __FUNCTION__, __LINE__, m_postProcessList.size(), newFrame->getFrameCount()); + newFrame->frameLock(); + m_postProcessList.push_back(newFrame); + + ret = m_reprocessingFrameFactory->startInitialThreads(); + if (ret < 0) { + CLOGE("ERR(%s):startInitialThreads fail", __FUNCTION__); + goto CLEAN_FRAME; + } + + /* Get bayerPipeId at first entity */ + bayerPipeId = newFrame->getFirstEntity()->getPipeId(); + CLOGD("DEBUG(%s[%d]): bayer Pipe ID(%d)", __FUNCTION__, __LINE__, bayerPipeId); + + /* Check available buffer */ + ret = m_getBufferManager(bayerPipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getBufferManager fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + if (bufferMgr != NULL) { + ret = m_checkBufferAvailable(bayerPipeId, bufferMgr); + if (ret < 0) { + CLOGE("ERR(%s[%d]): Waiting buffer timeout, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + } + + if (m_parameters->isHWFCEnabled() == true) { + ret = m_checkBufferAvailable(PIPE_HWFC_JPEG_DST_REPROCESSING, m_jpegBufferMgr); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Waiting buffer timeout, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + + ret = m_checkBufferAvailable(PIPE_HWFC_THUMB_SRC_REPROCESSING, m_thumbnailBufferMgr); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Waiting buffer timeout, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + } + + ret = m_setupEntity(bayerPipeId, newFrame, &bayerBuffer, NULL); + if (ret < 0) { + CLOGE("ERR(%s[%d]:setupEntity fail, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + + m_reprocessingFrameFactory->setOutputFrameQToPipe(dstIspReprocessingQ, prePictureDonePipeId); + + while (dstIspReprocessingQ->getSizeOfProcessQ() > 0) { + dstIspReprocessingQ->popProcessQ(&doneFrame); + + if (m_parameters->isUseYuvReprocessingForThumbnail() == true + && m_parameters->getHighResolutionCallbackMode() == false) { + doneFrame->decRef(); + m_frameMgr->deleteFrame(doneFrame); + } + doneFrame = NULL; + } + + newFrame->incRef(); + + /* push the newFrameReprocessing to pipe */ + m_reprocessingFrameFactory->pushFrameToPipe(&newFrame, bayerPipeId); + + /* When enabled SCC capture or pureBayerReprocessing, we need to start bayer pipe thread */ + if (m_parameters->getUsePureBayerReprocessing() == true || + m_parameters->isSccCapture() == true) + m_reprocessingFrameFactory->startThread(bayerPipeId); + + /* wait ISP done */ + CLOGI("INFO(%s[%d]):wait ISP output", __FUNCTION__, __LINE__); + + doneFrame = NULL; + do { + ret = dstIspReprocessingQ->waitAndPopProcessQ(&doneFrame); + retryIsp++; + } while (ret == TIMED_OUT && retryIsp < 100 && m_flagThreadStop != true); + + if (ret < 0) { + CLOGW("WARN(%s[%d]):ISP wait and pop return, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + /* goto CLEAN; */ + } + + if (doneFrame == NULL) { + CLOGE("ERR(%s[%d]):doneFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN_FRAME; + } + + if (m_parameters->isUseYuvReprocessingForThumbnail() == true + && m_parameters->getHighResolutionCallbackMode() == false) + dstSccReprocessingQ->pushProcessQ(&newFrame); + + ret = newFrame->setEntityState(bayerPipeId, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, bayerPipeId, ret); + + if (updateDmShot != NULL) { + delete updateDmShot; + updateDmShot = NULL; + } + + return ret; + } + + CLOGI("INFO(%s[%d]):ISP output done", __FUNCTION__, __LINE__); + + newFrame->setMetaDataEnable(true); + + if (m_parameters->isUseYuvReprocessing() == true) { + /* put YUV buffer */ + ret = m_putBuffers(m_sccBufferMgr, bayerBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):MCSC src putBuffer fail, index(%d), ret(%d)", __FUNCTION__, __LINE__, bayerBuffer.index, ret); + goto CLEAN_FRAME; + } + } else { + /* put bayer buffer */ + ret = m_putBuffers(m_bayerBufferMgr, bayerBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):3AA src putBuffer fail, index(%d), ret(%d)", __FUNCTION__, __LINE__, bayerBuffer.index, ret); + goto CLEAN_FRAME; + } + } + + /* put isp buffer */ + if (m_parameters->getUsePureBayerReprocessing() == true) { + ret = m_getBufferManager(bayerPipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getBufferManager fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto CLEAN_FRAME; + } + + if (bufferMgr != NULL) { + ret = newFrame->getDstBuffer(bayerPipeId, &ispReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_FLITE)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, bayerPipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, bayerPipeId, ret); + goto CLEAN_FRAME; + } + + ret = m_putBuffers(m_ispReprocessingBufferMgr, ispReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): ISP src putBuffer fail, index(%d), ret(%d)", __FUNCTION__, __LINE__, bayerBuffer.index, ret); + goto CLEAN_FRAME; + } + } + } + + m_reprocessingCounter.decCount(); + + CLOGI("INFO(%s[%d]):reprocessing complete, remaining count(%d)", __FUNCTION__, __LINE__, m_reprocessingCounter.getCount()); + + if (m_hdrEnabled) { + ExynosCameraActivitySpecialCapture *m_sCaptureMgr; + + m_sCaptureMgr = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + + if (m_reprocessingCounter.getCount() == 0) + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_OFF); + } + + if (newFrame != NULL) { + CLOGD("DEBUG(%s[%d]): Reprocessing frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == true)) + loop = true; + + if (m_reprocessingCounter.getCount() > 0) + loop = true; + + if (updateDmShot != NULL) { + delete updateDmShot; + updateDmShot = NULL; + } + + /* one shot */ + return loop; + +CLEAN_FRAME: + /* newFrame is not pushed any pipes, we can delete newFrame */ + if (newFrame != NULL) { + newFrame->printEntity(); + CLOGD("DEBUG(%s[%d]): Reprocessing frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + +CLEAN: + if (0 <= bayerBuffer.index) { + if (m_parameters->isUseYuvReprocessing() == true) { + /* put YUV buffer */ + ret = m_putBuffers(m_sccBufferMgr, bayerBuffer.index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_putBuffer(m_sccBufferMgr, %d) fail, ret(%d)", + __FUNCTION__, __LINE__, bayerBuffer.index, ret); + } + } else { + /* put Bayer buffer */ + ret = m_putBuffers(m_bayerBufferMgr, bayerBuffer.index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_putBuffer(m_bayerBufferMgr, %d) fail, ret(%d)", + __FUNCTION__, __LINE__, bayerBuffer.index, ret); + } + } + } + if (ispReprocessingBuffer.index != -2 && m_ispReprocessingBufferMgr != NULL) + m_putBuffers(m_ispReprocessingBufferMgr, ispReprocessingBuffer.index); + + /* newFrame is already pushed some pipes, we can not delete newFrame until frame is complete */ + if (newFrame != NULL) { + newFrame->frameUnlock(); + ret = m_removeFrameFromList(&m_postProcessList, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + newFrame->printEntity(); + CLOGD("DEBUG(%s[%d]): Reprocessing frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + if (updateDmShot != NULL) { + delete updateDmShot; + updateDmShot = NULL; + } + + if (m_hdrEnabled) { + ExynosCameraActivitySpecialCapture *m_sCaptureMgr; + + m_sCaptureMgr = m_exynosCameraActivityControl->getSpecialCaptureMgr(); + + if (m_reprocessingCounter.getCount() == 0) + m_sCaptureMgr->setCaptureStep(ExynosCameraActivitySpecialCapture::SCAPTURE_STEP_OFF); + } + + if ((m_parameters->getHighResolutionCallbackMode() == true) && + (m_highResolutionCallbackRunning == true)) + loop = true; + + if (m_reprocessingCounter.getCount() > 0) + loop = true; + + CLOGI("INFO(%s[%d]): reprocessing fail, remaining count(%d)", __FUNCTION__, __LINE__, m_reprocessingCounter.getCount()); + + return loop; +} + +bool ExynosCamera::m_CheckBurstJpegSavingPath(char *dir) +{ + int ret = false; + + struct dirent **items; + struct stat fstat; + + char *burstPath = m_parameters->getSeriesShotFilePath(); + + char ChangeDirPath[BURST_CAPTURE_FILEPATH_SIZE] = {'\0',}; + + memset(m_burstSavePath, 0, sizeof(m_burstSavePath)); + + // Check access path + if (burstPath && sizeof(m_burstSavePath) >= sizeof(burstPath)) { + strncpy(m_burstSavePath, burstPath, sizeof(m_burstSavePath)-1); + } else { + CLOGW("WARN(%s[%d]) Parameter burstPath is NULL. Change to Default Path", __FUNCTION__, __LINE__); + snprintf(m_burstSavePath, sizeof(m_burstSavePath), "%s/DCIM/Camera/", dir); + } + + if (access(m_burstSavePath, 0)==0) { + CLOGW("WARN(%s[%d]) success access dir = %s", __FUNCTION__, __LINE__, m_burstSavePath); + return true; + } + + CLOGW("WARN(%s[%d]) can't find dir = %s", __FUNCTION__, __LINE__, m_burstSavePath); + + // If directory cant't access, then search "DCIM/Camera" folder in current directory + int iitems = scandir(dir, &items, NULL, alphasort); + int lstatRet = -1; + for (int i = 0; i < iitems; i++) { + // Search only dcim directory + if (lstat(items[i]->d_name, &fstat) < 0) + continue; + if ((fstat.st_mode & S_IFDIR) == S_IFDIR) { + if (!strcmp(items[i]->d_name, ".") || !strcmp(items[i]->d_name, "..")) + continue; + if (strcasecmp(items[i]->d_name, "DCIM")==0) { + sprintf(ChangeDirPath, "%s/%s", dir, items[i]->d_name); + int jitems = scandir(ChangeDirPath, &items, NULL, alphasort); + for (int j = 0; j < jitems; j++) { + // Search only camera directory + lstatRet = lstat(items[j]->d_name, &fstat); + if (lstatRet != 0) + CLOGE("ERR(%s[%d]):lstat returned error", __FUNCTION__, __LINE__); + + if ((fstat.st_mode & S_IFDIR) == S_IFDIR) { + if (!strcmp(items[j]->d_name, ".") || !strcmp(items[j]->d_name, "..")) + continue; + if (strcasecmp(items[j]->d_name, "CAMERA")==0) { + sprintf(m_burstSavePath, "%s/%s/", ChangeDirPath, items[j]->d_name); + CLOGW("WARN(%s[%d]) change save path = %s", __FUNCTION__, __LINE__, m_burstSavePath); + j = jitems; + ret = true; + break; + } + } + } + i = iitems; + break; + } + } + } + + if (items != NULL) { + free(items); + } + + return ret; +} + +bool ExynosCamera::m_pictureThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int loop = false; + int bufIndex = -2; + int retryCountGSC = 4; + + ExynosCameraFrame *newFrame = NULL; + + ExynosCameraBuffer sccReprocessingBuffer; + ExynosCameraBufferManager *bufferMgr = NULL; + struct camera2_stream *shot_stream = NULL; + ExynosRect srcRect, dstRect; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int hwPictureW = 0, hwPictureH = 0, hwPictureFormat = 0; + int buffer_idx = getShotBufferIdex(); + float zoomRatio = m_parameters->getZoomRatio(0) / 1000; + + sccReprocessingBuffer.index = -2; + + int pipeId_scc = 0; + int pipeId_gsc = 0; + bool isSrc = false; + + if (m_parameters->isReprocessing() == true) { + if (m_parameters->isUseYuvReprocessing() == true) + pipeId_scc = PIPE_MCSC_REPROCESSING; + else if (m_parameters->isReprocessing3aaIspOTF() == false) + pipeId_scc = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISP_REPROCESSING; + else + pipeId_scc = PIPE_3AA_REPROCESSING; + + pipeId_gsc = PIPE_GSC_REPROCESSING; + isSrc = true; + } else if (m_parameters->isUsing3acForIspc() == true) { + pipeId_scc = PIPE_3AA; + pipeId_gsc = PIPE_GSC_PICTURE; + isSrc = false; + } else { + switch (getCameraId()) { + case CAMERA_ID_FRONT: + if (m_parameters->getDualMode() == true) { + pipeId_scc = PIPE_3AA; + } else { + pipeId_scc = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC : PIPE_ISPC; + } + pipeId_gsc = PIPE_GSC_PICTURE; + break; + default: + CLOGE("ERR(%s[%d]):Current picture mode is not yet supported, CameraId(%d), reprocessing(%d)", + __FUNCTION__, __LINE__, getCameraId(), m_parameters->isReprocessing()); + break; + } + } + + /* wait SCC */ + CLOGI("INFO(%s[%d]):wait SCC output", __FUNCTION__, __LINE__); + int retry = 0; + do { + ret = dstSccReprocessingQ->waitAndPopProcessQ(&newFrame); + retry++; + } while (ret == TIMED_OUT && retry < 40 && + (m_takePictureCounter.getCount() > 0 || m_parameters->getSeriesShotCount() == 0)); + + if (ret < 0) { + CLOGW("WARN(%s[%d]):wait and pop fail, ret(%d), retry(%d), takePictuerCount(%d), seriesShotCount(%d)", + __FUNCTION__, __LINE__, ret, retry, m_takePictureCounter.getCount(), m_parameters->getSeriesShotCount()); + // TODO: doing exception handling + goto CLEAN; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + if (m_postProcessList.size() <= 0) { + CLOGE("ERR(%s[%d]): postPictureList size(%zd)", __FUNCTION__, __LINE__, m_postProcessList.size()); + usleep(5000); + if(m_postProcessList.size() <= 0) { + CLOGE("ERR(%s[%d]):Retry postPictureList size(%zd)", __FUNCTION__, __LINE__, m_postProcessList.size()); + goto CLEAN; + } + } + + /* + * When Non-reprocessing scenario does not setEntityState, + * because Non-reprocessing scenario share preview and capture frames + */ + if (m_parameters->isReprocessing() == true) { + ret = newFrame->setEntityState(pipeId_scc, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_COMPLETE) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + return ret; + } + } + + CLOGI("INFO(%s[%d]):SCC output done, frame Count(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + + /* Shutter Callback */ + if (pipeId_scc != PIPE_SCC_REPROCESSING && + pipeId_scc != PIPE_ISP_REPROCESSING) { + if (m_parameters->msgTypeEnabled(CAMERA_MSG_SHUTTER)) { + CLOGI("INFO(%s[%d]): CAMERA_MSG_SHUTTER callback S", __FUNCTION__, __LINE__); +#ifdef BURST_CAPTURE + m_notifyCb(CAMERA_MSG_SHUTTER, m_parameters->getSeriesShotDuration(), 0, m_callbackCookie); +#else + m_notifyCb(CAMERA_MSG_SHUTTER, 0, 0, m_callbackCookie); +#endif + CLOGI("INFO(%s[%d]): CAMERA_MSG_SHUTTER callback E", __FUNCTION__, __LINE__); + } + } + + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + /* set GSC buffer */ + if (m_parameters->isReprocessing() == true) + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_ISPC_REPROCESSING)); + else if (m_parameters->isUsing3acForIspc() == true) + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + else + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_ISPC)); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + + shot_stream = (struct camera2_stream *)(sccReprocessingBuffer.addr[buffer_idx]); + if (shot_stream != NULL) { + CLOGD("DEBUG(%s[%d]):(%d %d %d %d)", __FUNCTION__, __LINE__, + shot_stream->fcount, + shot_stream->rcount, + shot_stream->findex, + shot_stream->fvalid); + CLOGD("DEBUG(%s[%d]):(%d %d %d %d)(%d %d %d %d)", __FUNCTION__, __LINE__, + shot_stream->input_crop_region[0], + shot_stream->input_crop_region[1], + shot_stream->input_crop_region[2], + shot_stream->input_crop_region[3], + shot_stream->output_crop_region[0], + shot_stream->output_crop_region[1], + shot_stream->output_crop_region[2], + shot_stream->output_crop_region[3]); + } else { + CLOGE("DEBUG(%s[%d]):shot_stream is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + int retry = 0; + m_getBufferManager(pipeId_gsc, &bufferMgr, DST_BUFFER_DIRECTION); + do { + ret = -1; + retry++; + if (bufferMgr->getNumOfAvailableBuffer() > 0) { + ret = m_setupEntity(pipeId_gsc, newFrame, &sccReprocessingBuffer, NULL); + } else { + /* wait available SCC buffer */ + usleep(WAITING_TIME); + } + + if (retry % 10 == 0) { + CLOGW("WRAN(%s[%d]):retry setupEntity for GSC postPictureQ(%d), saveQ0(%d), saveQ1(%d), saveQ2(%d)", + __FUNCTION__, __LINE__, + m_postPictureQ->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD0]->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD1]->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD2]->getSizeOfProcessQ()); + } + } while(ret < 0 && retry < (TOTAL_WAITING_TIME/WAITING_TIME) && m_stopBurstShot == false); + + if (ret < 0) { + if (retry >= (TOTAL_WAITING_TIME/WAITING_TIME)) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), retry(%d), ret(%d), m_stopBurstShot(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, retry, ret, m_stopBurstShot); + /* HACK for debugging P150108-08143 */ + bufferMgr->printBufferState(); + android_printAssert(NULL, LOG_TAG, "BURST_SHOT_TIME_ASSERT(%s[%d]): unexpected error, get GSC buffer failed, assert!!!!", __FUNCTION__, __LINE__); + } else { + CLOGD("DEBUG(%s[%d]):setupEntity stopped, pipeId(%d), retry(%d), ret(%d), m_stopBurstShot(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, retry, ret, m_stopBurstShot); + } + goto CLEAN; + } +/* should change size calculation code in pure bayer */ +#if 0 + if (shot_stream != NULL) { + ret = m_calcPictureRect(&srcRect, &dstRect); + ret = newFrame->setSrcRect(pipeId_gsc, &srcRect); + ret = newFrame->setDstRect(pipeId_gsc, &dstRect); + } +#else + m_parameters->getPictureSize(&pictureW, &pictureH); + pictureFormat = m_parameters->getHwPictureFormat(); +#if 1 /* HACK in case of 3AA-OTF-ISP input_cropRegion always 0, use output crop region, check the driver */ + srcRect.x = shot_stream->output_crop_region[0]; + srcRect.y = shot_stream->output_crop_region[1]; + srcRect.w = shot_stream->output_crop_region[2]; + srcRect.h = shot_stream->output_crop_region[3]; +#endif + srcRect.fullW = shot_stream->output_crop_region[2]; + srcRect.fullH = shot_stream->output_crop_region[3]; + srcRect.colorFormat = pictureFormat; + + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = pictureW; + dstRect.h = pictureH; + dstRect.fullW = pictureW; + dstRect.fullH = pictureH; + dstRect.colorFormat = JPEG_INPUT_COLOR_FMT; + + ret = getCropRectAlign(srcRect.w, srcRect.h, + pictureW, pictureH, + &srcRect.x, &srcRect.y, + &srcRect.w, &srcRect.h, + 2, 2, 0, zoomRatio); + + ret = newFrame->setSrcRect(pipeId_gsc, &srcRect); + ret = newFrame->setDstRect(pipeId_gsc, &dstRect); +#endif + + CLOGD("DEBUG(%s):size (%d, %d, %d, %d %d %d)", __FUNCTION__, + srcRect.x, srcRect.y, srcRect.w, srcRect.h, srcRect.fullW, srcRect.fullH); + CLOGD("DEBUG(%s):size (%d, %d, %d, %d %d %d)", __FUNCTION__, + dstRect.x, dstRect.y, dstRect.w, dstRect.h, dstRect.fullW, dstRect.fullH); + + /* push frame to GSC pipe */ + m_pictureFrameFactory->pushFrameToPipe(&newFrame, pipeId_gsc); + m_pictureFrameFactory->setOutputFrameQToPipe(dstGscReprocessingQ, pipeId_gsc); + + /* wait GSC */ + newFrame = NULL; + CLOGI("INFO(%s[%d]):wait GSC output", __FUNCTION__, __LINE__); + while (retryCountGSC > 0) { + ret = dstGscReprocessingQ->waitAndPopProcessQ(&newFrame); + if (ret == TIMED_OUT) { + CLOGW("WRN(%s)(%d):wait and pop timeout, ret(%d)", __FUNCTION__, __LINE__, ret); + m_pictureFrameFactory->startThread(pipeId_gsc); + } else if (ret < 0) { + CLOGE("ERR(%s)(%d):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto CLEAN; + } else { + break; + } + retryCountGSC--; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + goto CLEAN; + } + CLOGI("INFO(%s[%d]):GSC output done", __FUNCTION__, __LINE__); + + /* put SCC buffer */ + if (m_parameters->isReprocessing() == true) { + if (m_parameters->isUseYuvReprocessing() == true) + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_MCSC0_REPROCESSING)); + else + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_reprocessingFrameFactory->getNodeType(PIPE_ISPC_REPROCESSING)); + } else if (m_parameters->isUsing3acForIspc() == true) { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_3AC)); + } else { + ret = newFrame->getDstBuffer(pipeId_scc, &sccReprocessingBuffer, m_previewFrameFactory->getNodeType(PIPE_ISPC)); + } + + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_scc, ret); + goto CLEAN; + } + + m_getBufferManager(pipeId_scc, &bufferMgr, DST_BUFFER_DIRECTION); + ret = m_putBuffers(bufferMgr, sccReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s)(%d):m_putBuffers fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto CLEAN; + } + } + + /* push postProcess */ + m_postPictureQ->pushProcessQ(&newFrame); + + m_pictureCounter.decCount(); + + CLOGI("INFO(%s[%d]):picture thread complete, remaining count(%d)", __FUNCTION__, __LINE__, m_pictureCounter.getCount()); + + if (m_pictureCounter.getCount() > 0) { + loop = true; + } else { + if (m_parameters->isReprocessing() == true) { + if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + CLOGD("DEBUG(%s[%d]):Use dynamic bayer", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequest(PIPE_3AC, false); + } + } else { + if (m_parameters->getUseDynamicScc() == true) { + CLOGD("DEBUG(%s[%d]): Use dynamic bayer", __FUNCTION__, __LINE__); + + if (m_parameters->isOwnScc(getCameraId()) == true) + m_previewFrameFactory->setRequestSCC(false); + else + m_previewFrameFactory->setRequestISPC(false); + } else if (m_parameters->getRecordingHint() == true + && m_parameters->isUsing3acForIspc() == true) { + CLOGD("DEBUG(%s[%d]): Use dynamic bayer", __FUNCTION__, __LINE__); + m_previewFrameFactory->setRequest3AC(false); + } + + if (m_parameters->isUsing3acForIspc() == true) + m_sccCaptureSelector->clearList(pipeId_scc, isSrc, m_previewFrameFactory->getNodeType(PIPE_3AC)); + else + m_sccCaptureSelector->clearList(pipeId_scc, isSrc); + } + + dstSccReprocessingQ->release(); + } + + /* one shot */ + return loop; + +CLEAN: + if (sccReprocessingBuffer.index != -2) { + CLOGD("DEBUG(%s[%d]): putBuffer sccReprocessingBuffer(index:%d) in error state", + __FUNCTION__, __LINE__, sccReprocessingBuffer.index); + m_getBufferManager(pipeId_scc, &bufferMgr, DST_BUFFER_DIRECTION); + m_putBuffers(bufferMgr, sccReprocessingBuffer.index); + } + + CLOGI("INFO(%s[%d]):take picture fail, remaining count(%d)", __FUNCTION__, __LINE__, m_pictureCounter.getCount()); + + if (m_pictureCounter.getCount() > 0) + loop = true; + + /* one shot */ + return loop; +} + +camera_memory_t *ExynosCamera::m_getJpegCallbackHeap(ExynosCameraBuffer jpegBuf, int seriesShotNumber) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + camera_memory_t *jpegCallbackHeap = NULL; + +#ifdef BURST_CAPTURE + if (1 < m_parameters->getSeriesShotCount()) { + int seriesShotSaveLocation = m_parameters->getSeriesShotSaveLocation(); + + if (seriesShotNumber < 0 || seriesShotNumber > m_parameters->getSeriesShotCount()) { + CLOGE("ERR(%s[%d]): Invalid shot number (%d)", __FUNCTION__, __LINE__, seriesShotNumber); + goto done; + } + + if (seriesShotSaveLocation == BURST_SAVE_CALLBACK) { + CLOGD("DEBUG(%s[%d]):burst callback : size (%d), count(%d)", __FUNCTION__, __LINE__, jpegBuf.size[0], seriesShotNumber); + + jpegCallbackHeap = m_getMemoryCb(jpegBuf.fd[0], jpegBuf.size[0], 1, m_callbackCookie); + if (!jpegCallbackHeap || jpegCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, jpegBuf.size[0]); + goto done; + } + if (jpegBuf.fd[0] < 0) + memcpy(jpegCallbackHeap->data, jpegBuf.addr[0], jpegBuf.size[0]); + } else { + char filePath[100]; + int nw, cnt = 0; + uint32_t written = 0; + camera_memory_t *tempJpegCallbackHeap = NULL; + + memset(filePath, 0, sizeof(filePath)); + + snprintf(filePath, sizeof(filePath), "%sBurst%02d.jpg", m_burstSavePath, seriesShotNumber); + CLOGD("DEBUG(%s[%d]):burst callback : size (%d), filePath(%s)", __FUNCTION__, __LINE__, jpegBuf.size[0], filePath); + + jpegCallbackHeap = m_getMemoryCb(-1, sizeof(filePath), 1, m_callbackCookie); + if (!jpegCallbackHeap || jpegCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%s) fail", __FUNCTION__, __LINE__, filePath); + goto done; + } + + memcpy(jpegCallbackHeap->data, filePath, sizeof(filePath)); + } + } else +#endif + { + CLOGD("DEBUG(%s[%d]):general callback : size (%d)", __FUNCTION__, __LINE__, jpegBuf.size[0]); + + jpegCallbackHeap = m_getMemoryCb(jpegBuf.fd[0], jpegBuf.size[0], 1, m_callbackCookie); + if (!jpegCallbackHeap || jpegCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, jpegBuf.size[0]); + goto done; + } + + if (jpegBuf.fd[0] < 0) + memcpy(jpegCallbackHeap->data, jpegBuf.addr[0], jpegBuf.size[0]); + } + +done: + if (jpegCallbackHeap == NULL || + jpegCallbackHeap->data == MAP_FAILED) { + + if (jpegCallbackHeap) { + jpegCallbackHeap->release(jpegCallbackHeap); + jpegCallbackHeap = NULL; + } + + m_notifyCb(CAMERA_MSG_ERROR, -1, 0, m_callbackCookie); + } + + CLOGD("INFO(%s[%d]):making callback buffer done", __FUNCTION__, __LINE__); + + return jpegCallbackHeap; +} + +bool ExynosCamera::m_postPictureThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int loop = false; + int bufIndex = -2; + int buffer_idx = getShotBufferIdex(); + int retryCountJPEG = 4; + + ExynosCameraFrame *newFrame = NULL; + + ExynosCameraBuffer gscReprocessingBuffer; + ExynosCameraBuffer jpegReprocessingBuffer; + ExynosCameraBuffer thumbnailReprocessingBuffer; + + gscReprocessingBuffer.index = -2; + jpegReprocessingBuffer.index = -2; + thumbnailReprocessingBuffer.index = -2; + + int pipeId_gsc = 0; + int pipeId_jpeg = 0; + + int currentSeriesShotMode = 0; + + if (m_parameters->isReprocessing() == true) { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_gsc = PIPE_GSC_REPROCESSING; + } else { + if (m_parameters->isOwnScc(getCameraId()) == false) { + if (m_parameters->isUseYuvReprocessing() == true) + pipeId_gsc = PIPE_MCSC_REPROCESSING; + else if (m_parameters->isReprocessing3aaIspOTF() == true) + pipeId_gsc = PIPE_3AA_REPROCESSING; + else + pipeId_gsc = PIPE_ISP_REPROCESSING; + } else { + pipeId_gsc = PIPE_SCC_REPROCESSING; + } + } + pipeId_jpeg = PIPE_JPEG_REPROCESSING; + } else { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_gsc = PIPE_GSC_PICTURE; + } else { + if (m_parameters->isOwnScc(getCameraId()) == true) { + pipeId_gsc = PIPE_SCC; + } else { + if (m_parameters->isUsing3acForIspc() == true) + pipeId_gsc = PIPE_3AA; + else + pipeId_gsc = PIPE_ISPC; + } + } + + pipeId_jpeg = PIPE_JPEG; + } + + ExynosCameraBufferManager *bufferMgr = NULL; + ret = m_getBufferManager(pipeId_gsc, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBufferManager(SRC) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_gsc, ret); + return ret; + } + + CLOGI("INFO(%s[%d]):wait postPictureQ output", __FUNCTION__, __LINE__); + ret = m_postPictureQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + CLOGW("WARN(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto CLEAN; + } + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + if (m_jpegCounter.getCount() <= 0) { + CLOGD("DEBUG(%s[%d]): Picture canceled", __FUNCTION__, __LINE__); + goto CLEAN; + } + + CLOGI("INFO(%s[%d]):postPictureQ output done", __FUNCTION__, __LINE__); + + /* put picture callback buffer */ + /* get gsc dst buffers */ + ret = newFrame->getDstBuffer(pipeId_gsc, &gscReprocessingBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + /* callback */ + if (m_hdrEnabled == false && m_parameters->getSeriesShotCount() <= 0) { + if (m_parameters->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE)) { + CLOGD("DEBUG(%s[%d]): RAW callabck", __FUNCTION__, __LINE__); + camera_memory_t *rawCallbackHeap = NULL; + rawCallbackHeap = m_getMemoryCb(gscReprocessingBuffer.fd[0], gscReprocessingBuffer.size[0], 1, m_callbackCookie); + if (!rawCallbackHeap || rawCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, gscReprocessingBuffer.size[0]); + goto CLEAN; + } + + setBit(&m_callbackState, CALLBACK_STATE_RAW_IMAGE, true); + m_dataCb(CAMERA_MSG_RAW_IMAGE, rawCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_RAW_IMAGE, true); + rawCallbackHeap->release(rawCallbackHeap); + } + + if (m_parameters->msgTypeEnabled(CAMERA_MSG_RAW_IMAGE_NOTIFY)) { + CLOGD("DEBUG(%s[%d]): RAW_IMAGE_NOTIFY callabck", __FUNCTION__, __LINE__); + + m_notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, m_callbackCookie); + } + + if ((m_parameters->msgTypeEnabled(CAMERA_MSG_POSTVIEW_FRAME))) { + CLOGD("DEBUG(%s[%d]): POSTVIEW callabck", __FUNCTION__, __LINE__); + + camera_memory_t *postviewCallbackHeap = NULL; + postviewCallbackHeap = m_getMemoryCb(gscReprocessingBuffer.fd[0], gscReprocessingBuffer.size[0], 1, m_callbackCookie); + if (!postviewCallbackHeap || postviewCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, gscReprocessingBuffer.size[0]); + goto CLEAN; + } + + setBit(&m_callbackState, CALLBACK_STATE_POSTVIEW_FRAME, true); + m_dataCb(CAMERA_MSG_POSTVIEW_FRAME, postviewCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_POSTVIEW_FRAME, true); + postviewCallbackHeap->release(postviewCallbackHeap); + } + } + + currentSeriesShotMode = m_parameters->getSeriesShotMode(); + + /* Make compressed image */ + if (m_parameters->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE) || + m_parameters->getSeriesShotCount() > 0 || + m_hdrEnabled == true) { + + /* HDR callback */ + if (m_hdrEnabled == true || + currentSeriesShotMode == SERIES_SHOT_MODE_LLS || + currentSeriesShotMode == SERIES_SHOT_MODE_SIS || + m_parameters->getShotMode() == SHOT_MODE_FRONT_PANORAMA) { + CLOGD("DEBUG(%s[%d]): HDR callback", __FUNCTION__, __LINE__); + + /* send yuv image with jpeg callback */ + camera_memory_t *jpegCallbackHeap = NULL; + jpegCallbackHeap = m_getMemoryCb(gscReprocessingBuffer.fd[0], gscReprocessingBuffer.size[0], 1, m_callbackCookie); + if (!jpegCallbackHeap || jpegCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(%d) fail", __FUNCTION__, __LINE__, gscReprocessingBuffer.size[0]); + goto CLEAN; + } + + setBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE, true); + m_dataCb(CAMERA_MSG_COMPRESSED_IMAGE, jpegCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE, true); + + jpegCallbackHeap->release(jpegCallbackHeap); + + /* put GSC buffer */ + ret = m_putBuffers(bufferMgr, gscReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):bufferMgr->putBuffers() fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + m_jpegCounter.decCount(); + } else { + if (m_parameters->isHWFCEnabled() == true) { + /* get gsc dst buffers */ + entity_buffer_state_t jpegMainBufferState = ENTITY_BUFFER_STATE_NOREQ; + ret = newFrame->getDstBufferState(pipeId_gsc, &jpegMainBufferState, + m_reprocessingFrameFactory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBufferState fail, pipeId(%d), nodeType(%d), ret(%d)", + __FUNCTION__, __LINE__, + pipeId_gsc, + m_reprocessingFrameFactory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING), + ret); + goto CLEAN; + } + + if (jpegMainBufferState == ENTITY_BUFFER_STATE_ERROR) { + CLOGE("ERR(%s[%d]):Failed to get the encoded JPEG buffer", __FUNCTION__, __LINE__); + goto CLEAN; + } + + /* put Thumbnail buffer */ + ret = newFrame->getDstBuffer(pipeId_gsc, &thumbnailReprocessingBuffer, + m_reprocessingFrameFactory->getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + ret = m_putBuffers(m_thumbnailBufferMgr, thumbnailReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):bufferMgr->putBuffers() fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + ret = newFrame->getDstBuffer(pipeId_gsc, &jpegReprocessingBuffer, + m_reprocessingFrameFactory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + +#if 0 + /* in case OTF until JPEG, we should be call below function to this position + in order to update debugData from frame. */ + m_parameters->setDebugInfoAttributeFromFrame(udm); +#endif + + /* in case OTF until JPEG, we should overwrite debugData info to Jpeg data. */ + if (jpegReprocessingBuffer.size[0] != 0) { + UpdateDebugData(jpegReprocessingBuffer.addr[0], + jpegReprocessingBuffer.size[0], + m_parameters->getDebugAttribute()); + } + } else { + int retry = 0; + + /* 1. get wait available JPEG src buffer */ + do { + bufIndex = -2; + retry++; + + if (m_pictureEnabled == false) { + CLOGI("INFO(%s[%d]):m_pictureEnable is false", __FUNCTION__, __LINE__); + goto CLEAN; + } + if (m_jpegBufferMgr->getNumOfAvailableBuffer() > 0) + m_jpegBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &jpegReprocessingBuffer); + + if (bufIndex < 0) { + usleep(WAITING_TIME); + + if (retry % 20 == 0) { + CLOGW("WRN(%s[%d]):retry JPEG getBuffer(%d) postPictureQ(%d), saveQ0(%d), saveQ1(%d), saveQ2(%d)", + __FUNCTION__, __LINE__, bufIndex, + m_postPictureQ->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD0]->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD1]->getSizeOfProcessQ(), + m_jpegSaveQ[JPEG_SAVE_THREAD2]->getSizeOfProcessQ()); + m_jpegBufferMgr->dump(); + } + } + /* this will retry until 300msec */ + } while (bufIndex < 0 && retry < (TOTAL_WAITING_TIME / WAITING_TIME) && m_stopBurstShot == false); + + if (bufIndex < 0) { + if (retry >= (TOTAL_WAITING_TIME / WAITING_TIME)) { + CLOGE("ERR(%s[%d]):getBuffer totally fail, retry(%d), m_stopBurstShot(%d)", + __FUNCTION__, __LINE__, retry, m_stopBurstShot); + /* HACK for debugging P150108-08143 */ + bufferMgr->printBufferState(); + android_printAssert(NULL, LOG_TAG, "BURST_SHOT_TIME_ASSERT(%s[%d]): unexpected error, get jpeg buffer failed, assert!!!!", __FUNCTION__, __LINE__); + } else { + CLOGD("DEBUG(%s[%d]):getBuffer stopped, retry(%d), m_stopBurstShot(%d)", + __FUNCTION__, __LINE__, retry, m_stopBurstShot); + } + ret = m_putBuffers(bufferMgr, gscReprocessingBuffer.index); + goto CLEAN; + } + + /* 2. setup Frame Entity */ + ret = m_setupEntity(pipeId_jpeg, newFrame, &gscReprocessingBuffer, &jpegReprocessingBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setupEntity fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_jpeg, ret); + goto CLEAN; + } + + /* 3. Q Set-up */ + m_pictureFrameFactory->setOutputFrameQToPipe(dstJpegReprocessingQ, pipeId_jpeg); + + /* 4. push the newFrame to pipe */ + m_pictureFrameFactory->pushFrameToPipe(&newFrame, pipeId_jpeg); + + /* 5. wait outputQ */ + CLOGI("INFO(%s[%d]):wait Jpeg output", __FUNCTION__, __LINE__); + while (retryCountJPEG > 0) { + ret = dstJpegReprocessingQ->waitAndPopProcessQ(&newFrame); + if (ret == TIMED_OUT) { + CLOGW("WRN(%s)(%d):wait and pop timeout, ret(%d)", __FUNCTION__, __LINE__, ret); + m_pictureFrameFactory->startThread(pipeId_jpeg); + } else if (ret < 0) { + CLOGE("ERR(%s)(%d):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto CLEAN; + } else { + break; + } + retryCountJPEG--; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + goto CLEAN; + } + + /* + * When Non-reprocessing scenario does not setEntityState, + * because Non-reprocessing scenario share preview and capture frames + */ + if (m_parameters->isReprocessing() == true) { + ret = newFrame->setEntityState(pipeId_jpeg, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):setEntityState(ENTITY_STATE_COMPLETE) fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, pipeId_jpeg, ret); + return ret; + } + } + } + + /* put GSC buffer */ + ret = m_putBuffers(bufferMgr, gscReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):bufferMgr->putBuffers() fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + int jpegOutputSize = newFrame->getJpegSize(); + if (jpegOutputSize <= 0) { + jpegOutputSize = jpegReprocessingBuffer.size[0]; + if (jpegOutputSize <= 0) + CLOGW("WRN(%s[%d]):jpegOutput size(%d) is invalid", __FUNCTION__, __LINE__, jpegOutputSize); + } + + CLOGI("INFO(%s[%d]):Jpeg output done, jpeg size(%d)", __FUNCTION__, __LINE__, jpegOutputSize); + + jpegReprocessingBuffer.size[0] = jpegOutputSize; + + /* push postProcess to call CAMERA_MSG_COMPRESSED_IMAGE */ + jpeg_callback_buffer_t jpegCallbackBuf; + jpegCallbackBuf.buffer = jpegReprocessingBuffer; +#ifdef BURST_CAPTURE + m_burstCaptureCallbackCount++; + CLOGI("INFO(%s[%d]): burstCaptureCallbackCount(%d)", __FUNCTION__, __LINE__, m_burstCaptureCallbackCount); +#endif +retry: + if ((m_parameters->getSeriesShotCount() > 0)) { + int threadNum = 0; + + if (m_burst[JPEG_SAVE_THREAD0] == false && m_jpegSaveThread[JPEG_SAVE_THREAD0]->isRunning() == false) { + threadNum = JPEG_SAVE_THREAD0; + } else if (m_burst[JPEG_SAVE_THREAD1] == false && m_jpegSaveThread[JPEG_SAVE_THREAD1]->isRunning() == false) { + threadNum = JPEG_SAVE_THREAD1; + } else if (m_burst[JPEG_SAVE_THREAD2] == false && m_jpegSaveThread[JPEG_SAVE_THREAD2]->isRunning() == false) { + threadNum = JPEG_SAVE_THREAD2; + } else { + CLOGW("WARN(%s[%d]): wait for available save thread, thread running(%d, %d, %d,)", + __FUNCTION__, __LINE__, + m_jpegSaveThread[JPEG_SAVE_THREAD0]->isRunning(), + m_jpegSaveThread[JPEG_SAVE_THREAD1]->isRunning(), + m_jpegSaveThread[JPEG_SAVE_THREAD2]->isRunning()); + usleep(WAITING_TIME * 10); + goto retry; + } + + m_burst[threadNum] = true; + ret = m_jpegSaveThread[threadNum]->run(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_jpegSaveThread%d run fail, ret(%d)", __FUNCTION__, __LINE__, threadNum, ret); + m_burst[threadNum] = false; + m_running[threadNum] = false; + goto retry; + } + + jpegCallbackBuf.callbackNumber = m_burstCaptureCallbackCount; + m_jpegSaveQ[threadNum]->pushProcessQ(&jpegCallbackBuf); + } else { + jpegCallbackBuf.callbackNumber = 0; + m_jpegCallbackQ->pushProcessQ(&jpegCallbackBuf); + } + + m_jpegCounter.decCount(); + } + } else { + CLOGD("DEBUG(%s[%d]): Disabled compressed image", __FUNCTION__, __LINE__); + + /* put GSC buffer */ + ret = m_putBuffers(bufferMgr, gscReprocessingBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):bufferMgr->putBuffers() fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId_gsc, ret); + goto CLEAN; + } + + m_jpegCounter.decCount(); + } + + if (newFrame != NULL) { + newFrame->printEntity(); + newFrame->frameUnlock(); + ret = m_removeFrameFromList(&m_postProcessList, newFrame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):remove frame from processList fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGD("DEBUG(%s[%d]): Picture frame delete(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + } + + CLOGI("INFO(%s[%d]):postPicture thread complete, remaining count(%d)", __FUNCTION__, __LINE__, m_jpegCounter.getCount()); + + if (m_jpegCounter.getCount() <= 0) { + if (m_hdrEnabled == true) { + CLOGI("INFO(%s[%d]): End of HDR capture!", __FUNCTION__, __LINE__); + m_hdrEnabled = false; + m_pictureEnabled = false; + } + if (currentSeriesShotMode == SERIES_SHOT_MODE_LLS || + currentSeriesShotMode == SERIES_SHOT_MODE_SIS) { + CLOGI("INFO(%s[%d]): End of LLS/SIS capture!", __FUNCTION__, __LINE__); + m_pictureEnabled = false; + } + + if(m_parameters->getShotMode() == SHOT_MODE_FRONT_PANORAMA) { + CLOGI("INFO(%s[%d]): End of wideselfie capture!", __FUNCTION__, __LINE__); + m_pictureEnabled = false; + } + + CLOGD("DEBUG(%s[%d]): free gsc buffers", __FUNCTION__, __LINE__); + m_gscBufferMgr->resetBuffers(); + + if (currentSeriesShotMode != SERIES_SHOT_MODE_BURST) { + CLOGD("DEBUG(%s[%d]): clearList postProcessList, series shot mode(%d)", __FUNCTION__, __LINE__, currentSeriesShotMode); + if (m_clearList(&m_postProcessList) < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + } + } + } + + if (m_parameters->getScalableSensorMode()) { + m_scalableSensorMgr.setMode(EXYNOS_CAMERA_SCALABLE_CHANGING); + ret = m_restartPreviewInternal(); + if (ret < 0) + CLOGE("(%s[%d]): restart preview internal fail", __FUNCTION__, __LINE__); + m_scalableSensorMgr.setMode(EXYNOS_CAMERA_SCALABLE_NONE); + } + +CLEAN: + /* HACK: Sometimes, m_postPictureThread is finished without waiting the last picture */ + int waitCount = 5; + while (m_postPictureQ->getSizeOfProcessQ() == 0 && 0 < waitCount) { + usleep(10000); + waitCount--; + } + + if (m_postPictureQ->getSizeOfProcessQ() > 0 || + currentSeriesShotMode != SERIES_SHOT_MODE_NONE) { + CLOGD("DEBUG(%s[%d]):postPicture thread will run again. currentSeriesShotMode(%d), postPictureQ size(%d)", + __func__, __LINE__, currentSeriesShotMode, m_postPictureQ->getSizeOfProcessQ()); + loop = true; + } + + return loop; +} + +bool ExynosCamera::m_jpegSaveThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int loop = false; + int curThreadNum = -1; + char burstFilePath[100]; +#ifdef BURST_CAPTURE + int fd = -1; +#endif + + jpeg_callback_buffer_t jpegCallbackBuf; + ExynosCameraBuffer jpegSaveBuffer; + int seriesShotNumber = -1; +// camera_memory_t *jpegCallbackHeap = NULL; + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + if (m_burst[threadNum] == true && m_running[threadNum] == false) { + m_running[threadNum] = true; + curThreadNum = threadNum; + if (m_jpegSaveQ[curThreadNum]->waitAndPopProcessQ(&jpegCallbackBuf) < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto done; + } + break; + } + } + + if (curThreadNum < 0 || curThreadNum > JPEG_SAVE_THREAD2) { + CLOGE("ERR(%s[%d]): invalid thrad num (%d)", __FUNCTION__, __LINE__, curThreadNum); + goto done; + } + + jpegSaveBuffer = jpegCallbackBuf.buffer; + seriesShotNumber = jpegCallbackBuf.callbackNumber; + +#ifdef BURST_CAPTURE + if (m_parameters->getSeriesShotCount() > 0) { + + int seriesShotSaveLocation = m_parameters->getSeriesShotSaveLocation(); + + if (seriesShotSaveLocation == BURST_SAVE_CALLBACK) { + jpegCallbackBuf.buffer = jpegSaveBuffer; + jpegCallbackBuf.callbackNumber = 0; + m_jpegCallbackQ->pushProcessQ(&jpegCallbackBuf); + goto done; + } else { + int nw, cnt = 0; + uint32_t written = 0; + camera_memory_t *tempJpegCallbackHeap = NULL; + + memset(burstFilePath, 0, sizeof(burstFilePath)); + + m_burstCaptureCallbackCountLock.lock(); + snprintf(burstFilePath, sizeof(burstFilePath), "%sBurst%02d.jpg", m_burstSavePath, seriesShotNumber); + m_burstCaptureCallbackCountLock.unlock(); + + CLOGD("DEBUG(%s[%d]):%s fd:%d jpegSize : %d", __FUNCTION__, __LINE__, burstFilePath, jpegSaveBuffer.fd[0], jpegSaveBuffer.size[0]); + + m_burstCaptureSaveLock.lock(); + + fd = open(burstFilePath, O_RDWR | O_CREAT, 0664); + if (fd < 0) { + CLOGD("DEBUG(%s[%d]):failed to create file [%s]: %s", + __FUNCTION__, __LINE__, burstFilePath, strerror(errno)); + m_burstCaptureSaveLock.unlock(); + goto done; + } + + m_burstSaveTimer.start(); + CLOGD("DEBUG(%s[%d]):%s jpegSize : %d", __FUNCTION__, __LINE__, burstFilePath, jpegSaveBuffer.size[0]); + + char *data = NULL; + + if (jpegSaveBuffer.fd[0] < 0) { + data = jpegSaveBuffer.addr[0]; + } else { + /* TODO : we need to use jpegBuf's buffer directly */ + tempJpegCallbackHeap = m_getMemoryCb(jpegSaveBuffer.fd[0], jpegSaveBuffer.size[0], 1, m_callbackCookie); + if (!tempJpegCallbackHeap || tempJpegCallbackHeap->data == MAP_FAILED) { + CLOGE("ERR(%s[%d]):m_getMemoryCb(fd:%d, size:%d) fail", __FUNCTION__, __LINE__, jpegSaveBuffer.fd[0], jpegSaveBuffer.size[0]); + m_burstCaptureSaveLock.unlock(); + goto done; + } + + data = (char *)tempJpegCallbackHeap->data; + } + + CLOGD("DEBUG(%s[%d]):(%s)file write start)", __FUNCTION__, __LINE__, burstFilePath); + while (written < jpegSaveBuffer.size[0]) { + nw = ::write(fd, (const char *)(data) + written, jpegSaveBuffer.size[0] - written); + + if (nw < 0) { + CLOGD("DEBUG(%s[%d]):failed to write file [%s]: %s", + __FUNCTION__, __LINE__, burstFilePath, strerror(errno)); + break; + } + + written += nw; + cnt++; + } + CLOGD("DEBUG(%s[%d]):(%s)file write end)", __FUNCTION__, __LINE__, burstFilePath); + + if (fd >= 0) + ::close(fd); + + if (chmod(burstFilePath,0664) < 0) { + CLOGE("failed chmod [%s]", burstFilePath); + } + if (chown(burstFilePath,AID_MEDIA,AID_MEDIA_RW) < 0) { + CLOGE("failed chown [%s] user(%d), group(%d)", burstFilePath,AID_MEDIA,AID_MEDIA_RW); + } + + m_burstCaptureSaveLock.unlock(); + + if (tempJpegCallbackHeap) { + tempJpegCallbackHeap->release(tempJpegCallbackHeap); + tempJpegCallbackHeap = NULL; + } + + m_burstSaveTimer.stop(); + m_burstSaveTimerTime = m_burstSaveTimer.durationUsecs(); + if (m_burstSaveTimerTime > (m_burstDuration - 33000)) { + m_burstDuration += (int)((m_burstSaveTimerTime - m_burstDuration + 33000) / 33000) * 33000; + CLOGD("Increase burst duration = %d", m_burstDuration); + } + + CLOGD("DEBUG(%s[%d]):m_burstSaveTimerTime : %d msec, path(%s)", __FUNCTION__, __LINE__, (int)m_burstSaveTimerTime / 1000, burstFilePath); + } + jpegCallbackBuf.buffer = jpegSaveBuffer; + jpegCallbackBuf.callbackNumber = seriesShotNumber; + m_jpegCallbackQ->pushProcessQ(&jpegCallbackBuf); + } else +#endif + { + jpegCallbackBuf.buffer = jpegSaveBuffer; + jpegCallbackBuf.callbackNumber = 0; + m_jpegCallbackQ->pushProcessQ(&jpegCallbackBuf); + } + +done: +/* + if (jpegCallbackHeap == NULL || + jpegCallbackHeap->data == MAP_FAILED) { + + if (jpegCallbackHeap) { + jpegCallbackHeap->release(jpegCallbackHeap); + jpegCallbackHeap = NULL; + } + + m_notifyCb(CAMERA_MSG_ERROR, -1, 0, m_callbackCookie); + } +*/ + if (JPEG_SAVE_THREAD0 <= curThreadNum && curThreadNum < JPEG_SAVE_THREAD_MAX_COUNT) { + m_burst[curThreadNum] = false; + m_running[curThreadNum] = false; + } + + CLOGI("INFO(%s[%d]):saving jpeg buffer done", __FUNCTION__, __LINE__); + + /* one shot */ + return false; +} + +bool ExynosCamera::m_jpegCallbackThreadFunc(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + int retry = 0, maxRetry = 0; + int loop = false; + int seriesShotNumber = -1; + + jpeg_callback_buffer_t jpegCallbackBuf; + ExynosCameraBuffer jpegCallbackBuffer; + camera_memory_t *jpegCallbackHeap = NULL; + + jpegCallbackBuffer.index = -2; + + ExynosCameraActivityFlash *m_flashMgr = m_exynosCameraActivityControl->getFlashMgr(); + if (m_flashMgr->getNeedFlash() == true) { + maxRetry = TOTAL_FLASH_WATING_COUNT; + } else { + maxRetry = TOTAL_WAITING_COUNT; + } + + do { + ret = m_jpegCallbackQ->waitAndPopProcessQ(&jpegCallbackBuf); + if (ret < 0) { + retry++; + CLOGW("WARN(%s[%d]):jpegCallbackQ pop fail, retry(%d)", __FUNCTION__, __LINE__, retry); + } + } while(ret < 0 && retry < maxRetry && m_jpegCounter.getCount() > 0); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + loop = true; + goto CLEAN; + } + + jpegCallbackBuffer = jpegCallbackBuf.buffer; + seriesShotNumber = jpegCallbackBuf.callbackNumber; + + CLOGD("DEBUG(%s[%d]):jpeg calllback is start", __FUNCTION__, __LINE__); + + /* Make compressed image */ + if (m_parameters->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE) || + m_parameters->getSeriesShotCount() > 0) { + m_captureLock.lock(); + camera_memory_t *jpegCallbackHeap = m_getJpegCallbackHeap(jpegCallbackBuffer, seriesShotNumber); + if (jpegCallbackHeap == NULL) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + android_printAssert(NULL, LOG_TAG, "Cannot recoverable, assert!!!!"); + } + + setBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE, true); + m_dataCb(CAMERA_MSG_COMPRESSED_IMAGE, jpegCallbackHeap, 0, NULL, m_callbackCookie); + clearBit(&m_callbackState, CALLBACK_STATE_COMPRESSED_IMAGE, true); + CLOGD("DEBUG(%s[%d]): CAMERA_MSG_COMPRESSED_IMAGE callabck (%d)", __FUNCTION__, __LINE__, m_burstCaptureCallbackCount); + + /* put JPEG callback buffer */ + if (m_jpegBufferMgr->putBuffer(jpegCallbackBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_NONE) != NO_ERROR) + CLOGE("ERR(%s[%d]):putBuffer(%d) fail", __FUNCTION__, __LINE__, jpegCallbackBuffer.index); + + jpegCallbackHeap->release(jpegCallbackHeap); + } else { + CLOGD("DEBUG(%s[%d]): Disabled compressed image", __FUNCTION__, __LINE__); + } + +CLEAN: + CLOGI("INFO(%s[%d]):jpeg callback thread complete, remaining count(%d)", __FUNCTION__, __LINE__, m_takePictureCounter.getCount()); + if (m_takePictureCounter.getCount() == 0) { + m_pictureEnabled = false; + loop = false; + m_clearJpegCallbackThread(true); + m_captureLock.unlock(); + } else { + m_captureLock.unlock(); + } + + return loop; +} + +void ExynosCamera::m_clearJpegCallbackThread(bool callFromJpeg) +{ + jpeg_callback_buffer_t jpegCallbackBuf; + ExynosCameraBuffer jpegCallbackBuffer; + int ret = 0; + + CLOGI("INFO(%s[%d]): takePicture disabled, takePicture callback done takePictureCounter(%d)", + __FUNCTION__, __LINE__, m_takePictureCounter.getCount()); + m_pictureEnabled = false; + + if (m_parameters->getUseDynamicScc() == true) { + CLOGD("DEBUG(%s[%d]): Use dynamic bayer", __FUNCTION__, __LINE__); + if (m_parameters->isOwnScc(getCameraId()) == true) + m_previewFrameFactory->setRequestSCC(false); + else + m_previewFrameFactory->setRequestISPC(false); + } + + m_prePictureThread->requestExit(); + m_pictureThread->requestExit(); + m_postPictureThread->requestExit(); + m_jpegCallbackThread->requestExit(); + + CLOGI("INFO(%s[%d]): wait m_prePictureThrad", __FUNCTION__, __LINE__); + m_prePictureThread->requestExitAndWait(); + CLOGI("INFO(%s[%d]): wait m_pictureThrad", __FUNCTION__, __LINE__); + m_pictureThread->requestExitAndWait(); + CLOGI("INFO(%s[%d]): wait m_postPictureThrad", __FUNCTION__, __LINE__); + m_postPictureThread->requestExitAndWait(); + CLOGI("INFO(%s[%d]): wait m_jpegCallbackThrad", __FUNCTION__, __LINE__); + if (!callFromJpeg) + m_jpegCallbackThread->requestExitAndWait(); + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + CLOGI("INFO(%s[%d]): wait m_jpegSaveThrad%d", __FUNCTION__, __LINE__, threadNum); + m_jpegSaveThread[threadNum]->requestExitAndWait(); + } + + CLOGI("INFO(%s[%d]): All picture threads done", __FUNCTION__, __LINE__); + + if (m_parameters->isReprocessing() == true) { + enum pipeline pipe = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISP_REPROCESSING; + + m_reprocessingFrameFactory->stopThread(pipe); + } + + while (m_jpegCallbackQ->getSizeOfProcessQ() > 0) { + m_jpegCallbackQ->popProcessQ(&jpegCallbackBuf); + jpegCallbackBuffer = jpegCallbackBuf.buffer; + + CLOGD("DEBUG(%s[%d]):put remaining jpeg buffer(index: %d)", __FUNCTION__, __LINE__, jpegCallbackBuffer.index); + if (m_jpegBufferMgr->putBuffer(jpegCallbackBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):putBuffer(%d) fail", __FUNCTION__, __LINE__, jpegCallbackBuffer.index); + } + + int seriesShotSaveLocation = m_parameters->getSeriesShotSaveLocation(); + char command[100]; + memset(command, 0, sizeof(command)); + + snprintf(command, sizeof(command), "rm %sBurst%02d.jpg", m_burstSavePath, jpegCallbackBuf.callbackNumber); + + system(command); + CLOGD("run %s", command); + } + + for (int threadNum = JPEG_SAVE_THREAD0; threadNum < JPEG_SAVE_THREAD_MAX_COUNT; threadNum++) { + while (m_jpegSaveQ[threadNum]->getSizeOfProcessQ() > 0) { + m_jpegSaveQ[threadNum]->popProcessQ(&jpegCallbackBuf); + jpegCallbackBuffer = jpegCallbackBuf.buffer; + + CLOGD("DEBUG(%s[%d]):put remaining SaveQ%d jpeg buffer(index: %d)", + __FUNCTION__, __LINE__, threadNum, jpegCallbackBuffer.index); + if (m_jpegBufferMgr->putBuffer(jpegCallbackBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):putBuffer(%d) fail", __FUNCTION__, __LINE__, jpegCallbackBuffer.index); + } + + } + + m_burst[threadNum] = false; + } + + if (m_parameters->isReprocessing() == true) { + enum pipeline pipe = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISP_REPROCESSING; + CLOGD("DEBUG(%s[%d]): Wait thread exit Pipe(%d) ", __FUNCTION__, __LINE__, pipe); + m_reprocessingFrameFactory->stopThreadAndWait(pipe); + } + + CLOGD("DEBUG(%s[%d]): clear postProcessList", __FUNCTION__, __LINE__); + if (m_clearList(&m_postProcessList) < 0) { + CLOGE("ERR(%s):m_clearList fail", __FUNCTION__); + } + +#if 1 + CLOGD("DEBUG(%s[%d]): clear postPictureQ", __FUNCTION__, __LINE__); + m_postPictureQ->release(); + + CLOGD("DEBUG(%s[%d]): clear dstSccReprocessingQ", __FUNCTION__, __LINE__); + dstSccReprocessingQ->release(); + + CLOGD("DEBUG(%s[%d]): clear dstJpegReprocessingQ", __FUNCTION__, __LINE__); + dstJpegReprocessingQ->release(); +#else + ExynosCameraFrame *frame = NULL; + + CLOGD("DEBUG(%s[%d]): clear postPictureQ", __FUNCTION__, __LINE__); + while(m_postPictureQ->getSizeOfProcessQ()) { + m_postPictureQ->popProcessQ(&frame); + if (frame != NULL) { + delete frame; + frame = NULL; + } + } + + CLOGD("DEBUG(%s[%d]): clear dstSccReprocessingQ", __FUNCTION__, __LINE__); + while(dstSccReprocessingQ->getSizeOfProcessQ()) { + dstSccReprocessingQ->popProcessQ(&frame); + if (frame != NULL) { + delete frame; + frame = NULL; + } + } +#endif + + CLOGD("DEBUG(%s[%d]): reset buffer gsc buffers", __FUNCTION__, __LINE__); + m_gscBufferMgr->resetBuffers(); + CLOGD("DEBUG(%s[%d]): reset buffer jpeg buffers", __FUNCTION__, __LINE__); + m_jpegBufferMgr->resetBuffers(); + CLOGD("DEBUG(%s[%d]): reset buffer sccReprocessing buffers", __FUNCTION__, __LINE__); + m_sccReprocessingBufferMgr->resetBuffers(); +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3.cpp b/libcamera/34xx/hal3/ExynosCamera3.cpp new file mode 100644 index 0000000..41690c1 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3.cpp @@ -0,0 +1,8212 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3" +#include +#include + +#include "ExynosCamera3.h" + +namespace android { + +#ifdef MONITOR_LOG_SYNC +uint32_t ExynosCamera3::cameraSyncLogId = 0; +#endif +ExynosCamera3::ExynosCamera3(int cameraId, camera_metadata_t **info): + m_requestMgr(NULL), + m_parameters(NULL), + m_streamManager(NULL), + m_activityControl(NULL) +{ + BUILD_DATE(); + m_cameraId = cameraId; + + memset(m_name, 0x00, sizeof(m_name)); + + /* Initialize pointer variables */ + m_ionAllocator = NULL; + + m_bayerBufferMgr= NULL; + m_fliteBufferMgr= NULL; + m_3aaBufferMgr = NULL; + m_ispBufferMgr = NULL; + m_yuvCaptureBufferMgr = NULL; + m_vraBufferMgr = NULL; + m_gscBufferMgr = NULL; + m_internalScpBufferMgr = NULL; + m_ispReprocessingBufferMgr = NULL; + m_thumbnailBufferMgr = NULL; + m_skipBufferMgr = NULL; + m_yuvCaptureReprocessingBufferMgr = NULL; + m_captureSelector = NULL; + m_sccCaptureSelector = NULL; + m_captureZslSelector = NULL; + + /* Create related classes */ + m_parameters = new ExynosCamera3Parameters(m_cameraId); + m_use_companion = m_parameters->isCompanion(cameraId); + m_activityControl = m_parameters->getActivityControl(); + + ExynosCameraActivityUCTL *uctlMgr = m_activityControl->getUCTLMgr(); + if (uctlMgr != NULL) + uctlMgr->setDeviceRotation(m_parameters->getFdOrientation()); + + m_metadataConverter = new ExynosCamera3MetadataConverter(cameraId, (ExynosCameraParameters *)m_parameters); + m_requestMgr = new ExynosCameraRequestManager(cameraId, (ExynosCameraParameters *)m_parameters); + m_requestMgr->setMetaDataConverter(m_metadataConverter); + + /* Create managers */ + m_createManagers(); + + /* Create threads */ + m_createThreads(); + + /* Create queue for preview path. If you want to control pipeDone in ExynosCamera3, try to create frame queue here */ + m_shotDoneQ = new ExynosCameraList(); + for (int i = 0; i < MAX_PIPE_NUM; i++) { + switch (i) { + case PIPE_FLITE: + case PIPE_3AA: + case PIPE_ISP: + case PIPE_VRA: + m_pipeFrameDoneQ[i] = new frame_queue_t; + break; + default: + m_pipeFrameDoneQ[i] = NULL; + break; + } + } + + /* Create queue for capture path */ + m_pipeCaptureFrameDoneQ = new frame_queue_t(m_captureStreamThread); + m_pipeCaptureFrameDoneQ->setWaitTime(2000000000); + + m_duplicateBufferDoneQ = new frame_queue_t; + + /* Create queue for capture path */ + m_reprocessingDoneQ = new frame_queue_t; + m_reprocessingDoneQ->setWaitTime(2000000000); + + m_frameFactoryQ = new framefactory3_queue_t; + m_selectBayerQ = new frame_queue_t(); + m_captureQ = new frame_queue_t(m_captureThread); + + /* construct static meta data information */ + if (ExynosCamera3MetadataConverter::constructStaticInfo(cameraId, info)) + CLOGE2("Create static meta data failed!!"); + + m_metadataConverter->setStaticInfo(cameraId, *info); + + m_streamManager->setYuvStreamMaxCount(m_parameters->getYuvStreamMaxNum()); + + m_setFrameManager(); + + m_setConfigInform(); + + m_constructFrameFactory(); + + /* Setup FrameFactory to RequestManager*/ + m_setupFrameFactoryToRequest(); + + /* HACK : check capture stream */ + isCaptureConfig = false; + isRestarted = false; + + isRecordingConfig = false; + recordingEnabled = false; + m_checkConfigStream = false; + m_flushFlag = false; + m_factoryStartFlag = false; + m_flushWaitEnable = false; + m_frameFactoryStartDone = false; + m_internalFrameCount = 0; + m_isNeedInternalFrame = false; + m_isNeedRequestFrame = false; + m_currentShot = new struct camera2_shot_ext; + memset(m_currentShot, 0x00, sizeof(struct camera2_shot_ext)); + m_internalFrameDoneQ = new frame_queue_t(m_internalFrameThread); + m_captureCount = 0; +#ifdef MONITOR_LOG_SYNC + m_syncLogDuration = 0; +#endif + m_flagStartFrameFactory = false; + m_flagStartReprocessingFrameFactory = false; + m_flagBayerRequest = false; + m_prepareFliteCnt = 0; + m_lastFrametime = 0; +} + +status_t ExynosCamera3::m_setConfigInform() { + struct ExynosConfigInfo exynosConfig; + memset((void *)&exynosConfig, 0x00, sizeof(exynosConfig)); + + exynosConfig.mode = CONFIG_MODE::NORMAL; + + /* Internal buffers */ + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_sensor_buffers = NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_3aa_buffers = NUM_3AA_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_hwdis_buffers = NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_vra_buffers = NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_reprocessing_buffers = NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_fastaestable_buffer = NUM_FASTAESTABLE_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.reprocessing_bayer_hold_count = REPROCESSING_BAYER_HOLD_COUNT; + /* Service buffers */ +#if 1 /* Consumer's buffer counts are not fixed */ + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_bayer_buffers = VIDEO_MAX_FRAME - NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_preview_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_preview_cb_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_recording_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_picture_buffers = VIDEO_MAX_FRAME; +#else + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_bayer_buffers = NUM_REQUEST_RAW_BUFFER + NUM_SERVICE_RAW_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_preview_buffers = NUM_REQUEST_PREVIEW_BUFFER + NUM_SERVICE_PREVIEW_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_preview_cb_buffers = NUM_REQUEST_CALLBACK_BUFFER + NUM_SERVICE_CALLBACK_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_recording_buffers = NUM_REQUEST_VIDEO_BUFFER + NUM_SERVICE_VIDEO_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_picture_buffers = NUM_REQUEST_JPEG_BUFFER + NUM_SERVICE_JPEG_BUFFER; +#endif + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_request_raw_buffers = NUM_REQUEST_RAW_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_request_preview_buffers = NUM_REQUEST_PREVIEW_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_request_callback_buffers = NUM_REQUEST_CALLBACK_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_request_video_buffers = NUM_REQUEST_VIDEO_BUFFER; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_request_jpeg_buffers = NUM_REQUEST_JPEG_BUFFER; + /* Blocking request */ + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_min_block_request = NUM_REQUEST_BLOCK_MIN; + exynosConfig.info[CONFIG_MODE::NORMAL].bufInfo.num_max_block_request = NUM_REQUEST_BLOCK_MAX; + /* Prepare buffers */ + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_FLITE] = PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::NORMAL].pipeInfo.prepare[PIPE_3AA] = PIPE_3AA_PREPARE_COUNT; + +#if (USE_HIGHSPEED_RECORDING) + /* Config HIGH_SPEED 60 buffer & pipe info */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_sensor_buffers = (getCameraId() == CAMERA_ID_BACK) ? FPS60_NUM_SENSOR_BUFFERS : FPS60_FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_bayer_buffers = FPS60_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.init_bayer_buffers = FPS60_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_3aa_buffers = FPS60_NUM_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_hwdis_buffers = FPS60_NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_vra_buffers = FPS60_NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_preview_buffers = FPS60_NUM_PREVIEW_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_picture_buffers = FPS60_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_reprocessing_buffers = FPS60_NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_recording_buffers = FPS60_NUM_RECORDING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_fastaestable_buffer = FPS60_INITIAL_SKIP_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.reprocessing_bayer_hold_count = FPS60_REPROCESSING_BAYER_HOLD_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.front_num_bayer_buffers = FPS60_FRONT_NUM_BAYER_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.front_num_picture_buffers = FPS60_FRONT_NUM_PICTURE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.preview_buffer_margin = FPS60_NUM_PREVIEW_BUFFERS_MARGIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_min_block_request = FPS60_NUM_REQUEST_BLOCK_MIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].bufInfo.num_max_block_request = FPS60_NUM_REQUEST_BLOCK_MAX; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_FLITE] = FPS60_PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_3AA] = FPS60_PIPE_3AA_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_60].pipeInfo.prepare[PIPE_SCP_REPROCESSING] = FPS60_PIPE_SCP_REPROCESSING_PREPARE_COUNT; + + /* Config HIGH_SPEED 120 buffer & pipe info */ + + /* Internal buffers */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_sensor_buffers = FPS120_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_3aa_buffers = FPS120_NUM_3AA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_hwdis_buffers = FPS120_NUM_HW_DIS_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_vra_buffers = FPS120_NUM_VRA_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_reprocessing_buffers = FPS120_NUM_REPROCESSING_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_fastaestable_buffer = FPS120_NUM_FASTAESTABLE_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.reprocessing_bayer_hold_count = FPS120_REPROCESSING_BAYER_HOLD_COUNT; + + /* Service buffers */ +#if 1 + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_bayer_buffers = VIDEO_MAX_FRAME - FPS120_NUM_SENSOR_BUFFERS; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_preview_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_preview_cb_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_recording_buffers = VIDEO_MAX_FRAME; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_picture_buffers = VIDEO_MAX_FRAME; +#else + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_bayer_buffers = FPS120_NUM_REQUEST_RAW_BUFFER + FPS120_NUM_SERVICE_RAW_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_preview_buffers = FPS120_NUM_REQUEST_PREVIEW_BUFFER + FPS120_NUM_SERVICE_PREVIEW_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_preview_cb_buffers = FPS120_NUM_REQUEST_CALLBACK_BUFFER + FPS120_NUM_SERVICE_CALLBACK_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_recording_buffers = FPS120_NUM_REQUEST_VIDEO_BUFFER + FPS120_NUM_SERVICE_VIDEO_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_picture_buffers = FPS120_NUM_REQUEST_JPEG_BUFFER + FPS120_NUM_SERVICE_JPEG_BUFFER; +#endif + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_request_raw_buffers = FPS120_NUM_REQUEST_RAW_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_request_preview_buffers = FPS120_NUM_REQUEST_PREVIEW_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_request_callback_buffers = FPS120_NUM_REQUEST_CALLBACK_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_request_video_buffers = FPS120_NUM_REQUEST_VIDEO_BUFFER; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_request_jpeg_buffers = FPS120_NUM_REQUEST_JPEG_BUFFER; + /* Blocking request */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_min_block_request = FPS120_NUM_REQUEST_BLOCK_MIN; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].bufInfo.num_max_block_request = FPS120_NUM_REQUEST_BLOCK_MAX; + /* Prepare buffers */ + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_FLITE] = FPS120_PIPE_FLITE_PREPARE_COUNT; + exynosConfig.info[CONFIG_MODE::HIGHSPEED_120].pipeInfo.prepare[PIPE_3AA] = FPS120_PIPE_3AA_PREPARE_COUNT; +#endif + + m_parameters->setConfig(&exynosConfig); + + m_exynosconfig = m_parameters->getConfig(); + + return NO_ERROR; +} + +status_t ExynosCamera3::m_setupFrameFactoryToRequest() +{ + status_t ret = NO_ERROR; + const char *intentName = NULL; + const char *streamIDName = NULL; + ExynosCamera3FrameFactory *factory = NULL; + + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; +#if defined(ENABLE_FULL_FRAME) + if (factory != NULL) { + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_PREVIEW, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_VIDEO, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_CALLBACK, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_RAW, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_JPEG, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_ZSL_OUTPUT, factory); + } else { + CLOGE2("FRAME_FACTORY_TYPE_CAPTURE_PREVIEW factory is NULL!!!!"); + } +#else + if (factory != NULL) { + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_PREVIEW, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_VIDEO, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_CALLBACK, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_RAW, factory); + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_ZSL_OUTPUT, factory); + + /* Set reprocessing frameFactory */ + if (m_parameters->isReprocessing() == true) { + if (m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING] != NULL) + factory = m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING]; + else + CLOGE2("FRAME_FACTORY_TYPE_REPROCESSING factory is NULL!!!!"); + } + + m_requestMgr->setRequestsInfo(HAL_STREAM_ID_JPEG, factory); + } else { + CLOGE2("FRAME_FACTORY_TYPE_CAPTURE_PREVIEW factory is NULL!!!!"); + } +#endif + + return ret; +} + +status_t ExynosCamera3::m_setStreamInfo(camera3_stream_configuration *streamList) +{ + int ret = OK; + int id = 0; + + CLOGD2("In"); + + /* sanity check */ + if (streamList == NULL) { + CLOGE2("NULL stream configuration"); + return BAD_VALUE; + } + + if (streamList->streams == NULL) { + CLOGE2("NULL stream list"); + return BAD_VALUE; + } + + if (streamList->num_streams < 1) { + CLOGE2("Bad number of streams requested: %d", streamList->num_streams); + return BAD_VALUE; + } + + /* check input stream */ + camera3_stream_t *inputStream = NULL; + for (size_t i = 0; i < streamList->num_streams; i++) { + camera3_stream_t *newStream = streamList->streams[i]; + if (newStream == NULL) { + CLOGE2("Stream index %zu was NULL", i); + return BAD_VALUE; + } + // for debug + CLOGD2("Stream(%p), ID(%zu), type(%d), usage(%#x) format(%#x) w(%d),h(%d)", + newStream, i, newStream->stream_type, newStream->usage, newStream->format, newStream->width, newStream->height); + + if ((newStream->stream_type == CAMERA3_STREAM_INPUT) || + (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL)) { + if (inputStream != NULL) { + CLOGE2("Multiple input streams requested!"); + return BAD_VALUE; + } + inputStream = newStream; + } + + /* HACK : check capture stream */ + if (newStream->format == 0x21) + isCaptureConfig = true; + + /* HACK : check recording stream */ + if ((newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) + && (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)) { + CLOGI2("recording stream checked"); + isRecordingConfig = true; + } + + // TODO: format validation + } + + /* 1. Invalidate all current streams */ + List keylist; + List::iterator iter; + ExynosCameraStream *stream = NULL; + keylist.clear(); + m_streamManager->getStreamKeys(&keylist); + for (iter = keylist.begin(); iter != keylist.end(); iter++) { + m_streamManager->getStream(*iter, &stream); + stream->setRegisterStream(EXYNOS_STREAM::HAL_STREAM_STS_INVALID); + stream->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED); + } + + /* 2. Remove dead streams */ + keylist.clear(); + stream = NULL; + id = 0; + EXYNOS_STREAM::STATE registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + m_streamManager->getStreamKeys(&keylist); + for (iter = keylist.begin(); iter != keylist.end(); iter++) { + m_streamManager->getStream(*iter, &stream); + ret = stream->getRegisterStream(®isterStream); + if (registerStream == EXYNOS_STREAM::HAL_STREAM_STS_INVALID){ + ret = stream->getID(&id); + if (id < 0) { + CLOGE2("getID failed id(%d)", id); + continue; + } + m_streamManager->deleteStream(id); + } + } + + /* 3. Update stream info */ + for (size_t i = 0; i < streamList->num_streams; i++) { + stream = NULL; + camera3_stream_t *newStream = streamList->streams[i]; + if (newStream->priv == NULL) { + /* new stream case */ + ret = m_enumStreamInfo(newStream); + if (ret) { + CLOGE2("Register stream failed %p", newStream); + return ret; + } + } else { + /* Existing stream, reuse current stream */ + stream = static_cast(newStream->priv); + stream->setRegisterStream(EXYNOS_STREAM::HAL_STREAM_STS_VALID); + } + } + + /* 4. Debug */ + CLOGD2("Out"); + return ret; +} + +status_t ExynosCamera3::m_enumStreamInfo(camera3_stream_t *stream) +{ + CLOGD2("In"); + int ret = OK; + ExynosCameraStream *newStream = NULL; + int id = 0; + int actualFormat = 0; + int planeCount = 0; + int requestBuffer = 0; + int outputPortId = 0; + EXYNOS_STREAM::STATE registerStream; + EXYNOS_STREAM::STATE registerBuffer; + + registerStream = EXYNOS_STREAM::HAL_STREAM_STS_VALID; + registerBuffer = EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED; + + if (stream == NULL) { + CLOGE2("stream is NULL."); + return INVALID_OPERATION; + } + + /* Update gralloc usage */ + switch (stream->stream_type) { + case CAMERA3_STREAM_INPUT: + stream->usage |= GRALLOC_USAGE_HW_CAMERA_READ; + break; + case CAMERA3_STREAM_OUTPUT: + stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE; + break; + case CAMERA3_STREAM_BIDIRECTIONAL: + stream->usage |= (GRALLOC_USAGE_HW_CAMERA_READ | GRALLOC_USAGE_HW_CAMERA_WRITE); + break; + default: + CLOGE2("Invalid stream_type %d", stream->stream_type); + break; + } + + switch (stream->stream_type) { + case CAMERA3_STREAM_OUTPUT: + // TODO: split this routine to function + switch (stream->format) { + case HAL_PIXEL_FORMAT_BLOB: + CLOGD2("HAL_PIXEL_FORMAT_BLOB format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_JPEG; + actualFormat = HAL_PIXEL_FORMAT_BLOB; + planeCount = 1; + outputPortId = 0; + requestBuffer = m_exynosconfig->current->bufInfo.num_request_jpeg_buffers; + break; + case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: + if (stream->usage & GRALLOC_USAGE_HW_TEXTURE || stream->usage & GRALLOC_USAGE_HW_COMPOSER) { + CLOGD2("GRALLOC_USAGE_HW_TEXTURE foramt(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_PREVIEW; + actualFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + planeCount = 2; + outputPortId = m_streamManager->getYuvStreamCount(); + requestBuffer = m_exynosconfig->current->bufInfo.num_request_preview_buffers; + } else if(stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) { + CLOGD2("GRALLOC_USAGE_HW_VIDEO_ENCODER format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_VIDEO; + actualFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; /* NV12M */ + planeCount = 2; + outputPortId = m_streamManager->getYuvStreamCount(); + requestBuffer = m_exynosconfig->current->bufInfo.num_request_video_buffers; + } else if(stream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) { + CLOGD2("GRALLOC_USAGE_HW_CAMERA_ZSL format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_ZSL_OUTPUT; + actualFormat = HAL_PIXEL_FORMAT_RAW16; + planeCount = 1; + requestBuffer = m_exynosconfig->current->bufInfo.num_request_raw_buffers; + } else { + CLOGE2("HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED unknown usage(%#x) format(%#x) stream_type(%#x)", stream->usage, stream->format, stream->stream_type); + ret = BAD_VALUE; + goto func_err; + break; + } + break; + case HAL_PIXEL_FORMAT_YCbCr_420_888: + CLOGD2("HAL_PIXEL_FORMAT_YCbCr_420_888 format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_CALLBACK; + actualFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + planeCount = 1; + outputPortId = m_streamManager->getYuvStreamCount(); + requestBuffer = m_exynosconfig->current->bufInfo.num_request_callback_buffers; + break; + /* case HAL_PIXEL_FORMAT_RAW_SENSOR: */ + case HAL_PIXEL_FORMAT_RAW16: + CLOGD2("HAL_PIXEL_FORMAT_RAW_XXX format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_RAW; + actualFormat = HAL_PIXEL_FORMAT_RAW16; + planeCount = 1; + outputPortId = 0; + requestBuffer = m_exynosconfig->current->bufInfo.num_request_raw_buffers; + break; + default: + CLOGE2("Not supported image format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + ret = BAD_VALUE; + goto func_err; + break; + } + break; + case CAMERA3_STREAM_INPUT: + case CAMERA3_STREAM_BIDIRECTIONAL: + switch (stream->format) { + /* case HAL_PIXEL_FORMAT_RAW_SENSOR: */ + case HAL_PIXEL_FORMAT_RAW16: + case HAL_PIXEL_FORMAT_RAW_OPAQUE: + CLOGD2("HAL_PIXEL_FORMAT_RAW_XXX format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_RAW; + actualFormat = HAL_PIXEL_FORMAT_RAW16; + planeCount = 1; + requestBuffer = m_exynosconfig->current->bufInfo.num_request_raw_buffers; + break; + case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: + if (stream->usage & GRALLOC_USAGE_HW_CAMERA_ZSL) { + CLOGD2("GRALLOC_USAGE_HW_CAMERA_ZSL foramt(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + id = HAL_STREAM_ID_ZSL_INPUT; + actualFormat = HAL_PIXEL_FORMAT_RAW16; + planeCount = 1; + requestBuffer = m_exynosconfig->current->bufInfo.num_request_raw_buffers; + break; + } else { + CLOGE2("HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED unknown usage(%#x) format(%#x) stream_type(%#x)", stream->usage, stream->format, stream->stream_type); + ret = BAD_VALUE; + goto func_err; + } + break; + default: + CLOGE2("Not supported image format(%#x) usage(%#x) stream_type(%#x)", stream->format, stream->usage, stream->stream_type); + goto func_err; + } + break; + default: + CLOGE2("Unknown stream_type(%#x) format(%#x) usage(%#x)", stream->stream_type, stream->format, stream->usage); + ret = BAD_VALUE; + goto func_err; + } + + newStream = m_streamManager->createStream(id, stream); + if (newStream == NULL) { + CLOGE2("createStream is NULL id(%d)", id); + goto func_err; + } + + newStream->setRegisterStream(registerStream); + newStream->setRegisterBuffer(registerBuffer); + newStream->setFormat(actualFormat); + newStream->setPlaneCount(planeCount); + newStream->setOutputPortId(outputPortId); + newStream->setRequestBuffer(requestBuffer); + +func_err: + CLOGD2("Out"); + + return ret; + +} + +void ExynosCamera3::m_createManagers(void) +{ + if (!m_streamManager) { + m_streamManager = new ExynosCameraStreamManager(m_cameraId); + CLOGD2("Stream Manager created"); + } + + /* Create buffer manager */ + if (!m_fliteBufferMgr) + m_createInternalBufferManager(&m_fliteBufferMgr, "INTERNAL_BAYER_BUF"); + if (!m_3aaBufferMgr) + m_createInternalBufferManager(&m_3aaBufferMgr, "3AA_IN_BUF"); + if (!m_ispBufferMgr) + m_createInternalBufferManager(&m_ispBufferMgr, "ISP_IN_BUF"); + if (!m_yuvCaptureBufferMgr) + m_createInternalBufferManager(&m_yuvCaptureBufferMgr, "YUV_CAPTURE_IN_BUF"); + if (!m_internalScpBufferMgr) + m_createInternalBufferManager(&m_internalScpBufferMgr, "INTERNAL_SCP_BUF"); + if (!m_vraBufferMgr) + m_createInternalBufferManager(&m_vraBufferMgr, "VRA_BUF"); + if (!m_gscBufferMgr) + m_createInternalBufferManager(&m_gscBufferMgr, "GSC_BUF"); + if (!m_ispReprocessingBufferMgr) + m_createInternalBufferManager(&m_ispReprocessingBufferMgr, "ISP_RE_BUF"); + if (!m_yuvCaptureReprocessingBufferMgr) + m_createInternalBufferManager(&m_yuvCaptureReprocessingBufferMgr, "YUV_CAPTURE_RE_BUF"); + if (!m_thumbnailBufferMgr) + m_createInternalBufferManager(&m_thumbnailBufferMgr, "THUMBNAIL_BUF"); + +} + +void ExynosCamera3::m_createThreads(void) +{ + m_mainThread = new mainCameraThread(this, &ExynosCamera3::m_mainThreadFunc, "m_mainThreadFunc"); + CLOGD2("DEBUG(%s):Main thread created", __FUNCTION__); + + /* m_previewStreamXXXThread is for seperated frameDone each own handler */ + m_previewStreamBayerThread = new mainCameraThread(this, &ExynosCamera3::m_previewStreamBayerPipeThreadFunc, "PreviewBayerThread"); + CLOGD2("Bayer Preview stream thread created"); + + m_previewStream3AAThread = new mainCameraThread(this, &ExynosCamera3::m_previewStream3AAPipeThreadFunc, "Preview3AAThread"); + CLOGD2("3AA Preview stream thread created"); + + m_previewStreamVRAThread = new mainCameraThread(this, &ExynosCamera3::m_previewStreamVRAPipeThreadFunc, "PreviewVRAThread"); + CLOGD2("VRA Preview stream thread created"); + + m_duplicateBufferThread = new mainCameraThread(this, &ExynosCamera3::m_duplicateBufferThreadFunc, "DuplicateThread"); + CLOGD2("Duplicate buffer thread created"); + + m_captureStreamThread = new mainCameraThread(this, &ExynosCamera3::m_captureStreamThreadFunc, "CaptureThread"); + CLOGD2("Capture stream thread created"); + + m_setBuffersThread = new mainCameraThread(this, &ExynosCamera3::m_setBuffersThreadFunc, "setBuffersThread"); + CLOGD2("Buffer allocation thread created"); + + m_framefactoryCreateThread = new mainCameraThread(this, &ExynosCamera3::m_frameFactoryCreateThreadFunc, "FrameFactoryInitThread"); + CLOGD2("FrameFactoryInitThread created"); + + m_selectBayerThread = new mainCameraThread(this, &ExynosCamera3::m_selectBayerThreadFunc, "SelectBayerThreadFunc"); + CLOGD2("SelectBayerThread created"); + + m_captureThread = new mainCameraThread(this, &ExynosCamera3::m_captureThreadFunc, "m_captureThreadFunc"); + CLOGD2("FrameFactoryInitThread created"); + + m_reprocessingFrameFactoryStartThread = new mainCameraThread(this, &ExynosCamera3::m_reprocessingFrameFactoryStartThreadFunc, "m_reprocessingFrameFactoryStartThread"); + CLOGD2("m_reprocessingFrameFactoryStartThread created"); + + m_startPictureBufferThread = new mainCameraThread(this, &ExynosCamera3::m_startPictureBufferThreadFunc, "startPictureBufferThread"); + CLOGD2("startPictureBufferThread created"); + + m_frameFactoryStartThread = new mainCameraThread(this, &ExynosCamera3::m_frameFactoryStartThreadFunc, "FrameFactoryStartThread"); + CLOGD2("FrameFactoryStartThread created"); + + m_internalFrameThread = new mainCameraThread(this, &ExynosCamera3::m_internalFrameThreadFunc, "InternalFrameThread"); + CLOGD2("Internal Frame Handler Thread created"); + + m_monitorThread = new mainCameraThread(this, &ExynosCamera3::m_monitorThreadFunc, "MonitorThread"); + CLOGD2("MonitorThread created"); +} + +ExynosCamera3::~ExynosCamera3() +{ + this->release(); +} + +void ExynosCamera3::release() +{ + CLOGI2("-IN-"); + int ret = 0; +// m_mainFrameThread->requestExitAndWait(); + + m_releaseBuffers(); + + if (m_fliteBufferMgr!= NULL) { + delete m_fliteBufferMgr; + m_fliteBufferMgr = NULL; + } + + if (m_3aaBufferMgr != NULL) { + delete m_3aaBufferMgr; + m_3aaBufferMgr = NULL; + } + + if (m_ispBufferMgr != NULL) { + delete m_ispBufferMgr; + m_ispBufferMgr = NULL; + } + + if (m_yuvCaptureBufferMgr != NULL) { + delete m_yuvCaptureBufferMgr; + m_yuvCaptureBufferMgr = NULL; + } + + if (m_vraBufferMgr != NULL) { + delete m_vraBufferMgr; + m_vraBufferMgr = NULL; + } + + if (m_gscBufferMgr != NULL) { + delete m_gscBufferMgr; + m_gscBufferMgr = NULL; + } + + if (m_yuvCaptureReprocessingBufferMgr != NULL) { + delete m_yuvCaptureReprocessingBufferMgr; + m_yuvCaptureReprocessingBufferMgr = NULL; + } + + if (m_internalScpBufferMgr != NULL) { + delete m_internalScpBufferMgr; + m_internalScpBufferMgr = NULL; + } + + if (m_ispReprocessingBufferMgr != NULL) { + delete m_ispReprocessingBufferMgr; + m_ispReprocessingBufferMgr = NULL; + } + + if (m_thumbnailBufferMgr != NULL) { + delete m_thumbnailBufferMgr; + m_thumbnailBufferMgr = NULL; + } + + if (m_skipBufferMgr != NULL) { + delete m_skipBufferMgr; + m_skipBufferMgr = NULL; + } + + if (m_ionAllocator != NULL) { + delete m_ionAllocator; + m_ionAllocator = NULL; + } + + if (m_shotDoneQ != NULL) { + delete m_shotDoneQ; + m_shotDoneQ = NULL; + } + + for (int i = 0; i < MAX_PIPE_NUM; i++) { + if (m_pipeFrameDoneQ[i] != NULL) { + delete m_pipeFrameDoneQ[i]; + m_pipeFrameDoneQ[i] = NULL; + } + } + + if (m_duplicateBufferDoneQ != NULL) { + delete m_duplicateBufferDoneQ; + m_duplicateBufferDoneQ = NULL; + } + + /* 4. release queues*/ + if (m_pipeCaptureFrameDoneQ != NULL) { + delete m_pipeCaptureFrameDoneQ; + m_pipeCaptureFrameDoneQ = NULL; + } + + if (m_reprocessingDoneQ != NULL) { + delete m_reprocessingDoneQ; + m_reprocessingDoneQ = NULL; + } + + if (m_internalFrameDoneQ != NULL) { + delete m_internalFrameDoneQ; + m_internalFrameDoneQ = NULL; + } + + if (m_frameFactoryQ != NULL) { + delete m_frameFactoryQ; + m_frameFactoryQ = NULL; + } + + if (m_selectBayerQ != NULL) { + delete m_selectBayerQ; + m_selectBayerQ = NULL; + } + + if (m_captureQ != NULL) { + delete m_captureQ; + m_captureQ = NULL; + } + + if (m_frameMgr != NULL) { + delete m_frameMgr; + m_frameMgr = NULL; + } + + if (m_streamManager != NULL) { + delete m_streamManager; + m_streamManager = NULL; + } + + if (m_requestMgr!= NULL) { + delete m_requestMgr; + m_requestMgr = NULL; + } + + m_deinitFrameFactory(); + +#if 1 + if (m_parameters != NULL) { + delete m_parameters; + m_parameters = NULL; + } + + if (m_metadataConverter != NULL) { + delete m_parameters; + m_parameters = NULL; + } + + if (m_captureSelector != NULL) { + delete m_captureSelector; + m_captureSelector = NULL; + } + + if (m_captureZslSelector != NULL) { + delete m_captureZslSelector; + m_captureZslSelector = NULL; + } + + if (m_sccCaptureSelector != NULL) { + delete m_sccCaptureSelector; + m_sccCaptureSelector = NULL; + } +#endif + + // TODO: clean up + // m_resultBufferVectorSet + // m_processList + // m_postProcessList + // m_pipeFrameDoneQ + CLOGI2("-OUT-"); +} + +status_t ExynosCamera3::initilizeDevice(const camera3_callback_ops *callbackOps) +{ + status_t ret = NO_ERROR; + CLOGD2("-IN-"); + + /* set callback ops */ + m_requestMgr->setCallbackOps(callbackOps); + + if (m_parameters->isReprocessing() == true) { + ExynosCameraBufferManager *bufMgr = NULL; + if (m_parameters->isUseYuvReprocessing() == true + && m_parameters->isUsing3acForIspc() == true) + bufMgr = m_yuvCaptureBufferMgr; + else + bufMgr = m_fliteBufferMgr; + + if (m_captureSelector == NULL) { + m_captureSelector = new ExynosCameraFrameSelector(m_parameters, bufMgr, m_frameMgr); + ret = m_captureSelector->setFrameHoldCount(REPROCESSING_BAYER_HOLD_COUNT); + if (ret < 0) + CLOGE2("m_captureSelector setFrameHoldCount(%d) is fail", REPROCESSING_BAYER_HOLD_COUNT); + } + + if (m_captureZslSelector == NULL) { + m_captureZslSelector = new ExynosCameraFrameSelector(m_parameters, m_bayerBufferMgr, m_frameMgr); + ret = m_captureZslSelector->setFrameHoldCount(REPROCESSING_BAYER_HOLD_COUNT); + if (ret < 0) + CLOGE2("m_captureZslSelector setFrameHoldCount(%d) is fail", REPROCESSING_BAYER_HOLD_COUNT); + } + } else { + if (m_sccCaptureSelector == NULL) { + ExynosCameraBufferManager *bufMgr = NULL; + + if (m_parameters->isSccCapture() == true + || m_parameters->isUsing3acForIspc() == true) { + /* TODO: Dynamic select buffer manager for capture */ + bufMgr = m_yuvCaptureBufferMgr; + } + + m_sccCaptureSelector = new ExynosCameraFrameSelector(m_parameters, bufMgr, m_frameMgr); + } + } + + m_framefactoryCreateThread->run(); + m_frameMgr->start(); + + m_startPictureBufferThread->run(); + + /* + * NOTICE: Join is to avoid dual scanario's problem. + * The problem is that back camera's EOS was not finished, but front camera opened. + * Two instance was actually different but driver accepts same instance. + */ + m_framefactoryCreateThread->join(); + return ret; +} + +status_t ExynosCamera3::releaseDevice(void) +{ + status_t ret = NO_ERROR; + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_setBuffersThread->requestExitAndWait(); + m_framefactoryCreateThread->requestExitAndWait(); + m_monitorThread->requestExitAndWait(); + m_previewStreamBayerThread->requestExitAndWait(); + m_previewStream3AAThread->requestExitAndWait(); + m_previewStreamVRAThread->requestExitAndWait(); + m_duplicateBufferThread->requestExitAndWait(); + m_internalFrameThread->requestExitAndWait(); + m_mainThread->requestExitAndWait(); + m_selectBayerThread->requestExitAndWait(); + + if (m_shotDoneQ != NULL) + m_shotDoneQ->release(); + + if (m_flushFlag == false) + flush(); + + ret = m_clearList(&m_processList, &m_processLock); + if (ret < 0) { + CLOGE2("m_clearList fail"); + return ret; + } + + /* initialize frameQ */ + for (int i = 0; i < MAX_PIPE_NUM; i++) { + if (m_pipeFrameDoneQ[i] != NULL) { + m_pipeFrameDoneQ[i]->release(); + m_pipeFrameDoneQ[i] = NULL; + } + } + m_reprocessingDoneQ->release(); + m_pipeCaptureFrameDoneQ->release(); + m_duplicateBufferDoneQ->release(); + + /* internal frame */ + m_internalFrameDoneQ->release(); + + m_frameMgr->stop(); + m_frameMgr->deleteAllFrame(); + + return ret; +} + +status_t ExynosCamera3::construct_default_request_settings(camera_metadata_t **request, int type) +{ + Mutex::Autolock l(m_requestLock); + factory_handler_t frameCreateHandler; + factory_donehandler_t frameDoneHandler; + ExynosCamera3FrameFactory *factory = NULL; + + CLOGD2("Type(%d)", type); + if ((type < 0) || (type >= CAMERA3_TEMPLATE_COUNT)) { + CLOGE2("Unknown request settings template: %d", type); + return -ENODEV; + } + + m_requestMgr->constructDefaultRequestSettings(type, request); + + CLOGI2("-OUT-"); + return OK; +} + +status_t ExynosCamera3::configureStreams(camera3_stream_configuration *stream_list) +{ + Mutex::Autolock l(m_requestLock); + + status_t ret = NO_ERROR; + EXYNOS_STREAM::STATE registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + EXYNOS_STREAM::STATE registerbuffer = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + int id = 0; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int planeCount, width, height, maxBufferCount, startIndex; + CameraParameters parameter; + bool updateSize = true; + int streamPixelFormat = 0; + int streamPlaneCount = 0; + int outputPortId = 0; + int currentConfigMode = m_parameters->getConfigMode(); + + /* prepare flite count init */ + m_prepareFliteCnt = 0; + + CLOGD2("-IN-"); + + /* sanity check for stream_list */ + if (stream_list == NULL) { + CLOGE2("NULL stream configuration"); + return BAD_VALUE; + } + + if (stream_list->streams == NULL) { + CLOGE2("NULL stream list"); + return BAD_VALUE; + } + + if (stream_list->num_streams < 1) { + CLOGE2("Bad number of streams requested: %d", stream_list->num_streams); + return BAD_VALUE; + } + + if (stream_list->operation_mode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) { + ALOGI("INFO(%s[%d]):High speed mode is configured. StreamCount %d", + __FUNCTION__, __LINE__, stream_list->num_streams); + m_parameters->setConfigMode(CONFIG_MODE::HIGHSPEED_120); + m_exynosconfig = m_parameters->getConfig(); + m_flagBayerRequest = false; + } else { + m_parameters->setConfigMode(CONFIG_MODE::NORMAL); + m_exynosconfig = m_parameters->getConfig(); + m_flagBayerRequest = false; + } + + /* start allocation of internal buffers */ + if (m_checkConfigStream == false) { + m_setBuffersThread->run(PRIORITY_DEFAULT); + } + + ret = m_streamManager->setConfig(m_exynosconfig); + if (ret) { + CLOGE2("setMaxBuffers() failed!!"); + return ret; + } + ret = m_setStreamInfo(stream_list); + if (ret) { + CLOGE2("setStreams() failed!!"); + return ret; + } + + /* flush request Mgr */ + m_requestMgr->flush(); + + /* HACK :: restart frame factory */ + if (m_checkConfigStream == true || + ((isCaptureConfig == true) && (stream_list->num_streams == 1)) + || ((isRecordingConfig == true) && (recordingEnabled == false)) + || ((isRecordingConfig == false) && (recordingEnabled == true))) { + CLOGI2("restart frame factory isCaptureConfig(%d), isRecordingConfig(%d), stream_list->num_streams(%d)", + isCaptureConfig, isRecordingConfig, stream_list->num_streams); + + isCaptureConfig = false; + /* In case of preview with Recording, enter this block even if not restart */ + if (m_checkConfigStream == true) + isRestarted = true; + + recordingEnabled = false; + isRecordingConfig = false; + + if (m_flagStartReprocessingFrameFactory == true) + m_stopReprocessingFrameFactory(m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING]); + + if (m_flagStartFrameFactory == true) { + m_stopFrameFactory(m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]); + m_removeInternalFrames(&m_processList, &m_processLock); + m_clearList(&m_captureProcessList, &m_captureProcessLock); + } + + if (m_parameters->isReprocessing() == true) { + m_captureSelector->release(); + m_captureZslSelector->release(); + } else { + m_sccCaptureSelector->release(); + } + + /* restart frame manager */ + m_frameMgr->stop(); + m_frameMgr->deleteAllFrame(); + m_frameMgr->start(); + + /* Pull all internal buffers */ + for (int bufIndex = 0; bufIndex < m_fliteBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_fliteBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_3aaBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_3aaBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_internalScpBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_internalScpBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_yuvCaptureBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_yuvCaptureBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_vraBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_vraBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_gscBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_gscBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_ispBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_ispBufferMgr, bufIndex); + + if (currentConfigMode != m_parameters->getConfigMode()) { + CLOGI("INFO(%s[%d]):ConfigMode is changed. Reallocate the internal buffers. currentConfigMode %d newConfigMode %d", + __FUNCTION__, __LINE__, + currentConfigMode, m_parameters->getConfigMode()); + ret = m_releaseBuffers(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to releaseBuffers. ret %d", + __FUNCTION__, __LINE__, ret); + + m_setBuffersThread->run(PRIORITY_DEFAULT); + } + } + + /* clear previous settings */ + ret = m_requestMgr->clearPrevRequest(); + if (ret) { + CLOGE2("clearPrevRequest() failed!!"); + return ret; + } + + ret = m_requestMgr->clearPrevShot(); + if (ret < 0) { + CLOGE2("clearPrevShot() failed!! status(%d)", ret); + } + + /* check flag update size */ + updateSize = m_streamManager->findStream(HAL_STREAM_ID_PREVIEW); + + m_parameters->setSetfileYuvRange(); + + /* Create service buffer manager at each stream */ + for (size_t i = 0; i < stream_list->num_streams; i++) { + registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + registerbuffer = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + id = -1; + + camera3_stream_t *newStream = stream_list->streams[i]; + ExynosCameraStream *privStreamInfo = static_cast(newStream->priv); + privStreamInfo->getID(&id); + + CLOGI2("list_index(%zu), streamId(%d)", i, id); + CLOGD2("stream_type(%d), usage(%x), format(%x), width(%d), height(%d), max_buffers(%d)", + newStream->stream_type, newStream->usage, newStream->format, + newStream->width, newStream->height, newStream->max_buffers); + + privStreamInfo->getRegisterBuffer(®isterbuffer); + privStreamInfo->getPlaneCount(&streamPlaneCount); + privStreamInfo->getFormat(&streamPixelFormat); + if (registerbuffer != EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED) { + CLOGE2("privStreamInfo->registerBuffer state error!!"); + return BAD_VALUE; + } + + width = newStream->width; + height = newStream->height; + + privStreamInfo->getRegisterStream(®isterStream); + if (registerStream == EXYNOS_STREAM::HAL_STREAM_STS_INVALID) { + privStreamInfo->getID(&id); + CLOGE2("Invalid stream index(%zu) id(%d)", i, id); + ret = BAD_VALUE; + break; + } + + privStreamInfo->getRegisterBuffer(®isterbuffer); + + CLOGD2("streamID(%d) registerStream(%d) registerbuffer(%d)", id, registerStream, registerbuffer); + + if ((registerStream == EXYNOS_STREAM::HAL_STREAM_STS_VALID) && + (registerbuffer == EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED)) { + ExynosCameraBufferManager *bufferMgr = NULL; + switch (id % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_RAW: + CLOGD2("Create buffer manager(RAW)"); + ret = m_createServiceBufferManager(&m_bayerBufferMgr, "RAW_STREAM_BUF"); + if (ret < 0) { + CLOGE2("m_createBufferManager() failed!!"); + return ret; + } + + planeCount = streamPlaneCount + 1; + planeSize[0] = width * height * 2; + CLOGD2("planeCount(%d)+1", streamPlaneCount); + CLOGD2("planeSize[0](%d)", planeSize[0]); + CLOGD2("bytesPerLine[0](%d)", bytesPerLine[0]); + + /* set startIndex as the next internal buffer index */ + startIndex = m_exynosconfig->current->bufInfo.num_sensor_buffers; + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; + CLOGD2("(RAW)- maxBufferCount(%d)", maxBufferCount); + + m_bayerBufferMgr->setAllocator(newStream); + m_allocBuffers(m_bayerBufferMgr, planeCount, planeSize, bytesPerLine, startIndex, maxBufferCount, true, false); + + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + privStreamInfo->setBufferManager(m_bayerBufferMgr); + CLOGD2("m_bayerBufferMgr - %p", m_bayerBufferMgr); + break; + case HAL_STREAM_ID_ZSL_OUTPUT: + CLOGD2("DEBUG(%s[%d]):Create buffer manager(ZSL)", __FUNCTION__, __LINE__); + + planeCount = streamPlaneCount + 1; + planeSize[0] = width * height * 2; + + CLOGD2("planeCount %d+1", streamPlaneCount); + CLOGD2("planeSize[0] %d", planeSize[0]); + CLOGD2("bytesPerLine[0] %d", bytesPerLine[0]); + + /* set startIndex as the next internal buffer index */ + startIndex = NUM_BAYER_BUFFERS; + maxBufferCount = m_exynosconfig->current->bufInfo.num_bayer_buffers; + + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + break; + case HAL_STREAM_ID_ZSL_INPUT: + planeCount = streamPlaneCount + 1; + planeSize[0] = width * height * 2; + + ALOGD("DEBUG(%s[%d]):planeCount %d+1", + __FUNCTION__, __LINE__, streamPlaneCount); + ALOGD("DEBUG(%s[%d]):planeSize[0] %d", + __FUNCTION__, __LINE__, planeSize[0]); + ALOGD("DEBUG(%s[%d]):bytesPerLine[0] %d", + __FUNCTION__, __LINE__, bytesPerLine[0]); + + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + break; + case HAL_STREAM_ID_PREVIEW: + + ret = privStreamInfo->getOutputPortId(&outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getOutputPortId for PREVIEW stream", + __FUNCTION__, __LINE__); + return ret; + } + + ret = m_parameters->checkYuvSize(width, height, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvSize for PREVIEW stream. size %dx%d outputPortId %d", + __FUNCTION__, __LINE__, width, height, outputPortId); + return ret; + } + + ret = m_parameters->checkYuvFormat(streamPixelFormat, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvFormat for PREVIEW stream. format %x outputPortId %d", + __FUNCTION__, __LINE__, streamPixelFormat, outputPortId); + return ret; + } + + maxBufferCount = m_exynosconfig->current->bufInfo.num_preview_buffers; + CLOGD2("(PREVIEW)- maxBufferCount(%d)", maxBufferCount); + ret = m_parameters->setYuvBufferCount(maxBufferCount, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setYuvBufferCount for PREVIEW stream. maxBufferCount %d outputPortId %d", + __FUNCTION__, __LINE__, maxBufferCount, outputPortId); + return ret; + } + + CLOGD2("Create buffer manager(PREVIEW)"); + ret = m_createServiceBufferManager(&bufferMgr, "PREVIEW_STREAM_BUF"); + if (ret != NO_ERROR) { + CLOGE2("m_createBufferManager() failed!!"); + return ret; + } + + planeCount = streamPlaneCount + 1; + planeSize[0] = width * height; + planeSize[1] = width * height / 2; + CLOGD2("planeCount(%d)+1", streamPlaneCount); + CLOGD2("planeSize[0](%d), planeSize[1](%d)", planeSize[0], planeSize[1]); + CLOGD2("bytesPerLine[0](%d)", bytesPerLine[0]); + + bufferMgr->setAllocator(newStream); + m_allocBuffers(bufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, true, false); + + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + privStreamInfo->setBufferManager(bufferMgr); + CLOGD2("previewBufferMgr - %p", bufferMgr); + break; + case HAL_STREAM_ID_VIDEO: + + ret = privStreamInfo->getOutputPortId(&outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getOutputPortId for VIDEO stream", + __FUNCTION__, __LINE__); + return ret; + } + + ret = m_parameters->checkYuvSize(width, height, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvSize for VIDEO stream. size %dx%d outputPortId %d", + __FUNCTION__, __LINE__, width, height, outputPortId); + return ret; + } + + ret = m_parameters->checkYuvFormat(streamPixelFormat, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvFormat for VIDEO stream. format %x outputPortId %d", + __FUNCTION__, __LINE__, streamPixelFormat, outputPortId); + return ret; + } + + maxBufferCount = m_exynosconfig->current->bufInfo.num_recording_buffers; + CLOGD2("(VIDEO)- maxBufferCount(%d)", maxBufferCount); + ret = m_parameters->setYuvBufferCount(maxBufferCount, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setYuvBufferCount for VIDEO stream. maxBufferCount %d outputPortId %d", + __FUNCTION__, __LINE__, maxBufferCount, outputPortId); + return ret; + } + CLOGD2("Create buffer manager(VIDEO)"); + ret = m_createServiceBufferManager(&bufferMgr, "RECORDING_STREAM_BUF"); + if (ret != NO_ERROR) { + CLOGE2("m_createBufferManager() failed!!"); + return ret; + } + + planeCount = streamPlaneCount + 1; + planeSize[0] = width * height; + planeSize[1] = width * height / 2; + + bufferMgr->setAllocator(newStream); + m_allocBuffers(bufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, false, false); + + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + privStreamInfo->setBufferManager(bufferMgr); + CLOGD2("recBufferMgr - %p", bufferMgr); + break; + + case HAL_STREAM_ID_JPEG: + + ret = m_parameters->checkPictureSize(width, height); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkPictureSize for JPEG stream. size %dx%d", + __FUNCTION__, __LINE__, width, height); + return ret; + } + + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + CLOGD2("Create buffer manager(JPEG)"); + ret = m_createServiceBufferManager(&bufferMgr, "JPEG_STREAM_BUF"); + if (ret < 0) { + CLOGE2("m_createBufferManager() failed!!"); + return ret; + } + + planeCount = streamPlaneCount; + planeSize[0] = width * height * 2; + + CLOGD2("planeCount(%d), planeSize[0](%d), bytesPerLine[0](%d)", + streamPlaneCount, planeSize[0], bytesPerLine[0]); + + bufferMgr->setAllocator(newStream); + m_allocBuffers(bufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, false, false); + + CLOGD2("JPEG stream size = %d", planeSize[0]); + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + privStreamInfo->setBufferManager(bufferMgr); + CLOGD2("JpegBufferMgr - %p", bufferMgr); + + break; + case HAL_STREAM_ID_CALLBACK: + + ret = privStreamInfo->getOutputPortId(&outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getOutputPortId for CALLBACK stream", + __FUNCTION__, __LINE__); + return ret; + } + + ret = m_parameters->checkYuvSize(width, height, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvSize for CALLBACK stream. size %dx%d outputPortId %d", + __FUNCTION__, __LINE__, width, height, outputPortId); + return ret; + } + + ret = m_parameters->checkYuvFormat(streamPixelFormat, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to checkYuvFormat for CALLBACK stream. format %x outputPortId %d", + __FUNCTION__, __LINE__, streamPixelFormat, outputPortId); + return ret; + } + + maxBufferCount = m_exynosconfig->current->bufInfo.num_preview_cb_buffers; + ret = m_parameters->setYuvBufferCount(maxBufferCount, outputPortId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setYuvBufferCount for CALLBACK stream. maxBufferCount %d outputPortId %d", + __FUNCTION__, __LINE__, maxBufferCount, outputPortId); + return ret; + } + CLOGD2("Create buffer manager(PREVIEW_CB)"); + + ret = m_createServiceBufferManager(&bufferMgr, "PREVIEW_CB_STREAM_BUF"); + if (ret < 0) { + CLOGE2("m_createBufferManager() failed!!"); + return ret; + } + + planeCount = streamPlaneCount + 1; + planeSize[0] = (width * height * 3) / 2; + + CLOGD2("planeCount %d", streamPlaneCount); + CLOGD2("planeSize[0] %d",planeSize[0]); + CLOGD2("bytesPerLine[0] %d",bytesPerLine[0]); + + bufferMgr->setAllocator(newStream); + m_allocBuffers(bufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, true, false); + privStreamInfo->setRegisterBuffer(EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED); + privStreamInfo->setBufferManager(bufferMgr); + CLOGD2("preivewCallbackBufferMgr - %p", bufferMgr); + + break; + + default: + CLOGE2("privStreamInfo->id is invalid !! id(%d)", id); + ret = BAD_VALUE; + break; + } + } + } + + /* Do pure bayer always reprocessing */ + m_checkConfigStream = true; + + CLOGD2("-OUT-"); + return ret; +} + +status_t ExynosCamera3::registerStreamBuffers(const camera3_stream_buffer_set_t *buffer_set) +{ + /* deprecated function */ + if (buffer_set == NULL) { + CLOGE2("buffer_set is NULL"); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCamera3::processCaptureRequest(camera3_capture_request *request) +{ + Mutex::Autolock l(m_requestLock); + ExynosCameraBuffer *buffer = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer dstBuf; + ExynosCameraStream *streamInfo = NULL; + uint32_t timeOutNs = 60 * 1000000; /* timeout default value is 60ms based on 15fps */ + uint32_t waitMaxBlockCnt = 0; + status_t ret = NO_ERROR; + camera3_stream_t *stream = NULL; + EXYNOS_STREAM::STATE registerBuffer = EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED; + uint32_t minBlockReqCount = 0; + uint32_t maxBlockReqCount = 0; + CameraMetadata meta; + camera_metadata_entry_t entry; + + CLOGV2("Capture request (%d) #out(%d)", request->frame_number, request->num_output_buffers); + + /* 1. Wait for allocation completion of internal buffers and creation of frame factory */ + m_setBuffersThread->join(); + m_framefactoryCreateThread->join(); + + /* 2. Check the validation of request */ + if (request == NULL) { + ALOGE("ERR(%s[%d]):NULL request!", __FUNCTION__, __LINE__); + ret = BAD_VALUE; + goto req_err; + } + + /* m_streamManager->dumpCurrentStreamList(); */ + + /* 3. Check NULL for service metadata */ + if ((request->settings == NULL) && (m_requestMgr->isPrevRequest())) { + CLOGE2("Request%d: NULL and no prev request!!", request->frame_number); + ret = BAD_VALUE; + goto req_err; + } + + /* 4. Check the registeration of input buffer on stream */ + if (request->input_buffer != NULL){ + stream = request->input_buffer->stream; + streamInfo = static_cast(stream->priv); + streamInfo->getRegisterBuffer(®isterBuffer); + + if (registerBuffer != EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED) { + CLOGE2("Request %d: Input buffer not from input stream!", request->frame_number); + CLOGE2("Bad Request %p, type(%d), format(%x)", request->input_buffer->stream, + request->input_buffer->stream->stream_type, request->input_buffer->stream->format); + ret = BAD_VALUE; + goto req_err; + } + } + + /* 5. Check the output buffer count */ + if ((request->num_output_buffers < 1) || (request->output_buffers == NULL)) { + CLOGE2("Request %d: No output buffers provided!", request->frame_number); + ret = BAD_VALUE; + goto req_err; + } + + CLOGV2("request->num_output_buffers(%d) frame_number(%d)", + request->num_output_buffers, request->frame_number); + + /* 6. Store request settings + * Caution : All information must be copied into internal data structure + * before receiving another request from service + */ + ret = m_pushRequest(request); + ret = m_registerStreamBuffers(request); + + /* 7. Calculate the timeout value for processing request based on actual fps setting */ + meta = request->settings; + if (request->settings != NULL && meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { + uint32_t minFps = 0, maxFps = 0; + entry = meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE); + minFps = entry.data.i32[0]; + maxFps = entry.data.i32[1]; + m_parameters->checkPreviewFpsRange(minFps, maxFps); + timeOutNs = (1000 / ((minFps == 0) ? 15 : minFps)) * 1000000; + } + + /* 8. Process initial requests for preparing the stream */ + if (request->frame_number == 0 || m_flushFlag == true || isRestarted == true) { + isRestarted = false; + m_flushFlag = false; + m_prepareFliteCnt = m_exynosconfig->current->pipeInfo.prepare[PIPE_FLITE]; + m_factoryStartFlag = true; + + ALOGV("DEBUG(%s[%d]):Start FrameFactory requestKey(%d) m_flushFlag(%d/%d)", + __FUNCTION__, __LINE__, request->frame_number, isRestarted, m_flushFlag); + m_frameFactoryStartDone = false; + m_frameFactoryStartThread->run(); + + if (m_parameters->isReprocessing() == true && + m_flagStartReprocessingFrameFactory == false) { + m_frameFactoryStartThread->join(); + m_reprocessingFrameFactoryStartThread->run(); + } + } + + m_flushWaitEnable = true; + + minBlockReqCount = m_exynosconfig->current->bufInfo.num_min_block_request; + maxBlockReqCount = m_exynosconfig->current->bufInfo.num_max_block_request; + waitMaxBlockCnt = minBlockReqCount * 10; + + /* + * Blocked this func if request counts(in HAL) is over than MIN request count. + * So we keeps MIN request counts in HAL. + * If HAL received too many requests, there are some late reactive problem in case of stopping, appling effects .. etc. + * MAX request count is not used now. But it will be used in case that HAL want to receive request as many as MAX request count + */ + while (m_requestMgr->getRequestCount() > minBlockReqCount && m_flushFlag == false && waitMaxBlockCnt > 0) { + if (m_frameFactoryStartDone == false) { + m_frameFactoryStartThread->join(); + } + if (m_parameters->isReprocessing() == true) + m_reprocessingFrameFactoryStartThread->join(); + status_t waitRet = NO_ERROR; + m_captureResultDoneLock.lock(); + waitRet = m_captureResultDoneCondition.waitRelative(m_captureResultDoneLock, timeOutNs); + if (waitRet == TIMED_OUT) + CLOGV2("time out (m_processList:%zu / totalRequestCnt:%d / " + "blockReqCount = min:%u, max:%u / waitcnt:%u)", + m_processList.size(), m_requestMgr->getRequestCount(), + minBlockReqCount, maxBlockReqCount, waitMaxBlockCnt); + + m_captureResultDoneLock.unlock(); + waitMaxBlockCnt--; + } + +req_err: + return ret; +} + +void ExynosCamera3::get_metadata_vendor_tag_ops(const camera3_device_t *, vendor_tag_query_ops_t *ops) +{ + ALOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (ops == NULL) { + CLOGE2("ops is NULL"); + return; + } +} + +status_t ExynosCamera3::flush() +{ + camera3_stream_buffer_t streamBuffer; + CameraMetadata result; + ExynosCamera3FrameFactory *frameFactory = NULL; + ExynosCameraRequest* request = NULL; + ResultRequest resultRequest = NULL; + ExynosCameraStream *stream = NULL; + ExynosCameraBuffer buffer; + List *list = &m_processList; + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + List::iterator requestInfoR; + request_info_t *requestInfo = NULL; + int bufferIndex; + int requestIndex = -1; + status_t ret = NO_ERROR; + ExynosCameraBufferManager *bufferMgr = NULL; + + /* + * This flag should be set before stoping all pipes, + * because other func(like processCaptureRequest) must know call state about flush() entry level + */ + m_flushFlag = true; + m_captureResultDoneCondition.signal(); + + List *outputStreamId; + List::iterator outputStreamIdIter; + + int streamId = 0; + + Mutex::Autolock l(m_resultLock); + CLOGD2("IN+++", __FUNCTION__, __LINE__); + CLOGV2("ProcessListCount(%d)", list->size()); + + if (m_flushWaitEnable == false) { + CLOGD2("No need to wait & flush"); + goto func_exit; + } + + /* Wait for finishing frameFactoryStartThread */ + m_frameFactoryStartThread->requestExitAndWait(); + m_internalFrameThread->requestExit(); + m_mainThread->requestExitAndWait(); + + m_captureThread->requestExit(); + if (m_sccCaptureSelector != NULL) + m_sccCaptureSelector->wakeselectDynamicFrames(); + m_captureThread->requestExitAndWait(); + + /* Create frame for the waiting request */ + while (m_requestWaitingList.size() > 0) { + requestInfoR = m_requestWaitingList.begin(); + requestInfo = *requestInfoR; + request = requestInfo->request; + + m_createRequestFrameFunc(request); + + m_requestWaitingList.erase(requestInfoR); + } + + /* Stop pipeline */ + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + if (m_frameFactory[i] != NULL) { + frameFactory = m_frameFactory[i]; + + for (int k = i + 1; k < FRAME_FACTORY_TYPE_MAX; k++) { + if (frameFactory == m_frameFactory[k]) { + CLOGD2("m_frameFactory index(%d) and index(%d) are same instance, set index(%d) = NULL", i, k, k); + m_frameFactory[k] = NULL; + } + } + + ret = m_stopFrameFactory(m_frameFactory[i]); + if (ret < 0) + CLOGE2("m_frameFactory[%d] stopPipes fail", i); + + CLOGD2("m_frameFactory[%d] stopPipes", i); + } + } + + m_flagStartReprocessingFrameFactory = false; + + if (m_captureSelector != NULL) + m_captureSelector->release(); + if (m_sccCaptureSelector != NULL) + m_sccCaptureSelector->release(); + + /* Wait until duplicateBufferThread stop */ + m_duplicateBufferThread->requestExitAndWait(); + + /* Wait until previewStream3AA/ISPThread stop */ + m_previewStreamVRAThread->requestExitAndWait(); + m_previewStream3AAThread->requestExitAndWait(); + m_previewStreamBayerThread->requestExitAndWait(); + + /* Check queued requests from camera service */ + CLOGV2("ProcessListCount(%d)", list->size()); + + do { + Mutex::Autolock l(m_processLock); + while (!list->empty()) { + r = list->begin()++; + curFrame = *r; + if (curFrame == NULL) { + CLOGE2("curFrame is empty"); + break; + } + + if (curFrame->getFrameType() == FRAME_TYPE_INTERNAL) { + m_releaseInternalFrame(curFrame); + list->erase(r); + curFrame = NULL; + + continue; + } + + request = m_requestMgr->getServiceRequest(curFrame->getFrameCount()); + + if (request == NULL) { + CLOGE2("request is empty"); + list->erase(r); + curFrame->decRef(); + m_frameMgr->deleteFrame(curFrame); + curFrame = NULL; + continue; + } + CLOGV2("framecount(%d)", curFrame->getFrameCount()); + + /* handle notify */ + camera3_notify_msg_t notify; + uint64_t timeStamp = 0L; + timeStamp = request->getSensorTimestamp(); + if (timeStamp == 0L) + timeStamp = m_lastFrametime + 15000000; /* set dummy frame time */ + notify.type = CAMERA3_MSG_SHUTTER; + notify.message.shutter.frame_number = request->getKey(); + notify.message.shutter.timestamp = timeStamp; + resultRequest = m_requestMgr->createResultRequest(curFrame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY, NULL, ¬ify); + m_requestMgr->callbackSequencerLock(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + + request = m_requestMgr->getServiceRequest(curFrame->getFrameCount()); + if (request == NULL) { + CLOGW("WARN(%s[%d]):request is empty", __FUNCTION__, __LINE__); + list->erase(r); + curFrame->decRef(); + m_frameMgr->deleteFrame(curFrame); + curFrame = NULL; + continue; + } + + CLOGV2("framecount(%d)", curFrame->getFrameCount()); + result = request->getResultMeta(); + result.update(ANDROID_SENSOR_TIMESTAMP, (int64_t *)&timeStamp, 1); + request->setResultMeta(result); + + request->getAllRequestOutputStreams(&outputStreamId); + CLOGI2("outputStreamID->size(%d)", outputStreamId->size()); + + if (outputStreamId->size() > 0) { + outputStreamIdIter = outputStreamId->begin(); + bufferIndex = 0; + CLOGI("INFO(%s[%d]):outputStreamId->size(%zu)", + __FUNCTION__, __LINE__, outputStreamId->size()); + for (int i = outputStreamId->size(); i > 0; i--) { + CLOGI("INFO(%s[%d]):i(%d) *outputStreamIdIter(%d)", + __FUNCTION__, __LINE__, i, *outputStreamIdIter); + if (*outputStreamIdIter < 0) + break; + + m_streamManager->getStream(*outputStreamIdIter, &stream); + + if (stream == NULL) { + CLOGE2("stream is NULL"); + ret = INVALID_OPERATION; + return ret; + } + + stream->getID(&streamId); + + ret = stream->getStream(&streamBuffer.stream); + if (ret < 0) { + CLOGE2("getStream is failed, from exynoscamerastream. Id error:HAL_STREAM_ID_PREVIEW"); + return ret; + } + + requestIndex = -1; + stream->getBufferManager(&bufferMgr); + ret = bufferMgr->getBuffer(&requestIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &buffer); + if (ret != NO_ERROR) { + CLOGE2("Buffer manager getBuffer fail, frameCount(%d), ret(%d)", curFrame->getFrameCount(), ret); + } + + ret = bufferMgr->getHandleByIndex(&streamBuffer.buffer, buffer.index); + if (ret != OK) { + CLOGE2("Buffer index error(%d)!!", buffer.index); + return ret; + } + + /* handle stream buffers */ + streamBuffer.status = CAMERA3_BUFFER_STATUS_OK; + streamBuffer.acquire_fence = -1; + streamBuffer.release_fence = -1; + + resultRequest = m_requestMgr->createResultRequest(curFrame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY, NULL, NULL); + resultRequest->pushStreamBuffer(&streamBuffer); + m_requestMgr->callbackSequencerLock(); + request->increaseCompleteBufferCount(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + + bufferIndex++; + outputStreamIdIter++; + + } + + } + + CLOGV2("frameCount(%d), request->getNumOfOutputBuffer(%d), result num_output_buffers(%d)", + request->getFrameCount(), request->getNumOfOutputBuffer(), bufferIndex); + + /* frame to complete callback should be removed */ + list->erase(r); + + curFrame->decRef(); + + m_frameMgr->deleteFrame(curFrame); + + curFrame = NULL; + } + } while(0); + + /* Pull all internal buffers */ + for (int bufIndex = 0; bufIndex < m_fliteBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_fliteBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_3aaBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_3aaBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_internalScpBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_internalScpBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_yuvCaptureBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_yuvCaptureBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_vraBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_vraBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_gscBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_gscBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_ispBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_ispBufferMgr, bufIndex); + +func_exit: + CLOGD2("-OUT-"); + return ret; +} + +void ExynosCamera3::dump() +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); +} + +int ExynosCamera3::getCameraId() const +{ + return m_cameraId; +} + +bool ExynosCamera3::m_mainThreadFunc(void) +{ + ExynosCameraRequest *request = NULL; + uint32_t frameCount = 0; + status_t ret = NO_ERROR; + + /* 1. Wait the shot done with the latest framecount */ + ret = m_shotDoneQ->waitAndPopProcessQ(&frameCount); + if (ret < 0) { + if (ret == TIMED_OUT) + CLOGW("WARN(%s[%d]):wait timeout", __FUNCTION__, __LINE__); + else + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return true; + } + + if (isRestarted == true) { + CLOGI("INFO(%s[%d]):wait configure stream", __FUNCTION__, __LINE__); + usleep(1); + + return true; + } + + ret = m_createFrameFunc(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to createFrameFunc. Shotdone framecount %d", + __FUNCTION__, __LINE__, frameCount); + + return true; +} + +void ExynosCamera3::m_updateCurrentShot(void) +{ + List::iterator r; + request_info_t *requestInfo = NULL; + ExynosCameraRequest *request = NULL; + struct camera2_shot_ext temp_shot_ext; + status_t ret = NO_ERROR; + int controlInterval = 0; + + /* 1. Get the request info from the back of the list (the newest request) */ + r = m_requestWaitingList.end(); + r--; + requestInfo = *r; + request = requestInfo->request; + + /* 2. Get the metadata from request */ + ret = request->getServiceShot(&temp_shot_ext); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Get service shot fail, requestKey(%d), ret(%d)", + __FUNCTION__, __LINE__, request->getKey(), ret); + } + + /* 3. Store the frameCount that the sensor control is going to be delivered */ + if (requestInfo->sensorControledFrameCount == 0) + requestInfo->sensorControledFrameCount = m_internalFrameCount; + + /* 4. Get the request info from the front of the list (the oldest request) */ + r = m_requestWaitingList.begin(); + requestInfo = *r; + request = requestInfo->request; + + /* 5. Update the entire shot_ext structure */ + ret = request->getServiceShot(m_currentShot); + if (ret != NO_ERROR) { + CLOGV("ERR(%s[%d]):Get service shot fail, requestKey(%d), ret(%d)", + __FUNCTION__, __LINE__, request->getKey(), ret); + } + + /* 6. Overwrite the sensor control metadata to m_currentShot */ + m_currentShot->shot.ctl.sensor.exposureTime = temp_shot_ext.shot.ctl.sensor.exposureTime; + m_currentShot->shot.ctl.sensor.frameDuration = temp_shot_ext.shot.ctl.sensor.frameDuration; + m_currentShot->shot.ctl.sensor.sensitivity = temp_shot_ext.shot.ctl.sensor.sensitivity; + m_currentShot->shot.ctl.lens = temp_shot_ext.shot.ctl.lens; + m_currentShot->shot.ctl.aa.aeMode = temp_shot_ext.shot.ctl.aa.aeMode; + m_currentShot->shot.ctl.aa.aeLock = temp_shot_ext.shot.ctl.aa.aeLock; + m_currentShot->shot.ctl.aa.vendor_isoValue = temp_shot_ext.shot.ctl.aa.vendor_isoValue; + m_currentShot->shot.ctl.aa.vendor_isoMode = temp_shot_ext.shot.ctl.aa.vendor_isoMode; + m_currentShot->shot.ctl.aa.aeExpCompensation = temp_shot_ext.shot.ctl.aa.aeExpCompensation; + + controlInterval = m_internalFrameCount - requestInfo->sensorControledFrameCount; + + /* 7. Decide to make the internal frame */ + if (controlInterval < SENSOR_CONTROL_DELAY) { + m_isNeedInternalFrame = true; + m_isNeedRequestFrame = false; + } else if (request->getNeedInternalFrame() == true) { + m_isNeedInternalFrame = true; + m_isNeedRequestFrame = true; + } else { + m_isNeedInternalFrame = false; + m_isNeedRequestFrame = true; + } + + CLOGV2("INFO(%s[%d]):framecount %d needRequestFrame %d needInternalFrame %d", + __FUNCTION__, __LINE__, + m_internalFrameCount, m_isNeedRequestFrame, m_isNeedInternalFrame); + + return; +} + +status_t ExynosCamera3::m_previewframeHandler(ExynosCameraRequest *request, ExynosCamera3FrameFactory *targetfactory) +{ + status_t ret = NO_ERROR; + ExynosCameraFrame *newFrame = NULL; + uint32_t bufferCnt = 0; + uint32_t requestKey = 0; + int32_t bufIndex = -1; + ExynosCameraBuffer buffer; + bool captureFlag = false; + bool rawStreamFlag = false; + bool zslStreamFlag = false; + bool needDynamicBayer = false; + bool usePureBayer = false; + uint32_t frameCount = 0; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + + /* set buffers belonged to each stream as available */ + // TODO: acquire fence + + frameCount = request->getFrameCount(); + requestKey = request->getKey(); + + /* Initialize the request flags in framefactory */ + targetfactory->setRequestFLITE(false); + targetfactory->setRequest3AC(false); + targetfactory->setRequestSCC(false); + targetfactory->setRequestSCP(false); + + m_flagBayerRequest = false; + + /* To decide the dynamic bayer request flag for JPEG capture */ + switch (reprocessingBayerMode) { + case REPROCESSING_BAYER_MODE_NONE : + needDynamicBayer = false; + break; + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON : + targetfactory->setRequest(PIPE_FLITE, true); + needDynamicBayer = false; + usePureBayer = true; + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON : + targetfactory->setRequest(PIPE_3AC, true); + needDynamicBayer = false; + usePureBayer = false; + break; + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC : + needDynamicBayer = true; + usePureBayer = true; + break; + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC : + needDynamicBayer = true; + usePureBayer = false; + break; + default : + break; + } + + /* Setting DMA-out request flag based on stream configuration */ + bufferCnt = request->getNumOfOutputBuffer(); + + for (size_t i = 0; i < bufferCnt; i++) { + int id = -1; + id = request->getStreamId(i); + + switch (id % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_RAW: + CLOGD2("request(%d) outputBuffer-Index(%zu) buffer-StreamType(HAL_STREAM_ID_RAW) ", request->getKey(), i); + targetfactory->setRequestFLITE(true); + m_flagBayerRequest = true; + rawStreamFlag = true; + + break; + case HAL_STREAM_ID_ZSL_OUTPUT: + CLOGD2("request(%d) outputBuffer-Index(%zu) buffer-StreamType(HAL_STREAM_ID_ZSL)", request->getKey(), i); + if (usePureBayer == true) + targetfactory->setRequest(PIPE_FLITE, true); + else + targetfactory->setRequest(PIPE_3AC, true); + zslStreamFlag = true; + + break; + case HAL_STREAM_ID_VIDEO: + recordingEnabled = true; + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_CALLBACK: + CLOGV2("request(%d) outputBuffer-Index(%zu) buffer-StreamType(%d) ", request->getKey(), i, id); + targetfactory->setRequestSCP(true); + + break; + case HAL_STREAM_ID_JPEG: + CLOGD2("request(%d) outputBuffer-Index %zu buffer-StreamType(HAL_STREAM_ID_JPEG)", request->getKey(), i); + captureFlag = true; + if (m_parameters->isReprocessing() == false) { + targetfactory->setRequest(PIPE_3AC, true); + } else if (needDynamicBayer == true) { + if(m_parameters->getUsePureBayerReprocessing() == true) + targetfactory->setRequest(PIPE_FLITE, true); + else + targetfactory->setRequest(PIPE_3AC, true); + } + + break; + default: + CLOGE2("Invalid stream ID %d", id); + break; + } + } + + if (m_currentShot == NULL) { + CLOGE2("m_currentShot is NULL. requestKey %d ret %d", request->getKey(), ret); + request->getServiceShot(m_currentShot); + } + + m_updateCropRegion(m_currentShot); + + /* Set framecount into request */ + if (frameCount == 0) { + m_requestMgr->setFrameCount(m_internalFrameCount++, request->getKey()); + frameCount = request->getFrameCount(); + } + + ret = m_generateFrame(frameCount, targetfactory, &m_processList, &m_processLock, &newFrame); + if (ret != NO_ERROR) { + CLOGE2("Failed to generateRequestFrame. framecount %d", frameCount); + goto CLEAN; + } else if (newFrame == NULL) { + CLOGE2("newFrame is NULL. framecount %d", frameCount); + goto CLEAN; + } + + CLOGV2("frameCount:%d , requestKey:%d", frameCount, request->getKey()); + + ret = newFrame->setMetaData(m_currentShot); + if (ret != NO_ERROR) { + CLOGE2("Set metadata to frame fail, Frame count(%d), ret(%d)", frameCount, ret); + } + + newFrame->setFrameCapture(captureFlag); + newFrame->setFrameZsl(zslStreamFlag); + + /* newFrame->printEntity(); */ + if (m_parameters->isFlite3aaOtf() == true) { + /* It is assumed that flite is first, So use newFrame->getFristEntity(). */ + /* TODO: If you want location of flite entity isn't first. Don't use this. */ + if (m_flagBayerRequest == true) { + int bayerPipeId = m_getBayerPipeId(); + + if ((rawStreamFlag == true) && (zslStreamFlag != true)) { + CLOGD2("flite buffer(%d) use Service Bayer buffer", frameCount); + + ret = m_bayerBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &buffer); + if (ret != NO_ERROR) { + CLOGE2("Failed to get Service Bayer Buffer. framecount %d availableBuffer %d", + frameCount, m_bayerBufferMgr->getNumOfAvailableBuffer()); + } else { + newFrame->setFrameServiceBayer(true); + } + } else { + CLOGD2("flite buffer use Internal Bayer buffer"); + + ret = m_fliteBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &buffer); + if (ret != NO_ERROR) { + CLOGE2("getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", bayerPipeId, frameCount, ret); + } + + CLOGV2("Use Internal Bayer Buffer. framecount %d bufferIndex %d", frameCount, buffer.index); + } + + if (bufIndex < 0) { + CLOGW2("Invalid bayer buffer index %d. Skip to pushFrame", bufIndex); + ret = newFrame->setEntityState(bayerPipeId, ENTITY_STATE_COMPLETE); + if (ret != NO_ERROR) + CLOGE2("Failed to setEntityState with COMPLETE to FLITE. framecount %d", frameCount); + } else { + ret = m_setupEntity(bayerPipeId, newFrame, NULL, &buffer); + if (ret != NO_ERROR) { + CLOGE2("Failed to setupEntity with bayer buffer. framecount %d bufferIndex %d", + frameCount, buffer.index); + } else { + CLOGD2("PushFrametoPipe PIPE_FLITE framecount(%d)", frameCount); + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[bayerPipeId], bayerPipeId); + targetfactory->pushFrameToPipe(&newFrame, bayerPipeId); + } + } + } + + m_setupEntity(PIPE_3AA, newFrame); + if (m_parameters->isMcscVraOtf() == true) { + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + } else { + targetfactory->setFrameDoneQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_VRA], PIPE_VRA); + } + targetfactory->pushFrameToPipe(&newFrame, PIPE_3AA); + } else { /* TODO: Implement M2M Path */ + m_setupEntity(PIPE_FLITE_FRONT, newFrame); + targetfactory->pushFrameToPipe(&newFrame, PIPE_FLITE_FRONT); + + m_setupEntity(PIPE_3AA_FRONT, newFrame); + m_setupEntity(PIPE_ISP_FRONT, newFrame); + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[INDEX(PIPE_ISP_FRONT)], PIPE_ISP_FRONT); + + m_setupEntity(PIPE_SCC_FRONT, newFrame); + targetfactory->pushFrameToPipe(&newFrame, PIPE_SCC_FRONT); + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[INDEX(PIPE_SCC_FRONT)], PIPE_SCC_FRONT); + + m_setupEntity(PIPE_SCP_FRONT, newFrame); + targetfactory->pushFrameToPipe(&newFrame, PIPE_SCP_FRONT); + targetfactory->setOutputFrameQToPipe(m_pipeFrameDoneQ[INDEX(PIPE_SCP_FRONT)], PIPE_SCP_FRONT); + } + + if (captureFlag == true + && m_parameters->isReprocessing() == false) { + m_captureCount++; + CLOGI2("setFrameCapture(true), frameCount(%d) m_captureCount(%d) isRunningstate(%d)", + frameCount, m_captureCount, m_captureThread->isRunning()); + m_captureQ->pushProcessQ(&newFrame); + } + + if (m_flagBayerRequest == true && m_frameFactoryStartDone == true) { + /* HAL 3.2 on 8MP Full concept is dynamic bayer */ + if (targetfactory->checkPipeThreadRunning(m_getBayerPipeId()) == false) { + m_previewStreamBayerThread->run(PRIORITY_DEFAULT); + targetfactory->startThread(m_getBayerPipeId()); + } + targetfactory->setRequestFLITE(false); + } + +CLEAN: + return ret; +} + +status_t ExynosCamera3::m_captureframeHandler(ExynosCameraRequest *request, ExynosCamera3FrameFactory *targetfactory) +{ + status_t ret = NO_ERROR; + ExynosCameraFrame *newFrame = NULL; + struct camera2_shot_ext shot_ext; + uint32_t bufferCnt = 0; + uint32_t requestKey = 0; + uint32_t frameCount = 0; + bool captureFlag = false; + bool rawStreamFlag = false; + bool zslFlag = false; + bool isNeedThumbnail = false; + + frameCount = request->getFrameCount(); + + CLOGD2("Capture request. requestKey %d frameCount %d", + request->getKey(), frameCount); + + if (targetfactory == NULL) { + CLOGE2("targetfactory is NULL"); + return INVALID_OPERATION; + } + + /* set buffers belonged to each stream as available */ + /* wait for reprocessing instance of capture */ + if (m_parameters->isReprocessing() == true) { + if (m_flagStartReprocessingFrameFactory == false) + m_reprocessingFrameFactoryStartThread->join(); + m_startPictureBufferThread->join(); + } + + requestKey = request->getKey(); + + targetfactory->setRequest(PIPE_MCSC0_REPROCESSING, false); + if (m_parameters->isHWFCEnabled() == true) { + targetfactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, false); + targetfactory->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, false); + } + + /* set input buffers belonged to each stream as available */ + bufferCnt = request->getNumOfInputBuffer(); + + for (size_t i = 0; i < bufferCnt ; i++) { + int id = request->getStreamId(i); + switch (id % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_ZSL_INPUT: + CLOGD2("requestKey %d buffer-StreamType(HAL_STREAM_ID_ZSL_INPUT)", request->getKey()); + zslFlag = true; + break; + case HAL_STREAM_ID_JPEG: + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_CALLBACK: + case HAL_STREAM_ID_MAX: + CLOGE2("frameCount %d requestKey %d Invalid buffer-StreamType(%d)", + frameCount, request->getKey(), id); + break; + default: + break; + } + } + + /* set output buffers belonged to each stream as available */ + bufferCnt = request->getNumOfOutputBuffer(); + + for (size_t i = 0; i < bufferCnt; i++) { + int id = request->getStreamId(i); + switch (id % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_JPEG: + CLOGD2("frameCount %d requestKey %d buffer-StreamType(HAL_STREAM_ID_JPEG)", + frameCount, request->getKey()); + targetfactory->setRequest(PIPE_MCSC0_REPROCESSING, true); + + request->getServiceShot(&shot_ext); + isNeedThumbnail = (shot_ext.shot.ctl.jpeg.thumbnailSize[0] > 0 + && shot_ext.shot.ctl.jpeg.thumbnailSize[1] > 0) ? true : false; + + if (m_parameters->isHWFCEnabled() == true) { + if (m_parameters->isUseYuvReprocessingForThumbnail() == false) { + targetfactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, true); + } else if (isNeedThumbnail == false) { + targetfactory->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, true); + targetfactory->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, true); + } + } + + captureFlag = true; + break; + case HAL_STREAM_ID_RAW: + CLOGV2("frameCount %d requestKey %d buffer-StreamType(HAL_STREAM_ID_RAW)", + frameCount, request->getKey()); + + rawStreamFlag = true; + break; + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_CALLBACK: + case HAL_STREAM_ID_MAX: + CLOGE2("frameCount %d requestKey %d Invalid buffer-StreamType(%d)", + frameCount, request->getKey()); + break; + default: + break; + } + } + + if (m_currentShot == NULL) { + CLOGW2("requestKey(%d) m_currentShot is NULL. Use request metadata.", request->getKey()); + request->getServiceShot(m_currentShot); + } + + m_updateCropRegion(m_currentShot); + m_updateJpegControlInfo(m_currentShot); + + /* Set framecount into request */ + if (request->getNeedInternalFrame() == true) + /* Must use the same framecount with internal frame */ + m_internalFrameCount--; + + if (frameCount == 0) { + m_requestMgr->setFrameCount(m_internalFrameCount++, request->getKey()); + frameCount = request->getFrameCount(); + } + + ret = m_generateFrame(frameCount, targetfactory, &m_captureProcessList, &m_captureProcessLock, &newFrame); + if (ret != NO_ERROR) { + CLOGE2("m_generateFrame fail"); + return INVALID_OPERATION; + } else if (newFrame == NULL) { + CLOGE2("new frame is NULL"); + return INVALID_OPERATION; + } + + CLOGV2("generate request framecount %d requestKey %d", frameCount, request->getKey()); + + ret = newFrame->setMetaData(m_currentShot); + if (ret != NO_ERROR) + CLOGE2("Set metadata to frame fail, Frame count(%d), ret(%d)", + frameCount, ret); + + newFrame->setFrameServiceBayer(rawStreamFlag); + newFrame->setFrameCapture(captureFlag); + newFrame->setFrameZsl(zslFlag); + + m_selectBayerQ->pushProcessQ(&newFrame); + + if(m_selectBayerThread != NULL + && m_selectBayerThread->isRunning() == false) { + m_selectBayerThread->run(); + CLOGI2("Initiate selectBayerThread (%d)", m_selectBayerThread->getTid()); + } + + return ret; +} + +status_t ExynosCamera3::m_createRequestFrameFunc(ExynosCameraRequest *request) +{ + int32_t factoryAddrIndex = 0; + bool removeDupFlag = false; + + ExynosCamera3FrameFactory *factory = NULL; + ExynosCamera3FrameFactory *factoryAddr[100] ={NULL,}; + FrameFactoryList factorylist; + FrameFactoryListIterator factorylistIter; + factory_handler_t frameCreateHandler; + + // TODO: acquire fence + /* 1. Remove the duplicated frame factory in request */ + factoryAddrIndex = 0; + factorylist.clear(); + + request->getFrameFactoryList(&factorylist); + for (factorylistIter = factorylist.begin(); factorylistIter != factorylist.end(); ) { + removeDupFlag = false; + factory = *factorylistIter; + ALOGV("DEBUG(%s[%d]):list Factory(%p) ", __FUNCTION__, __LINE__, factory); + + for (int i = 0; i < factoryAddrIndex ; i++) { + if (factoryAddr[i] == factory) { + removeDupFlag = true; + break; + } + } + + if (removeDupFlag) { + ALOGV("DEBUG(%s[%d]):remove duplicate Factory factoryAddrIndex(%d)", + __FUNCTION__, __LINE__, factoryAddrIndex); + factorylist.erase(factorylistIter++); + } else { + ALOGV("DEBUG(%s[%d]):add frame factory, factoryAddrIndex(%d)", + __FUNCTION__, __LINE__, factoryAddrIndex); + factoryAddr[factoryAddrIndex] = factory; + factoryAddrIndex++; + factorylistIter++; + } + + } + + /* 2. Call the frame create handler for each frame factory */ + for (int i = 0; i < factoryAddrIndex; i++) { + ALOGV("DEBUG(%s[%d]):framefactory index(%d) maxIndex(%d) (%p)", + __FUNCTION__, __LINE__, i, factoryAddrIndex, factoryAddr[i]); + frameCreateHandler = factoryAddr[i]->getFrameCreateHandler(); + (this->*frameCreateHandler)(request, factoryAddr[i]); + } + + ALOGV("DEBUG(%s[%d]):- OUT - (F:%d)", __FUNCTION__, __LINE__, request->getKey()); + return NO_ERROR; +} + +status_t ExynosCamera3::m_createInternalFrameFunc(void) +{ + status_t ret = NO_ERROR; + uint32_t waitTime = 15 * 1000000; /* 15 msec as a default */ + uint32_t maxFps = 0; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCamera3FrameFactory *factory = NULL; + ExynosCameraFrame *newFrame = NULL; + short retryCount = 4; + + /* 1. Generate the internal frame */ + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + if (m_parameters->isFlite3aaOtf() == true) { + factory->setRequestFLITE(false); + } else + factory->setRequestFLITE(true); + + ret = m_generateInternalFrame(m_internalFrameCount++, factory, &m_processList, &m_processLock, &newFrame); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_generateFrame failed", __FUNCTION__, __LINE__); + return ret; + } else if (newFrame == NULL) { + ALOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ALOGV("INFO(%s[%d]):generate internal framecount %d", + __FUNCTION__, __LINE__, + newFrame->getFrameCount()); + + /* 2. Set DMA-out request flag into frame + * 3AP, 3AC, ISP, ISPP, ISPC, SCC, DIS, SCP */ + newFrame->setRequest(false, false, false, false, false, false, false, false); + + switch (m_parameters->getReprocessingBayerMode()) { + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON : + newFrame->setRequest(PIPE_FLITE, true); + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON: + newFrame->setRequest(PIPE_3AC, true); + break; + default: + break; + } + + /* 3. Update the metadata with m_currentShot into frame */ + if (m_currentShot == NULL) { + CLOGE2("ERR(%s[%d]):m_currentShot is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + ret = newFrame->setMetaData(m_currentShot); + if (ret != NO_ERROR) { + CLOGE2("ERR(%s[%d]):Failed to setMetaData with m_currehtShot. framecount %d ret %d", + __FUNCTION__, __LINE__, + newFrame->getFrameCount(), ret); + return ret; + } + + /* 4. Attach the buffers into frame */ + maxFps = m_currentShot->shot.ctl.aa.aeTargetFpsRange[1]; + if (maxFps > 0) + waitTime = (1000 / maxFps) * 1000000 / 2; // Wait for the half of MIN frame duration(MAX fps) + while (retryCount-- > 0) { + if (m_parameters->isFlite3aaOtf() == false) { + ret = m_setupEntity(PIPE_FLITE, newFrame); + if (ret != NO_ERROR) { + ALOGW("WARN(%s[%d]):Get FLITE buffer failed!, framecount(%d), availableFLITEBuffer(%d), sleep(%d ns/%d fps(MAX)), retryCount(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), + m_fliteBufferMgr->getNumOfAvailableBuffer(), waitTime, maxFps, retryCount); + usleep(waitTime); + continue; + } + } else { + ret = m_setupEntity(PIPE_3AA, newFrame); + if (ret != NO_ERROR) { + ret = m_getBufferManager(PIPE_3AA, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getBufferManager for 3AA. framecount %d", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + return ret; + } + + ALOGW("WARN(%s[%d]):Get 3AA buffer failed!, framecount(%d), available3AABuffer(%d), sleep(%d ns/%d fps(MAX)), retryCount(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), + m_3aaBufferMgr->getNumOfAvailableBuffer(), waitTime, maxFps, retryCount); + + usleep(waitTime); + continue; + } + } + break; + } + if (retryCount == 0 && ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Get 3AA buffer finally failed!, framecount(%d), available3AABuffer(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), + m_3aaBufferMgr->getNumOfAvailableBuffer()); + return ret; + } + + /* 5. Push the frame to 3AA */ + if (m_parameters->isFlite3aaOtf() == false) { + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_FLITE], PIPE_FLITE); + factory->pushFrameToPipe(&newFrame, PIPE_FLITE); + } else { + if (m_parameters->isMcscVraOtf() == true) { + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + } else { + factory->setFrameDoneQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_VRA], PIPE_VRA); + } + factory->pushFrameToPipe(&newFrame, PIPE_3AA); + } + + return ret; +} + +status_t ExynosCamera3::m_createPrepareFrameFunc(__unused ExynosCameraRequest *request) +{ + status_t ret = NO_ERROR; + short retryCount = 4; + uint32_t waitTime = 15 * 1000000; /* 15 msec as as defatul */ + uint32_t maxFps = 0; + uint32_t pipeId = MAX_PIPE_NUM; + uint32_t flitePrepareCnt = m_prepareFliteCnt; + uint32_t taaPrepareCnt = m_exynosconfig->current->pipeInfo.prepare[PIPE_3AA]; + ExynosCamera3FrameFactory *factory = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraStream *stream = NULL; + + ExynosCameraBufferManager *taaBufferManager[MAX_NODE]; + ExynosCameraBufferManager *ispBufferManager[MAX_NODE]; + ExynosCameraBufferManager *disBufferManager[MAX_NODE]; + ExynosCameraBufferManager *vraBufferManager[MAX_NODE]; + + for (int i = 0; i < MAX_NODE; i++) { + taaBufferManager[i] = NULL; + ispBufferManager[i] = NULL; + disBufferManager[i] = NULL; + vraBufferManager[i] = NULL; + } + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + + if (m_factoryStartFlag == true) { + m_factoryStartFlag = false; + + ret = factory->initPipes(); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_previewFrameFactory->initPipes() failed", __FUNCTION__, __LINE__); + return ret; + } + + if (m_parameters->getTpuEnabledMode() == true) { +#if 0 + factory->setRequestISPP(true); + factory->setRequestDIS(true); + + if (m_parameters->is3aaIspOtf() == true) { + taaBufferManager[factory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_3AC)] = m_fliteBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_ISPP)] = m_hwDisBufferMgr; + + disBufferManager[factory->getNodeType(PIPE_DIS)] = m_hwDisBufferMgr; + disBufferManager[factory->getNodeType(PIPE_SCP)] = scpBufferMgr; + } else { + taaBufferManager[factory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_3AC)] = m_fliteBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_3AP)] = m_ispBufferMgr; + + ispBufferManager[factory->getNodeType(PIPE_ISP)] = m_ispBufferMgr; + ispBufferManager[factory->getNodeType(PIPE_ISPP)] = m_hwDisBufferMgr; + + disBufferManager[factory->getNodeType(PIPE_DIS)] = m_hwDisBufferMgr; + disBufferManager[factory->getNodeType(PIPE_SCP)] = scpBufferMgr; + } +#endif + } else { + factory->setRequestISPP(false); + factory->setRequestDIS(false); + + if (m_parameters->is3aaIspOtf() == true) { + if (m_parameters->getDualMode() == true && getCameraId() == CAMERA_ID_FRONT) { + factory->setRequestFLITE(true); + factory->setRequestISPC(true); + + taaBufferManager[factory->getNodeType(PIPE_3AA)] = m_fliteBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_ISPC)] = m_yuvCaptureBufferMgr; + } else { + + taaBufferManager[factory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + if (m_parameters->isUsing3acForIspc() == true) + taaBufferManager[factory->getNodeType(PIPE_3AC)] = m_yuvCaptureBufferMgr; +#ifndef RAWDUMP_CAPTURE + else + taaBufferManager[factory->getNodeType(PIPE_3AC)] = m_fliteBufferMgr; +#endif + taaBufferManager[factory->getNodeType(PIPE_SCP)] = m_internalScpBufferMgr; + } + } else { + taaBufferManager[factory->getNodeType(PIPE_3AA)] = m_3aaBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_3AC)] = m_fliteBufferMgr; + taaBufferManager[factory->getNodeType(PIPE_3AP)] = m_ispBufferMgr; + + ispBufferManager[factory->getNodeType(PIPE_ISP)] = m_ispBufferMgr; + ispBufferManager[factory->getNodeType(PIPE_SCP)] = m_internalScpBufferMgr; + } + } + + if (m_parameters->isMcscVraOtf() == false) { + vraBufferManager[OUTPUT_NODE] = m_vraBufferMgr; + + ret = factory->setBufferManagerToPipe(vraBufferManager, PIPE_VRA); + if (ret != NO_ERROR) { + CLOGE2("m_previewFrameFactory->setBufferManagerToPipe(vraBufferManager, %d) failed", PIPE_VRA); + return ret; + } + } + + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (taaBufferManager[i] != NULL) { + ret = factory->setBufferManagerToPipe(taaBufferManager, PIPE_3AA); + if (ret != NO_ERROR) { + CLOGE2("m_previewFrameFactory->setBufferManagerToPipe(taaBufferManager, %d) failed", PIPE_3AA); + return ret; + } + break; + } + } + + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (ispBufferManager[i] != NULL) { + ret = factory->setBufferManagerToPipe(ispBufferManager, PIPE_ISP); + if (ret != NO_ERROR) { + CLOGE2("m_previewFrameFactory->setBufferManagerToPipe(ispBufferManager, %d) failed", PIPE_ISP); + return ret; + } + break; + } + } + + if (m_parameters->getHWVdisMode()) { + for (int i = 0; i < MAX_NODE; i++) { + /* If even one buffer slot is valid. call setBufferManagerToPipe() */ + if (disBufferManager[i] != NULL) { + ret = factory->setBufferManagerToPipe(disBufferManager, PIPE_DIS); + if (ret != NO_ERROR) { + CLOGE2("m_previewFrameFactory->setBufferManagerToPipe(disBufferManager, %d) failed", PIPE_DIS); + return ret; + } + break; + } + } + } + + } + + /* 1. Generate the internal frame */ + if (m_parameters->isFlite3aaOtf() == true) { + factory->setRequestFLITE(false); + pipeId = PIPE_3AA; + } else { + factory->setRequestFLITE(true); + pipeId = PIPE_FLITE; + } + ret = m_generateInternalFrame(m_internalFrameCount++, factory, &m_processList, &m_processLock, &newFrame); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_generateFrame failed", __FUNCTION__, __LINE__); + return ret; + } else if (newFrame == NULL) { + ALOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* newFrame->dump(); */ + + ALOGI("INFO(%s[%d]):generate prepare framecount %d", + __FUNCTION__, __LINE__, + newFrame->getFrameCount()); + + /* 2. Set DMA-out request flag into frame + * 3AP, 3AC, ISP, ISPP, ISPC, SCC, DIS, SCP */ + /* newFrame->setRequest(false, false, false, false, false, false, false, false); */ +#if 0 + if (m_parameters->is3aaIspOtf() == false) { + newFrame->setRequest(PIPE_3AP, true); + taaBufferManager[OUTPUT_NODE] = m_3aaBufferMgr; + taaBufferManager[SUB_NODE] = m_ispBufferMgr; + ret = factory->setBufferManagerToPipe(taaBufferManager, PIPE_3AA); + if (ret < 0) { + ALOGE("ERR(%s):m_previewFrameFactory->setBufferManagerToPipe() failed", __FUNCTION__); + return ret; + } + } +#endif + + /* 3. Update the metadata with m_currentShot into frame */ + ret = newFrame->setMetaData(m_currentShot); + + /* 4. Attach the buffers into frame */ + maxFps = m_currentShot->shot.ctl.aa.aeTargetFpsRange[1]; + if (maxFps > 0) + waitTime = (1000 / maxFps) * 1000000 / 2; // Wait for the half of MIN frame duration(MAX fps) + while (retryCount-- > 0) { + ret = m_setupEntity(pipeId, newFrame); + if (ret != NO_ERROR) { + ALOGW("WARN(%s[%d]):Get %s buffer failed!, framecount(%d), availableFLITEBuffer(%d), sleep(%d ns/%d fps(MAX)), retryCount(%d)", + __FUNCTION__, __LINE__, + (pipeId == PIPE_FLITE)?"PIPE_FLITE":"PIPE_3AA", + newFrame->getFrameCount(), + m_fliteBufferMgr->getNumOfAvailableBuffer(), waitTime, maxFps, retryCount); + usleep(waitTime); + continue; + } + break; + } + if (retryCount == 0 && ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Get %s buffer finally failed!, framecount(%d), available3AABuffer(%d)", + __FUNCTION__, __LINE__, + (pipeId == PIPE_FLITE)?"PIPE_FLITE":"PIPE_3AA", + newFrame->getFrameCount(), + m_3aaBufferMgr->getNumOfAvailableBuffer()); + return ret; + } + + /* 5. Push the frame to 3AA */ + if (m_parameters->isMcscVraOtf() == true) { + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + } else { + factory->setFrameDoneQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_VRA], PIPE_VRA); + } + factory->pushFrameToPipe(&newFrame, pipeId); + + /* before starting framefactory, we should set bayer prepare buffer count (max prepare count depend on 3aa count) */ + if ((factory->checkPipeThreadRunning(m_getBayerPipeId()) == false) && + (taaPrepareCnt - 1) >= (newFrame->getFrameCount()) && + m_parameters->isFlite3aaOtf() == false && + m_flagBayerRequest == true) { + flitePrepareCnt++; + if (flitePrepareCnt != m_prepareFliteCnt) + ALOGI("INFO(%s[%d]):adjust PIPE_FLITE prepare count(%d => %d)", __FUNCTION__, __LINE__, + m_prepareFliteCnt, flitePrepareCnt); + m_prepareFliteCnt = flitePrepareCnt; + } + + return ret; +} + +status_t ExynosCamera3::m_createFrameFunc(void) +{ + status_t ret = NO_ERROR; + ExynosCameraRequest *request = NULL; + int key = 0; + /* 1. Get new service request from request manager */ + if (m_requestMgr->getServiceRequestCount() > 0) { + m_popRequest(&request); + key = request->getKey(); + } + ALOGV("Service Request Count(%d) Total Request Count(%d)", + m_requestMgr->getServiceRequestCount(),m_requestMgr->getRequestCount()); + + /* 2. Push back the new service request into m_requestWaitingList */ + /* Warning : + * The List APIs for 'm_requestWaitingList' are called sequencially. + * So the mutex is not required. + * If the 'm_requestWaitingList' will be accessed by another thread, + * using mutex must be considered. + */ + if (request != NULL) { + request_info_t *requestInfo = new request_info_t; + requestInfo->request = request; + requestInfo->sensorControledFrameCount = 0; + m_requestWaitingList.push_back(requestInfo); + } + + /* 3. Update the current shot */ + if (m_requestWaitingList.size() > 0) + m_updateCurrentShot(); + + ALOGV("DEBUG(%s[%d]):Create New Frame %d Key %d needRequestFrame %d needInternalFrame %d waitingSize %d", + __FUNCTION__, __LINE__, + m_internalFrameCount, key, m_isNeedRequestFrame, m_isNeedInternalFrame, m_requestWaitingList.size()); + + /* 4. Select the frame creation logic between request frame and internal frame */ + if (m_isNeedInternalFrame == true || m_requestWaitingList.empty() == true) { + + m_createInternalFrameFunc(); + } + if (m_isNeedRequestFrame == true && m_requestWaitingList.empty() == false) { + List::iterator r; + request_info_t *requestInfo = NULL; + + r = m_requestWaitingList.begin(); + requestInfo = *r; + request = requestInfo->request; + + m_createRequestFrameFunc(request); + + m_requestWaitingList.erase(r); + delete requestInfo; + } + + return ret; +} + +status_t ExynosCamera3::m_sendRawCaptureResult(ExynosCameraFrame *frame, uint32_t pipeId, bool isSrc) +{ + status_t ret = NO_ERROR; + ExynosCameraStream *stream = NULL; + ExynosCameraRequest *request = NULL; + ExynosCameraBuffer buffer; + camera3_stream_buffer_t streamBuffer; + ResultRequest resultRequest = NULL; + + /* 1. Get stream object for RAW */ + ret = m_streamManager->getStream(HAL_STREAM_ID_RAW, &stream); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getStream is failed, from streammanager. Id error:HAL_STREAM_ID_RAW", + __FUNCTION__, __LINE__); + return ret; + } + + /* 2. Get camera3_stream structure from stream object */ + ret = stream->getStream(&streamBuffer.stream); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getStream is failed, from exynoscamerastream. Id error:HAL_STREAM_ID_RAW", + __FUNCTION__, __LINE__); + return ret; + } + + /* 3. Get the bayer buffer from frame */ + if (isSrc == true) + ret = frame->getSrcBuffer(pipeId, &buffer); + else + ret = frame->getDstBuffer(pipeId, &buffer); + if (ret < 0) { + ALOGE("ERR(%s[%d]):Get bayer buffer failed, framecount(%d), isSrc(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + frame->getFrameCount(), isSrc, pipeId); + return ret; + } + + /* 4. Get the service buffer handle from buffer manager */ + ret = m_bayerBufferMgr->getHandleByIndex(&(streamBuffer.buffer), buffer.index); + if (ret < 0) { + ALOGE("ERR(%s[%d]):Buffer index error(%d)!!", __FUNCTION__, __LINE__, buffer.index); + return ret; + } + + /* 5. Update the remained buffer info */ + streamBuffer.status = CAMERA3_BUFFER_STATUS_OK; + streamBuffer.acquire_fence = -1; + streamBuffer.release_fence = -1; + + /* 6. Create new result for RAW buffer */ + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + resultRequest = m_requestMgr->createResultRequest(frame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY, NULL, NULL); + resultRequest->pushStreamBuffer(&streamBuffer); + + /* 7. Request to callback the result to request manager */ + m_requestMgr->callbackSequencerLock(); + request->increaseCompleteBufferCount(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + +#if 0 + /* 8. Send the bayer buffer to frame selector */ + if (frame->getFrameCapture() == true) { + ALOGD("DEBUG(%s[%d]):Send the service bayer buffer to m_sccCaptureSelector. frameCount(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + ret = m_sccCaptureSelector->manageFrameHoldList(frame, pipeId, isSrc); + if (ret < 0) { + ALOGE("ERR(%s[%d]):manageFrameHoldList failed, frameCount(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + return ret; + } + } +#endif + ALOGE("DEBUG(%s[%d]):request->frame_number(%d), request->getNumOfOutputBuffer(%d) request->getCompleteBufferCount(%d) frame->getFrameCapture(%d)", + __FUNCTION__, __LINE__, + request->getKey(), + request->getNumOfOutputBuffer(), + request->getCompleteBufferCount(), + frame->getFrameCapture()); + + ALOGV("DEBUG(%s[%d]):streamBuffer info: stream (%p), handle(%p)", + __FUNCTION__, __LINE__, + streamBuffer.stream, streamBuffer.buffer); + + return ret; +} + +status_t ExynosCamera3::m_sendZSLCaptureResult(ExynosCameraFrame *frame, __unused uint32_t pipeId, __unused bool isSrc) +{ + status_t ret = NO_ERROR; + ExynosCameraStream *stream = NULL; + ExynosCameraRequest *request = NULL; + camera3_stream_buffer_t streamBuffer; + ResultRequest resultRequest = NULL; + const camera3_stream_buffer_t *buffer; + const camera3_stream_buffer_t *bufferList; + int streamId = 0; + uint32_t bufferCount = 0; + + + /* 1. Get stream object for ZSL */ + ret = m_streamManager->getStream(HAL_STREAM_ID_ZSL_OUTPUT, &stream); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getStream is failed, from streammanager. Id error:HAL_STREAM_ID_ZSL", + __FUNCTION__, __LINE__); + return ret; + } + + /* 2. Get camera3_stream structure from stream object */ + ret = stream->getStream(&streamBuffer.stream); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getStream is failed, from exynoscamerastream. Id error:HAL_STREAM_ID_RAW", + __FUNCTION__, __LINE__); + return ret; + } + + /* 3. Get zsl buffer */ + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + bufferCount = request->getNumOfOutputBuffer(); + bufferList = request->getOutputBuffers(); + + for (uint32_t index = 0; index < bufferCount; index++) { + buffer = &(bufferList[index]); + stream = static_cast(bufferList[index].stream->priv); + stream->getID(&streamId); + + if ((streamId % HAL_STREAM_ID_MAX) == HAL_STREAM_ID_ZSL_OUTPUT) { + streamBuffer.buffer = bufferList[index].buffer; + } + } + + /* 4. Update the remained buffer info */ + streamBuffer.status = CAMERA3_BUFFER_STATUS_OK; + streamBuffer.acquire_fence = -1; + streamBuffer.release_fence = -1; + + /* 5. Create new result for ZSL buffer */ + resultRequest = m_requestMgr->createResultRequest(frame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY, NULL, NULL); + resultRequest->pushStreamBuffer(&streamBuffer); + + /* 6. Request to callback the result to request manager */ + m_requestMgr->callbackSequencerLock(); + request->increaseCompleteBufferCount(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + + CLOGE("DEBUG(%s[%d]):request->frame_number(%d), request->getNumOfOutputBuffer(%d) request->getCompleteBufferCount(%d) frame->getFrameCapture(%d)", + __FUNCTION__, __LINE__, + request->getKey(), + request->getNumOfOutputBuffer(), + request->getCompleteBufferCount(), + frame->getFrameCapture()); + + CLOGV("DEBUG(%s[%d]):streamBuffer info: stream (%p), handle(%p)", + __FUNCTION__, __LINE__, + streamBuffer.stream, streamBuffer.buffer); + + return ret; + +} + + +status_t ExynosCamera3::m_sendNotify(uint32_t frameNumber, int type) +{ + camera3_notify_msg_t notify; + ResultRequest resultRequest = NULL; + ExynosCameraRequest *request = NULL; + uint32_t frameCount = 0; + nsecs_t timeStamp = m_lastFrametime; + + status_t ret = OK; + request = m_requestMgr->getServiceRequest(frameNumber); + frameCount = request->getKey(); + timeStamp = request->getSensorTimestamp(); + + CLOGV2("(%d)frame t(%lld), key : %d", frameCount, timeStamp, frameCount); + switch (type) { + case CAMERA3_MSG_ERROR: + notify.type = CAMERA3_MSG_ERROR; + notify.message.error.frame_number = frameCount; + notify.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER; + // TODO: how can handle this? + //msg.message.error.error_stream = j->stream; + resultRequest = m_requestMgr->createResultRequest(frameNumber, EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY, NULL, ¬ify); + m_requestMgr->callbackSequencerLock(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + break; + case CAMERA3_MSG_SHUTTER: + CLOGV2("SHUTTER (%d)frame t(%lld)", frameNumber, timeStamp); + notify.type = CAMERA3_MSG_SHUTTER; + notify.message.shutter.frame_number = frameCount; + notify.message.shutter.timestamp = timeStamp; + resultRequest = m_requestMgr->createResultRequest(frameNumber, EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY, NULL, ¬ify); + /* keep current frame time for flush ops */ + m_requestMgr->callbackSequencerLock(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + m_captureResultDoneCondition.signal(); + break; + default: + CLOGE2("Msg type is invalid (%d)", type); + ret = BAD_VALUE; + break; + } + + return ret; +} + +status_t ExynosCamera3::m_searchFrameFromList(List *list, Mutex *listLock, uint32_t frameCount, ExynosCameraFrame **frame) +{ + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + Mutex::Autolock l(listLock); + if (list->empty()) { + CLOGD2("list is empty"); + return NO_ERROR; + } + + r = list->begin()++; + + do { + curFrame = *r; + if (curFrame == NULL) { + CLOGE2("curFrame is empty"); + return INVALID_OPERATION; + } + + if (frameCount == curFrame->getFrameCount()) { + CLOGV2("frame count match: expected(%d)", frameCount); + *frame = curFrame; + return NO_ERROR; + } + r++; + } while (r != list->end()); + + CLOGV2("Cannot find match frame, frameCount(%d)", frameCount); + + return OK; +} + +status_t ExynosCamera3::m_removeFrameFromList(List *list, Mutex *listLock, ExynosCameraFrame *frame) +{ + ExynosCameraFrame *curFrame = NULL; + int frameCount = 0; + int curFrameCount = 0; + List::iterator r; + + if (frame == NULL) { + CLOGE2("frame is NULL"); + return BAD_VALUE; + } + + Mutex::Autolock l(listLock); + if (list->empty()) { + CLOGE2("list is empty"); + return INVALID_OPERATION; + } + + frameCount = frame->getFrameCount(); + r = list->begin()++; + + do { + curFrame = *r; + if (curFrame == NULL) { + CLOGE2("curFrame is empty"); + return INVALID_OPERATION; + } + + curFrameCount = curFrame->getFrameCount(); + if (frameCount == curFrameCount) { + CLOGV2("frame count match: expected(%d), current(%d)", frameCount, curFrameCount); + list->erase(r); + return NO_ERROR; + } + CLOGW2("frame count mismatch: expected(%d), current(%d)", frameCount, curFrameCount); + /* curFrame->printEntity(); */ + r++; + } while (r != list->end()); + + CLOGE2("Cannot find match frame!!!"); + + return INVALID_OPERATION; +} + +status_t ExynosCamera3::m_clearList(List *list, Mutex *listLock) +{ + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + CLOGD2("remaining frame(%zu), we remove them all", list->size()); + + Mutex::Autolock l(listLock); + while (!list->empty()) { + r = list->begin()++; + curFrame = *r; + if (curFrame != NULL) { + CLOGV2("remove frame count(%d)", curFrame->getFrameCount()); + curFrame->decRef(); + m_frameMgr->deleteFrame(curFrame); + } + list->erase(r); + } + + return OK; +} + +status_t ExynosCamera3::m_removeInternalFrames(List *list, Mutex *listLock) +{ + ExynosCameraFrame *curFrame = NULL; + List::iterator r; + + ALOGD("DEBUG(%s[%d]):remaining frame(%zu), we remove internal frames", + __FUNCTION__, __LINE__, list->size()); + + Mutex::Autolock l(listLock); + while (!list->empty()) { + r = list->begin()++; + curFrame = *r; + if (curFrame != NULL) { + if (curFrame->getFrameType() == FRAME_TYPE_INTERNAL) { + ALOGV("DEBUG(%s[%d]):remove internal frame(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount()); + m_releaseInternalFrame(curFrame); + } else { + ALOGW("WARN(%s[%d]):frame(%d) is NOT internal frame and will be remained in List", + __FUNCTION__, __LINE__, curFrame->getFrameCount()); + } + } + list->erase(r); + curFrame = NULL; + } + + return OK; +} + +status_t ExynosCamera3::m_releaseInternalFrame(ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + ExynosCameraBuffer buffer; + ExynosCameraBufferManager *bufferMgr = NULL; + + if (frame == NULL) { + ALOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (frame->getFrameType() != FRAME_TYPE_INTERNAL) { + ALOGE("ERR(%s[%d]):frame(%d) is NOT internal frame", + __FUNCTION__, __LINE__, frame->getFrameCount()); + return BAD_VALUE; + } + + /* Return bayer buffer */ + if (m_parameters->isFlite3aaOtf() == false) { + ret = frame->getDstBuffer(PIPE_FLITE, &buffer); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):getDstBuffer failed. PIPE_FLITE, ret %d", + __FUNCTION__, __LINE__, ret); + } else if (buffer.index >= 0) { + ret = m_getBufferManager(PIPE_FLITE, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to getBufferManager for FLITE", + __FUNCTION__, __LINE__); + } else { + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):Failed to putBuffer for FLITE. index %d", + __FUNCTION__, __LINE__, buffer.index); + } + } + } + + /* Return 3AS buffer */ + ret = frame->getSrcBuffer(PIPE_3AA, &buffer); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):getSrcBuffer failed. PIPE_3AA, ret(%d)", + __FUNCTION__, __LINE__, ret); + } else if (buffer.index >= 0) { + ret = m_getBufferManager(PIPE_3AA, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to getBufferManager for 3AS", + __FUNCTION__, __LINE__); + } else { + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):Failed to putBuffer for 3AS. index %d", + __FUNCTION__, __LINE__, buffer.index); + } + } + + /* Return 3AP buffer */ + if (frame->getRequest(PIPE_3AP) == true) { + ret = frame->getDstBuffer(PIPE_3AA, &buffer); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):getDstBuffer failed. PIPE_3AA, ret %d", + __FUNCTION__, __LINE__, ret); + } else if (buffer.index >= 0) { + ret = m_getBufferManager(PIPE_3AA, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to getBufferManager for 3AP", + __FUNCTION__, __LINE__); + } else { + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):Failed to putBuffer for 3AP. index %d", + __FUNCTION__, __LINE__, buffer.index); + } + } + } + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + + return ret; +} + +status_t ExynosCamera3::m_setFrameManager() +{ + sp worker; + m_frameMgr = new ExynosCameraFrameManager("FRAME MANAGER", m_cameraId, FRAMEMGR_OPER::SLIENT, 50, 100); + + worker = new CreateWorker("CREATE FRAME WORKER", m_cameraId, FRAMEMGR_OPER::SLIENT, 40); + m_frameMgr->setWorker(FRAMEMGR_WORKER::CREATE, worker); + + worker = new DeleteWorker("DELETE FRAME WORKER", m_cameraId, FRAMEMGR_OPER::SLIENT); + m_frameMgr->setWorker(FRAMEMGR_WORKER::DELETE, worker); + + sp key = new KeyBox("FRAME KEYBOX", m_cameraId); + + m_frameMgr->setKeybox(key); + + return NO_ERROR; +} + +bool ExynosCamera3::m_frameFactoryCreateThreadFunc(void) +{ + +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + bool loop = false; + status_t ret = NO_ERROR; + + ExynosCamera3FrameFactory *framefactory = NULL; + + ret = m_frameFactoryQ->waitAndPopProcessQ(&framefactory); + if (ret < 0) { + CLOGE2("wait and pop fail, ret(%d)", ret); + goto func_exit; + } + + if (framefactory == NULL) { + CLOGE2("framefactory is NULL"); + goto func_exit; + } + + if (framefactory->isCreated() == false) { + CLOGD2("framefactory create"); + framefactory->create(); + } else { + CLOGD2("framefactory already create"); + } + +func_exit: + if (0 < m_frameFactoryQ->getSizeOfProcessQ()) { + loop = true; + } + + return loop; +} + +bool ExynosCamera3::m_frameFactoryStartThreadFunc(void) +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory *factory = NULL; + ExynosCameraRequest *request = NULL; + uint32_t prepare = 1; + + if (m_requestMgr->getServiceRequestCount() < 1) { + ALOGE("ERR(%s[%d]):There is NO available request!!! \"processCaptureRequest()\" must be called, first!!!", __FUNCTION__, __LINE__); + return false; + } + + /* 1. Get the first request from the request manager */ + m_popRequest(&request); + if (request == NULL) { + ALOGE("ERR(%s[%d]):request is NULL", __FUNCTION__, __LINE__); + } else { + request_info_t *requestInfo = new request_info_t; + requestInfo->request = request; + requestInfo->sensorControledFrameCount = 0; + m_requestWaitingList.push_back(requestInfo); + } + + /* 2. Get the initial metadata from request */ + ret = request->getServiceShot(m_currentShot); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to getServiceShot. requestKey %d ret %d", + __FUNCTION__, __LINE__, request->getKey(), ret); + return false; + } + + m_internalFrameCount = 0; + prepare = m_exynosconfig->current->pipeInfo.prepare[PIPE_3AA]; + + ALOGD("DEBUG(%s[%d]):prepare %d", __FUNCTION__, __LINE__, prepare); + + /* 3. Push the prepare frame into 3AA Pipe + * - call initPipes() + */ + m_createPrepareFrameFunc(request); + for (uint32_t i = 0; i < prepare; i++) { + ret = m_createFrameFunc(); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):Failed to createFrameFunc for preparing frame. prepareCount %d/%d", + __FUNCTION__, __LINE__, i, prepare); + } + + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + if (factory != NULL) { + /* - call preparePipes(); + * - call startPipes() + * - call startInitialThreads() + */ + if (m_flagStartFrameFactory == false) + m_startFrameFactory(factory); + + if (m_shotDoneQ != NULL) + m_shotDoneQ->release(); + + for (int i = 0; i < MAX_PIPE_NUM; i++) { + if (m_pipeFrameDoneQ[i] != NULL) { + m_pipeFrameDoneQ[i]->release(); + } + } + m_reprocessingDoneQ->release(); + m_pipeCaptureFrameDoneQ->release(); + m_mainThread->run(PRIORITY_URGENT_DISPLAY); + + m_previewStream3AAThread->run(PRIORITY_DEFAULT); + if (m_parameters->isMcscVraOtf() == false) + m_previewStreamVRAThread->run(PRIORITY_DEFAULT); + if (m_flagBayerRequest == true) + m_previewStreamBayerThread->run(PRIORITY_DEFAULT); + m_duplicateBufferThread->run(PRIORITY_DEFAULT); + m_monitorThread->run(PRIORITY_DEFAULT); + + m_internalFrameThread->run(PRIORITY_DEFAULT); + +#ifdef USE_INTERNAL_FRAME + /* internal frame */ + m_internalFrameHandlerThread->run(PRIORITY_DEFAULT); +#endif /* #ifdef USE_INTERNAL_FRAME */ + if (m_flagBayerRequest == true + && factory->checkPipeThreadRunning(m_getBayerPipeId()) == false) + factory->startThread(m_getBayerPipeId()); + } else { + CLOGE2("Can't start FrameFactory!!!! FrameFactory is NULL!! Prepare(%d), Request(%d)", + prepare, m_requestMgr != NULL ? m_requestMgr->getRequestCount(): 0); + return false; + } + m_frameFactoryStartDone = true; + + return false; +} + +status_t ExynosCamera3::m_constructFrameFactory(void) +{ + CLOGI2("-IN-"); + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory *factory = NULL; + + for(int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) + m_frameFactory[i] = NULL; + + factory = new ExynosCamera3FrameFactoryPreview(m_cameraId, m_parameters); + factory->setFrameCreateHandler(&ExynosCamera3::m_previewframeHandler); + m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW] = factory; + m_frameFactory[FRAME_FACTORY_TYPE_RECORDING_PREVIEW] = factory; + + m_frameFactory[FRAME_FACTORY_TYPE_DUAL_PREVIEW] = factory; + + if (m_parameters->isReprocessing() == true) { + factory = new ExynosCamera3FrameReprocessingFactory(m_cameraId, m_parameters); + factory->setFrameCreateHandler(&ExynosCamera3::m_captureframeHandler); + m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING] = factory; + } + + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + factory = m_frameFactory[i]; + if ((factory != NULL) && (factory->isCreated() == false)) { + factory->setFrameManager(m_frameMgr); + m_frameFactoryQ->pushProcessQ(&factory); + } + } + + CLOGI2("-OUT-"); + + return ret; +} + +status_t ExynosCamera3::m_startFrameFactory(ExynosCamera3FrameFactory *factory) +{ + status_t ret = OK; + + uint32_t flitePrepareCnt = m_prepareFliteCnt; + CLOGD2("flitePrepareCnt:%d", flitePrepareCnt); + + /* prepare pipes */ +#if !defined(ENABLE_FULL_FRAME) + ret = factory->preparePipes(flitePrepareCnt); +#else + ret = factory->preparePipes(); +#endif + + if (ret < 0) { + CLOGW("ERR(%s[%d]):Failed to prepare FLITE", __FUNCTION__, __LINE__); + } + + /* s_ctrl HAL version for selecting dvfs table */ + ret = factory->setControl(V4L2_CID_IS_HAL_VERSION, IS_HAL_VER_3_2, PIPE_3AA); + + if (ret < 0) + CLOGW2("V4L2_CID_IS_HAL_VERSION is fail"); + + /* stream on pipes */ + ret = factory->startPipes(); + if (ret < 0) { + CLOGE2("startPipe fail"); + return ret; + } + + /* start all thread */ + ret = factory->startInitialThreads(); + if (ret < 0) { + CLOGE2("startInitialThreads fail"); + return ret; + } + + m_flagStartFrameFactory = true; + + return ret; +} + +status_t ExynosCamera3::m_stopFrameFactory(ExynosCamera3FrameFactory *factory) +{ + int ret = 0; + + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (factory != NULL && factory->isCreated()) { + ret = factory->stopPipes(); + if (ret < 0) { + CLOGE2("stopPipe fail"); + return ret; + } + } + + m_flagStartFrameFactory = false; + + return ret; +} + +status_t ExynosCamera3::m_deinitFrameFactory() +{ + CLOGI2("-IN-"); + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory *frameFactory = NULL; + + for (int i = 0; i < FRAME_FACTORY_TYPE_MAX; i++) { + if (m_frameFactory[i] != NULL) { + frameFactory = m_frameFactory[i]; + + for (int k = i + 1; k < FRAME_FACTORY_TYPE_MAX; k++) { + if (frameFactory == m_frameFactory[k]) { + CLOGD2("m_frameFactory index(%d) and index(%d) are same instance, set index(%d) = NULL", i, k, k); + m_frameFactory[k] = NULL; + } + } + + ret = m_frameFactory[i]->destroy(); + if (ret < 0) + CLOGE2("m_frameFactory[%d] destroy fail", i); + + SAFE_DELETE(m_frameFactory[i]); + + CLOGD2("m_frameFactory[%d] destroyed", i); + } + } + + CLOGI2("-OUT-"); + + return ret; + +} + +status_t ExynosCamera3::m_setupReprocessingPipeline(void) +{ + status_t ret = NO_ERROR; + uint32_t pipeId = MAX_PIPE_NUM; + ExynosCamera3FrameFactory *factory = m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING]; + ExynosCameraStream *stream = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBufferManager *taaBufferManager[MAX_NODE]; + ExynosCameraBufferManager *ispBufferManager[MAX_NODE]; + ExynosCameraBufferManager *mcscBufferManager[MAX_NODE]; + ExynosCameraBufferManager **tempBufferManager; + + for (int i = 0; i < MAX_NODE; i++) { + taaBufferManager[i] = NULL; + ispBufferManager[i] = NULL; + mcscBufferManager[i] = NULL; + } + + /* Setting bufferManager based on H/W pipeline */ + tempBufferManager = taaBufferManager; + pipeId = PIPE_3AA_REPROCESSING; + + tempBufferManager[factory->getNodeType(PIPE_3AA_REPROCESSING)] = m_fliteBufferMgr; + tempBufferManager[factory->getNodeType(PIPE_3AP_REPROCESSING)] = m_ispReprocessingBufferMgr; + + if (m_parameters->isReprocessing3aaIspOTF() == false) { + ret = factory->setBufferManagerToPipe(tempBufferManager, pipeId); + if (ret != NO_ERROR) { + CLOGE2("Failed to setBufferManagerToPipe into pipeId %d", pipeId); + return ret; + } + tempBufferManager = ispBufferManager; + pipeId = PIPE_ISP_REPROCESSING; + } + + tempBufferManager[factory->getNodeType(PIPE_ISP_REPROCESSING)] = m_ispReprocessingBufferMgr; + tempBufferManager[factory->getNodeType(PIPE_ISPC_REPROCESSING)] = m_yuvCaptureBufferMgr; + + if (m_parameters->isReprocessingIspMcscOTF() == false) { + ret = factory->setBufferManagerToPipe(tempBufferManager, pipeId); + if (ret != NO_ERROR) { + CLOGE2("Failed to setBufferManagerToPipe into pipeId %d", pipeId); + return ret; + } + tempBufferManager = mcscBufferManager; + pipeId = PIPE_MCSC_REPROCESSING; + } + + tempBufferManager[factory->getNodeType(PIPE_MCSC0_REPROCESSING)] = m_yuvCaptureReprocessingBufferMgr; + tempBufferManager[factory->getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING)] = m_yuvCaptureReprocessingBufferMgr; + tempBufferManager[factory->getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)] = m_thumbnailBufferMgr; + /* Dummy buffer manager */ + tempBufferManager[factory->getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING)] = m_thumbnailBufferMgr; + + if (m_streamManager->findStream(HAL_STREAM_ID_JPEG) == true) { + ret = m_streamManager->getStream(HAL_STREAM_ID_JPEG, &stream); + if (ret != NO_ERROR) + CLOGE2("Failed to getStream from streamMgr. HAL_STREAM_ID_JPEG"); + + ret = stream->getBufferManager(&bufferMgr); + if (ret != NO_ERROR) + CLOGE2("Failed to getBufferMgr. HAL_STREAM_ID_JPEG"); + + tempBufferManager[factory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)] = bufferMgr; + } + + ret = factory->setBufferManagerToPipe(tempBufferManager, pipeId); + if (ret != NO_ERROR) { + CLOGE2("Failed to setBufferManagerToPipe into pipeId %d", pipeId); + return ret; + } + + /* Setting OutputFrameQ/FrameDoneQ to Pipe */ + if(m_parameters->getUsePureBayerReprocessing()) { + // Pure bayer reprocessing + pipeId = PIPE_3AA_REPROCESSING; + } else if (m_parameters->isUseYuvReprocessing() == true) { + // YUV reprocessing + pipeId = PIPE_MCSC_REPROCESSING; + } else { + // Dirty bayer reprocessing + pipeId = PIPE_ISP_REPROCESSING; + } + + /* TODO : Consider the M2M Reprocessing Scenario */ + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) { + factory->setOutputFrameQToPipe(m_reprocessingDoneQ, pipeId); + } else { + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId); + factory->setFrameDoneQToPipe(m_reprocessingDoneQ, pipeId); + } + + return ret; +} + +void ExynosCamera3::m_updateCropRegion(struct camera2_shot_ext *shot_ext) +{ + int sensorMaxW = 0, sensorMaxH = 0; + + m_parameters->getMaxSensorSize(&sensorMaxW, &sensorMaxH); + + shot_ext->shot.ctl.scaler.cropRegion[0] = ALIGN_DOWN(shot_ext->shot.ctl.scaler.cropRegion[0], 2); + shot_ext->shot.ctl.scaler.cropRegion[1] = ALIGN_DOWN(shot_ext->shot.ctl.scaler.cropRegion[1], 2); + shot_ext->shot.ctl.scaler.cropRegion[2] = ALIGN_UP(shot_ext->shot.ctl.scaler.cropRegion[2], 2); + shot_ext->shot.ctl.scaler.cropRegion[3] = ALIGN_UP(shot_ext->shot.ctl.scaler.cropRegion[3], 2); + + /* 1. Check the validation of the crop size(width x height). + * The crop size must be smaller than sensor max size. + */ + if (sensorMaxW < (int) shot_ext->shot.ctl.scaler.cropRegion[2] + || sensorMaxH < (int)shot_ext->shot.ctl.scaler.cropRegion[3]) { + CLOGE2("Invalid Crop Size(%d, %d), sensorMax(%d, %d)", + shot_ext->shot.ctl.scaler.cropRegion[2], + shot_ext->shot.ctl.scaler.cropRegion[3], + sensorMaxW, sensorMaxH); + shot_ext->shot.ctl.scaler.cropRegion[2] = sensorMaxW; + shot_ext->shot.ctl.scaler.cropRegion[3] = sensorMaxH; + } + + /* 2. Check the validation of the crop offset. + * Offset coordinate + width or height must be smaller than sensor max size. + */ + if ((int)(shot_ext->shot.ctl.scaler.cropRegion[0]) < 0) { + CLOGE2("Invalid Crop Region, offsetX(%d), Change to 0", + shot_ext->shot.ctl.scaler.cropRegion[0]); + shot_ext->shot.ctl.scaler.cropRegion[0] = 0; + } + + if ((int)(shot_ext->shot.ctl.scaler.cropRegion[1]) < 0) { + CLOGE2("Invalid Crop Region, offsetY(%d), Change to 0", + shot_ext->shot.ctl.scaler.cropRegion[1]); + shot_ext->shot.ctl.scaler.cropRegion[1] = 0; + } + + if (sensorMaxW < (int) shot_ext->shot.ctl.scaler.cropRegion[0] + + (int) shot_ext->shot.ctl.scaler.cropRegion[2]) { + CLOGE2("Invalid Crop Region, offsetX(%d), width(%d) sensorMaxW(%d)", + shot_ext->shot.ctl.scaler.cropRegion[0], + shot_ext->shot.ctl.scaler.cropRegion[2], + sensorMaxW); + shot_ext->shot.ctl.scaler.cropRegion[0] = sensorMaxW - shot_ext->shot.ctl.scaler.cropRegion[2]; + } + + if (sensorMaxH < (int) shot_ext->shot.ctl.scaler.cropRegion[1] + + (int) shot_ext->shot.ctl.scaler.cropRegion[3]) { + CLOGE2("Invalid Crop Region, offsetY(%d), height(%d) sensorMaxH(%d)", + shot_ext->shot.ctl.scaler.cropRegion[1], + shot_ext->shot.ctl.scaler.cropRegion[3], + sensorMaxH); + shot_ext->shot.ctl.scaler.cropRegion[1] = sensorMaxH - shot_ext->shot.ctl.scaler.cropRegion[3]; + } + + m_parameters->setCropRegion( + shot_ext->shot.ctl.scaler.cropRegion[0], + shot_ext->shot.ctl.scaler.cropRegion[1], + shot_ext->shot.ctl.scaler.cropRegion[2], + shot_ext->shot.ctl.scaler.cropRegion[3]); +} + +status_t ExynosCamera3::m_updateJpegControlInfo(const struct camera2_shot_ext *shot_ext) +{ + status_t ret = NO_ERROR; + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + ret = m_parameters->checkJpegQuality(shot_ext->shot.ctl.jpeg.quality); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to checkJpegQuality. quality %d", + __FUNCTION__, __LINE__, shot_ext->shot.ctl.jpeg.quality); + ret = m_parameters->checkThumbnailSize( + shot_ext->shot.ctl.jpeg.thumbnailSize[0], + shot_ext->shot.ctl.jpeg.thumbnailSize[1]); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to checkThumbnailSize. size %dx%d", + __FUNCTION__, __LINE__, + shot_ext->shot.ctl.jpeg.thumbnailSize[0], + shot_ext->shot.ctl.jpeg.thumbnailSize[1]); + ret = m_parameters->checkThumbnailQuality(shot_ext->shot.ctl.jpeg.thumbnailQuality); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to checkThumbnailQuality. quality %d", + __FUNCTION__, __LINE__, + shot_ext->shot.ctl.jpeg.thumbnailQuality); + return ret; +} +status_t ExynosCamera3::m_generateFrame(int32_t frameCount, ExynosCamera3FrameFactory *factory, List *list, Mutex *listLock, ExynosCameraFrame **newFrame) +{ + status_t ret = OK; + *newFrame = NULL; + + CLOGV2("(%d)", frameCount); + if (frameCount >= 0) { + ret = m_searchFrameFromList(list, listLock, frameCount, newFrame); + if (ret < 0) { + CLOGE2("searchFrameFromList fail"); + return INVALID_OPERATION; + } + } + + if (*newFrame == NULL) { + *newFrame = factory->createNewFrame(frameCount); + if (*newFrame == NULL) { + CLOGE2("newFrame is NULL"); + return UNKNOWN_ERROR; + } + listLock->lock(); + list->push_back(*newFrame); + listLock->unlock(); + } + + return ret; +} + +status_t ExynosCamera3::m_setupEntity(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *srcBuf, ExynosCameraBuffer *dstBuf) +{ + status_t ret = OK; + entity_buffer_state_t entityBufferState; + + CLOGV2("pipeId : %d", pipeId); + /* set SRC buffer */ + ret = newFrame->getSrcBufferState(pipeId, &entityBufferState); + if (ret < 0) { + CLOGE2("getSrcBufferState fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (entityBufferState == ENTITY_BUFFER_STATE_REQUESTED) { + ret = m_setSrcBuffer(pipeId, newFrame, srcBuf); + if (ret < 0) { + CLOGE2("m_setSrcBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + } + + /* set DST buffer */ + ret = newFrame->getDstBufferState(pipeId, &entityBufferState); + if (ret < 0) { + CLOGE2("getDstBufferState fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (entityBufferState == ENTITY_BUFFER_STATE_REQUESTED) { + ret = m_setDstBuffer(pipeId, newFrame, dstBuf); + if (ret < 0) { + CLOGE2("m_setDstBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + } + + ret = newFrame->setEntityState(pipeId, ENTITY_STATE_PROCESSING); + if (ret < 0) { + CLOGE2("setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + return ret; +} + +status_t ExynosCamera3::m_setSrcBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer) +{ + status_t ret = OK; + int bufIndex = -1; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer srcBuf; + + CLOGV2("pipeId : %d", pipeId); + if (buffer == NULL) { + buffer = &srcBuf; + + ret = m_getBufferManager(pipeId, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(SRC) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (bufferMgr == NULL) { + CLOGE2("buffer manager is NULL, pipeId(%d)", pipeId); + return BAD_VALUE; + } + + /* get buffers */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + CLOGE2("getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", pipeId, newFrame->getFrameCount(), ret); + return ret; + } + } + + /* set buffers */ + ret = newFrame->setSrcBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE2("setSrcBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + return ret; +} + +status_t ExynosCamera3::m_setDstBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer) +{ + status_t ret = OK; + int bufIndex = -1; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer dstBuf; + + CLOGV2("pipeId : %d", pipeId); + if (buffer == NULL) { + buffer = &dstBuf; + + ret = m_getBufferManager(pipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (bufferMgr == NULL) { + CLOGE2("buffer manager is NULL, pipeId(%d)", pipeId); + return BAD_VALUE; + } + + /* get buffers */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + CLOGE2("getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", pipeId, newFrame->getFrameCount(), ret); + return ret; + } + } + + /* set buffers */ + ret = newFrame->setDstBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE2("setDstBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + return ret; +} + +status_t ExynosCamera3::m_setSrcBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer, ExynosCameraBufferManager *bufMgr) +{ + status_t ret = OK; + int bufIndex = -1; + ExynosCameraBuffer srcBuf; + + CLOGV2("pipeId : %d", pipeId); + if (bufMgr == NULL) { + + ret = m_getBufferManager(pipeId, &bufMgr, SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(SRC) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (bufMgr == NULL) { + CLOGE2("buffer manager is NULL, pipeId(%d)", pipeId); + return BAD_VALUE; + } + + } + + /* get buffers */ + ret = bufMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + CLOGE2("getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", pipeId, newFrame->getFrameCount(), ret); + return ret; + } + + /* set buffers */ + ret = newFrame->setSrcBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE2("setSrcBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + return ret; +} + +status_t ExynosCamera3::m_setDstBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer, ExynosCameraBufferManager *bufMgr) +{ + status_t ret = OK; + int bufIndex = -1; + ExynosCameraBuffer dstBuf; + + CLOGD2("pipeId : %d", pipeId); + if (bufMgr == NULL) { + + ret = m_getBufferManager(pipeId, &bufMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + if (bufMgr == NULL) { + CLOGE2("buffer manager is NULL, pipeId(%d)", pipeId); + return BAD_VALUE; + } + } + + /* get buffers */ + ret = bufMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, buffer); + if (ret < 0) { + CLOGE2("getBuffer fail, pipeId(%d), frameCount(%d), ret(%d)", pipeId, newFrame->getFrameCount(), ret); + return ret; + } + + /* set buffers */ + ret = newFrame->setDstBuffer(pipeId, *buffer); + if (ret < 0) { + CLOGE2("setDstBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + return ret; +} + +/* This function reset buffer state of pipeId. + * Some pipes are shared by multi stream. In this case, we need reset buffer for using PIPE again. + */ +status_t ExynosCamera3::m_resetBufferState(uint32_t pipeId, ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + entity_buffer_state_t bufState = ENTITY_BUFFER_STATE_NOREQ; + + if (frame == NULL) { + CLOGE2("frame is NULL"); + ret = BAD_VALUE; + goto ERR; + } + + ret = frame->getSrcBufferState(pipeId, &bufState); + if (ret < 0) { + CLOGE2("getSrcBufferState fail, pipeId(%d), ret(%d)", pipeId, ret); + goto ERR; + } + + if (bufState != ENTITY_BUFFER_STATE_NOREQ && bufState != ENTITY_BUFFER_STATE_INVALID) { + frame->setSrcBufferState(pipeId, ENTITY_BUFFER_STATE_REQUESTED); + } else { + CLOGW2("SrcBufferState is not COMPLETE, fail to reset buffer state, pipeId(%d), state(%d)", pipeId, bufState); + ret = INVALID_OPERATION; + goto ERR; + } + + + ret = frame->getDstBufferState(pipeId, &bufState); + if (ret < 0) { + CLOGE2("getDstBufferState fail, pipeId(%d), ret(%d)", pipeId, ret); + goto ERR; + } + + if (bufState != ENTITY_BUFFER_STATE_NOREQ && bufState != ENTITY_BUFFER_STATE_INVALID) { + ret = frame->setDstBufferState(pipeId, ENTITY_BUFFER_STATE_REQUESTED); + if (ret != NO_ERROR) + CLOGE2("setDstBufferState fail, pipeId(%d), ret(%d)", pipeId, ret); + } else { + CLOGW2("DstBufferState is not COMPLETE, fail to reset buffer state, pipeId(%d), state(%d)", pipeId, bufState); + ret = INVALID_OPERATION; + goto ERR; + } + +ERR: + return ret; +} + +status_t ExynosCamera3::m_getBufferManager(uint32_t pipeId, ExynosCameraBufferManager **bufMgr, uint32_t direction) +{ + status_t ret = NO_ERROR; + ExynosCameraBufferManager **bufMgrList[2] = {NULL}; + + switch (pipeId) { + case PIPE_FLITE: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_bayerBufferMgr; + break; + case PIPE_3AA_ISP: + bufMgrList[0] = &m_3aaBufferMgr; + bufMgrList[1] = &m_ispBufferMgr; + break; + case PIPE_3AA: + bufMgrList[0] = &m_3aaBufferMgr; + bufMgrList[1] = &m_ispBufferMgr; + break; + case PIPE_3AC: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_bayerBufferMgr; + break; + case PIPE_ISP: + bufMgrList[0] = &m_ispBufferMgr; + bufMgrList[1] = &m_internalScpBufferMgr; + break; + case PIPE_SCP: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_internalScpBufferMgr; + break; + case PIPE_VRA: + bufMgrList[0] = &m_vraBufferMgr; + bufMgrList[1] = NULL; + break; + case PIPE_GSC: + bufMgrList[0] = &m_internalScpBufferMgr; + bufMgrList[1] = NULL; + break; + case PIPE_GSC_VIDEO: + bufMgrList[0] = &m_internalScpBufferMgr; + bufMgrList[1] = NULL; + break; + case PIPE_GSC_PICTURE: + bufMgrList[0] = &m_yuvCaptureBufferMgr; + bufMgrList[1] = &m_gscBufferMgr; + break; + case PIPE_JPEG: + case PIPE_JPEG_REPROCESSING: + bufMgrList[0] = NULL; + bufMgrList[1] = NULL; + break; + case PIPE_3AA_REPROCESSING: + bufMgrList[0] = &m_fliteBufferMgr; + bufMgrList[1] = &m_ispReprocessingBufferMgr; + break; + case PIPE_ISP_REPROCESSING: + bufMgrList[0] = &m_ispReprocessingBufferMgr; + bufMgrList[1] = &m_yuvCaptureReprocessingBufferMgr; + break; + case PIPE_ISPC_REPROCESSING: + case PIPE_SCC_REPROCESSING: + bufMgrList[0] = NULL; + bufMgrList[1] = &m_yuvCaptureReprocessingBufferMgr; + break; + case PIPE_MCSC_REPROCESSING: + bufMgrList[0] = &m_yuvCaptureBufferMgr; + bufMgrList[1] = &m_yuvCaptureReprocessingBufferMgr; + break; + case PIPE_GSC_REPROCESSING: + bufMgrList[0] = &m_yuvCaptureReprocessingBufferMgr; + bufMgrList[1] = &m_gscBufferMgr; + break; + + default: + CLOGE2("Unknown pipeId(%d)", pipeId); + bufMgrList[0] = NULL; + bufMgrList[1] = NULL; + ret = BAD_VALUE; + break; + } + + *bufMgr = *bufMgrList[direction]; + return ret; +} + +status_t ExynosCamera3::m_createIonAllocator(ExynosCameraIonAllocator **allocator) +{ + status_t ret = NO_ERROR; + int retry = 0; + do { + retry++; + CLOGI2("try(%d) to create IonAllocator", retry); + *allocator = new ExynosCameraIonAllocator(); + ret = (*allocator)->init(false); + if (ret < 0) + CLOGE2("create IonAllocator fail (retryCount=%d)", retry); + else { + CLOGD2("m_createIonAllocator success (allocator=%p)", *allocator); + break; + } + } while ((ret < 0) && (retry < 3)); + + if ((ret < 0) && (retry >=3)) { + CLOGE2("create IonAllocator fail (retryCount=%d)", retry); + ret = INVALID_OPERATION; + } + + return ret; +} + +status_t ExynosCamera3::m_createBufferManager(ExynosCameraBufferManager **bufferManager, const char *name, buffer_manager_type type) +{ + status_t ret = NO_ERROR; + + if (m_ionAllocator == NULL) { + ret = m_createIonAllocator(&m_ionAllocator); + if (ret < 0) + CLOGE2("m_createIonAllocator fail"); + else + CLOGD2("m_createIonAllocator success"); + } + + *bufferManager = ExynosCameraBufferManager::createBufferManager(type); + (*bufferManager)->create(name, m_cameraId, m_ionAllocator); + + CLOGD2("BufferManager(%s) created", name); + + return ret; +} + +status_t ExynosCamera3::m_createInternalBufferManager(ExynosCameraBufferManager **bufferManager, const char *name) +{ + return m_createBufferManager(bufferManager, name, BUFFER_MANAGER_ION_TYPE); +} + +status_t ExynosCamera3::m_createServiceBufferManager(ExynosCameraBufferManager **bufferManager, const char *name) +{ + return m_createBufferManager(bufferManager, name, BUFFER_MANAGER_SERVICE_GRALLOC_TYPE); +} + +status_t ExynosCamera3::m_convertingStreamToShotExt(ExynosCameraBuffer *buffer, struct camera2_node_output *outputInfo) +{ +/* TODO: HACK: Will be removed, this is driver's job */ + status_t ret = NO_ERROR; + int bayerFrameCount = 0; + camera2_shot_ext *shot_ext = NULL; + camera2_stream *shot_stream = NULL; + + shot_stream = (struct camera2_stream *)buffer->addr[1]; + bayerFrameCount = shot_stream->fcount; + outputInfo->cropRegion[0] = shot_stream->output_crop_region[0]; + outputInfo->cropRegion[1] = shot_stream->output_crop_region[1]; + outputInfo->cropRegion[2] = shot_stream->output_crop_region[2]; + outputInfo->cropRegion[3] = shot_stream->output_crop_region[3]; + + memset(buffer->addr[1], 0x0, sizeof(struct camera2_shot_ext)); + + shot_ext = (struct camera2_shot_ext *)buffer->addr[1]; + shot_ext->shot.dm.request.frameCount = bayerFrameCount; + + return ret; +} + +bool ExynosCamera3::m_selectBayerThreadFunc() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + ExynosCameraFrame *frame = NULL; + ExynosCameraBuffer bayerBuffer; + ExynosCameraBufferManager *bufferMgr = NULL; + camera2_shot_ext *shot_ext = NULL; + camera2_shot_ext updateDmShot; + struct camera2_node_output output_crop_info; + camera2_node_group node_group_info; + uint32_t pipeID = 0; + uint32_t frameCount = 0; + ExynosRect ratioCropSize; + int pictureW = 0, pictureH = 0; + + ret = m_selectBayerQ->waitAndPopProcessQ(&frame); + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) + CLOGW2("Wait timeout"); + else + CLOGE2("Failed to waitAndPopProcessQ. ret %d", ret); + + goto CLEAN; + } else if (frame == NULL) { + CLOGE2("frame is NULL!!"); + goto CLEAN; + } + + frameCount = frame->getFrameCount(); + + if (frame->getFrameCapture() == false) { + CLOGW2("frame is not capture frame. frameCount %d", frameCount); + goto CLEAN; + } + + CLOGV2("Start to select Bayer. frameCount %d", frameCount); + + /* Get bayer buffer based on current reprocessing mode */ + switch(m_parameters->getReprocessingBayerMode()) { + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + CLOGD2("REPROCESSING_BAYER_MODE_PURE. isRawCapture %d", + frame->getFrameServiceBayer()); + + if (frame->getFrameZsl() || frame->getFrameServiceBayer()) + ret = m_getBayerServiceBuffer(frame, &bayerBuffer); + else + ret = m_getBayerBuffer(m_getBayerPipeId(), frameCount, &bayerBuffer, m_captureSelector); + if (ret != NO_ERROR) { + CLOGE2("Failed to get bayer buffer. frameCount %d useServiceBayerBuffer %d", + frameCount, frame->getFrameZsl()); + goto CLEAN; + } + + shot_ext = (struct camera2_shot_ext *)(bayerBuffer.addr[bayerBuffer.planeCount-1]); + if (shot_ext == NULL) { + CLOGE2("shot_ext from pure bayer buffer is NULL"); + break; + } + + ret = frame->storeDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE2("storeDynamicMeta fail ret(%d)", ret); + goto CLEAN; + } + + ret = frame->storeUserDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE2("storeUserDynamicMeta fail ret(%d)", ret); + goto CLEAN; + } + + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC: + CLOGD2("REPROCESSING_BAYER_MODE_DIRTY%s. isRawCapture %d", + (m_parameters->getReprocessingBayerMode() + == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) ? "_DYNAMIC" : "_ALWAYS_ON", + frame->getFrameServiceBayer()); + + if (frame->getFrameZsl()/* || frame->getFrameServiceBayer()*/) + ret = m_getBayerServiceBuffer(frame, &bayerBuffer); + else + ret = m_getBayerBuffer(PIPE_3AA, frameCount, &bayerBuffer, m_captureSelector, &updateDmShot); + if (ret != NO_ERROR) { + CLOGE2("Failed to get bayer buffer. frameCount %d useServiceBayerBuffer %d", + frameCount, frame->getFrameZsl()); + goto CLEAN; + } + + /* Set perframe size of dirty reprocessing */ + /* TODO: HACK: Will be removed, this is driver's job */ + ret = m_convertingStreamToShotExt(&bayerBuffer, &output_crop_info); + if (ret != NO_ERROR) { + CLOGE2("shot_stream to shot_ext converting fail, ret(%d)", ret); + goto CLEAN; + } + + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) { + m_parameters->getThumbnailSize(&pictureW, &pictureH); + if (pictureW <= 0 || pictureH <= 0) + m_parameters->getPictureSize(&pictureW, &pictureH); + } else { + m_parameters->getPictureSize(&pictureW, &pictureH); + } + + if (m_parameters->isUseYuvReprocessing() == true) + frame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + else + frame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_DIRTY_REPROCESSING_ISP); + + /* Leader */ + setLeaderSizeToNodeGroupInfo(&node_group_info, + output_crop_info.cropRegion[0], + output_crop_info.cropRegion[1], + ALIGN_UP(output_crop_info.cropRegion[2], CAMERA_ISP_ALIGN), + output_crop_info.cropRegion[3]); + + /* Capture */ + ret = getCropRectAlign( + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH, + &ratioCropSize.x, &ratioCropSize.y, &ratioCropSize.w, &ratioCropSize.h, + CAMERA_MCSC_ALIGN, 2, 0, 1.0); + if (ret != NO_ERROR) { + CLOGE2("getCropRectAlign failed. MCSC in_crop %dx%d, MCSC(picture) out_size %dx%d", + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH); + + ratioCropSize.x = 0; + ratioCropSize.y = 0; + ratioCropSize.w = node_group_info.leader.input.cropRegion[2]; + ratioCropSize.h = node_group_info.leader.input.cropRegion[3]; + } + + setCaptureCropNScaleSizeToNodeGroupInfo(&node_group_info, + PERFRAME_REPROCESSING_SCC_POS, + ratioCropSize.x, ratioCropSize.y, + ratioCropSize.w, ratioCropSize.h, + pictureW, pictureH); + + if (m_parameters->isUseYuvReprocessing() == true) + frame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + else + frame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_DIRTY_REPROCESSING_ISP); + + ret = frame->storeDynamicMeta(&updateDmShot); + if (ret < 0) { + CLOGE2("storeDynamicMeta fail ret(%d)", ret); + goto CLEAN; + } + + ret = frame->storeUserDynamicMeta(&updateDmShot); + if (ret < 0) { + CLOGE2("storeUserDynamicMeta fail ret(%d)", ret); + goto CLEAN; + } + + shot_ext = (struct camera2_shot_ext *)(bayerBuffer.addr[bayerBuffer.planeCount-1]); + frame->getMetaData(shot_ext); + + break; + default: + CLOGE2("bayer mode is not valid(%d)", m_parameters->getReprocessingBayerMode()); + goto CLEAN; + } + + CLOGD2("meta_shot_ext->shot.dm.request.frameCount : %d", getMetaDmRequestFrameCount(shot_ext)); + + /* Get pipeId for the first entity in reprocessing frame */ + pipeID = frame->getFirstEntity()->getPipeId(); + CLOGD2("Reprocessing stream first pipe ID %d", pipeID); + + /* Check available buffer */ + ret = m_getBufferManager(pipeID, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + CLOGE2("Failed to getBufferManager, ret %d", ret); + goto CLEAN; + } else if (bufferMgr == NULL) { + CLOGE2("BufferMgr is NULL. pipeId %d", pipeID); + goto CLEAN; + } + + ret = m_checkBufferAvailable(pipeID, bufferMgr); + if (ret != NO_ERROR) { + CLOGE2("Waiting buffer timeout, PipeID %d, ret %d", pipeID, ret); + goto CLEAN; + } + + ret = m_setupEntity(pipeID, frame, &bayerBuffer, NULL); + if (ret < 0) { + CLOGE2("setupEntity fail, bayerPipeId(%d), ret(%d)", pipeID, ret); + goto CLEAN; + } + + m_captureQ->pushProcessQ(&frame); + + return true; + +CLEAN: + if (frame != NULL) { + frame->frameUnlock(); + ret = m_removeFrameFromList(&m_captureProcessList, &m_captureProcessLock, frame); + if (ret != NO_ERROR) + CLOGE2("Failed to remove frame from m_captureProcessList. frameCount %d ret %d", + frame->getFrameCount(), ret); + + frame->printEntity(); + CLOGD2("Delete frame from m_captureProcessList. frameCount %d", frame->getFrameCount()); + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + return true; + +} + +bool ExynosCamera3::m_captureThreadFunc() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraRequest *request = NULL; + ExynosCamera3FrameFactory *factory = NULL; + + int pipeId = 0; + int bufPipeId = 0; + bool isSrc = false; + int retryCount = 3; + uint32_t frameCount = 0; + + ret = m_captureQ->waitAndPopProcessQ(&frame); + if (ret != NO_ERROR) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("Wait timeout"); + } else { + CLOGE2("Failed to wait&pop captureQ, ret %d", ret); + /* TODO: doing exception handling */ + } + goto CLEAN; + } else if (frame == NULL) { + CLOGE2("frame is NULL!!"); + goto CLEAN; + } + + m_captureStreamThread->run(PRIORITY_DEFAULT); + + frameCount = frame->getFrameCount(); + + CLOGV2("frame frameCount(%d)", frameCount); + + request = m_requestMgr->getServiceRequest(frameCount); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + if (m_parameters->isReprocessing() == true) { + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) { + ret = m_handleThumbnailReprocessingFrame(frame); + if (ret != NO_ERROR) { + CLOGE2("m_handleThumbnailReprocessingFrame fail, ret(%d)", ret); + goto CLEAN; + } + } else { + pipeId = frame->getFirstEntity()->getPipeId(); + + factory->pushFrameToPipe(&frame, pipeId); + factory->startThread(pipeId); + + /* Wait reprocesisng done */ + CLOGI2("Wait reprocessing done. frameCount %d", frameCount); + do { + ret = m_reprocessingDoneQ->waitAndPopProcessQ(&frame); + } while (ret == TIMED_OUT && retryCount-- > 0); + + if (ret != NO_ERROR) + CLOGW2("Failed to waitAndPopProcessQ to reprocessingDoneQ. ret %d", ret); + } + } else { + if (m_parameters->is3aaIspOtf() == true) { + pipeId = PIPE_3AA; + if (m_parameters->isUsing3acForIspc() == true) + bufPipeId = PIPE_3AC; + else + bufPipeId = PIPE_ISPC; + } else { + pipeId = PIPE_ISP; + bufPipeId = PIPE_ISPC; + } + + newFrame = m_sccCaptureSelector->selectDynamicFrames(1, pipeId, isSrc, retryCount, factory->getNodeType(bufPipeId)); + + if (newFrame == NULL) { + CLOGE2("newFrame is NULL"); + goto CLEAN; + } + + if (frameCount != newFrame->getFrameCount()) + CLOGW2("Selected frame count is not match! frame(%d), selected(%d)", + frameCount, newFrame->getFrameCount()); + + m_captureProcessList.push_back(newFrame); + + ret = m_handleIsChainDone(newFrame); + if (ret != NO_ERROR) { + CLOGE2("m_handleIsChainDone fail, ret(%d)", ret); + goto CLEAN; + } + } + + if (m_captureQ->getSizeOfProcessQ() > 0) + return true; + else + return false; + +CLEAN: + if (frame != NULL) { + if (m_parameters->isReprocessing() == true) { + frame->frameUnlock(); + ret = m_removeFrameFromList(&m_captureProcessList, &m_captureProcessLock, frame); + if (ret != NO_ERROR) + CLOGE2("remove frame from m_captureProcessList fail, ret(%d)", ret); + } + + frame->printEntity(); + CLOGD2("Picture frame delete(%d)", frame->getFrameCount()); + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + + CLOGI2("captureThreadFunc fail, remaining count(%d)", m_sccCaptureSelector->getHoldCount()); + + if (m_captureQ->getSizeOfProcessQ() > 0) + return true; + else + return false; +} + +status_t ExynosCamera3::m_getBayerServiceBuffer(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer) +{ + status_t ret = NO_ERROR; + ExynosCameraRequest *request = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + if (request != NULL) { + camera3_stream_buffer_t *stream_buffer = request->getInputBuffer(); + buffer_handle_t *handle = stream_buffer->buffer; + int bufIndex = -1; + + m_bayerBufferMgr->getIndexByHandle(handle, &bufIndex); + if (bufIndex < 0) { + CLOGE2("getIndexByHandle is fail(fcount:%d / handle:%p)", frame->getFrameCount(), handle); + ret = BAD_VALUE; + } else { + ret = m_bayerBufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_SERVICE, buffer); + CLOGI2("service bayer selected(fcount:%d / handle:%p / idx:%d / ret:%d)", frame->getFrameCount(), handle, bufIndex, ret); + } + } else { + CLOGE2("request if NULL(fcount:%d)", frame->getFrameCount()); + ret = BAD_VALUE; + } + + return ret; +} + +status_t ExynosCamera3::m_getBayerBuffer(uint32_t pipeId, uint32_t frameCount, ExynosCameraBuffer *buffer, ExynosCameraFrameSelector *selector, camera2_shot_ext *updateDmShot) +{ + status_t ret = NO_ERROR; + bool isSrc = false; + int retryCount = 30; /* 200ms x 30 */ + camera2_shot_ext *shot_ext = NULL; + camera2_stream *shot_stream = NULL; + ExynosCameraFrame *inListFrame = NULL; + ExynosCameraFrame *bayerFrame = NULL; + + if (m_parameters->isReprocessing() == false || selector == NULL) { + CLOGE2("INVALID_OPERATION, isReprocessing(%s) or bayerFrame is NULL", + m_parameters->isReprocessing() ? "True" : "False"); + ret = INVALID_OPERATION; + goto CLEAN; + } + + selector->setWaitTime(200000000); + bayerFrame = selector->selectCaptureFrames(1, frameCount, pipeId, isSrc, retryCount, 0); + if (bayerFrame == NULL) { + CLOGE2("bayerFrame is NULL"); + ret = INVALID_OPERATION; + goto CLEAN; + } + + ret = bayerFrame->getDstBuffer(pipeId, buffer); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + goto CLEAN; + } + + if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_PURE_DYNAMIC) { + shot_ext = (struct camera2_shot_ext *)buffer->addr[1]; + CLOGD2("Selected frame count(hal : %d / driver : %d)", + bayerFrame->getFrameCount(), shot_ext->shot.dm.request.frameCount); + } else if (m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON || + m_parameters->getReprocessingBayerMode() == REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC) { + if (updateDmShot == NULL) { + CLOGE2("updateDmShot is NULL"); + goto CLEAN; + } + + while(retryCount > 0) { + if(bayerFrame->getMetaDataEnable() == false) { + CLOGD2("Waiting for update jpeg metadata failed (%d), retryCount(%d)", ret, retryCount); + } else { + break; + } + retryCount--; + usleep(DM_WAITING_TIME); + } + + /* update meta like pure bayer */ + bayerFrame->getUserDynamicMeta(updateDmShot); + bayerFrame->getDynamicMeta(updateDmShot); + + shot_stream = (struct camera2_stream *)buffer->addr[1]; + CLOGD2("Selected fcount(hal : %d / driver : %d)", bayerFrame->getFrameCount(), shot_stream->fcount); + } else { + CLOGE2("reprocessing is not valid pipeId(%d), ret(%d)", pipeId, ret); + goto CLEAN; + } + +CLEAN: + + if (bayerFrame != NULL) { + bayerFrame->frameUnlock(); + + ret = m_searchFrameFromList(&m_processList, &m_processLock, bayerFrame->getFrameCount(), &inListFrame); + if (ret < 0) { + CLOGE2("searchFrameFromList fail"); + } else { + CLOGD2("Selected frame(%d) complete, Delete", bayerFrame->getFrameCount()); + bayerFrame->decRef(); + m_frameMgr->deleteFrame(bayerFrame); + bayerFrame = NULL; + } + } + + return ret; +} + +bool ExynosCamera3::m_captureStreamThreadFunc(void) +{ + status_t ret = 0; + ExynosCameraFrame *frame = NULL; + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraBuffer buffer; + ExynosCameraRequest* request = NULL; + struct camera2_shot_ext *shot_ext = NULL; + uint32_t frameCount = 0; + int pipeId = -1; + + ret = m_pipeCaptureFrameDoneQ->waitAndPopProcessQ(&frame); + if (ret != NO_ERROR) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + goto FUNC_EXIT; + } + + if (frame == NULL) { + CLOGE2("frame is NULL"); + goto FUNC_EXIT; + } + + entity = frame->getFrameDoneEntity(); + if (entity == NULL) { + CLOGE2("current entity is NULL"); + /* TODO: doing exception handling */ + goto FUNC_EXIT; + } + + frameCount = frame->getFrameCount(); + pipeId = entity->getPipeId(); + + CLOGD2("captureStream frame->frameCnt(%d), entityID(%d)", frameCount, pipeId); + + switch(pipeId) { + case PIPE_3AA_REPROCESSING: + case PIPE_ISP_REPROCESSING: + case PIPE_MCSC_REPROCESSING: + ret = frame->getSrcBuffer(pipeId, &buffer); + if (ret != NO_ERROR) { + CLOGE2("Failed to getSrcBuffer, pipeId %d, ret %d", pipeId, ret); + goto FUNC_EXIT; + } + + shot_ext = (struct camera2_shot_ext *) buffer.addr[buffer.planeCount - 1]; + if (shot_ext == NULL) { + CLOGE2("shot_ext is NULL. pipeId %d frameCount %d", pipeId, frameCount); + goto FUNC_EXIT; + } + + if (m_parameters->getUsePureBayerReprocessing() == true) { + ret = m_pushResult(frameCount, shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Failed to pushResult. framecount %d ret %d", frameCount, ret); + goto FUNC_EXIT; + } + } else { + // In dirty bayer case, the meta is updated if the current request + // is reprocessing only(i.e. Internal frame is created on preview path). + // Preview path will update the meta if the current request have preview frame + ExynosCameraRequest* request = m_requestMgr->getServiceRequest(frameCount); + if (request == NULL) { + CLOGE2("getServiceRequest failed"); + } else { + if(request->getNeedInternalFrame() == true) { + ret = m_pushResult(frameCount, shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Failed to pushResult. framecount %d ret %d", frameCount, ret); + goto FUNC_EXIT; + } + } + } + } + + ret = frame->storeDynamicMeta(shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Failed to storeUserDynamicMeta, requestKey %d, ret %d", request->getKey(), ret); + goto FUNC_EXIT; + } + + ret = frame->storeUserDynamicMeta(shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Failed to storeUserDynamicMeta, requestKey %d, ret %d", request->getKey(), ret); + goto FUNC_EXIT; + } + + CLOGV2("REPROCESSING Done. dm.request.frameCount %d frameCount %d", + getMetaDmRequestFrameCount(shot_ext), frameCount); + + if (m_parameters->isUseYuvReprocessing() == true) { + ret = m_putBuffers(m_yuvCaptureBufferMgr, buffer.index); + if (ret != NO_ERROR) + CLOGE2("Failed to putBuffer to yuvCaptureBufferMgr, bufferIndex %d", buffer.index); + } else if (frame->getFrameServiceBayer() == false) { + ret = m_putBuffers(m_fliteBufferMgr, buffer.index); + if (ret != NO_ERROR) + CLOGE2("Failed to putBuffer to fliteBufferMgr, bufferIndex %d", buffer.index); + } + + /* Handle yuv capture buffer */ + ret = m_handleYuvCaptureFrame(frame); + if (ret != NO_ERROR) { + CLOGE2("Failed to handleYuvCaptureFrame. pipeId %d ret %d", pipeId, ret); + goto FUNC_EXIT; + } + + /* Continue to JPEG processing stage in HWFC mode */ + if (m_parameters->isHWFCEnabled() == false) + break; + case PIPE_JPEG: + case PIPE_JPEG_REPROCESSING: + ret = m_handleJpegFrame(frame); + if (ret != NO_ERROR) { + CLOGE2("m_handleJpegFrame fail, pipeId(%d), ret(%d)", pipeId, ret); + goto FUNC_EXIT; + } + break; + case PIPE_GSC: + case PIPE_GSC_VIDEO: + case PIPE_GSC_PICTURE: + case PIPE_GSC_REPROCESSING: + ret = m_handleScalerDone(frame); + if (ret != NO_ERROR) { + CLOGE2("m_handleScalerDone fail, pipeId(%d) ret(%d)", pipeId, ret); + goto FUNC_EXIT; + } + break; + default: + CLOGE2("Invalid pipe ID (%d)", pipeId); + break; + } + + ret = frame->setEntityState(pipeId, ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE2("setEntityState fail, pipeId(%d), state(%d), ret(%d)", + pipeId, ENTITY_STATE_COMPLETE, ret); + goto FUNC_EXIT; + } + + if ((pipeId == PIPE_JPEG + || m_parameters->isHWFCEnabled() == true + || pipeId == PIPE_JPEG_REPROCESSING) + && frame->isComplete() == true) { + List *list = NULL; + Mutex *listLock = NULL; +#if defined(ENABLE_FULL_FRAME) + list = &m_processList; + listLock = &m_processLock; +#else + list = &m_captureProcessList; + listLock = &m_captureProcessLock; +#endif + // TODO:decide proper position + CLOGV2("frame complete, count(%d)", frameCount); + ret = m_removeFrameFromList(list, listLock, frame); + if (ret < 0) { + CLOGE2("remove frame from processList fail, ret(%d)", ret); + } + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + } + +FUNC_EXIT: + Mutex::Autolock l(m_captureProcessLock); + if (m_captureProcessList.size() > 0) + return true; + else + return false; +} + +status_t ExynosCamera3::m_handleIsChainDone(ExynosCameraFrame *frame) +{ + status_t ret = 0; + ExynosCameraRequest* request = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + int bufIndex = -1; + ExynosCamera3FrameFactory *factory = NULL; + int pipeId_src = -1; + int pipeId_gsc = -1; + int pipeId_jpeg = -1; + + bool isSrc = false; + float zoomRatio = 0.0F; + struct camera2_stream *shot_stream = NULL; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + ExynosRect srcRect, dstRect; + int type = CAMERA3_MSG_SHUTTER; + struct camera2_shot_ext *temp_ext = new struct camera2_shot_ext; + struct camera2_shot_ext *result_ext = new struct camera2_shot_ext; + + memset(temp_ext, 0x00, sizeof(struct camera2_shot_ext)); + memset(result_ext, 0x00, sizeof(struct camera2_shot_ext)); + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + zoomRatio = m_parameters->getZoomRatio(m_parameters->getZoomLevel()) / 1000; + + /////////////////////////////////////////////////////////// + if (m_parameters->isReprocessing() == true) { + /* We are using only PIPE_ISP_REPROCESSING */ + pipeId_src = PIPE_ISP_REPROCESSING; + pipeId_gsc = PIPE_GSC_REPROCESSING; + pipeId_jpeg = PIPE_JPEG_REPROCESSING; + isSrc = true; + } else if(m_parameters->isUsing3acForIspc() == true){ + pipeId_src = PIPE_3AA; + pipeId_gsc = PIPE_GSC_PICTURE; + pipeId_jpeg = PIPE_JPEG; + } else { +#if defined(ENABLE_FULL_FRAME) + pipeId_src = PIPE_ISP; + pipeId_gsc = PIPE_GSC_PICTURE; + pipeId_jpeg = PIPE_JPEG; +#else + switch (getCameraId()) { + case CAMERA_ID_FRONT: + pipeId_src = PIPE_ISP; + pipeId_gsc = PIPE_GSC_PICTURE; + break; + default: + CLOGE2("Current picture mode is not yet supported, CameraId(%d), reprocessing(%d)", + getCameraId(), m_parameters->isReprocessing()); + break; + } + pipeId_jpeg = PIPE_JPEG; +#endif + } + /////////////////////////////////////////////////////////// + + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + if (m_parameters->isReprocessing() == true) + ret = frame->getDstBuffer(pipeId_src, &srcBuffer, factory->getNodeType(PIPE_ISPC_REPROCESSING)); + else if (m_parameters->isUsing3acForIspc() == true) + ret = frame->getDstBuffer(pipeId_src, &srcBuffer, factory->getNodeType(PIPE_3AC)); + else + ret = frame->getDstBuffer(pipeId_src, &srcBuffer, factory->getNodeType(PIPE_ISPC)); + + if (ret < 0) + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + + shot_stream = (struct camera2_stream *)(srcBuffer.addr[srcBuffer.planeCount-1]); + if (shot_stream != NULL) { + CLOGD2("fcount(%d), rcount(%d), findex(%d), fvalid(%d)", + shot_stream->fcount, shot_stream->rcount, shot_stream->findex, shot_stream->fvalid); + + CLOGD2("(%d %d %d %d)(%d %d %d %d)", + shot_stream->input_crop_region[0], + shot_stream->input_crop_region[1], + shot_stream->input_crop_region[2], + shot_stream->input_crop_region[3], + shot_stream->output_crop_region[0], + shot_stream->output_crop_region[1], + shot_stream->output_crop_region[2], + shot_stream->output_crop_region[3]); + } else { + CLOGE2("shot_stream is NULL"); + return INVALID_OPERATION; + } + + /* should change size calculation code in pure bayer */ +#if 0 + if (shot_stream != NULL) { + ret = m_calcPictureRect(&srcRect, &dstRect); + ret = newFrame->setSrcRect(pipeId_gsc, &srcRect); + ret = newFrame->setDstRect(pipeId_gsc, &dstRect); + } +#else + m_parameters->getPictureSize(&pictureW, &pictureH); +#if defined(ENABLE_FULL_FRAME) + pictureFormat = m_parameters->getHwPreviewFormat(); +#else + pictureFormat = m_parameters->getHwPictureFormat(); +#endif + + srcRect.x = shot_stream->output_crop_region[0]; + srcRect.y = shot_stream->output_crop_region[1]; + srcRect.w = shot_stream->output_crop_region[2]; + srcRect.h = shot_stream->output_crop_region[3]; + srcRect.fullW = shot_stream->output_crop_region[2]; + srcRect.fullH = shot_stream->output_crop_region[3]; + srcRect.colorFormat = pictureFormat; +#if 0 + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = srcRect.w; + dstRect.h = srcRect.h; + dstRect.fullW = srcRect.fullW; + dstRect.fullH = srcRect.fullH; + dstRect.colorFormat = JPEG_INPUT_COLOR_FMT; + +#else + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = pictureW; + dstRect.h = pictureH; + dstRect.fullW = pictureW; + dstRect.fullH = pictureH; + dstRect.colorFormat = JPEG_INPUT_COLOR_FMT; +#endif + ret = getCropRectAlign(srcRect.w, srcRect.h, + pictureW, pictureH, + &srcRect.x, &srcRect.y, + &srcRect.w, &srcRect.h, + 2, 2, 0, zoomRatio); + + ret = frame->setSrcRect(pipeId_gsc, &srcRect); + ret = frame->setDstRect(pipeId_gsc, &dstRect); +#endif + + CLOGD2("srcRect size (%d, %d, %d, %d %d %d)", + srcRect.x, srcRect.y, srcRect.w, srcRect.h, srcRect.fullW, srcRect.fullH); + CLOGD2("dstRect size (%d, %d, %d, %d %d %d)", + dstRect.x, dstRect.y, dstRect.w, dstRect.h, dstRect.fullW, dstRect.fullH); + + ret = m_setupEntity(pipeId_gsc, frame, &srcBuffer, NULL); + + if (ret < 0) { + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } + + // Update frame's exposureTime from request result's exposureTime + frame->getMetaData(temp_ext); + if ((temp_ext->shot.dm.sensor.exposureTime) == 0) { + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + ret = request->getResultShot(result_ext); + if (ret < 0) { + CLOGE2("getResultShot fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } + temp_ext->shot.dm.sensor.exposureTime = result_ext->shot.dm.sensor.exposureTime; + frame->setMetaData(temp_ext); + } + + factory->pushFrameToPipe(&frame, pipeId_gsc); + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId_gsc); + + } else { /* m_parameters->needGSCForCapture(getCameraId()) == false */ + ret = frame->getDstBuffer(pipeId_src, &srcBuffer); +#if defined(ENABLE_FULL_FRAME) + if (ret < 0) + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + + ret = m_setupEntity(pipeId_jpeg, frame, &srcBuffer, NULL); + if (ret < 0) { + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } +#else + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + } else { + /* getting jpeg buffer from service buffer */ + ExynosCameraStream *stream = NULL; + + int streamId = 0; + m_streamManager->getStream(HAL_STREAM_ID_JPEG, &stream); + + if (stream == NULL) { + CLOGE2("stream is NULL"); + return INVALID_OPERATION; + } + + stream->getID(&streamId); + stream->getBufferManager(&bufferMgr); + CLOGV2("streamId(%d), bufferMgr(%p)", streamId, bufferMgr); + /* bufferMgr->printBufferQState(); */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuffer); + if (ret < 0) { + CLOGE2("bufferMgr getBuffer fail, frameCount(%d), ret(%d)", frame->getFrameCount(), ret); + } + } + ret = m_setupEntity(pipeId_jpeg, frame, &srcBuffer, &dstBuffer); + if (ret < 0) { + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } +#endif + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId_jpeg); + factory->pushFrameToPipe(&frame, pipeId_jpeg); + } + + return ret; +} + +status_t ExynosCamera3::m_handleScalerDone(ExynosCameraFrame *frame) +{ + status_t ret = 0; + ExynosCameraRequest* request = NULL; + int pipeId_gsc = -1; + int pipeId_dst = -1; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer buffer; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + int bufIndex = -1; + ExynosCamera3FrameFactory *factory = NULL; + + unsigned int completeBufferCount = 0; + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + ////////////////////////////////////////////////////////// + /* TODO: Need to decision pipeId both current and next */ + if (m_parameters->isReprocessing() == true) { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_gsc = PIPE_GSC_REPROCESSING; + } else { + pipeId_gsc = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISPC_REPROCESSING; + } + pipeId_dst = PIPE_JPEG_REPROCESSING; + } else { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_gsc = PIPE_GSC_PICTURE; + } else { + if (m_parameters->isOwnScc(getCameraId()) == true) { + pipeId_gsc = PIPE_SCC; + } else { + pipeId_gsc = PIPE_ISPC; + } + } + pipeId_dst = PIPE_JPEG; + } + ////////////////////////////////////////////////////////// + +#if !defined(ENABLE_FULL_FRAME) + /* handle src buffer of gsc */ + ret = m_getBufferManager(pipeId_gsc, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + return ret; + } + + ret = frame->getSrcBuffer(pipeId_gsc, &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + } + + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("m_putBuffers fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } +#else + /* + * internal scp buffer should be return to buffer manager before creating jpeg output buffer. + * so, we should compare complete buffer count + 1 + */ + CLOGV2("request->getNumOfOutputBuffer(%d) request->getCompleteBuffers(%d)", request->getNumOfOutputBuffer(), request->getCompleteBufferCount()); + + request->increaseDuplicateBufferCount(); + completeBufferCount = request->getNumOfOutputBuffer(); + if (frame->getFrameCapture() == true) + completeBufferCount --; + if (frame->getFrameServiceBayer() == true) + completeBufferCount --; + + if(completeBufferCount == (unsigned int)request->getDuplicateBufferCount()) { + /* handle src buffer of gsc */ + ret = m_getBufferManager(pipeId_gsc, &bufferMgr, SRC_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + return ret; + } + + ret = frame->getSrcBuffer(pipeId_gsc, &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + } + + CLOGV2("Internal Scp Buffer is returned index(%d)frameCount(%d)", buffer.index, frame->getFrameCount()); + + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("m_putBuffers fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + m_captureResultDoneCondition.signal(); + } +#endif + + /* + * handle dst buffer of gsc + * - pipeId_dst : Indicate to pipe ID for next pipe. + * -1 means final result for this stream. + */ + if (pipeId_dst >= 0) { + ExynosCameraStream *stream = NULL; + + int streamId = 0; + m_streamManager->getStream(HAL_STREAM_ID_JPEG, &stream); + + if (stream == NULL) { + CLOGE2("stream is NULL"); + return INVALID_OPERATION; + } + + stream->getID(&streamId); + CLOGD2("streamID(%d)", streamId); + + stream->getBufferManager(&bufferMgr); + CLOGV2("bufferMgr(%p)", bufferMgr); + /* bufferMgr->printBufferQState(); */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuffer); + if (ret < 0) { + CLOGE2("bufferMgr getBuffer fail, frameCount(%d), ret(%d)", frame->getFrameCount(), ret); + } + ret = m_getBufferManager(pipeId_gsc, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + return ret; + } + /* bufferMgr->printBufferQState(); */ + ret = frame->getDstBuffer(pipeId_gsc, &srcBuffer); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_gsc, ret); + } + + ret = m_putBuffers(bufferMgr, srcBuffer.index); + if (ret < 0) { + CLOGE2("m_putBuffers fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + + ret = m_setupEntity(pipeId_dst, frame, &srcBuffer, &dstBuffer); + if (ret < 0) { + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_dst, ret); + } + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId_dst); + factory->pushFrameToPipe(&frame, pipeId_dst); + } else { + // TODO: send result + } + + return ret; +} + +status_t ExynosCamera3::m_handleThumbnailReprocessingFrame(ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + + ExynosCameraRequest *request = NULL; + ExynosCamera3FrameFactory *factory = NULL; + + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer yuvReprocessingBuffer; + ExynosCameraBuffer thumbnailBuffer; + struct camera2_shot_ext shot_ext; + struct camera2_node_output output_crop_info; + camera2_node_group node_group_info; + + int bufferIndex = -2; + int pipeId = 0; + int retryCount = 3; + uint32_t frameCount = 0; + bool isNeedThumbnail = false; + ExynosRect ratioCropSize; + int pictureW = 0, pictureH = 0; + + srcBuffer.index = -2; + yuvReprocessingBuffer.index = -2; + thumbnailBuffer.index = -2; + + frameCount = frame->getFrameCount(); + request = m_requestMgr->getServiceRequest(frameCount); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + pipeId = frame->getFirstEntity()->getPipeId(); + + factory->pushFrameToPipe(&frame, pipeId); + factory->startThread(pipeId); + + /* Wait reprocesisng done */ + CLOGI2("Wait reprocessing done. frameCount %d", frameCount); + do { + ret = m_reprocessingDoneQ->waitAndPopProcessQ(&frame); + } while (ret == TIMED_OUT && retryCount-- > 0); + + if (ret != NO_ERROR) + CLOGW2("Failed to waitAndPopProcessQ to reprocessingDoneQ. ret %d", ret); + + frame->getMetaData(&shot_ext); + isNeedThumbnail = (shot_ext.shot.ctl.jpeg.thumbnailSize[0] > 0 + && shot_ext.shot.ctl.jpeg.thumbnailSize[1] > 0) ? true : false; + + if (isNeedThumbnail == false) { + m_pipeCaptureFrameDoneQ->pushProcessQ(&frame); + return ret; + } + + ret = frame->setEntityState(pipeId, ENTITY_STATE_COMPLETE); + if (ret != NO_ERROR) { + CLOGE2("setEntityState(ENTITY_STATE_PROCESSING) fail, pipeId(%d), ret(%d)", pipeId, ret); + return ret; + } + + CLOGI2("Thumbnail Reprocessing done"); + + frame->setMetaDataEnable(true); + + /* Copy thumbnail image to thumbnail buffer */ + ret = frame->getDstBuffer(pipeId, &yuvReprocessingBuffer, factory->getNodeType(PIPE_MCSC0_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + goto CLEAN; + } + + ret = m_thumbnailBufferMgr->getBuffer(&bufferIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &thumbnailBuffer); + if (ret != NO_ERROR) { + CLOGE2("get thumbnail Buffer fail, ret(%d)", ret); + goto CLEAN; + } + + memcpy(thumbnailBuffer.addr[0], yuvReprocessingBuffer.addr[0], thumbnailBuffer.size[0]); + + /* Put buffers */ + ret = m_putBuffers(m_thumbnailBufferMgr, thumbnailBuffer.index); + if (ret != NO_ERROR) { + CLOGE2("ThumbnailBuffer putBuffer fail, index(%d), ret(%d)", thumbnailBuffer.index, ret); + goto CLEAN; + } + + /* Put reprocessing dst buffer */ + ret = m_getBufferManager(pipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + CLOGE2("getBufferManager fail, ret(%d)", ret); + goto CLEAN; + } + + if (bufferMgr != NULL) { + ret = m_putBuffers(bufferMgr, yuvReprocessingBuffer.index); + if (ret != NO_ERROR) { + CLOGE2("DstBuffer putBuffer fail, index(%d), ret(%d)", yuvReprocessingBuffer.index, ret); + goto CLEAN; + } + } + + /* get src buffer */ + ret = frame->getSrcBuffer(pipeId, &srcBuffer); + if (ret != NO_ERROR) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", pipeId, ret); + goto CLEAN; + } + + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + frame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + + /* Delete new frame */ + CLOGI2("Reprocessing frame for thumbnail delete(%d)", frame->getFrameCount()); + + ret = m_removeFrameFromList(&m_captureProcessList, &m_captureProcessLock, frame); + if (ret != 0) + CLOGE2("remove frame from processList fail, ret(%d)", ret); + + frame->decRef(); + m_frameMgr->deleteFrame(frame); + frame = NULL; + + ret = m_generateFrame(frameCount, factory, &m_captureProcessList, &m_captureProcessLock, &frame); + if (ret != NO_ERROR) { + CLOGE2("m_generateFrame fail"); + return INVALID_OPERATION; + } else if (frame == NULL) { + CLOGE2("frame is NULL"); + return INVALID_OPERATION; + } + + /* Set JPEG request true */ + frame->setRequest(PIPE_MCSC0_REPROCESSING, true); + if (m_parameters->isHWFCEnabled() == true) { + frame->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, true); + frame->setRequest(PIPE_HWFC_JPEG_DST_REPROCESSING, true); + frame->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, true); + } + + CLOGV2("generate request framecount %d requestKey %d", frameCount, request->getKey()); + + ret = frame->setMetaData(&shot_ext); + if (ret != NO_ERROR) + CLOGE2("Set metadata to frame fail, Frame count(%d), ret(%d)", + frameCount, ret); + + m_parameters->getPictureSize(&pictureW, &pictureH); + + /* Capture */ + ret = getCropRectAlign( + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH, + &ratioCropSize.x, &ratioCropSize.y, &ratioCropSize.w, &ratioCropSize.h, + CAMERA_MCSC_ALIGN, 2, 0, 1.0); + if (ret != NO_ERROR) { + CLOGE2("getCropRectAlign failed. MCSC in_crop %dx%d, MCSC(picture) out_size %dx%d", + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + pictureW, pictureH); + + ratioCropSize.x = 0; + ratioCropSize.y = 0; + ratioCropSize.w = node_group_info.leader.input.cropRegion[2]; + ratioCropSize.h = node_group_info.leader.input.cropRegion[3]; + } + + setCaptureCropNScaleSizeToNodeGroupInfo(&node_group_info, + PERFRAME_REPROCESSING_SCC_POS, + ratioCropSize.x, ratioCropSize.y, + ratioCropSize.w, ratioCropSize.h, + pictureW, pictureH); + + frame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_YUV_REPROCESSING_MCSC); + + /* Get pipeId for the first entity in reprocessing frame */ + pipeId = frame->getFirstEntity()->getPipeId(); + CLOGD2("Reprocessing stream first pipe ID %d", pipeId); + + /* Check available buffer */ + ret = m_getBufferManager(pipeId, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret != NO_ERROR) { + CLOGE2("Failed to getBufferManager, ret %d", ret); + goto CLEAN; + } else if (bufferMgr == NULL) { + CLOGE2("BufferMgr is NULL. pipeId %d", pipeId); + goto CLEAN; + } + + ret = m_checkBufferAvailable(pipeId, bufferMgr); + if (ret != NO_ERROR) { + CLOGE2("Waiting buffer timeout, PipeID %d, ret %d", pipeId, ret); + goto CLEAN; + } + + ret = m_setupEntity(pipeId, frame, &srcBuffer, NULL); + if (ret != NO_ERROR) { + CLOGE2("setupEntity fail, bayerPipeId(%d), ret(%d)", pipeId, ret); + goto CLEAN; + } + + factory->pushFrameToPipe(&frame, pipeId); + factory->startThread(pipeId); + + /* Wait reprocesisng done */ + CLOGI2("Wait reprocessing done. frameCount %d", frameCount); + do { + ret = m_reprocessingDoneQ->waitAndPopProcessQ(&frame); + } while (ret == TIMED_OUT && retryCount-- > 0); + + if (ret != NO_ERROR) + CLOGW2("Failed to waitAndPopProcessQ to reprocessingDoneQ. ret %d", ret); + +PUSH_FRAME: + m_pipeCaptureFrameDoneQ->pushProcessQ(&frame); + +CLEAN: + return ret; +} + +status_t ExynosCamera3::m_handleYuvCaptureFrame(ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + ExynosCameraRequest* request = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + int bufIndex = -1; + ExynosCamera3FrameFactory *factory = NULL; + int pipeId_src = -1; + int pipeId_gsc = -1; + int pipeId_jpeg = -1; + + bool isSrc = false; + float zoomRatio = 0.0F; + struct camera2_stream *shot_stream = NULL; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + ExynosRect srcRect, dstRect; + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + if (m_parameters->isHWFCEnabled() == true) { + ret = frame->getDstBuffer(PIPE_MCSC_REPROCESSING, &dstBuffer, factory->getNodeType(PIPE_MCSC0_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("Failed to getDstBuffer. pipeId %d node %d ret %d", + PIPE_MCSC_REPROCESSING, PIPE_MCSC0_REPROCESSING, ret); + return INVALID_OPERATION; + } + + ret = m_putBuffers(m_yuvCaptureReprocessingBufferMgr, dstBuffer.index); + if (ret != NO_ERROR) { + CLOGE2("Failed to putBuffer to m_yuvCaptureBufferMgr. bufferIndex %d", + dstBuffer.index); + return INVALID_OPERATION; + } + +#if 0 /* TODO : Why this makes error? */ + ret = frame->getDstBuffer(PIPE_MCSC_REPROCESSING, &dstBuffer, factory->getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("Failed to getDstBuffer. pipeId %d node %d ret %d", + PIPE_3AA_REPROCESSING, PIPE_MCSC4_REPROCESSING, ret); + return INVALID_OPERATION; + } + + ret = m_putBuffers(m_thumbnailBufferMgr, dstBuffer.index); + if (ret != NO_ERROR) { + CLOGE2("Failed to putBuffer to m_thumbnailBufferMgr. bufferIndex %d", + dstBuffer.index); + return INVALID_OPERATION; + } +#endif + } else { + zoomRatio = m_parameters->getZoomRatio(m_parameters->getZoomLevel()) / 1000; + + if (m_parameters->isReprocessing() == true) { + /* We are using only PIPE_ISP_REPROCESSING */ + pipeId_src = PIPE_ISP_REPROCESSING; + pipeId_gsc = PIPE_GSC_REPROCESSING; + pipeId_jpeg = PIPE_JPEG_REPROCESSING; + isSrc = true; + } else { +#if defined(ENABLE_FULL_FRAME) + pipeId_src = PIPE_ISP; + pipeId_gsc = PIPE_GSC_PICTURE; + pipeId_jpeg = PIPE_JPEG; +#else + switch (getCameraId()) { + case CAMERA_ID_FRONT: + pipeId_src = PIPE_ISP; + pipeId_gsc = PIPE_GSC_PICTURE; + break; + default: + CLOGE2("Current picture mode is not yet supported, CameraId(%d), reprocessing(%d)", + getCameraId(), m_parameters->isReprocessing()); + break; + } + pipeId_jpeg = PIPE_JPEG; +#endif + } + /////////////////////////////////////////////////////////// + + // Thumbnail image is currently not used + ret = frame->getDstBuffer(pipeId_src, &dstBuffer, factory->getNodeType(PIPE_MCSC4_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("Failed to getDstBuffer. pipeId %d node %d ret %d", + pipeId_src, PIPE_MCSC4_REPROCESSING, ret); + } else { + ret = m_putBuffers(m_thumbnailBufferMgr, dstBuffer.index); + if (ret != NO_ERROR) { + CLOGE2("Failed to putBuffer to m_thumbnailBufferMgr. bufferIndex %d", + dstBuffer.index); + } + CLOGI2("INFO(%s[%d]): Thumbnail image disposed at pipeId %d node %d, FrameCnt %d", + pipeId_src, PIPE_MCSC4_REPROCESSING, frame->getFrameCount()); + } + + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + ret = frame->getDstBuffer(pipeId_src, &srcBuffer); + if (ret < 0) + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + + shot_stream = (struct camera2_stream *)(srcBuffer.addr[srcBuffer.planeCount-1]); + if (shot_stream != NULL) { + CLOGD2("(%d %d %d %d)", + shot_stream->fcount, + shot_stream->rcount, + shot_stream->findex, + shot_stream->fvalid); + CLOGD2("(%d %d %d %d)(%d %d %d %d)", + shot_stream->input_crop_region[0], + shot_stream->input_crop_region[1], + shot_stream->input_crop_region[2], + shot_stream->input_crop_region[3], + shot_stream->output_crop_region[0], + shot_stream->output_crop_region[1], + shot_stream->output_crop_region[2], + shot_stream->output_crop_region[3]); + } else { + CLOGE2("shot_stream is NULL"); + return INVALID_OPERATION; + } + + /* should change size calculation code in pure bayer */ +#if 0 + if (shot_stream != NULL) { + ret = m_calcPictureRect(&srcRect, &dstRect); + ret = newFrame->setSrcRect(pipeId_gsc, &srcRect); + ret = newFrame->setDstRect(pipeId_gsc, &dstRect); + } +#else + m_parameters->getPictureSize(&pictureW, &pictureH); +#if defined(ENABLE_FULL_FRAME) + pictureFormat = m_parameters->getHwPreviewFormat(); +#else + pictureFormat = m_parameters->getHwPictureFormat(); +#endif + + srcRect.x = shot_stream->output_crop_region[0]; + srcRect.y = shot_stream->output_crop_region[1]; + srcRect.w = shot_stream->output_crop_region[2]; + srcRect.h = shot_stream->output_crop_region[3]; + srcRect.fullW = shot_stream->output_crop_region[2]; + srcRect.fullH = shot_stream->output_crop_region[3]; + srcRect.colorFormat = pictureFormat; +#if 0 + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = srcRect.w; + dstRect.h = srcRect.h; + dstRect.fullW = srcRect.fullW; + dstRect.fullH = srcRect.fullH; + dstRect.colorFormat = JPEG_INPUT_COLOR_FMT; +#else + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = pictureW; + dstRect.h = pictureH; + dstRect.fullW = pictureW; + dstRect.fullH = pictureH; + dstRect.colorFormat = JPEG_INPUT_COLOR_FMT; +#endif + ret = getCropRectAlign(srcRect.w, srcRect.h, + pictureW, pictureH, + &srcRect.x, &srcRect.y, + &srcRect.w, &srcRect.h, + 2, 2, 0, zoomRatio); + + ret = frame->setSrcRect(pipeId_gsc, &srcRect); + ret = frame->setDstRect(pipeId_gsc, &dstRect); +#endif + + CLOGD2("size (%d, %d, %d, %d %d %d)", + srcRect.x, srcRect.y, srcRect.w, srcRect.h, srcRect.fullW, srcRect.fullH); + CLOGD2("size (%d, %d, %d, %d %d %d)", + dstRect.x, dstRect.y, dstRect.w, dstRect.h, dstRect.fullW, dstRect.fullH); + + ret = m_setupEntity(pipeId_gsc, frame, &srcBuffer, NULL); + + if (ret < 0) + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + + factory->pushFrameToPipe(&frame, pipeId_gsc); + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId_gsc); + + } else { /* m_parameters->needGSCForCapture(getCameraId()) == false */ + ret = frame->getDstBuffer(pipeId_src, &srcBuffer); +#if defined(ENABLE_FULL_FRAME) + if (ret < 0) + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + + ret = m_setupEntity(pipeId_jpeg, frame, &srcBuffer, NULL); + if (ret < 0) { + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", + pipeId_jpeg, ret); + } +#else + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_src, ret); + } else { + /* getting jpeg buffer from service buffer */ + ExynosCameraStream *stream = NULL; + + int streamId = 0; + m_streamManager->getStream(HAL_STREAM_ID_JPEG, &stream); + + if (stream == NULL) { + CLOGE2("stream is NULL"); + return INVALID_OPERATION; + } + + stream->getID(&streamId); + stream->getBufferManager(&bufferMgr); + CLOGV2("streamId(%d), bufferMgr(%p)", streamId, bufferMgr); + /* bufferMgr->printBufferQState(); */ + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuffer); + if (ret < 0) { + CLOGE2("bufferMgr getBuffer fail, frameCount(%d), ret(%d)", + frame->getFrameCount(), ret); + } + } + ret = m_setupEntity(pipeId_jpeg, frame, &srcBuffer, &dstBuffer); + if (ret < 0) + CLOGE2("setupEntity fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); +#endif + factory->setOutputFrameQToPipe(m_pipeCaptureFrameDoneQ, pipeId_jpeg); + factory->pushFrameToPipe(&frame, pipeId_jpeg); + } + } + + return ret; +} + +status_t ExynosCamera3::m_handleJpegFrame(ExynosCameraFrame *frame) +{ + status_t ret = 0; + int pipeId_jpeg = -1; + int pipeId_src = -1; + ExynosCameraRequest *request = NULL; + ExynosCamera3FrameFactory * factory = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCameraBuffer buffer; + int jpegOutputSize = -1; + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + factory = request->getFrameFactory(HAL_STREAM_ID_JPEG); + + ////////////////////////////////////////////////////////// + /* TODO: Need to decision pipeId both current and next */ + if (m_parameters->isReprocessing() == true) { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_src = PIPE_GSC_REPROCESSING; + } else { + pipeId_src = (m_parameters->isOwnScc(getCameraId()) == true) ? PIPE_SCC_REPROCESSING : PIPE_ISPC_REPROCESSING; + } + pipeId_jpeg = PIPE_JPEG_REPROCESSING; + } else { + if (m_parameters->needGSCForCapture(getCameraId()) == true) { + pipeId_src = PIPE_GSC_PICTURE; + } else { + if (m_parameters->isOwnScc(getCameraId()) == true) { + pipeId_src = PIPE_SCC; + } else { + pipeId_src = PIPE_ISPC; + } + } + pipeId_jpeg = PIPE_JPEG; + } + ////////////////////////////////////////////////////////// + + if (m_parameters->isHWFCEnabled() == true) { + entity_buffer_state_t bufferState = ENTITY_BUFFER_STATE_NOREQ; + ret = frame->getDstBufferState(PIPE_MCSC_REPROCESSING, &bufferState, factory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("Failed to getDstBufferState. frameCount %d pipeId %d node %d", + frame->getFrameCount(), PIPE_MCSC_REPROCESSING, PIPE_HWFC_JPEG_DST_REPROCESSING); + return INVALID_OPERATION; + } else if (bufferState == ENTITY_BUFFER_STATE_ERROR) { + CLOGE2("Invalid JPEG buffer state. frameCount %d bufferState %d", + frame->getFrameCount(), bufferState); + return INVALID_OPERATION; + } + + ret = frame->getDstBuffer(PIPE_MCSC_REPROCESSING, &buffer, factory->getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)); + if (ret != NO_ERROR) { + CLOGE2("Failed to getDstBuffer. frameCount %d pipeId %d node %d", + frame->getFrameCount(), PIPE_MCSC_REPROCESSING, PIPE_HWFC_JPEG_DST_REPROCESSING); + return INVALID_OPERATION; + } + } else { + ret = frame->setEntityState(pipeId_jpeg, ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE2("set entity state fail, ret(%d)", ret); + /* TODO: doing exception handling */ + return OK; + } + + /* handle src buffer of jpeg */ + ret = m_getBufferManager(pipeId_src, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId_src, ret); + return ret; + } + + ret = frame->getSrcBuffer(pipeId_jpeg, &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } + + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("bufferMgr(DST)->putBuffers() fail, pipeId(%d), ret(%d)", pipeId_src, ret); + } + + /* + * handle dst buffer of jpeg + * - JPEG image must be final result of stream. + */ +#if 0 + ret = m_getBufferManager(pipeId_jpeg, &bufferMgr, DST_BUFFER_DIRECTION); + if (ret < 0) { + CLOGE2("getBufferManager(DST) fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + return ret; + } +#endif + + ret = frame->getDstBuffer(pipeId_jpeg, &buffer); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeId_jpeg, ret); + } + } + + jpegOutputSize = frame->getJpegSize(); + CLOGI2("jpeg output done, jpeg size(%d)", jpegOutputSize); + + if (jpegOutputSize <= 0) { + CLOGW2("jpegOutput size(%d) is invalid", jpegOutputSize); + jpegOutputSize = buffer.size[0]; + } + + /* frame->printEntity(); */ + m_pushJpegResult(frame, jpegOutputSize, &buffer); + m_captureCount--; + + return ret; +} + +status_t ExynosCamera3::m_handleBayerBuffer(ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + uint32_t bufferDirection = INVALID_BUFFER_DIRECTION; + uint32_t pipeId = MAX_PIPE_NUM; + ExynosCameraBuffer buffer; + ExynosCameraBufferManager *bufferMgr = NULL; + ExynosCamera3FrameFactory *factory = NULL; + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):Frame is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* Decide the bayer buffer position and pipe ID */ + if (m_parameters->isFlite3aaOtf() == true) { + pipeId = PIPE_FLITE; + bufferDirection = DST_BUFFER_DIRECTION; + ret = frame->getDstBuffer(pipeId, &buffer); + } else { + pipeId = PIPE_3AA; + bufferDirection = SRC_BUFFER_DIRECTION; + ret = frame->getSrcBuffer(pipeId, &buffer); + } + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Get bayer buffer failed, framecount(%d), direction(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + frame->getFrameCount(), bufferDirection, pipeId); + return ret; + } + + /* Check the validation of bayer buffer */ + if (buffer.index < 0) { + CLOGE("ERR(%s[%d]):Invalid bayer buffer, framecount(%d), direction(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + frame->getFrameCount(), bufferDirection, pipeId); + return INVALID_OPERATION; + } + + if (pipeId == PIPE_3AA) { + struct camera2_shot_ext *shot_ext = NULL; + + shot_ext = (struct camera2_shot_ext *) buffer.addr[buffer.planeCount -1]; + CLOGW("WRN(%s[%d]):Timestamp(%lld)", __FUNCTION__, __LINE__, shot_ext->shot.dm.sensor.timeStamp); + + ret = m_updateTimestamp(frame, &buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to update time stamp", __FUNCTION__, __LINE__); + return ret; + } + } + + /* Handle the bayer buffer */ + if (frame->getFrameServiceBayer() == true) { + /* Raw Capture Request */ + CLOGD("INFO(%s[%d]):Handle service bayer buffer. FLITE-3AA_OTF %d Bayer_Pipe_ID %d Framecount %d", + __FUNCTION__, __LINE__, + m_parameters->isFlite3aaOtf(), pipeId, frame->getFrameCount()); + ret = m_sendRawCaptureResult(frame, pipeId, (bufferDirection == SRC_BUFFER_DIRECTION)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to sendRawCaptureResult. frameCount %d bayerPipeId %d bufferIndex %d", + __FUNCTION__, __LINE__, + frame->getFrameCount(), pipeId, buffer.index); + return ret; + } + + if (m_parameters->isReprocessing() == true && frame->getFrameCapture() == true) { + CLOGD("DEBUG(%s[%d]):Hold service bayer buffer for reprocessing. frameCount %d bayerPipeId %d bufferIndex %d", + __FUNCTION__, __LINE__, + frame->getFrameCount(), pipeId, buffer.index); + ret = m_captureZslSelector->manageFrameHoldListForDynamicBayer(frame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to manageFrameHoldListForDynamicBayer to captureZslSelector. frameCount %d bayerPipeId %d bufferIndex %d", + __FUNCTION__, __LINE__, + frame->getFrameCount(), pipeId, buffer.index); + return ret; + } + } + } else if (frame->getFrameZsl() == true) { + /* ZSL Capture Request */ + CLOGV("INFO(%s[%d]):Handle ZSL buffer. FLITE-3AA_OTF %d Bayer_Pipe_ID %d Framecount %d", + __FUNCTION__, __LINE__, + m_parameters->isFlite3aaOtf(), pipeId, frame->getFrameCount()); + ret = m_sendZSLCaptureResult(frame, pipeId, (bufferDirection == SRC_BUFFER_DIRECTION)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to sendZslCaptureResult. frameCount %d bayerPipeId %d", + __FUNCTION__, __LINE__, + frame->getFrameCount(), pipeId); + } + } else if (m_parameters->isReprocessing() == true){ + /* For ZSL Reprocessing */ + CLOGV("INFO(%s[%d]):Hold internal bayer buffer for reprocessing. FLITE-3AA_OTF %d Bayer_Pipe_ID %d Framecount %d", + __FUNCTION__, __LINE__, + m_parameters->isFlite3aaOtf(), pipeId, frame->getFrameCount()); + ret = m_captureSelector->manageFrameHoldList(frame, pipeId, (bufferDirection == SRC_BUFFER_DIRECTION)); + } else { + /* No request for bayer image */ + CLOGV("INFO(%s[%d]):Return internal bayer buffer. FLITE-3AA_OTF %d Bayer_Pipe_ID %d Framecount %d", + __FUNCTION__, __LINE__, + m_parameters->isFlite3aaOtf(), pipeId, frame->getFrameCount()); + ret = m_getBufferManager(pipeId, &bufferMgr, bufferDirection); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getBufferManager failed, pipeId(%d), bufferDirection(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, bufferDirection, ret); + return ret; + } + + ret = m_putBuffers(bufferMgr, buffer.index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):putBuffers failed, pipeId(%d), bufferDirection(%d), bufferIndex(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, bufferDirection, buffer.index, ret); + return ret; + } + } + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Handling bayer buffer failed, isServiceBayer(%d), direction(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + frame->getFrameServiceBayer(), + bufferDirection, pipeId); + } + + return ret; +} + +bool ExynosCamera3::m_previewStreamBayerPipeThreadFunc(void) +{ + status_t ret = 0; + ExynosCameraFrame *newFrame = NULL; + + ret = m_pipeFrameDoneQ[PIPE_FLITE]->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + /* CLOGW2("wait timeout"); */ + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return true; + } + return m_previewStreamFunc(newFrame, PIPE_FLITE); +} + +bool ExynosCamera3::m_previewStream3AAPipeThreadFunc(void) +{ + status_t ret = 0; + ExynosCameraFrame *newFrame = NULL; + + ret = m_pipeFrameDoneQ[PIPE_3AA]->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return true; + } + return m_previewStreamFunc(newFrame, PIPE_3AA); +} + +bool ExynosCamera3::m_previewStreamISPPipeThreadFunc(void) +{ + status_t ret = 0; + ExynosCameraFrame *newFrame = NULL; + + ret = m_pipeFrameDoneQ[PIPE_ISP]->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return true; + } + return m_previewStreamFunc(newFrame, PIPE_ISP); +} + +bool ExynosCamera3::m_previewStreamVRAPipeThreadFunc(void) +{ + status_t ret = 0; + ExynosCameraFrame *newFrame = NULL; + + ret = m_pipeFrameDoneQ[PIPE_VRA]->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return true; + } + return m_previewStreamFunc(newFrame, PIPE_VRA); +} + +bool ExynosCamera3::m_previewStreamFunc(ExynosCameraFrame *newFrame, int pipeId) +{ + status_t ret = 0; + int index = 0; + //result_info_t *resultInfo = NULL; + entity_state_t entityState = ENTITY_STATE_COMPLETE; + int type = CAMERA3_MSG_SHUTTER; + /* only trace */ + unsigned int halFrameCount = 0; + + if (newFrame != NULL) { + halFrameCount = newFrame->getFrameCount(); + } else { + CLOGE2("frame is NULL"); + return true; + } + CLOGV2("stream thread : frameCnt(%d) , pipeId(%d)", halFrameCount, pipeId); + + //newFrame->dump(); + + /* internal frame */ + if (newFrame->getFrameType() == FRAME_TYPE_INTERNAL) { + CLOGV2("push to m_internalFrameDoneQ handler : internalFrame frameCnt(%d)", newFrame->getFrameCount()); + m_internalFrameDoneQ->pushProcessQ(&newFrame); + return true; + } + + //CLOGV2("stream thread : frameCnt(%d) , pipeId(%d)", halFrameCount, pipeId); + + /* TODO: M2M path is also handled by this */ + ret = m_handlePreviewFrame(newFrame, pipeId); + if (ret < 0) { + CLOGE2("handle preview frame fail"); + return ret; + } + //CLOGD2("+++++++++++++++++++++++++++++++++++++++++++++"); + //newFrame->dump(); + //CLOGD2("---------------------------------------------"); + + if (newFrame->isComplete() == true) { + + m_sendNotify(newFrame->getFrameCount(), type); + + CLOGV2("newFrame->getFrameCount(%d)", newFrame->getFrameCount()); + + ret = m_removeFrameFromList(&m_processList, &m_processLock, newFrame); + + if (ret < 0) { + CLOGE2("remove frame from processList fail, ret(%d)", ret); + } + + CLOGV2("frame complete, count(%d)", newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + m_captureResultDoneCondition.signal(); + } + + CLOGV2("stream thread : frameCnt(%d) , pipeId(%d)", halFrameCount, pipeId); + + return true; +} + +status_t ExynosCamera3::m_updateTimestamp(ExynosCameraFrame *frame, ExynosCameraBuffer *timestampBuffer, bool flagPushResult) +{ + struct camera2_shot_ext *shot_ext = NULL; + status_t ret = NO_ERROR; + + /* handle meta data */ + shot_ext = (struct camera2_shot_ext *) timestampBuffer->addr[timestampBuffer->planeCount -1]; + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext is NULL. framecount %d buffer %d", + __FUNCTION__, __LINE__, frame->getFrameCount(), timestampBuffer->index); + return INVALID_OPERATION; + } + + uint64_t timeStamp = shot_ext->shot.dm.sensor.timeStamp; + uint64_t frameDuration = shot_ext->shot.dm.sensor.frameDuration; + + /* HACK: W/A for timeStamp reversion */ + if (timeStamp < (uint64_t)m_lastFrametime) { + CLOGW2("Timestamp is %lld!, m_lastFrametime(%lld)", + timeStamp, m_lastFrametime); + + if (frameDuration > 0) + timeStamp = m_lastFrametime + frameDuration; + else + timeStamp = m_lastFrametime + 15000000; + } + + if (m_lastFrametime > 0 + && timeStamp > (uint64_t)m_lastFrametime + 100000000) { /* 1sec */ + CLOGW2("Timestamp is %lld!, m_lastFrametime(%lld)", + timeStamp, m_lastFrametime); + } + + m_lastFrametime = timeStamp; + shot_ext->shot.udm.sensor.timeStampBoot = timeStamp; + + if (flagPushResult == true) + ret = m_pushResult(frame->getFrameCount(), shot_ext); + + return ret; +} + +status_t ExynosCamera3::m_handlePreviewFrame(ExynosCameraFrame *frame, int pipeId) +{ + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer; + ExynosCamera3FrameFactory *factory = NULL; + ExynosCameraBuffer t3acBuffer; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + + struct camera2_shot_ext *shot_ext; + struct camera2_shot_ext meta_shot_ext; + struct camera2_dm *dm = NULL; + entity_state_t entityState = ENTITY_STATE_COMPLETE; + status_t ret = NO_ERROR; + uint32_t framecount = 0; + + entity = frame->getFrameDoneFirstEntity(pipeId); + if (entity == NULL) { + CLOGE2("current entity is NULL pipeID(%d)", pipeId); + /* TODO: doing exception handling */ + return true; + } + + CLOGV2("handle preview frame : previewStream frameCnt(%d) entityID(%d)", frame->getFrameCount(), entity->getPipeId()); + + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + + switch(pipeId) { + case PIPE_3AA_ISP: + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + return ret; + } + + /* handle meta data */ + shot_ext = (struct camera2_shot_ext *) buffer.addr[buffer.planeCount -1]; + memset(&meta_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + memcpy(&meta_shot_ext, shot_ext, sizeof(struct camera2_shot_ext)); + ret = m_pushResult(frame->getFrameCount(), &meta_shot_ext); + + ret = m_putBuffers(m_3aaBufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("put Buffer fail"); + } + + CLOGV2("3AA_ISP frameCount(%d) frame.Count(%d)", + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[buffer.planeCount-1]), + frame->getFrameCount()); + + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + return ret; + } + + ret = m_putBuffers(m_ispBufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("put Buffer fail"); + break; + } + + frame->setMetaDataEnable(true); + dm = &(meta_shot_ext.shot.dm); + if (dm == NULL) { + CLOGE2("dm data is null"); + return INVALID_OPERATION; + } + + break; + case PIPE_3AA: + /* Notify ShotDone to mainThread */ + framecount = frame->getFrameCount(); + m_shotDoneQ->pushProcessQ(&framecount); + + /* 1. Handle the buffer from 3AA output node */ + if (m_parameters->isFlite3aaOtf() == true) { + ExynosCameraBufferManager *bufferMgr = NULL; + + /* Return the dummy buffer */ + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getSrcBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } else if (buffer.index < 0) { + ALOGE("ERR(%s[%d]):Invalid buffer index(%d), framecount(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + buffer.index, frame->getFrameCount(), entity->getPipeId()); + return BAD_VALUE; + } + + if (m_parameters->is3aaIspOtf() == true) { + ret = m_updateTimestamp(frame, &buffer); + if (ret != NO_ERROR) { + CLOGE2("[F%d B%d]Failed to updateTimestamp", + frame->getFrameCount(), buffer.index); + return ret; + } + } + + ret = m_putBuffers(m_3aaBufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("[F%d]Failed to put buffer %d to 3aaBufferMgr", + frame->getFrameCount(), buffer.index); + break; + } + } else { + ret = m_handleBayerBuffer(frame); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to handle bayerBuffer. framecount %d pipeId %d ret %d", + __FUNCTION__, __LINE__, + frame->getFrameCount(), entity->getPipeId(), ret); + return ret; + } + } + + frame->setMetaDataEnable(true); + + if (frame->getFrameZsl() == true) { + /* ZSL Capture Request */ + CLOGD2("Handle ZSL buffer. FLITE-3AA_OTF %d Framecount %d", + m_parameters->isFlite3aaOtf(), frame->getFrameCount()); + ret = m_sendZSLCaptureResult(frame, PIPE_3AC, false); + if (ret != NO_ERROR) { + CLOGE2("Failed to sendZslCaptureResult. frameCount %d", + frame->getFrameCount()); + } + } + + t3acBuffer.index = -1; + + if (frame->getRequest(PIPE_3AC) == true) { + ret = frame->getDstBuffer(entity->getPipeId(), &t3acBuffer, factory->getNodeType(PIPE_3AC)); + if (ret != NO_ERROR) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + } + } + + if (m_parameters->isReprocessing() == true) { + if (m_captureSelector == NULL) { + CLOGE2("m_captureSelector is NULL"); + return INVALID_OPERATION; + } + } else { + if (m_sccCaptureSelector == NULL) { + CLOGE2("m_sccCaptureSelector is NULL"); + return INVALID_OPERATION; + } + } + + if (0 <= t3acBuffer.index) { + if (m_parameters->isUseYuvReprocessing() == true + || frame->getFrameCapture() == true) { + if (m_parameters->getHighSpeedRecording() == true) { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } else { + entity_buffer_state_t bufferstate = ENTITY_BUFFER_STATE_NOREQ; + ret = frame->getDstBufferState(entity->getPipeId(), &bufferstate, factory->getNodeType(PIPE_3AC)); + if (ret == NO_ERROR && bufferstate != ENTITY_BUFFER_STATE_ERROR) { + if (m_parameters->isUseYuvReprocessing() == false + && m_parameters->isUsing3acForIspc() == true) + ret = m_sccCaptureSelector->manageFrameHoldListForDynamicBayer(frame); + else + ret = m_captureSelector->manageFrameHoldList(frame, entity->getPipeId(), false, factory->getNodeType(PIPE_3AC)); + + if (ret < 0) { + CLOGE2("manageFrameHoldList fail"); + return ret; + } + } else { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } + } + } else { + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON) { + CLOGW2("frame->getRequest(PIPE_3AC) == false. so, just m_putBuffers(t3acBuffer.index(%d)..., pipeId(%d), ret(%d)", + t3acBuffer.index, entity->getPipeId(), ret); + } + + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } + } + case PIPE_SCP: + ret = frame->getDstBuffer(entity->getPipeId(), &buffer, factory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + return ret; + } + + CLOGV2("SCP frameCount(%d) frame.Count(%d) index(%d)", + ((struct camera2_stream *)buffer.addr[buffer.planeCount-1])->fcount, + frame->getFrameCount(), + buffer.index); + // TODO: extract this + + if (frame->getFrameCapture() == true) { + ret = frame->setEntityState(entity->getPipeId(), ENTITY_STATE_COMPLETE); + if (ret < 0) { + CLOGE2("setEntityState fail, pipeId(%d), state(%d), ret(%d)", + entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + CLOGI2("Capture frame(%d)", frame->getFrameCount()); +#if defined(ENABLE_FULL_FRAME) + ret = m_handleIsChainDone(frame); + if (ret < 0) + CLOGE2("ERR(%s[%d]):m_handleIsChainDone fail, ret(%d)", ret); + + m_captureStreamThread->run(PRIORITY_DEFAULT); +#endif + } + + m_generateDuplicateBuffers(frame, entity->getPipeId()); + + break; + case PIPE_VRA: + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + if (entity->getDstBufState() != ENTITY_BUFFER_STATE_ERROR) { + /* Face detection update */ + m_pushResult(frame->getFrameCount(), (struct camera2_shot_ext*)buffer.addr[buffer.planeCount-1]); + } + + ret = m_vraBufferMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):Put VRA buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + break; + case PIPE_3AC: + case PIPE_FLITE: + /* 1. Handle bayer buffer */ + if (m_parameters->isFlite3aaOtf() == true) { + ret = m_handleBayerBuffer(frame); + if (ret < NO_ERROR) { + ALOGE("ERR(%s[%d]):Handle bayer buffer failed, framecount(%d), pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount(), entity->getPipeId(), ret); + return ret; + } + } else { + /* Send the bayer buffer to 3AA Pipe */ + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index < 0) { + ALOGE("ERR(%s[%d]):Invalid buffer index(%d), framecount(%d), pipeId(%d)", + __FUNCTION__, __LINE__, + buffer.index, frame->getFrameCount(), entity->getPipeId()); + return BAD_VALUE; + } + ALOGV("DEBUG(%s[%d]):Deliver Flite Buffer to 3AA. driver->framecount %d hal->framecount %d", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[buffer.planeCount-1]), + frame->getFrameCount()); + + ret = m_setupEntity(PIPE_3AA, frame, &buffer, NULL); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setSrcBuffer failed, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, PIPE_3AA, ret); + return ret; + } + + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + factory->setOutputFrameQToPipe(m_pipeFrameDoneQ[PIPE_3AA], PIPE_3AA); + factory->pushFrameToPipe(&frame, PIPE_3AA); + } + + break; + case PIPE_ISP: + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_updateTimestamp(frame, &buffer); + if (ret != NO_ERROR) { + CLOGE2("[F%d B%d]Failed to updateTimestamp", + frame->getFrameCount(), buffer.index); + return ret; + } + + ret = m_putBuffers(m_ispBufferMgr, buffer.index); + if (ret < 0) { + CLOGE2("put Buffer fail"); + break; + } + } + + /* break; *//* MCpipe case */ + default: + CLOGE2("Invalid pipe ID"); + break; + } + + ret = frame->setEntityState(entity->getPipeId(), entityState); + if (ret < 0) { + CLOGE2("setEntityState fail, pipeId(%d), state(%d), ret(%d)", + entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + return ret; +} + +status_t ExynosCamera3::m_generateDuplicateBuffers(ExynosCameraFrame *frame, int pipeIdSrc) +{ + status_t ret = NO_ERROR; + ExynosCameraRequest *halRequest = NULL; + camera3_capture_request *serviceRequest = NULL; + const camera3_stream_buffer_t *targetBuffer = NULL; + const camera3_stream_buffer_t *targetBufferList = NULL; + + ExynosCameraStream *stream = NULL; + int streamId = 0; + + List keylist; + List::iterator iter; + keylist.clear(); + + List *outputStreamId; + List::iterator outputStreamIdIter; + + if (frame == NULL) { + CLOGE2("frame is NULL"); + return INVALID_OPERATION; + } + + halRequest = m_requestMgr->getServiceRequest(frame->getFrameCount()); + if (halRequest == NULL) { + CLOGE2("halRequest is NULL"); + return INVALID_OPERATION; + } + + serviceRequest = halRequest->getService(); + if (serviceRequest == NULL) { + CLOGE2("serviceRequest is NULL"); + return INVALID_OPERATION; + } + + CLOGV2("frame->getFrameCount(%d) halRequest->getFrameCount(%d) serviceRequest->num_output_buffers(%d)", + frame->getFrameCount(), + halRequest->getFrameCount(), + serviceRequest->num_output_buffers); + + halRequest->getAllRequestOutputStreams(&outputStreamId); + + if (outputStreamId->size() > 0) { + for (outputStreamIdIter = outputStreamId->begin(); outputStreamIdIter != outputStreamId->end(); outputStreamIdIter++) { + m_streamManager->getStream(*outputStreamIdIter, &stream); + + if (stream == NULL) { + CLOGE2("stream is NULL"); + continue; + } + + stream->getID(&streamId); + + switch (streamId % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_RAW: + break; + case HAL_STREAM_ID_PREVIEW: + m_doDestCSC(true, frame, pipeIdSrc, streamId, PIPE_GSC); + break; + case HAL_STREAM_ID_VIDEO: + m_doDestCSC(true, frame, pipeIdSrc, streamId, PIPE_GSC_VIDEO); + break; + case HAL_STREAM_ID_JPEG: + break; + case HAL_STREAM_ID_CALLBACK: + m_doDestCSC(true, frame, pipeIdSrc, streamId, PIPE_GSC); + break; + default: + break; + } + } + } + + return ret; +} + +bool ExynosCamera3::m_duplicateBufferThreadFunc(void) +{ + status_t ret = 0; + int index = 0; + ExynosCameraFrame *newFrame= NULL; + dup_buffer_info_t dupBufferInfo; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + ExynosCameraStream *stream = NULL; + camera3_stream_buffer_t streamBuffer; + ExynosCameraRequest *request = NULL; + ResultRequest resultRequest = NULL; + int actualFormat = 0; + int bufIndex = -1; + + unsigned int completeBufferCount = 0; + + ExynosCameraBufferManager *bufferMgr = NULL; + + ret = m_duplicateBufferDoneQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return true; + } + + if (newFrame == NULL) { + CLOGE2("frame is NULL"); + goto func_exit; + } + + CLOGV2("CSC done (frameCount(%d))", newFrame->getFrameCount()); + + dupBufferInfo = newFrame->getDupBufferInfo(); + CLOGV2("streamID(%d), extScalerPipeID(%d)", dupBufferInfo.streamID, dupBufferInfo.extScalerPipeID); + + ret = m_streamManager->getStream(dupBufferInfo.streamID, &stream); + if (ret < 0) { + CLOGE2("getStream is failed, from streammanager. Id error:HAL_STREAM_ID_PREVIEW"); + goto func_exit; + } + + ret = stream->getStream(&streamBuffer.stream); + if (ret < 0) { + CLOGE2("ERR(%s[%d]):getStream is failed, from exynoscamerastream. Id error:HAL_STREAM_ID_PREVIEW"); + goto func_exit; + } + + stream->getBufferManager(&bufferMgr); + CLOGV2("bufferMgr(%p)", bufferMgr); + + ret = newFrame->getDstBuffer(dupBufferInfo.extScalerPipeID, &dstBuffer); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", dupBufferInfo.extScalerPipeID, ret); + goto func_exit; + } + + ret = bufferMgr->getHandleByIndex(&streamBuffer.buffer, dstBuffer.index); + if (ret != OK) { + CLOGE2("Buffer index error(%d)!!", dstBuffer.index); + goto func_exit; + } + + /* update output stream buffer information */ + streamBuffer.status = CAMERA3_BUFFER_STATUS_OK; + streamBuffer.acquire_fence = -1; + streamBuffer.release_fence = -1; + + request = m_requestMgr->getServiceRequest(newFrame->getFrameCount()); + + resultRequest = m_requestMgr->createResultRequest(newFrame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY, NULL, NULL); + resultRequest->pushStreamBuffer(&streamBuffer); + + m_requestMgr->callbackSequencerLock(); + request->increaseCompleteBufferCount(); + request->increaseDuplicateBufferCount(); + completeBufferCount = request->getNumOfOutputBuffer(); + if (newFrame->getFrameCapture() == true) + completeBufferCount --; + if (newFrame->getFrameServiceBayer() == true) + completeBufferCount --; + + CLOGV2("OutputBuffer(%d) CompleteBufferCount(%d) DuplicateBufferCount(%d) streamID(%d), extScaler(%d), frame: Count(%d), ServiceBayer(%d), Capture(%d) completeBufferCount(%d)", + request->getNumOfOutputBuffer(), + request->getCompleteBufferCount(), + request->getDuplicateBufferCount(), + dupBufferInfo.streamID, + dupBufferInfo.extScalerPipeID, + newFrame->getFrameCount(), + newFrame->getFrameServiceBayer(), + newFrame->getFrameCapture(), + completeBufferCount); + + if(completeBufferCount == (unsigned int)request->getDuplicateBufferCount()) { + ret = newFrame->getSrcBuffer(dupBufferInfo.extScalerPipeID, &srcBuffer); + if (srcBuffer.index >= 0) { + CLOGV2("Internal Scp Buffer is returned index(%d)frameCount(%d)", srcBuffer.index, newFrame->getFrameCount()); + ret = m_putBuffers(m_internalScpBufferMgr, srcBuffer.index); + if (ret < 0) { + CLOGE2("put Buffer fail"); + } + } + } + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + +func_exit: + if (newFrame != NULL ) { + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame);; + newFrame = NULL; + } + + return true; +} + +status_t ExynosCamera3::m_doDestCSC(bool enableCSC, ExynosCameraFrame *frame, int pipeIdSrc, int halStreamId, int pipeExtScalerId) +{ + status_t ret = OK; + ExynosCameraFrame *newFrame = NULL; + ExynosRect srcRect, dstRect; + ExynosCamera3FrameFactory *factory = NULL; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + ExynosCameraStream *stream = NULL; + camera3_stream_buffer_t streamBuffer; + ExynosCameraRequest *request = NULL; + ResultRequest resultRequest = NULL; + int actualFormat = 0; + int bufIndex = -1; + dup_buffer_info_t dupBufferInfo; + struct camera2_stream *meta = NULL; + uint32_t *output = NULL; + + ExynosCameraBufferManager *bufferMgr = NULL; + + + if (enableCSC == false) { + /* TODO: memcpy srcBuffer, dstBuffer */ + return NO_ERROR; + } + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + factory = request->getFrameFactory(halStreamId); + ret = frame->getDstBuffer(pipeIdSrc, &srcBuffer, factory->getNodeType(PIPE_SCP)); + if (ret < 0) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", pipeIdSrc, ret); + return ret; + } + + ret = m_streamManager->getStream(halStreamId, &stream); + if (ret < 0) { + CLOGE2("getStream is failed, from streammanager. Id error:HAL_STREAM_ID_PREVIEW"); + return ret; + } + + ret = stream->getStream(&streamBuffer.stream); + if (ret < 0) { + CLOGE2("getStream is failed, from exynoscamerastream. Id error:HAL_STREAM_ID_PREVIEW"); + return ret; + } + + meta = (struct camera2_stream*)srcBuffer.addr[srcBuffer.planeCount-1]; + output = meta->output_crop_region; + + stream->getBufferManager(&bufferMgr); + CLOGV2("bufferMgr(%p)", bufferMgr); + + ret = bufferMgr->getBuffer(&bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuffer); + if (ret < 0) { + CLOGE2("bufferMgr getBuffer fail, frameCount(%d), ret(%d)", frame->getFrameCount(), ret); + return ret; + } + + srcRect.x = 0; + srcRect.y = 0; + srcRect.w = output[2]; + srcRect.h = output[3]; + srcRect.fullW = output[2]; + srcRect.fullH = output[3]; + srcRect.colorFormat = m_parameters->getHwPreviewFormat(); + + dstRect.x = 0; + dstRect.y = 0; + dstRect.fullW = dstRect.w = streamBuffer.stream->width; + dstRect.fullH = dstRect.h = streamBuffer.stream->height; + stream->getFormat(&actualFormat); + dstRect.colorFormat = HAL_PIXEL_FORMAT_2_V4L2_PIX(actualFormat); + + ret = getCropRectAlign(srcRect.w, srcRect.h, + dstRect.w, dstRect.h, + &srcRect.x, &srcRect.y, + &srcRect.w, &srcRect.h, + 2, 2, + 0, 1); + + newFrame = factory->createNewFrameOnlyOnePipe(pipeExtScalerId, frame->getFrameCount()); + + ret = newFrame->setSrcRect(pipeExtScalerId, srcRect); + if (ret != NO_ERROR) { + CLOGE2("setSrcRect fail, frameCount(%d), ret(%d)", frame->getFrameCount(), ret); + return ret; + } + + ret = newFrame->setDstRect(pipeExtScalerId, dstRect); + if (ret != NO_ERROR) { + CLOGE2("setDstRect fail, frameCount(%d), ret(%d)", frame->getFrameCount(), ret); + return ret; + } + + CLOGV2("srcRect size (%d, %d, %d, %d %d %d)", + srcRect.x, srcRect.y, srcRect.w, srcRect.h, srcRect.fullW, srcRect.fullH); + CLOGV2("dstRect size (%d, %d, %d, %d %d %d)", + dstRect.x, dstRect.y, dstRect.w, dstRect.h, dstRect.fullW, dstRect.fullH); + + /* GSC can be shared by preview and previewCb. Make sure dstBuffer for previewCb buffer. */ + /* m_resetBufferState(pipeExtScalerId, frame); */ + m_resetBufferState(pipeExtScalerId, newFrame); + + ret = m_setupEntity(pipeExtScalerId, newFrame, &srcBuffer, &dstBuffer); + if (ret < 0) { + CLOGE2("setupEntity fail, pipeExtScalerId(%d), ret(%d)", pipeExtScalerId, ret); + } + + dupBufferInfo.streamID = halStreamId; + dupBufferInfo.extScalerPipeID = pipeExtScalerId; + newFrame->setDupBufferInfo(dupBufferInfo); + newFrame->setFrameCapture(frame->getFrameCapture()); + newFrame->setFrameServiceBayer(frame->getFrameServiceBayer()); + + factory->setOutputFrameQToPipe(m_duplicateBufferDoneQ, pipeExtScalerId); + factory->pushFrameToPipe(&newFrame, pipeExtScalerId); + + return ret; +} + +status_t ExynosCamera3::m_releaseBuffers(void) +{ + CLOGI2("release buffer"); + int ret = 0; + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission; + enum EXYNOS_CAMERA_BUFFER_POSITION position; + + /* Pull all internal buffers */ + for (int bufIndex = 0; bufIndex < m_fliteBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_fliteBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_3aaBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_3aaBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_internalScpBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_internalScpBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_yuvCaptureBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_yuvCaptureBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_vraBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_vraBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_gscBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_gscBufferMgr, bufIndex); + for (int bufIndex = 0; bufIndex < m_ispBufferMgr->getAllocatedBufferCount(); bufIndex++) + ret = m_putBuffers(m_ispBufferMgr, bufIndex); + + if (m_bayerBufferMgr != NULL) { + m_bayerBufferMgr->deinit(); + } + + if (m_fliteBufferMgr != NULL) { + m_fliteBufferMgr->deinit(); + } + + if (m_3aaBufferMgr != NULL) { + m_3aaBufferMgr->deinit(); + } + if (m_ispBufferMgr != NULL) { + m_ispBufferMgr->deinit(); + } + if (m_internalScpBufferMgr != NULL) { + m_internalScpBufferMgr->deinit(); + } + if (m_ispReprocessingBufferMgr != NULL) { + m_ispReprocessingBufferMgr->deinit(); + } + if (m_yuvCaptureBufferMgr != NULL) { + m_yuvCaptureBufferMgr->deinit(); + } + if (m_vraBufferMgr != NULL) { + m_vraBufferMgr->deinit(); + } + if (m_gscBufferMgr != NULL) { + m_gscBufferMgr->deinit(); + } + if (m_yuvCaptureReprocessingBufferMgr != NULL) { + m_yuvCaptureReprocessingBufferMgr->deinit(); + } + if (m_thumbnailBufferMgr != NULL) + m_thumbnailBufferMgr->deinit(); + + if (m_skipBufferMgr != NULL) { + m_skipBufferMgr->deinit(); + } + + CLOGI2("free buffer done"); + + return NO_ERROR; +} + +/* m_registerStreamBuffers + * 1. Get the input buffers from the input request + * 2. Get the output buffers from the input request + * 3. Register each buffers into the matched buffer manager + * This operation must be done before another request is delivered from the service. + */ +status_t ExynosCamera3::m_registerStreamBuffers(camera3_capture_request *request) +{ + status_t ret = NO_ERROR; + const camera3_stream_buffer_t *buffer; + const camera3_stream_buffer_t *bufferList; + uint32_t bufferCount = 0; + int streamId = -1; + uint32_t requestKey = 0; + ExynosCameraStream *stream = NULL; + ExynosCameraBufferManager *bufferMgr = NULL; + + /* 1. Get the information of input buffers from the input request */ + requestKey = request->frame_number; + buffer = request->input_buffer; + + /* 2. Register the each input buffers into the matched buffer manager */ + if (buffer != NULL) { + stream = static_cast(buffer->stream->priv); + stream->getID(&streamId); + + switch (streamId % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_ZSL_INPUT: + m_registerBuffers(m_bayerBufferMgr, requestKey, buffer); + ALOGV("DEBUG(%s[%d]):request(%d) inputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_RAW) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + buffer->stream->width, + buffer->stream->height); + break; + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_JPEG: + case HAL_STREAM_ID_CALLBACK: + default: + ALOGE("ERR(%s[%d]):request(%d) inputBuffer(%p) buffer-stream type is inavlid(%d). size(%d x %d)", + __FUNCTION__, __LINE__, + requestKey, buffer, streamId, + buffer->stream->width, + buffer->stream->height); + break; + } + } + + /* 3. Get the information of output buffers from the input request */ + bufferCount = request->num_output_buffers; + bufferList = request->output_buffers; + + /* 4. Register the each output buffers into the matched buffer manager */ + for (uint32_t index = 0; index < bufferCount; index++) { + buffer = &(bufferList[index]); + stream = static_cast(bufferList[index].stream->priv); + stream->getID(&streamId); + stream->getBufferManager(&bufferMgr); + + switch (streamId % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_RAW: + m_registerBuffers(m_bayerBufferMgr, requestKey, buffer); + ALOGV("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_RAW) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + case HAL_STREAM_ID_ZSL_OUTPUT: + /* no buffer register */ + ALOGV("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_ZSL_OUTPUT) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + case HAL_STREAM_ID_PREVIEW: + m_registerBuffers(bufferMgr, requestKey, buffer); + ALOGV("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_PREVIEW) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + case HAL_STREAM_ID_VIDEO: + m_registerBuffers(bufferMgr, requestKey, buffer); + ALOGV("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_VIDEO) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + case HAL_STREAM_ID_JPEG: + m_registerBuffers(bufferMgr, requestKey, buffer); + ALOGD("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_JPEG) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + case HAL_STREAM_ID_CALLBACK: + m_registerBuffers(bufferMgr, requestKey, buffer); + ALOGV("DEBUG(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType(HAL_STREAM_ID_CALLBACK) size(%u x %u) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + default: + ALOGE("ERR(%s[%d]):request(%d) outputBuffer(%p) buffer-StreamType is invalid(%d) size(%d x %d) ", + __FUNCTION__, __LINE__, + request->frame_number, buffer, streamId, + bufferList[index].stream->width, + bufferList[index].stream->height); + break; + } + } + + return ret; +} + +status_t ExynosCamera3::m_registerBuffers( + ExynosCameraBufferManager *bufManager, + int requestKey, + const camera3_stream_buffer_t *streamBuffer) +{ + status_t ret = OK; + buffer_handle_t *handle = streamBuffer->buffer; + int acquireFence = streamBuffer->acquire_fence; + int releaseFence = streamBuffer->release_fence; + + if (bufManager != NULL) { + ret = bufManager->registerBuffer( + requestKey, + handle, + acquireFence, + releaseFence, + EXYNOS_CAMERA_BUFFER_POSITION_NONE); + if (ret < 0) { + CLOGE2("putBuffer(%d) fail(%d)", requestKey, ret); + return BAD_VALUE; + } + } + + return ret; +} + +status_t ExynosCamera3::m_putBuffers(ExynosCameraBufferManager *bufManager, int bufIndex) +{ + status_t ret = NO_ERROR; + if (bufManager != NULL) + ret = bufManager->putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + + return ret; +} + +status_t ExynosCamera3::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int startIndex, + int reqBufCount, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + ret = bufManager->setInfo( + planeCount, + planeSize, + bytePerLine, + startIndex, + reqBufCount, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE2("setInfo fail"); + goto func_exit; + } + + ret = bufManager->alloc(); + if (ret < 0) { + CLOGE2("alloc fail"); + goto func_exit; + } + +func_exit: + + return ret; +} + +status_t ExynosCamera3::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int reqBufCount, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + ret = bufManager->setInfo( + planeCount, + planeSize, + bytePerLine, + reqBufCount, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE2("setInfo fail"); + goto func_exit; + } + + ret = bufManager->alloc(); + if (ret < 0) { + CLOGE2("alloc fail"); + goto func_exit; + } + +func_exit: + + return ret; +} + +status_t ExynosCamera3::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + ret = m_allocBuffers( + bufManager, + planeCount, + planeSize, + bytePerLine, + minBufCount, + maxBufCount, + type, + BUFFER_MANAGER_ALLOCATION_ONDEMAND, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE2("m_allocBuffers(minBufCount=%d, maxBufCount=%d, type=%d) fail", minBufCount, maxBufCount, type); + } + + return ret; +} + +status_t ExynosCamera3::m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap) +{ + int ret = 0; + + CLOGI2("setInfo(planeCount=%d, minBufCount=%d, maxBufCount=%d, type=%d, allocMode=%d)", + planeCount, minBufCount, maxBufCount, (int)type, (int)allocMode); + + ret = bufManager->setInfo( + planeCount, + planeSize, + bytePerLine, + 0, + minBufCount, + maxBufCount, + type, + allocMode, + createMetaPlane, + needMmap); + if (ret < 0) { + CLOGE2("setInfo fail"); + goto func_exit; + } + + ret = bufManager->alloc(); + if (ret < 0) { + CLOGE2("alloc fail"); + goto func_exit; + } + +func_exit: + + return ret; +} + +status_t ExynosCamera3::m_setBuffers(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + int ret = OK; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int planeCount = 1; + int bufferCount = 1; + int previewMaxW, previewMaxH; + int sensorMaxW, sensorMaxH; + + int hwPreviewW, hwPreviewH; + int hwSensorW, hwSensorH; + int hwPictureW, hwPictureH; + int sensorMarginW, sensorMarginH; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + + int minBufferCount = 1; + int maxBufferCount = 1; + + CLOGI2("alloc buffer - camera ID: %d", m_cameraId); + + // TODO: get this value from metadata class + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + CLOGI2("HW Preview width x height = %dx%d", hwPreviewW, hwPreviewH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + CLOGI2("HW Sensor width x height = %dx%d", hwSensorW, hwSensorH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + CLOGI2("HW Picture width x height = %dx%d", hwPictureW, hwPictureH); + + m_parameters->getMaxSensorSize(&sensorMaxW, &sensorMaxH); + CLOGI2("HW Sensor MAX width x height = %dx%d", sensorMaxW, sensorMaxH); + m_parameters->getMaxPreviewSize(&previewMaxW, &previewMaxH); + CLOGI2("HW Preview MAX width x height = %dx%d", previewMaxW, previewMaxH); + + m_parameters->getSensorMargin(&sensorMarginW, &sensorMarginH); + + /* For preview stream */ + /* FLITE -> need bayer buffer for non zsl capture. */ + + if (m_parameters->isFlite3aaOtf() == false) { +#if !defined(ENABLE_FULL_FRAME) +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = (sensorMaxW + sensorMarginW) * 2; + planeSize[0] = (sensorMaxW + sensorMarginW) * (sensorMaxH + sensorMarginH) * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + bytesPerLine[0] = ROUND_UP((sensorMaxW + sensorMarginW), 10) * 8 / 5; + planeSize[0] = bytesPerLine[0] * (sensorMaxH + sensorMarginH); + } +#else + planeSize[0] = (sensorMaxW + sensorMarginW) * (sensorMaxH + sensorMarginH) * 2; +#endif + planeCount = 2; + + /* TO DO : make num of buffers samely */ + maxBufferCount = NUM_BAYER_BUFFERS; + + ret = m_allocBuffers(m_fliteBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, false); + if (ret < 0) { + CLOGE2("ERR(%s[%d]):bayerBuffer m_allocBuffers(bufferCount=%d) fail", maxBufferCount); + return ret; + } +#endif + } + + /* Buffers of FLITE is given by service for ZSL*/ + // TODO: consider non-zsl case + /* 3AA */ + planeSize[0] = 32 * 64 * 2; + planeCount = 2; + bufferCount = m_exynosconfig->current->bufInfo.num_3aa_buffers; + ret = m_allocBuffers(m_3aaBufferMgr, planeCount, planeSize, bytesPerLine, bufferCount, true); + if (ret < 0) { + CLOGE2("m_3aaBufferMgr m_allocBuffers(bufferCount=%d) fail", bufferCount); + return ret; + } + + /* ISP */ + // TODO: packed bayer? + if (m_parameters->is3aaIspOtf() == false) { + if (m_parameters->isFlite3aaOtf() == true) + planeSize[0] = previewMaxW * previewMaxH * 2; + else + planeSize[0] = sensorMaxW * sensorMaxH * 2; + planeCount = 2; + bufferCount = 1; + ret = m_allocBuffers(m_ispBufferMgr, planeCount, planeSize, bytesPerLine, bufferCount, true); + if (ret < 0) { + CLOGE2("m_ispBufferMgr m_allocBuffers(bufferCount=%d) fail", bufferCount); + return ret; + } + } + + /* SCC */ + /* planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 2; */ + planeSize[0] = sensorMaxW * sensorMaxH * 2; + planeCount = 2; + // TODO: Need dynamic buffer allocation. reduce SCC buffer + bufferCount = NUM_PICTURE_BUFFERS; + + ret = m_allocBuffers(m_yuvCaptureBufferMgr, planeCount, planeSize, bytesPerLine, bufferCount, true); + if (ret < 0) { + CLOGE2("m_yuvCaptureBufferMgr m_allocBuffers(bufferCount=%d) fail", bufferCount); + return ret; + } + + /* VRA buffers */ + if (m_parameters->isMcscVraOtf() == false) { + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + bytesPerLine[0] = ROUND_UP((vraWidth * 3 / 2), CAMERA_16PX_ALIGN); + planeSize[0] = bytesPerLine[0] * vraHeight; + planeCount = 2; + + maxBufferCount = m_exynosconfig->current->bufInfo.num_vra_buffers; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + ret = m_allocBuffers(m_vraBufferMgr, planeCount, planeSize, bytesPerLine, maxBufferCount, maxBufferCount, type, true, true); + if (ret < 0) { + CLOGE2("m_vraBufferMgr m_allocBuffers(bufferCount=%d) fail", maxBufferCount); + return ret; + } + } + + ret = m_setInternalScpBuffer(); + if (ret < 0) { + CLOGE2("m_setReprocessing Buffer fail"); + return ret; + } + + CLOGI2("alloc buffer done - camera ID: %d", m_cameraId); + return ret; +} + +status_t ExynosCamera3::m_setInternalScpBuffer(void) +{ + int ret = 0; + int hwPreviewW = 0, hwPreviewH = 0; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int planeCount = 0; + int bufferCount = 0; + int minBufferCount = NUM_REPROCESSING_BUFFERS; + int maxBufferCount = NUM_PREVIEW_BUFFERS; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + CLOGI2("HW Picture MAX width x height = %dx%d", hwPreviewW, hwPreviewH); + + bytesPerLine[0] = 0; + planeSize[0] = ALIGN_UP(hwPreviewW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPreviewH, GSCALER_IMG_ALIGN); + planeSize[1] = (ALIGN_UP(hwPreviewW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPreviewH, GSCALER_IMG_ALIGN)) / 2; + planeCount = 3; + minBufferCount = m_exynosconfig->current->bufInfo.num_request_preview_buffers; + maxBufferCount = m_exynosconfig->current->bufInfo.num_preview_buffers; + + allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + ret = m_allocBuffers(m_internalScpBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, true, false); + if (ret < 0) { + CLOGE2("m_internalScpBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", minBufferCount, maxBufferCount); + return ret; + } + + return NO_ERROR; +} + +bool ExynosCamera3::m_setBuffersThreadFunc(void) +{ + int ret; + + ret = m_setBuffers(); + if (ret < 0) { + CLOGE2("m_setBuffers failed"); + // TODO: Need release buffers and error exit + return false; + } + + return false; +} + +uint32_t ExynosCamera3::m_getBayerPipeId(void) +{ + uint32_t pipeId = 0; + // TODO: implement it + + pipeId = PIPE_FLITE; + + return pipeId; +} + +status_t ExynosCamera3::m_pushRequest(camera3_capture_request *request) +{ + ExynosCameraRequest* req = NULL; + + CLOGV2("m_pushRequest frameCnt(%d)", request->frame_number); + + req = m_requestMgr->registerServiceRequest(request); + if(req == NULL) { + return INVALID_OPERATION; + } else { + return OK; + } +} + +status_t ExynosCamera3::m_popRequest(ExynosCameraRequest **request) +{ + status_t ret = OK; + + CLOGV2("m_popRequest "); + + *request = m_requestMgr->createServiceRequest(); + if (*request == NULL) { + CLOGE2("createRequest failed "); + ret = INVALID_OPERATION; + } + return ret; +} + + +/* m_needNotify is for reprocessing */ +bool ExynosCamera3::m_needNotify(ExynosCameraRequest *request) +{ + camera3_stream_buffer_t *output_buffers; + List *outputStreamId = NULL; + List::iterator outputStreamIdIter; + ExynosCameraStream *stream = NULL; + int streamId = 0; + + request->getAllRequestOutputStreams(&outputStreamId); + bool notifyFlag = true; + + /* HACK: can't send notify cause of one request including render, video */ + if (outputStreamId != NULL) { + for (outputStreamIdIter = outputStreamId->begin(); outputStreamIdIter != outputStreamId->end(); outputStreamIdIter++) { + + m_streamManager->getStream(*outputStreamIdIter, &stream); + if (stream == NULL) { + CLOGE2("stream is NULL"); + break; + } + stream->getID(&streamId); + + switch (streamId % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_RAW: + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_CALLBACK: + notifyFlag = false; + break; + default: + break; + }; + } + } + + return notifyFlag; +} + + +status_t ExynosCamera3::m_pushResult(uint32_t frameCount, struct camera2_shot_ext *src_ext) +{ + status_t ret = OK; + ExynosCameraRequest *request = NULL; + struct camera2_shot_ext dst_ext; + uint8_t currentPipelineDepth = 0; + + request = m_requestMgr->getServiceRequest(frameCount); + if (request == NULL) { + CLOGE2("getRequest failed "); + return INVALID_OPERATION; + } + + ret = request->getResultShot(&dst_ext); + if (ret < 0) { + CLOGE2("getResultShot failed "); + return INVALID_OPERATION; + } + + if (dst_ext.shot.dm.request.frameCount > src_ext->shot.dm.request.frameCount) { + CLOGI("INFO(%s[%d]):Skip to update result. frameCount %d requestKey %d shot.request.frameCount %d", + __FUNCTION__, __LINE__, + frameCount, request->getKey(), dst_ext.shot.dm.request.frameCount); + return ret; + } + + currentPipelineDepth = dst_ext.shot.dm.request.pipelineDepth; + memcpy(&dst_ext.shot.dm, &src_ext->shot.dm, sizeof(struct camera2_dm)); + memcpy(&dst_ext.shot.udm, &src_ext->shot.udm, sizeof(struct camera2_udm)); + dst_ext.shot.dm.request.pipelineDepth = currentPipelineDepth; + + ret = request->setResultShot(&dst_ext); + if (ret < 0) { + CLOGE2("setResultShot failed "); + return INVALID_OPERATION; + } + + ret = m_metadataConverter->updateDynamicMeta(request); + + CLOGV2("result is set (%d)", request->getFrameCount()); + return ret; +} + +status_t ExynosCamera3::m_pushJpegResult(ExynosCameraFrame *frame, int size, ExynosCameraBuffer *buffer) +{ + status_t ret = NO_ERROR; + ExynosCameraStream *stream = NULL; + camera3_stream_buffer_t streamBuffer; + camera3_stream_buffer_t *output_buffers; + ResultRequest resultRequest = NULL; + ExynosCameraRequest *request = NULL; + camera3_capture_result_t requestResult; + + ExynosCameraBufferManager *bufferMgr = NULL; + + ret = m_streamManager->getStream(HAL_STREAM_ID_JPEG, &stream); + if (ret != NO_ERROR) { + CLOGE2("Failed to getStream from StreamMgr. streamId HAL_STREAM_ID_JPEG"); + return ret; + } + + if (stream == NULL) { + CLOGE2("stream is NULL"); + return INVALID_OPERATION; + } + + ret = stream->getStream(&streamBuffer.stream); + if (ret != NO_ERROR) { + CLOGE2("Failed to getStream from ExynosCameraStream. streamId HAL_STREAM_ID_JPEG"); + return ret; + } + + ret = stream->getBufferManager(&bufferMgr); + if (ret != NO_ERROR) { + CLOGE2("Failed to getBufferManager. streamId HAL_STREAM_ID_JPEG"); + return ret; + } + + ret = bufferMgr->getHandleByIndex(&streamBuffer.buffer, buffer->index); + if (ret != NO_ERROR) { + CLOGE2("Failed to getHandleByIndex. bufferIndex %d", buffer->index); + return ret; + } + + streamBuffer.status = CAMERA3_BUFFER_STATUS_OK; + streamBuffer.acquire_fence = -1; + streamBuffer.release_fence = -1; + + camera3_jpeg_blob_t jpeg_blob; + jpeg_blob.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; + jpeg_blob.jpeg_size = size; + memcpy(buffer->addr[0]+buffer->size[0]-sizeof(camera3_jpeg_blob_t), &jpeg_blob, sizeof(camera3_jpeg_blob_t)); + + request = m_requestMgr->getServiceRequest(frame->getFrameCount()); + +#if !defined(ENABLE_FULL_FRAME) + /* try to notify if notify callback was not called in same framecount */ + if (request->getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY) == false) { + /* can't send notify cause of one request including render, video */ + if (m_needNotify(request) == true) { + CLOGV2("notify(%d)", frame->getFrameCount()); + m_sendNotify(frame->getFrameCount(), CAMERA3_MSG_SHUTTER); + } + } +#endif + + CameraMetadata setting = request->getResultMeta(); + int32_t jpegsize = size; + ret = setting.update(ANDROID_JPEG_SIZE, &jpegsize, 1); + if (ret < 0) { + CLOGE2("ANDROID_JPEG_SIZE update failed(%d)", ret); + } + + /* update jpeg size */ + request->setResultMeta(setting); + + CLOGD2("Set JPEG result Done. frameCount %d request->Key %d", + frame->getFrameCount(), request->getKey()); + + resultRequest = m_requestMgr->createResultRequest(frame->getFrameCount(), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY, NULL, NULL); + resultRequest->pushStreamBuffer(&streamBuffer); + + m_requestMgr->callbackSequencerLock(); + request->increaseCompleteBufferCount(); + m_requestMgr->callbackRequest(resultRequest); + m_requestMgr->callbackSequencerUnlock(); + + CLOGD2("result is set"); + + return ret; +} + +ExynosCameraRequest* ExynosCamera3::m_popResult(CameraMetadata &result, uint32_t frameCount) +{ + ExynosCameraRequest *request = NULL; + struct camera2_shot_ext dst_ext; + + request = m_requestMgr->getServiceRequest(frameCount); + if (request == NULL) { + CLOGE2("getRequest failed "); + result.clear(); + return NULL; + } + + result = request->getResultMeta(); + + CLOGV2("m_popResult(%d)", request->getFrameCount()); + + return request; +} + +status_t ExynosCamera3::m_deleteRequest(uint32_t frameCount) +{ + status_t ret = OK; + + ret = m_requestMgr->deleteServiceRequest(frameCount); + + return ret; +} + +status_t ExynosCamera3::m_setReprocessingBuffer(void) +{ + int ret = 0; + int pictureMaxW = 0, pictureMaxH = 0; + int hwPictureW = 0, hwPictureH = 0; + int maxThumbnailW = 0, maxThumbnailH = 0; + unsigned int planeSize[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; + int pictureFormat = 0; + int planeCount = 0; + int bufferCount = 0; + int minBufferCount = NUM_REPROCESSING_BUFFERS; + int maxBufferCount = NUM_PICTURE_BUFFERS; + bool needMmap = false; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + m_parameters->getMaxPictureSize(&pictureMaxW, &pictureMaxH); + CLOGI2("HW Picture MAX width x height = %dx%d", pictureMaxW, pictureMaxH); + m_parameters->getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + CLOGI2("Thumbnail Max width x height = %dx%d", maxThumbnailW, maxThumbnailH); + pictureFormat = m_parameters->getHwPictureFormat(); + + /* for reprocessing */ + if (m_parameters->getUsePureBayerReprocessing() == true) { +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bytesPerLine[0] = pictureMaxW * 2; + planeSize[0] = pictureMaxW * pictureMaxH * 2; + } else +#endif /* DEBUG_RAWDUMP */ + { + bytesPerLine[0] = ROUND_UP((pictureMaxW * 3 / 2), 16); + planeSize[0] = bytesPerLine[0] * pictureMaxH; + } +#else + planeSize[0] = pictureMaxW * pictureMaxH * 2; +#endif + planeCount = 2; + bufferCount = NUM_REPROCESSING_BUFFERS; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + if (m_parameters->getHighResolutionCallbackMode() == true) { + /* ISP Reprocessing Buffer realloc for high resolution callback */ + minBufferCount = 2; + } + + ret = m_allocBuffers(m_ispReprocessingBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, true, false); + if (ret < 0) { + CLOGE2("m_ispReprocessingBufferMgr m_allocBuffers(minBufferCount=%d/maxBufferCount=%d) fail", minBufferCount, maxBufferCount); + return ret; + } + } + + if( m_parameters->getHighSpeedRecording() ) { + m_parameters->getHwSensorSize(&hwPictureW, &hwPictureH); + CLOGI2("HW Picture(HighSpeed) width x height = %dx%d", hwPictureW, hwPictureH); + } else { + m_parameters->getMaxSensorSize(&hwPictureW, &hwPictureH); + CLOGI2("HW Picture width x height = %dx%d", hwPictureW, hwPictureH); + } + + if (m_parameters->isUseYuvReprocessingForThumbnail() == true) + needMmap = true; + else + needMmap = false; + + bytesPerLine[0] = 0; + planeSize[0] = ALIGN_UP(hwPictureW, GSCALER_IMG_ALIGN) * ALIGN_UP(hwPictureH, GSCALER_IMG_ALIGN) * 2; + planeCount = 2; + minBufferCount = 1; + maxBufferCount = NUM_PICTURE_BUFFERS; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + if (m_parameters->isHWFCEnabled() == true) + allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + else + allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + if (m_parameters->getHighResolutionCallbackMode() == true) { + /* SCC Reprocessing Buffer realloc for high resolution callback */ + minBufferCount = 2; + } + + ret = m_allocBuffers(m_yuvCaptureReprocessingBufferMgr, + planeCount, planeSize, bytesPerLine, + minBufferCount, maxBufferCount, + type, allocMode, true, needMmap); + if (ret < 0) { + CLOGE2("m_yuvCaptureReprocessingBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", minBufferCount, maxBufferCount); + return ret; + } + + /* Reprocessing Thumbanil buffer */ + switch (pictureFormat) { + case V4L2_PIX_FMT_NV21M: + planeCount = 3; + planeSize[0] = maxThumbnailW * maxThumbnailH; + planeSize[1] = maxThumbnailW * maxThumbnailH / 2; + case V4L2_PIX_FMT_NV21: + default: + planeCount = 2; + planeSize[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), maxThumbnailW, maxThumbnailH); + } + + minBufferCount = 1; + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + + type = EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE; + + ret = m_allocBuffers(m_thumbnailBufferMgr, + planeCount, planeSize, bytesPerLine, + minBufferCount, maxBufferCount, + type, allocMode, true, needMmap); + if (ret != NO_ERROR) { + CLOGE2("m_thumbnailBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", + minBufferCount, maxBufferCount); + return ret; + } + + return NO_ERROR; +} + +bool ExynosCamera3::m_reprocessingFrameFactoryStartThreadFunc(void) +{ + status_t ret = 0; + ExynosCamera3FrameFactory *factory = NULL; + + factory = m_frameFactory[FRAME_FACTORY_TYPE_REPROCESSING]; + if (factory == NULL) { + CLOGE2("Can't start FrameFactory!!!! FrameFactory is NULL!!"); + + return false; + } else if (factory->isCreated() == false) { + CLOGE2("Reprocessing FrameFactory is NOT created!"); + return false; + } + + /* Set buffer manager */ + ret = m_setupReprocessingPipeline(); + if (ret != NO_ERROR) { + CLOGE2("Failed to setupReprocessingPipeline. ret %d", ret); + return false; + } + + ret = factory->initPipes(); + if (ret < 0) { + CLOGE2("Failed to initPipes. ret %d", ret); + return false; + } + + ret = m_startReprocessingFrameFactory(factory); + if (ret < 0) { + CLOGE2("Failed to startReprocessingFrameFactory"); + /* TODO: Need release buffers and error exit */ + return false; + } + + return false; +} + +status_t ExynosCamera3::m_startReprocessingFrameFactory(ExynosCamera3FrameFactory *factory) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = 0; + + CLOGD2("- IN -"); + + ret = factory->preparePipes(); + if (ret < 0) { + CLOGE2("m_reprocessingFrameFactory preparePipe fail"); + return ret; + } + + /* stream on pipes */ + ret = factory->startPipes(); + if (ret < 0) { + CLOGE2("m_reprocessingFrameFactory startPipe fail"); + return ret; + } + + m_flagStartReprocessingFrameFactory = true; + + return NO_ERROR; +} + +status_t ExynosCamera3::m_stopReprocessingFrameFactory(ExynosCamera3FrameFactory *factory) +{ + CLOGI2("- IN -"); + status_t ret = 0; + + if (factory != NULL) { + ret = factory->stopPipes(); + if (ret < 0) { + CLOGE2("m_reprocessingFrameFactory0>stopPipe() fail"); + } + } + + CLOGD2("clear m_captureProcessList(Picture) Frame list"); + ret = m_clearList(&m_captureProcessList, &m_captureProcessLock); + if (ret < 0) { + CLOGE2("m_clearList fail"); + return ret; + } + + m_flagStartReprocessingFrameFactory = false; + + return NO_ERROR; +} + +status_t ExynosCamera3::m_checkBufferAvailable(uint32_t pipeId, ExynosCameraBufferManager *bufferMgr) +{ + status_t ret = TIMED_OUT; + int retry = 0; + + do { + ret = -1; + retry++; + if (bufferMgr->getNumOfAvailableBuffer() > 0) { + ret = OK; + } else { + /* wait available ISP buffer */ + usleep(WAITING_TIME); + } + if (retry % 10 == 0) + CLOGW2("retry(%d) setupEntity for pipeId(%d)", retry, pipeId); + } while(ret < 0 && retry < (TOTAL_WAITING_TIME/WAITING_TIME)); + + return ret; +} + +bool ExynosCamera3::m_startPictureBufferThreadFunc(void) +{ + int ret = 0; + + ret = m_setPictureBuffer(); + if (ret < 0) { + CLOGE2("m_setPictureBuffer failed"); + + /* TODO: Need release buffers and error exit */ + + return false; + } + + if (m_parameters->isReprocessing() == true) { + ret = m_setReprocessingBuffer(); + if (ret < 0) { + CLOGE2("m_setReprocessing Buffer fail"); + return ret; + } + } + + return false; +} + +status_t ExynosCamera3::m_setPictureBuffer(void) +{ + int ret = 0; + unsigned int planeSize[3] = {0}; + unsigned int bytesPerLine[3] = {0}; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int planeCount = 0; + int minBufferCount = 1; + int maxBufferCount = 1; + exynos_camera_buffer_type_t type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + buffer_manager_allocation_mode_t allocMode = BUFFER_MANAGER_ALLOCATION_ONDEMAND; + + m_parameters->getMaxPictureSize(&pictureW, &pictureH); + pictureFormat = m_parameters->getPictureFormat(); + if ((m_parameters->needGSCForCapture(getCameraId()) == true)) { + planeSize[0] = pictureW * pictureH * 2; + planeCount = 1; + minBufferCount = 1; + maxBufferCount = m_exynosconfig->current->bufInfo.num_picture_buffers; + + // Pre-allocate certain amount of buffers enough to fed into 3 JPEG save threads. + if (m_parameters->getSeriesShotCount() > 0) + minBufferCount = NUM_BURST_GSC_JPEG_INIT_BUFFER; + + ret = m_allocBuffers(m_gscBufferMgr, planeCount, planeSize, bytesPerLine, minBufferCount, maxBufferCount, type, allocMode, false, false); + if (ret < 0) { + CLOGE2("m_gscBufferMgr m_allocBuffers(minBufferCount=%d, maxBufferCount=%d) fail", minBufferCount, maxBufferCount); + return ret; + } + } + + return ret; +} + +status_t ExynosCamera3::m_generateInternalFrame(uint32_t frameCount, ExynosCamera3FrameFactory *factory, List *list, Mutex *listLock, ExynosCameraFrame **newFrame) +{ + status_t ret = OK; + *newFrame = NULL; + + CLOGV2("frameCount(%d)", frameCount); + ret = m_searchFrameFromList(list, listLock, frameCount, newFrame); + if (ret < 0) { + CLOGE2("searchFrameFromList fail"); + return INVALID_OPERATION; + } + + if (*newFrame == NULL) { + *newFrame = factory->createNewFrame(frameCount); + if (*newFrame == NULL) { + CLOGE2("newFrame is NULL"); + return UNKNOWN_ERROR; + } + listLock->lock(); + list->push_back(*newFrame); + listLock->unlock(); + } + + /* Set frame type into FRAME_TYPE_INTERNAL */ + ret = (*newFrame)->setFrameInfo(m_parameters, frameCount, FRAME_TYPE_INTERNAL); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to setFrameInfo with INTERNAL. frameCount %d", + __FUNCTION__, __LINE__, frameCount); + return ret; + } + + return ret; +} + +bool ExynosCamera3::m_internalFrameThreadFunc(void) +{ + status_t ret = 0; + int index = 0; + ExynosCameraFrame *newFrame = NULL; + + CLOGV2("Enter m_internalFrameThreadFunc"); + + /* 1. Get new internal frame */ + ret = m_internalFrameDoneQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGV2("wait timeout"); + } else { + CLOGE2("wait and pop fail, ret(%d)", ret); + /* TODO: doing exception handling */ + } + return false; + } + + CLOGV2("handle internal frame : previewStream frameCnt(%d) (%d)", newFrame->getFrameCount(), newFrame->getFrameType()); + + /* 2. Redirection for the normal frame */ + if (newFrame->getFrameType() != FRAME_TYPE_INTERNAL) { + CLOGE2("push to m_pipeFrameDoneQ handler : previewStream frameCnt(%d)", newFrame->getFrameCount()); + m_pipeFrameDoneQ[PIPE_3AA]->pushProcessQ(&newFrame); + return true; + } + + /* 3. Handle the internal frame for each pipe */ + ret = m_handleInternalFrame(newFrame); + + if (ret < 0) { + CLOGE2("handle preview frame fail"); + return ret; + } + + if (newFrame->isComplete() == true/* && newFrame->getFrameCapture() == false */) { + ret = m_removeFrameFromList(&m_processList, &m_processLock, newFrame); + + if (ret < 0) { + CLOGE2("remove frame from processList fail, ret(%d)", ret); + } + + CLOGV2("internal frame complete, count(%d)", newFrame->getFrameCount()); + newFrame->decRef(); + m_frameMgr->deleteFrame(newFrame); + newFrame = NULL; + m_captureResultDoneCondition.signal(); + } + + return true; +} + +bool ExynosCamera3::m_doInternalFrame(ExynosCameraRequest *request) +{ + status_t ret = NO_ERROR; + bool internalFlag = false; + + struct camera2_shot_ext cur_shot_ext; + struct camera2_shot_ext prev_shot_ext; + + memset(&cur_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + memset(&prev_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + + ret = request->getServiceShot(&cur_shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Get service shot fail, Request Key(%d), ret(%d)", request->getKey(), ret); + return false; + } + + ret = request->getPrevShot(&prev_shot_ext); + if (ret != NO_ERROR) { + CLOGE2("Get service previous shot fail, Request Key(%d), ret(%d)", request->getKey(), ret); + return false; + } + + if (cur_shot_ext.shot.ctl.aa.aeMode == AA_AEMODE_OFF || cur_shot_ext.shot.ctl.aa.mode == AA_CONTROL_OFF) { + if ((cur_shot_ext.shot.ctl.sensor.exposureTime != prev_shot_ext.shot.ctl.sensor.exposureTime) + || (cur_shot_ext.shot.ctl.sensor.frameDuration!= prev_shot_ext.shot.ctl.sensor.frameDuration) + || (cur_shot_ext.shot.ctl.aa.vendor_isoValue != prev_shot_ext.shot.ctl.aa.vendor_isoValue)) { + CLOGI2("Create internal frame for manual AE setting"); + internalFlag = true; + } + } + + if ((cur_shot_ext.shot.ctl.lens.opticalStabilizationMode == OPTICAL_STABILIZATION_MODE_STILL) + && (prev_shot_ext.shot.ctl.lens.opticalStabilizationMode == OPTICAL_STABILIZATION_MODE_CENTERING)) { + CLOGI2("Create internal frame for ois mode (OFF -> ON)"); + internalFlag = true; + } + +#if 0 //for test + if (cur_shot_ext.shot.ctl.aa.aeLock != prev_shot_ext.shot.ctl.aa.aeLock ) { + CLOGI2("Create internal frame for ae lock (%d -> %d)", + prev_shot_ext.shot.ctl.aa.aeLock,cur_shot_ext.shot.ctl.aa.aeLock); + internalFlag = true; + } +#endif + + return internalFlag; +} + +status_t ExynosCamera3::m_handleInternalFrame(ExynosCameraFrame *frame) +{ + ExynosCameraFrameEntity *entity = NULL; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer; + ExynosCameraBuffer t3acBuffer; + ExynosCamera3FrameFactory *factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + ExynosCameraStream *stream = NULL; + camera3_capture_result_t captureResult; + camera3_notify_msg_t notityMsg; + ExynosCameraRequest* request = NULL; + ResultRequest resultRequest = NULL; + + struct camera2_shot_ext meta_shot_ext; + struct camera2_dm *dm = NULL; + uint32_t framecount = 0; + int32_t reprocessingBayerMode = m_parameters->getReprocessingBayerMode(); + + entity_state_t entityState = ENTITY_STATE_COMPLETE; + status_t ret = OK; + + entity = frame->getFrameDoneFirstEntity(); + if (entity == NULL) { + CLOGE2("current entity is NULL"); + /* TODO: doing exception handling */ + return true; + } + CLOGV2("handle internal frame : previewStream frameCnt(%d) entityID(%d)", frame->getFrameCount(), entity->getPipeId()); + + switch(entity->getPipeId()) { + case PIPE_3AA: + /* Notify ShotDone to mainThread */ + framecount = frame->getFrameCount(); + m_shotDoneQ->pushProcessQ(&framecount); + + ret = frame->getSrcBuffer(entity->getPipeId(), &buffer); + if (ret < 0) { + CLOGE2("getSrcBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_updateTimestamp(frame, &buffer, false); + if (ret != NO_ERROR) { + CLOGE2("[F%d B%d]Failed to updateTimestamp", + frame->getFrameCount(), buffer.index); + return ret; + } + + if (m_parameters->isFlite3aaOtf() == false) + ret = m_putBuffers(m_fliteBufferMgr, buffer.index); + else + ret = m_putBuffers(m_3aaBufferMgr, buffer.index); + + if (ret < 0) { + CLOGE2("put Buffer fail"); + } + } + + frame->setMetaDataEnable(true); + + t3acBuffer.index = -1; + + if (frame->getRequest(PIPE_3AC) == true) { + ret = frame->getDstBuffer(entity->getPipeId(), &t3acBuffer, factory->getNodeType(PIPE_3AC)); + if (ret != NO_ERROR) { + CLOGE2("getDstBuffer fail, pipeId(%d), ret(%d)", entity->getPipeId(), ret); + } + } + + if (m_parameters->isReprocessing() == true) { + if (m_captureSelector == NULL) { + CLOGE2("m_captureSelector is NULL"); + return INVALID_OPERATION; + } + } else { + if (m_sccCaptureSelector == NULL) { + CLOGE2("m_sccCaptureSelector is NULL"); + return INVALID_OPERATION; + } + } + + if (0 <= t3acBuffer.index) { + if (m_parameters->isUseYuvReprocessing() == true + || frame->getFrameCapture() == true) { + if (m_parameters->getHighSpeedRecording() == true) { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } else { + entity_buffer_state_t bufferstate = ENTITY_BUFFER_STATE_NOREQ; + ret = frame->getDstBufferState(entity->getPipeId(), &bufferstate, factory->getNodeType(PIPE_3AC)); + if (ret == NO_ERROR && bufferstate != ENTITY_BUFFER_STATE_ERROR) { + if (m_parameters->isUseYuvReprocessing() == false + && m_parameters->isUsing3acForIspc() == true) + ret = m_sccCaptureSelector->manageFrameHoldListForDynamicBayer(frame); + else + ret = m_captureSelector->manageFrameHoldList(frame, entity->getPipeId(), false, factory->getNodeType(PIPE_3AC)); + + if (ret < 0) { + CLOGE2("manageFrameHoldList fail"); + return ret; + } + } else { + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } + } + } else { + if (reprocessingBayerMode == REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON) { + CLOGW2("frame->getRequest(PIPE_3AC) == false. so, just m_putBuffers(t3acBuffer.index(%d)..., pipeId(%d), ret(%d)", + t3acBuffer.index, entity->getPipeId(), ret); + } + + if (m_parameters->isUsing3acForIspc() == true) + ret = m_putBuffers(m_yuvCaptureBufferMgr, t3acBuffer.index); + else + ret = m_putBuffers(m_fliteBufferMgr, t3acBuffer.index); + + if (ret < 0) { + CLOGE2("m_putBuffers(m_fliteBufferMgr, %d) fail", t3acBuffer.index); + break; + } + } + } + + break; + case PIPE_VRA: + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + if (buffer.index >= 0) { + ret = m_vraBufferMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):Put VRA buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + break; + case PIPE_FLITE: + /* TODO: HACK: Will be removed, this is driver's job */ + if (m_parameters->isFlite3aaOtf() == true) { + ret = m_handleBayerBuffer(frame); + if (ret < NO_ERROR) { + CLOGE("ERR(%s[%d]):Handle bayer buffer failed, framecount(%d), pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount(), entity->getPipeId(), ret); + return ret; + } + } else { + ret = frame->getDstBuffer(entity->getPipeId(), &buffer); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):getDstBuffer fail, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, entity->getPipeId(), ret); + return ret; + } + + CLOGV("DEBUG(%s[%d]):Deliver Flite Buffer to 3AA. driver->framecount %d hal->framecount %d", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)buffer.addr[buffer.planeCount-1]), + frame->getFrameCount()); + + ret = m_setupEntity(PIPE_3AA, frame, &buffer, NULL); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer failed, pipeId(%d), ret(%d)", + __FUNCTION__, __LINE__, PIPE_3AA, ret); + return ret; + } + + factory->pushFrameToPipe(&frame, PIPE_3AA); + } + + break; + default: + CLOGE2("Invalid pipe ID"); + break; + } + + ret = frame->setEntityState(entity->getPipeId(), entityState); + if (ret < 0) { + CLOGE2("setEntityState fail, pipeId(%d), state(%d), ret(%d)", entity->getPipeId(), ENTITY_STATE_COMPLETE, ret); + return ret; + } + + return ret; +} + +#ifdef MONITOR_LOG_SYNC +uint32_t ExynosCamera3::m_getSyncLogId(void) +{ + return ++cameraSyncLogId; +} +#endif + +bool ExynosCamera3::m_monitorThreadFunc(void) +{ + CLOGV("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int *threadState; + struct timeval dqTime; + uint64_t *timeInterval; + int *countRenew; + int camId = getCameraId(); + int ret = NO_ERROR; + int loopCount = 0; + + int dtpStatus = 0; + int pipeIdFlite = 0; + int pipeIdScp = 0; + ExynosCamera3FrameFactory *factory = NULL; + + for (loopCount = 0; loopCount < MONITOR_THREAD_INTERVAL; loopCount += (MONITOR_THREAD_INTERVAL/20)) { + if (m_flushFlag == true) { + CLOGI2("m_flushFlag(%d)", m_flushFlag); + return false; + } + usleep(MONITOR_THREAD_INTERVAL/20); + } + + if (m_parameters->isFlite3aaOtf() == true || getCameraId() == CAMERA_ID_BACK) { + pipeIdFlite = PIPE_FLITE; + pipeIdScp = PIPE_3AA; + } else { + pipeIdFlite = PIPE_FLITE_FRONT; + pipeIdScp = PIPE_3AA_FRONT; + } + + factory = m_frameFactory[FRAME_FACTORY_TYPE_CAPTURE_PREVIEW]; + if (factory == NULL) { + CLOGE2("frameFactory is NULL"); + return false; + } + + if (factory->checkPipeThreadRunning(pipeIdScp) == false) { + CLOGE2("Scp pipe is not running.. Skip monitoring."); + return false; + } +#ifdef MONITOR_LOG_SYNC + uint32_t pipeIdIsp = 0; + + if (m_parameters->isFlite3aaOtf() == true || getCameraId() == CAMERA_ID_BACK) + pipeIdIsp = PIPE_3AA; + else + pipeIdIsp = PIPE_3AA_FRONT; + + /* If it is not front camera in dual and sensor pipe is running, do sync log */ + if (factory->checkPipeThreadRunning(pipeIdIsp) && + !(getCameraId() == CAMERA_ID_FRONT && m_parameters->getDualMode())) { + if (!(m_syncLogDuration % MONITOR_LOG_SYNC_INTERVAL)) { + uint32_t syncLogId = m_getSyncLogId(); + CLOGI2("@FIMC_IS_SYNC %d", syncLogId); + factory->syncLog(pipeIdIsp, syncLogId); + } + m_syncLogDuration++; + } +#endif + factory->getControl(V4L2_CID_IS_G_DTPSTATUS, &dtpStatus, pipeIdFlite); + + if (dtpStatus == 1) { + CLOGE2("(%d)", dtpStatus); + dump(); + +#if 0//def CAMERA_GED_FEATURE + /* in GED */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); +#else + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1002, 0, m_callbackCookie); */ + /* or */ + /* android_printAssert(NULL, LOG_TAG, "killed by itself"); */ +#endif + return false; + } + +#ifdef SENSOR_OVERFLOW_CHECK + factory->getControl(V4L2_CID_IS_G_DTPSTATUS, &dtpStatus, pipeIdFlite); + if (dtpStatus == 1) { + CLOGE2("(%d)", dtpStatus); + dump(); +#if 0//def CAMERA_GED_FEATURE + /* in GED */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); +#else + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1002, 0, m_callbackCookie); */ + /* or */ + /* android_printAssert(NULL, LOG_TAG, "killed by itself"); */ +#endif + return false; + } +#endif + factory->getThreadState(&threadState, pipeIdScp); + factory->getThreadRenew(&countRenew, pipeIdScp); + + if ((*threadState == ERROR_POLLING_DETECTED) || (*countRenew > ERROR_DQ_BLOCKED_COUNT)) { + CLOGE2("(%d)", *threadState); + if((*countRenew > ERROR_DQ_BLOCKED_COUNT)) + CLOGE2("ERROR_DQ_BLOCKED) ; ERROR_DQ_BLOCKED_COUNT =20"); + + dump(); +#if 0//def CAMERA_GED_FEATURE + /* in GED */ + android_printAssert(NULL, LOG_TAG, "killed by itself"); +#else + /* specifically defined */ + /* m_notifyCb(CAMERA_MSG_ERROR, 1002, 0, m_callbackCookie); */ + /* or */ + /* android_printAssert(NULL, LOG_TAG, "killed by itself"); */ +#endif + return false; + } else { + CLOGV2(" (%d)", *threadState); + } + + gettimeofday(&dqTime, NULL); + factory->getThreadInterval(&timeInterval, pipeIdScp); + + CLOGV2("Thread IntervalTime [%lld]", *timeInterval); + CLOGV2("Thread Renew Count [%d]", *countRenew); + + factory->incThreadRenew(pipeIdScp); + + return true; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3.h b/libcamera/34xx/hal3/ExynosCamera3.h new file mode 100644 index 0000000..8148b8a --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3.h @@ -0,0 +1,407 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA3_HW_IMPLEMENTATION_H +#define EXYNOS_CAMERA3_HW_IMPLEMENTATION_H + +#include "ExynosCameraDefine.h" + +#include "ExynosCameraRequestManager.h" +#include "ExynosCameraStreamManager.h" +#include "ExynosCameraMetadataConverter.h" +#include "ExynosCamera3Parameters.h" +#include "ExynosCameraFrameManager.h" +#include "ExynosCamera3FrameFactory.h" +#include "ExynosCamera3FrameFactoryPreview.h" +#include "ExynosCamera3FrameReprocessingFactory.h" + +namespace android { + +typedef struct ExynosCameraRequestInfo { + ExynosCameraRequest *request; + uint32_t sensorControledFrameCount; +} request_info_t; + +typedef ExynosCameraThread mainCameraThread; +typedef ExynosCameraList framefactory3_queue_t; + +class ExynosCamera3 { +public: + ExynosCamera3() {}; + ExynosCamera3(int cameraId, camera_metadata_t **info); + virtual ~ExynosCamera3(); + + /** Startup */ + status_t initilizeDevice(const camera3_callback_ops *callbackOps); + + /** Stream configuration and buffer registration */ + status_t configureStreams(camera3_stream_configuration_t *stream_list); + + status_t registerStreamBuffers(const camera3_stream_buffer_set_t *buffer_set); + + /** Template request settings provision */ + status_t construct_default_request_settings(camera_metadata_t **request, int type); + + /** Submission of capture requests to HAL */ + status_t processCaptureRequest(camera3_capture_request *request); + + /** Vendor metadata registration */ + void get_metadata_vendor_tag_ops(const camera3_device_t *, vendor_tag_query_ops_t *ops); + + /** Flush all currently in-process captures and all buffers */ + status_t flush(void); + + /** Print out debugging state for the camera device */ + void dump(void); + + /** Stop */ + status_t releaseDevice(void); + + void release(); + + /* Common functions */ + int getCameraId() const; + +private: + /* Helper functions for initialization */ + void m_createThreads(void); + void m_createManagers(void); + + /* Helper functions for notification */ + status_t m_sendRawCaptureResult(ExynosCameraFrame *frame, uint32_t pipeId, bool isSrc); + status_t m_sendZSLCaptureResult(ExynosCameraFrame *frame, uint32_t pipeId, bool isSrc); + status_t m_sendNotify(uint32_t frameNumber, int type); + + /* Helper functions of Buffer operation */ + status_t m_createIonAllocator(ExynosCameraIonAllocator **allocator); + status_t m_createInternalBufferManager(ExynosCameraBufferManager **bufferManager, const char *name); + status_t m_createServiceBufferManager(ExynosCameraBufferManager **bufferManager, const char *name); + status_t m_createBufferManager( + ExynosCameraBufferManager **bufferManager, + const char *name, + buffer_manager_type type = BUFFER_MANAGER_ION_TYPE); + + status_t m_setBuffers(void); + status_t m_putBuffers(ExynosCameraBufferManager *bufManager, int bufIndex); + + status_t m_registerStreamBuffers(camera3_capture_request *request); + status_t m_registerBuffers( /* for serviceBufferManager */ + ExynosCameraBufferManager *bufManager, + int requestKey, + const camera3_stream_buffer_t *streamBuffer); + + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int startIndex, /* for serviceBufferManager */ + int reqBufCount, + bool createMetaPlane, + bool needMmap); + + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int reqBufCount, + bool createMetaPlane, + bool needMmap = false); + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap = false); + status_t m_allocBuffers( + ExynosCameraBufferManager *bufManager, + int planeCount, + unsigned int *planeSize, + unsigned int *bytePerLine, + int minBufCount, + int maxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap = false); + + status_t m_releaseBuffers(void); + + /* helper functions for set buffer to frame */ + status_t m_setupEntity(uint32_t pipeId, ExynosCameraFrame *newFrame, + ExynosCameraBuffer *srcBuf = NULL, + ExynosCameraBuffer *dstBuf = NULL); + status_t m_setSrcBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer); + status_t m_setDstBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer); + + /* status_t m_setupEntity(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBufferManager *srcBufMgr, ExynosCameraBufferManager *dstBufMgr); */ + status_t m_setSrcBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer, ExynosCameraBufferManager *bufMgr); + status_t m_setDstBuffer(uint32_t pipeId, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer, ExynosCameraBufferManager *bufMgr); + + status_t m_resetBufferState(uint32_t pipeId, ExynosCameraFrame *frame); + + status_t m_getBufferManager(uint32_t pipeId, ExynosCameraBufferManager **bufMgr, uint32_t direction); + + /* helper functions for frame factory */ + status_t m_constructFrameFactory(void); + status_t m_startFrameFactory(ExynosCamera3FrameFactory *factory); + status_t m_startReprocessingFrameFactory(ExynosCamera3FrameFactory *factory); + status_t m_stopFrameFactory(ExynosCamera3FrameFactory *factory); + status_t m_stopReprocessingFrameFactory(ExynosCamera3FrameFactory *factory); + status_t m_deinitFrameFactory(); + + /* frame Generation / Done handler */ + status_t m_createRequestFrameFunc(ExynosCameraRequest *request); + status_t m_createInternalFrameFunc(void); + status_t m_createPrepareFrameFunc(ExynosCameraRequest *request); + status_t m_createFrameFunc(void); + + status_t m_previewframeHandler(ExynosCameraRequest *request, ExynosCamera3FrameFactory *targetfactory); + status_t m_captureframeHandler(ExynosCameraRequest *request, ExynosCamera3FrameFactory *targetfactory); + bool m_previewStreamFunc(ExynosCameraFrame *newFrame, int pipeId); + + void m_updateCropRegion(struct camera2_shot_ext *shot_ext); + status_t m_updateJpegControlInfo(const struct camera2_shot_ext *shot_ext); + + /* helper functions for frame */ + status_t m_generateFrame(int32_t frameCount, + ExynosCamera3FrameFactory *factory, + List *list, + Mutex *listLock, + ExynosCameraFrame **newFrame); + status_t m_generateInternalFrame(uint32_t frameCount, + ExynosCamera3FrameFactory *factory, + List *list, + Mutex *listLock, + ExynosCameraFrame **newFrame); + status_t m_searchFrameFromList(List *list, + Mutex *listLock, + uint32_t frameCount, + ExynosCameraFrame **frame); + status_t m_removeFrameFromList(List *list, + Mutex *listLock, + ExynosCameraFrame *frame); + + status_t m_clearList(List *list, Mutex *listLock); + + status_t m_removeInternalFrames(List *list, Mutex *listLock); + status_t m_releaseInternalFrame(ExynosCameraFrame *frame); + status_t m_generateDuplicateBuffers(ExynosCameraFrame *frame, int pipeIdSrc); + status_t m_updateTimestamp(ExynosCameraFrame *frame, ExynosCameraBuffer *timestampBuffer, bool flagPushResult = true); + status_t m_handlePreviewFrame(ExynosCameraFrame *frame, int pipeId); + status_t m_handleInternalFrame(ExynosCameraFrame *frame); + status_t m_handleIsChainDone(ExynosCameraFrame *frame); + status_t m_handleScalerDone(ExynosCameraFrame *frame); + status_t m_handleThumbnailReprocessingFrame(ExynosCameraFrame *frame); + status_t m_handleYuvCaptureFrame(ExynosCameraFrame *frame); + status_t m_handleJpegFrame(ExynosCameraFrame *frame); + status_t m_handleBayerBuffer(ExynosCameraFrame *frame); + + /* helper functions for request */ + status_t m_pushRequest(camera3_capture_request *request); + status_t m_popRequest(ExynosCameraRequest **request); + status_t m_deleteRequest(uint32_t frameCount); + status_t m_pushResult(uint32_t frameCount, struct camera2_shot_ext *src_ext); + status_t m_pushJpegResult(ExynosCameraFrame *frame, int size, ExynosCameraBuffer *buffer); + ExynosCameraRequest* m_popResult(CameraMetadata &request, uint32_t frameCount); + bool m_needNotify(ExynosCameraRequest *request); + void m_updateCurrentShot(void); + + /* helper functions for configuration options */ + uint32_t m_getBayerPipeId(void); + status_t m_setFrameManager(); + status_t m_setupFrameFactoryToRequest(); + status_t m_setConfigInform(); + status_t m_setStreamInfo(camera3_stream_configuration *streamList); + status_t m_enumStreamInfo(camera3_stream_t *stream); + + status_t m_getBayerServiceBuffer(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer); + status_t m_getBayerBuffer(uint32_t pipeId, uint32_t frameCount, ExynosCameraBuffer *buffer, ExynosCameraFrameSelector *selector, camera2_shot_ext *updateDmShot = NULL); + status_t m_checkBufferAvailable(uint32_t pipeId, ExynosCameraBufferManager *bufferMgr); + status_t m_convertingStreamToShotExt(ExynosCameraBuffer *buffer, struct camera2_node_output *outputInfo); + + status_t m_doDestCSC(bool enableCSC, ExynosCameraFrame *frame, int pipeIdSrc, int halStreamId, int pipeExtScalerId); + +public: + + +private: + ExynosCameraRequestManager *m_requestMgr; + ExynosCameraMetadataConverter *m_metadataConverter; + ExynosCamera3Parameters *m_parameters; + ExynosCameraStreamManager *m_streamManager; + + ExynosCamera3FrameFactory *m_frameFactory[FRAME_FACTORY_TYPE_MAX]; + framefactory3_queue_t *m_frameFactoryQ; + + ExynosCameraActivityControl *m_activityControl; + + ExynosCameraIonAllocator *m_ionAllocator; + + /* Internal buffer managers */ + ExynosCameraBufferManager *m_fliteBufferMgr; + ExynosCameraBufferManager *m_3aaBufferMgr; + ExynosCameraBufferManager *m_ispBufferMgr; + ExynosCameraBufferManager *m_internalScpBufferMgr; + ExynosCameraBufferManager *m_vraBufferMgr; + ExynosCameraBufferManager *m_gscBufferMgr; + + /* internal reprocessing buffer managers */ + ExynosCameraBufferManager *m_yuvCaptureReprocessingBufferMgr; + ExynosCameraBufferManager *m_ispReprocessingBufferMgr; + ExynosCameraBufferManager *m_yuvCaptureBufferMgr; + ExynosCameraBufferManager *m_thumbnailBufferMgr; + + /* internal temporary buffer managers */ + ExynosCameraBufferManager *m_skipBufferMgr; + + /* service buffer managers */ + ExynosCameraBufferManager *m_bayerBufferMgr; + + ExynosCameraFrameSelector *m_captureSelector; + ExynosCameraFrameSelector *m_captureZslSelector; + ExynosCameraFrameSelector *m_sccCaptureSelector; + uint32_t m_prepareFliteCnt; + +private: + uint32_t m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + mutable Mutex m_requestLock; + mutable Mutex m_resultLock; + mutable Condition m_captureResultDoneCondition; + mutable Mutex m_captureResultDoneLock; + bool m_use_companion; + int m_captureCount; + + nsecs_t m_lastFrametime; + + /* HACK : check capture stream */ + bool isCaptureConfig; + bool isRestarted; + + /* HACK : check recording stream */ + bool isRecordingConfig; + bool recordingEnabled; + + bool m_checkConfigStream; + + ExynosCameraFrameManager *m_frameMgr; + struct ExynosConfigInfo *m_exynosconfig; + struct camera2_shot_ext *m_currentShot; + + bool m_flushWaitEnable; + bool m_factoryStartFlag; + bool m_flushFlag; + bool m_flagStartFrameFactory; + bool m_flagStartReprocessingFrameFactory; + bool m_flagBayerRequest; +#ifdef MONITOR_LOG_SYNC + static uint32_t cameraSyncLogId; + int m_syncLogDuration; + uint32_t m_getSyncLogId(void); +#endif + /* process queue */ + List m_processList; + mutable Mutex m_processLock; + List m_captureProcessList; + mutable Mutex m_captureProcessLock; + frame_queue_t *m_pipeFrameDoneQ[MAX_PIPE_NUM]; + + frame_queue_t *m_duplicateBufferDoneQ; + frame_queue_t *m_pipeCaptureFrameDoneQ; + + /* capture Queue */ + frame_queue_t *m_selectBayerQ; + frame_queue_t *m_captureQ; + frame_queue_t *m_reprocessingDoneQ; + + ExynosCameraList *m_shotDoneQ; + List m_requestWaitingList; + + frame_queue_t *m_internalFrameDoneQ; + + int m_internalFrameCount; + bool m_isNeedInternalFrame; + bool m_isNeedRequestFrame; + + /* Thread */ + sp m_mainThread; + bool m_mainThreadFunc(void); + + sp m_internalFrameThread; + bool m_internalFrameThreadFunc(void); + + sp m_previewStreamBayerThread; + bool m_previewStreamBayerPipeThreadFunc(void); + + sp m_previewStream3AAThread; + bool m_previewStream3AAPipeThreadFunc(void); + + sp m_previewStreamISPThread; + bool m_previewStreamISPPipeThreadFunc(void); + + sp m_previewStreamVRAThread; + bool m_previewStreamVRAPipeThreadFunc(void); + + sp m_duplicateBufferThread; + bool m_duplicateBufferThreadFunc(void); + + sp m_selectBayerThread; + bool m_selectBayerThreadFunc(void); + + sp m_captureThread; + bool m_captureThreadFunc(void); + + sp m_captureStreamThread; + bool m_captureStreamThreadFunc(void); + + sp m_setBuffersThread; + bool m_setBuffersThreadFunc(void); + + sp m_framefactoryCreateThread; + bool m_frameFactoryCreateThreadFunc(void); + + sp m_reprocessingFrameFactoryStartThread; + bool m_reprocessingFrameFactoryStartThreadFunc(void); + + sp m_startPictureBufferThread; + bool m_startPictureBufferThreadFunc(void); + + bool m_frameFactoryStartDone; + sp m_frameFactoryStartThread; + bool m_frameFactoryStartThreadFunc(void); + + status_t m_setupReprocessingPipeline(void); + + sp m_monitorThread; + bool m_monitorThreadFunc(void); + + status_t m_setInternalScpBuffer(void); + + status_t m_setPictureBuffer(void); + + status_t m_setReprocessingBuffer(void); + + bool m_doInternalFrame(ExynosCameraRequest *request); +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactory.cpp b/libcamera/34xx/hal3/ExynosCamera3FrameFactory.cpp new file mode 100644 index 0000000..81ab53b --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactory.cpp @@ -0,0 +1,963 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3FrameFactory" +#include + +#include "ExynosCamera3FrameFactory.h" + +namespace android { + +ExynosCamera3FrameFactory::~ExynosCamera3FrameFactory() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE2("destroy fail"); +} + +status_t ExynosCamera3FrameFactory::destroy(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + ret = m_pipes[i]->destroy(); + if (ret != NO_ERROR) { + CLOGE2("m_pipes[%d]->destroy() fail", i); + return ret; + } + + SAFE_DELETE(m_pipes[i]); + + CLOGD2("Pipe(%d) destroyed", i); + } + } + + m_setCreate(false); + + return ret; +} + +status_t ExynosCamera3FrameFactory::setFrameManager(ExynosCameraFrameManager *manager) +{ + m_frameMgr = manager; + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getFrameManager(ExynosCameraFrameManager **manager) +{ + *manager = m_frameMgr; + return NO_ERROR; +} + +bool ExynosCamera3FrameFactory::isCreated(void) +{ + return m_getCreate(); +} + +status_t ExynosCamera3FrameFactory::m_setCreate(bool create) +{ + Mutex::Autolock lock(m_createLock); + CLOGD2("setCreate old(%s) new(%s)", (m_create)?"true":"false", (create)?"true":"false"); + m_create = create; + return NO_ERROR; +} + +bool ExynosCamera3FrameFactory::m_getCreate() +{ + Mutex::Autolock lock(m_createLock); + return m_create; +} + +int ExynosCamera3FrameFactory::m_getFliteNodenum() +{ + int fliteNodeNim = FIMC_IS_VIDEO_SS0_NUM; + + fliteNodeNim = (m_cameraId == CAMERA_ID_BACK)?MAIN_CAMERA_FLITE_NUM:FRONT_CAMERA_FLITE_NUM; + + return fliteNodeNim; +} + +int ExynosCamera3FrameFactory::m_getSensorId(__unused unsigned int nodeNum, bool reprocessing) +{ + unsigned int reprocessingBit = 0; + unsigned int nodeNumBit = 0; + unsigned int sensorIdBit = 0; + unsigned int sensorId = getSensorId(m_cameraId); + + if (reprocessing == true) + reprocessingBit = (1 << REPROCESSING_SHIFT); + + /* + * hack + * nodeNum - FIMC_IS_VIDEO_BAS_NUM is proper. + * but, historically, FIMC_IS_VIDEO_SS0_NUM - FIMC_IS_VIDEO_SS0_NUM is worked properly + */ + //nodeNumBit = ((nodeNum - FIMC_IS_VIDEO_BAS_NUM) << SSX_VINDEX_SHIFT); + nodeNumBit = ((FIMC_IS_VIDEO_SS0_NUM - FIMC_IS_VIDEO_SS0_NUM) << SSX_VINDEX_SHIFT); + + sensorIdBit = (sensorId << 0); + + return (reprocessingBit) | + (nodeNumBit) | + (sensorIdBit); +} + +int ExynosCamera3FrameFactory::m_getSensorId(unsigned int nodeNum, bool flagOTFInterface, bool flagLeader, bool reprocessing) +{ + /* sub 100, and make index */ + nodeNum -= 100; + + unsigned int reprocessingBit = 0; + unsigned int otfInterfaceBit = 0; + unsigned int leaderBit = 0; + unsigned int sensorId = getSensorId(m_cameraId); + + if (reprocessing == true) + reprocessingBit = 1; + + if (flagLeader == true) + leaderBit = 1; + + if (flagOTFInterface == true) + otfInterfaceBit = 1; + + return ((reprocessingBit << INPUT_STREAM_SHIFT) & INPUT_STREAM_MASK) | + ((sensorId << INPUT_MODULE_SHIFT) & INPUT_MODULE_MASK) | + ((nodeNum << INPUT_VINDEX_SHIFT) & INPUT_VINDEX_MASK) | + ((otfInterfaceBit << INPUT_MEMORY_SHIFT) & INPUT_MEMORY_MASK) | + ((leaderBit << INPUT_LEADER_SHIFT) & INPUT_LEADER_MASK); +} + + +status_t ExynosCamera3FrameFactory::m_initFrameMetadata(ExynosCameraFrame *frame) +{ + int ret = 0; + struct camera2_shot_ext *shot_ext = new struct camera2_shot_ext; + + if (shot_ext == NULL) { + CLOGE2("new struct camera2_shot_ext fail"); + return INVALID_OPERATION; + } + + memset(shot_ext, 0x0, sizeof(struct camera2_shot_ext)); + + shot_ext->shot.magicNumber = SHOT_MAGIC_NUMBER; + + /* TODO: These bypass values are enabled at per-frame control */ +#if 1 + m_bypassDRC = m_parameters->getDrcEnable(); + m_bypassDNR = m_parameters->getDnrEnable(); + m_bypassDIS = m_parameters->getDisEnable(); + m_bypassFD = m_parameters->getFdEnable(); +#endif + setMetaBypassDrc(shot_ext, m_bypassDRC); + setMetaBypassDnr(shot_ext, m_bypassDNR); + setMetaBypassDis(shot_ext, m_bypassDIS); + setMetaBypassFd(shot_ext, m_bypassFD); + + ret = frame->initMetaData(shot_ext); + if (ret < 0) + CLOGE2("initMetaData fail"); + + frame->setRequest(m_request3AP, + m_request3AC, + m_requestISP, + m_requestISPP, + m_requestISPC, + m_requestSCC, + m_requestDIS, + m_requestSCP); + + if (m_flagReprocessing == true) { + frame->setRequest(PIPE_MCSC0_REPROCESSING, m_requestSCP); + frame->setRequest(PIPE_HWFC_JPEG_SRC_REPROCESSING, m_requestJPEG); + frame->setRequest(PIPE_HWFC_JPEG_DST_REPROCESSING, m_requestJPEG); + frame->setRequest(PIPE_HWFC_THUMB_SRC_REPROCESSING, m_requestThumbnail); + frame->setRequest(PIPE_HWFC_THUMB_DST_REPROCESSING, m_requestThumbnail); + } + + delete shot_ext; + shot_ext = NULL; + + return ret; +} + +int ExynosCamera3FrameFactory::setSrcNodeEmpty(int sensorId) +{ + return (sensorId & INPUT_STREAM_MASK) | + (sensorId & INPUT_MODULE_MASK) | + (0 & INPUT_VINDEX_MASK) | + (sensorId & INPUT_MEMORY_MASK) | + (sensorId & INPUT_LEADER_MASK); +} + +int ExynosCamera3FrameFactory::setLeader(int sensorId, bool flagLeader) +{ + return (sensorId & INPUT_STREAM_MASK) | + (sensorId & INPUT_MODULE_MASK) | + (sensorId & INPUT_VINDEX_MASK) | + (sensorId & INPUT_MEMORY_MASK) | + ((flagLeader)?1:0 & INPUT_LEADER_MASK); +} + +ExynosCameraFrame *ExynosCamera3FrameFactory::createNewFrameOnlyOnePipe(int pipeId, int frameCnt) +{ + Mutex::Autolock lock(m_frameLock); + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {}; + + if (frameCnt < 0) { + frameCnt = m_frameCount; + } + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, frameCnt); + if (frame == NULL) + return NULL; + + /* set pipe to linkageList */ + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + + return frame; +} + +ExynosCameraFrame *ExynosCamera3FrameFactory::createNewFrameVideoOnly(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount); + if (frame == NULL) + return NULL; + + /* set GSC-Video pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + return frame; +} + +status_t ExynosCamera3FrameFactory::m_initPipelines(ExynosCameraFrame *frame) +{ + ExynosCameraFrameEntity *curEntity = NULL; + ExynosCameraFrameEntity *childEntity = NULL; + frame_queue_t *frameQ = NULL; + int ret = 0; + + curEntity = frame->getFirstEntity(); + + while (curEntity != NULL) { + childEntity = curEntity->getNextEntity(); + if (childEntity != NULL) { + ret = getInputFrameQToPipe(&frameQ, childEntity->getPipeId()); + if (ret < 0 || frameQ == NULL) { + CLOGE2("getInputFrameQToPipe fail, ret(%d), frameQ(%p)", ret, frameQ); + return ret; + } + + ret = setOutputFrameQToPipe(frameQ, curEntity->getPipeId()); + if (ret < 0) { + CLOGE2("setOutputFrameQToPipe fail, ret(%d)", ret); + return ret; + } + + if (childEntity->getPipeId() != PIPE_VRA) { + /* check Image Configuration Equality */ + ret = m_checkPipeInfo(curEntity->getPipeId(), childEntity->getPipeId()); + if (ret < 0) { + CLOGE2("checkPipeInfo fail, Pipe[%d], Pipe[%d]", curEntity->getPipeId(), childEntity->getPipeId()); + return ret; + } + } + + curEntity = childEntity; + } else { + curEntity = frame->getNextEntity(); + } + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::pushFrameToPipe(ExynosCameraFrame **newFrame, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->pushFrame(newFrame); + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::setOutputFrameQToPipe(frame_queue_t *outputQ, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->setOutputFrameQ(outputQ); + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getOutputFrameQToPipe(frame_queue_t **outputQ, uint32_t pipeId) +{ + CLOGV2("pipeId=%d", pipeId); + m_pipes[INDEX(pipeId)]->getOutputFrameQ(outputQ); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::setFrameDoneQToPipe(frame_queue_t *frameDoneQ, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->setFrameDoneQ(frameDoneQ); + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getFrameDoneQToPipe(frame_queue_t **frameDoneQ, uint32_t pipeId) +{ + CLOGV2("pipeId=%d", pipeId); + m_pipes[INDEX(pipeId)]->getFrameDoneQ(frameDoneQ); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getInputFrameQToPipe(frame_queue_t **inputFrameQ, uint32_t pipeId) +{ + CLOGV2("pipeId=%d", pipeId); + + m_pipes[INDEX(pipeId)]->getInputFrameQ(inputFrameQ); + + if (inputFrameQ == NULL) + CLOGE2("inputFrameQ is NULL"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::setBufferManagerToPipe(ExynosCameraBufferManager **bufferManager, uint32_t pipeId) +{ + if (m_pipes[INDEX(pipeId)] == NULL) { + CLOGE2("m_pipes[%d] is NULL. pipeId(%d)", INDEX(pipeId), pipeId); + return INVALID_OPERATION; + } + + return m_pipes[INDEX(pipeId)]->setBufferManager(bufferManager); +} + +status_t ExynosCamera3FrameFactory::getThreadState(int **threadState, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadState(threadState); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getThreadInterval(uint64_t **threadInterval, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadInterval(threadInterval); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::getThreadRenew(int **threadRenew, uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->getThreadRenew(threadRenew); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::incThreadRenew(uint32_t pipeId) +{ + m_pipes[INDEX(pipeId)]->incThreadRenew(); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::preparePipes(__unused uint32_t prepareCnt) +{ + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::startThread(uint32_t pipeId) +{ + int ret = 0; + + CLOGI2("pipeId=%d", pipeId); + + ret = m_pipes[INDEX(pipeId)]->startThread(); + if (ret < 0) { + CLOGE2("start thread fail, pipeId(%d), ret(%d)", pipeId, ret); + /* TODO: exception handling */ + } + return ret; +} + +status_t ExynosCamera3FrameFactory::stopThread(uint32_t pipeId) +{ + int ret = 0; + + CLOGI2("pipeId=%d", pipeId); + + if (m_pipes[INDEX(pipeId)] == NULL) { + CLOGE2("m_pipes[INDEX(%d)] == NULL. so, fail", pipeId); + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(pipeId)]->stopThread(); + if (ret < 0) { + CLOGE2("stop thread fail, pipeId(%d), ret(%d)", pipeId, ret); + /* TODO: exception handling */ + } + return ret; +} + +status_t ExynosCamera3FrameFactory::stopThreadAndWait(uint32_t pipeId, int sleep, int times) +{ + status_t status = NO_ERROR; + + CLOGI2("pipeId=%d", pipeId); + status = m_pipes[INDEX(pipeId)]->stopThreadAndWait(sleep, times); + if (status < 0) { + CLOGE2("pipe(%d) stopThreadAndWait fail, ret(%d)", pipeId); + /* TODO: exception handling */ + status = INVALID_OPERATION; + } + return status; +} + +status_t ExynosCamera3FrameFactory::setStopFlag(void) +{ + CLOGE2("Must use the concreate class, don't use superclass"); + return INVALID_OPERATION; +} + +status_t ExynosCamera3FrameFactory::stopPipe(uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->stopThread(); + if (ret < 0) { + CLOGE2("Pipe:%d stopThread fail, ret(%d)", pipeId, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(pipeId)]->stop(); + if (ret < 0) { + CLOGE2("Pipe:%d stop fail, ret(%d)", pipeId, ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::stopPipes(void) +{ + CLOGE2("Must use the concreate class, don't use superclass"); + return INVALID_OPERATION; +} + +void ExynosCamera3FrameFactory::dump() +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + m_pipes[i]->dump(); + } + } + + return; +} + +void ExynosCamera3FrameFactory::setRequest(int pipeId, bool enable) +{ + switch (pipeId) { + case PIPE_FLITE: + case PIPE_FLITE_REPROCESSING: + m_requestFLITE = enable ? 1 : 0; + break; + case PIPE_3AC: + case PIPE_3AC_REPROCESSING: + m_request3AC = enable ? 1 : 0; + break; + case PIPE_3AP: + case PIPE_3AP_REPROCESSING: + m_request3AP = enable ? 1 : 0; + break; + case PIPE_ISPC: + case PIPE_ISPC_REPROCESSING: + m_requestISPC = enable ? 1 : 0; + break; + case PIPE_ISPP: + case PIPE_ISPP_REPROCESSING: + m_requestISPP = enable ? 1 : 0; + break; + case PIPE_MCSC0: + case PIPE_MCSC0_REPROCESSING: + m_requestSCP = enable ? 1 : 0; + break; + case PIPE_HWFC_JPEG_SRC_REPROCESSING: + case PIPE_HWFC_JPEG_DST_REPROCESSING: + m_requestJPEG = enable ? 1 : 0; + break; + case PIPE_HWFC_THUMB_SRC_REPROCESSING: + case PIPE_HWFC_THUMB_DST_REPROCESSING: + m_requestThumbnail = enable ? 1 : 0; + break; + default: + CLOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + break; + } +} + +void ExynosCamera3FrameFactory::setRequestFLITE(bool enable) +{ +#if 1 + m_requestFLITE = enable ? 1 : 0; +#else + /* If not FLite->3AA OTF, FLite must be on */ + if (m_flagFlite3aaOTF == true) { + m_requestFLITE = enable ? 1 : 0; + } else { + CLOGW2("isFlite3aaOtf (%d) == false). so Skip set m_requestFLITE(%d) as (%d)", m_cameraId, m_requestFLITE, enable); + } +#endif + +} + +void ExynosCamera3FrameFactory::setRequest3AC(bool enable) +{ +#if 1 + m_request3AC = enable ? 1 : 0; +#else + /* From 74xx, Front will use reprocessing. so, we need to prepare BDS */ + if (isReprocessing(m_cameraId) == true) { + if (m_parameters->getUsePureBayerReprocessing() == true) { + m_request3AC = 0; + } else { + m_request3AC = enable ? 1 : 0; + } + } else { + m_request3AC = 0; + } +#endif +} + +void ExynosCamera3FrameFactory::setRequestISPC(bool enable) +{ + m_requestISPC = enable ? 1 : 0; +} + +void ExynosCamera3FrameFactory::setRequestISPP(bool enable) +{ + m_requestISPP = enable ? 1 : 0; +} + +void ExynosCamera3FrameFactory::setRequestSCC(bool enable) +{ + m_requestSCC = enable ? 1 : 0; +} + +void ExynosCamera3FrameFactory::setRequestSCP(bool enable) +{ + m_requestSCP = enable ? 1 : 0; +} + +void ExynosCamera3FrameFactory::setRequestDIS(bool enable) +{ + m_requestDIS = enable ? 1 : 0; +} + +status_t ExynosCamera3FrameFactory::setParam(struct v4l2_streamparm *streamParam, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->setParam(*streamParam); + + return ret; +} + +status_t ExynosCamera3FrameFactory::m_checkPipeInfo(uint32_t srcPipeId, uint32_t dstPipeId) +{ + int srcFullW, srcFullH, srcColorFormat; + int dstFullW, dstFullH, dstColorFormat; + int isDifferent = 0; + int ret = 0; + + ret = m_pipes[INDEX(srcPipeId)]->getPipeInfo(&srcFullW, &srcFullH, &srcColorFormat, SRC_PIPE); + if (ret < 0) { + CLOGE2("Source getPipeInfo fail"); + return ret; + } + ret = m_pipes[INDEX(dstPipeId)]->getPipeInfo(&dstFullW, &dstFullH, &dstColorFormat, DST_PIPE); + if (ret < 0) { + CLOGE2("Destination getPipeInfo fail"); + return ret; + } + + if (srcFullW != dstFullW || srcFullH != dstFullH || srcColorFormat != dstColorFormat) { + CLOGE2("Video Node Image Configuration is NOT matching. so, fail"); + + CLOGE2("fail info : srcPipeId(%d), srcFullW(%d), srcFullH(%d), srcColorFormat(%d)", + srcPipeId, srcFullW, srcFullH, srcColorFormat); + + CLOGE2("fail info : dstPipeId(%d), dstFullW(%d), dstFullH(%d), dstColorFormat(%d)", + dstPipeId, dstFullW, dstFullH, dstColorFormat); + + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactory::dumpFimcIsInfo(uint32_t pipeId, bool bugOn) +{ + int ret = 0; + int pipeIdIsp = 0; + + if (m_pipes[INDEX(pipeId)] != NULL) + ret = m_pipes[INDEX(pipeId)]->dumpFimcIsInfo(bugOn); + else + CLOGE2("pipe is not ready (%d/%d)", pipeId, bugOn); + + return ret; +} + +#ifdef MONITOR_LOG_SYNC +status_t ExynosCamera3FrameFactory::syncLog(uint32_t pipeId, uint32_t syncId) +{ + int ret = 0; + int pipeIdIsp = 0; + + if (m_pipes[INDEX(pipeId)] != NULL) + ret = m_pipes[INDEX(pipeId)]->syncLog(syncId); + else + CLOGE2("pipe is not ready (%d/%d)", pipeId, syncId); + + return ret; +} +#endif + +status_t ExynosCamera3FrameFactory::setControl(int cid, int value, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->setControl(cid, value); + + return ret; +} + +bool ExynosCamera3FrameFactory::checkPipeThreadRunning(uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->isThreadRunning(); + + return ret; +} + +status_t ExynosCamera3FrameFactory::getControl(int cid, int *value, uint32_t pipeId) +{ + int ret = 0; + + ret = m_pipes[INDEX(pipeId)]->getControl(cid, value); + + return ret; +} + +status_t ExynosCamera3FrameFactory::m_checkNodeSetting(int pipeId) +{ + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NODE; i++) { + /* in case of wrong nodeNums set */ + if (m_nodeInfo[pipeId].nodeNum[i] != m_nodeNums[pipeId][i]) { + CLOGE2("m_nodeInfo[%d].nodeNum[%d](%d) != m_nodeNums[%d][%d](%d). so, fail", + pipeId, i, m_nodeInfo[pipeId].nodeNum[i], + pipeId, i, m_nodeNums[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of not set sensorId */ + if (0 < m_nodeInfo[pipeId].nodeNum[i] && m_sensorIds[pipeId][i] < 0) { + CLOGE2("0 < m_nodeInfo[%d].nodeNum[%d](%d) && m_sensorIds[%d][%d](%d) < 0. so, fail", + pipeId, i, m_nodeInfo[pipeId].nodeNum[i], + pipeId, i, m_sensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of strange set sensorId */ + if (m_nodeInfo[pipeId].nodeNum[i] < 0 && 0 < m_sensorIds[pipeId][i]) { + CLOGE2("m_nodeInfo[%d].nodeNum[%d](%d) < 0 && 0 < m_sensorIds[%d][%d](%d). so, fail", + pipeId, i, m_nodeInfo[pipeId].nodeNum[i], + pipeId, i, m_sensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of not set secondarySensorId */ + if (0 < m_nodeInfo[pipeId].secondaryNodeNum[i] && m_secondarySensorIds[pipeId][i] < 0) { + CLOGE2("0 < m_nodeInfo[%d].secondaryNodeNum[%d](%d) && m_secondarySensorIds[%d][%d](%d) < 0. so, fail", + pipeId, i, m_nodeInfo[pipeId].secondaryNodeNum[i], + pipeId, i, m_secondarySensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + + /* in case of strange set secondarySensorId */ + if (m_nodeInfo[pipeId].secondaryNodeNum[i] < 0 && 0 < m_secondarySensorIds[pipeId][i]) { + CLOGE2("m_nodeInfo[%d].secondaryNodeNum[%d](%d) < 0 && 0 < m_secondarySensorIds[%d][%d](%d). so, fail", + pipeId, i, m_nodeInfo[pipeId].secondaryNodeNum[i], + pipeId, i, m_secondarySensorIds[pipeId][i]); + + ret = BAD_VALUE; + goto err; + } + } + +err: + return ret; +} + +int ExynosCamera3FrameFactory::m_initFlitePipe(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + enum NODE_TYPE nodeType = (enum NODE_TYPE)0; + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int bayerFormat = m_parameters->getBayerFormat(PIPE_FLITE); + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + struct ExynosConfigInfo *config = m_parameters->getConfig(); + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + CLOGI2("MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", maxSensorW, maxSensorH, hwSensorW, hwSensorH); + + /* FLITE pipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + /* setParam for Frame rate : must after setInput on Flite */ + uint32_t min, max, frameRate; + struct v4l2_streamparm streamParam; + + memset(&streamParam, 0x0, sizeof(v4l2_streamparm)); + m_parameters->getPreviewFpsRange(&min, &max); + + if (m_parameters->getScalableSensorMode() == true) + frameRate = 24; + else + frameRate = max; + + streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + streamParam.parm.capture.timeperframe.numerator = 1; + streamParam.parm.capture.timeperframe.denominator = frameRate; + CLOGI2("set framerate (denominator=%d)", frameRate); + ret = setParam(&streamParam, PIPE_FLITE); + if (ret < 0) { + CLOGE2("FLITE setParam fail, ret(%d)", ret); + return INVALID_OPERATION; + } + + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; + tempRect.colorFormat = bayerFormat; + + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[nodeType].bufInfo.count = MAX_BUFFERS; + /* per frame info */ + pipeInfo[nodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* set v4l2 video node bytes per plane */ + switch (bayerFormat) { + case V4L2_PIX_FMT_SBGGR16: + pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN); + break; + case V4L2_PIX_FMT_SBGGR12: + pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 3 / 2), CAMERA_16PX_ALIGN); + break; + case V4L2_PIX_FMT_SBGGR10: + pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 5 / 4), CAMERA_16PX_ALIGN); + break; + default: + CLOGW("WRN(%s[%d]):Invalid bayer format(%d)", __FUNCTION__, __LINE__, bayerFormat); + pipeInfo[nodeType].bytesPerPlane[0] = ROUND_UP((tempRect.fullW * 2), CAMERA_16PX_ALIGN); + break; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE2("FLITE setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* set BNS ratio */ + int bnsScaleRatio = 0; + int bnsSize = 0; + if( m_parameters->getHighSpeedRecording() +#ifdef USE_BINNING_MODE + || m_parameters->getBinningMode() +#endif + ) { + bnsScaleRatio = 1000; + } else { + bnsScaleRatio = m_parameters->getBnsScaleRatio(); + } + ret = m_pipes[INDEX(PIPE_FLITE)]->setControl(V4L2_CID_IS_S_BNS, bnsScaleRatio); + if (ret < 0) { + CLOGE2("set BNS(%d) fail, ret(%d)", bnsScaleRatio, ret); + } else { + ret = m_pipes[INDEX(PIPE_FLITE)]->getControl(V4L2_CID_IS_G_BNS_SIZE, &bnsSize); + if (ret < 0) { + CLOGE2("get BNS size fail, ret(%d)", ret); + bnsSize = -1; + } + } + + int bnsWidth = 0; + int bnsHeight = 0; + if (bnsSize > 0) { + bnsHeight = bnsSize & 0xffff; + bnsWidth = bnsSize >> 16; + + CLOGI2("BNS scale down ratio(%.1f), size (%dx%d)", (float)(bnsScaleRatio / 1000), bnsWidth, bnsHeight); + m_parameters->setBnsSize(bnsWidth, bnsHeight); + } + + return NO_ERROR; +} + + +/* added by 3.2 HAL */ +status_t ExynosCamera3FrameFactory::setFrameCreateHandler(factory_handler_t handler) +{ + status_t ret = NO_ERROR; + m_frameCreateHandler = handler; + return ret; +} + +/* added by 3.2 HAL */ +factory_handler_t ExynosCamera3FrameFactory::getFrameCreateHandler() +{ + return m_frameCreateHandler; +} + +status_t ExynosCamera3FrameFactory::setFrameDoneHandler(factory_donehandler_t handler) +{ + status_t ret = NO_ERROR; + m_frameDoneHandler = handler; + return ret; +} + +/* added by 3.2 HAL */ +factory_donehandler_t ExynosCamera3FrameFactory::getFrameDoneHandler() +{ + return m_frameDoneHandler; +} + +void ExynosCamera3FrameFactory::m_initDeviceInfo(int pipeId) +{ + camera_device_info_t nullDeviceInfo; + + m_nodeInfo[pipeId] = nullDeviceInfo; + + for (int i = 0; i < MAX_NODE; i++) { + // set nodeNum + m_nodeNums[pipeId][i] = m_nodeInfo[pipeId].nodeNum[i]; + + // set default sensorId + m_sensorIds[pipeId][i] = -1; + + // set second sensorId + m_secondarySensorIds[pipeId][i] = -1; + } +} + +void ExynosCamera3FrameFactory::m_init(void) +{ + m_cameraId = 0; + memset(m_name, 0x00, sizeof(m_name)); + m_frameCount = 0; + + memset(m_nodeNums, -1, sizeof(m_nodeNums)); + memset(m_sensorIds, -1, sizeof(m_sensorIds)); + memset(m_secondarySensorIds, -1, sizeof(m_secondarySensorIds)); + + for (int i = 0; i < MAX_NUM_PIPES; i++) + m_pipes[i] = NULL; + + /* setting about request */ + m_requestFLITE = 0; + + m_request3AP = 0; + m_request3AC = 0; + m_requestISP = 1; + + m_requestISPP = 0; + m_requestISPC = 0; + m_requestSCC = 0; + + m_requestDIS = 0; + m_requestSCP = 1; + + m_requestVRA = 0; + + /* setting about bypass */ + m_bypassDRC = true; + m_bypassDIS = true; + m_bypassDNR = true; + m_bypassFD = true; + + m_setCreate(false); + + m_flagFlite3aaOTF = false; + m_flag3aaIspOTF = false; + m_flagIspTpuOTF = false; + m_flagIspMcscOTF = false; + m_flagTpuMcscOTF = false; + m_flagMcscVraOTF = false; + m_supportReprocessing = false; + m_flagReprocessing = false; + m_supportPureBayerReprocessing = false; + m_supportSCC = false; + m_supportMCSC = false; + +#ifdef USE_INTERNAL_FRAME + /* internal frame */ + //m_internalFrameCount = 0; +#endif /* #ifdef USE_INTERNAL_FRAME */ +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactory.h b/libcamera/34xx/hal3/ExynosCamera3FrameFactory.h new file mode 100644 index 0000000..2561d2d --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactory.h @@ -0,0 +1,236 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA3_FRAME_FACTORY_H +#define EXYNOS_CAMERA3_FRAME_FACTORY_H + +#include "ExynosCameraConfig.h" + +#include "ExynosCameraFrame.h" +#include "ExynosCameraPipe.h" +#include "ExynosCameraMCPipe.h" +#include "ExynosCameraPipeFlite.h" +#include "ExynosCameraPipeVRA.h" +#include "ExynosCameraPipeGSC.h" +#include "ExynosCameraPipeJpeg.h" +#include "ExynosCameraFrameManager.h" + +#include "ExynosCamera3Parameters.h" + +#include "ExynosCameraUtilsModule.h" + +namespace android { + +#define SET_OUTPUT_DEVICE_BASIC_INFO(perframeInfo) \ + pipeInfo[nodeType].rectInfo = tempRect;\ + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;\ + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = perframeInfo;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER;\ + pipeInfo[nodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_nodeInfo[INDEX(pipeId)].nodeNum[nodeType] - FIMC_IS_VIDEO_BAS_NUM); +#define SET_CAPTURE_DEVICE_BASIC_INFO() \ + pipeInfo[nodeType].rectInfo = tempRect;\ + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;\ + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE;\ + pipeInfo[leaderNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE;\ + pipeInfo[leaderNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_nodeInfo[INDEX(pipeId)].nodeNum[nodeType] - FIMC_IS_VIDEO_BAS_NUM);\ + +/* added by 3.2 HAL */ +class ExynosCamera3; +class ExynosCameraRequest; +class ExynosCamera3FrameFactory; +typedef status_t (ExynosCamera3::*factory_handler_t)(ExynosCameraRequest*, ExynosCamera3FrameFactory*); +typedef bool (ExynosCamera3::*factory_donehandler_t)(); + +class ExynosCamera3FrameFactory { +public: + ExynosCamera3FrameFactory() + { + m_init(); + } + + ExynosCamera3FrameFactory(int cameraId, ExynosCamera3Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "FrameFactoryBack" : "FrameFactoryFront"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCamera3FrameFactory(); + + virtual status_t create(bool active = true) = 0; + + virtual status_t setFrameManager(ExynosCameraFrameManager *manager); + virtual status_t getFrameManager(ExynosCameraFrameManager **manager); + virtual status_t destroy(void); + virtual bool isCreated(void); + virtual enum NODE_TYPE getNodeType(uint32_t pipeId) = 0; + + virtual ExynosCameraFrame *createNewFrameOnlyOnePipe(int pipeId, int frameCnt=-1); + virtual ExynosCameraFrame *createNewFrameVideoOnly(void); + virtual ExynosCameraFrame *createNewFrame(uint32_t frameCount = 0) = 0; + + virtual status_t initPipes(void) = 0; + virtual status_t preparePipes(void) = 0; + virtual status_t preparePipes(uint32_t prepareCnt); + virtual status_t startPipes(void) = 0; + virtual status_t stopPipes(void) = 0; + virtual status_t startInitialThreads(void) = 0; + + virtual status_t pushFrameToPipe(ExynosCameraFrame **newFrame, uint32_t pipeId); + virtual status_t setOutputFrameQToPipe(frame_queue_t *outputQ, uint32_t pipeId); + virtual status_t getOutputFrameQToPipe(frame_queue_t **outputQ, uint32_t pipeId); + virtual status_t setFrameDoneQToPipe(frame_queue_t *frameDoneQ, uint32_t pipeId); + virtual status_t getFrameDoneQToPipe(frame_queue_t **frameDoneQ, uint32_t pipeId); + virtual status_t getInputFrameQToPipe(frame_queue_t **inputFrameQ, uint32_t pipeId); + + virtual status_t setBufferManagerToPipe(ExynosCameraBufferManager **bufferManager, uint32_t pipeId); + + virtual status_t startThread(uint32_t pipeId); + virtual status_t stopThread(uint32_t pipeId); + virtual status_t stopThreadAndWait(uint32_t pipeId, int sleep = 5, int times = 40); + virtual status_t setStopFlag(void); + virtual status_t stopPipe(uint32_t pipeId); + + virtual status_t getThreadState(int **threadState, uint32_t pipeId); + virtual status_t getThreadInterval(uint64_t **threadInterval, uint32_t pipeId); + virtual status_t getThreadRenew(int **threadRenew, uint32_t pipeId); + virtual status_t incThreadRenew(uint32_t pipeId); + virtual void dump(void); + + virtual void setRequest(int pipeId, bool enable); + virtual void setRequestFLITE(bool enable); + virtual void setRequest3AC(bool enable); + virtual void setRequestISPC(bool enable); + virtual void setRequestSCC(bool enable); + virtual void setRequestSCP(bool enable); + virtual void setRequestISPP(bool enable); + virtual void setRequestDIS(bool enable); + + virtual status_t setParam(struct v4l2_streamparm *streamParam, uint32_t pipeId); + virtual status_t setControl(int cid, int value, uint32_t pipeId); + virtual status_t getControl(int cid, int *value, uint32_t pipeId); + + virtual bool checkPipeThreadRunning(uint32_t pipeId); + + /* only for debugging */ + virtual status_t dumpFimcIsInfo(uint32_t pipeId, bool bugOn); +#ifdef MONITOR_LOG_SYNC + virtual status_t syncLog(uint32_t pipeId, uint32_t syncId); +#endif + +/* added by 3.2 HAL */ + status_t setFrameCreateHandler(factory_handler_t handler); + factory_handler_t getFrameCreateHandler(); + status_t setFrameDoneHandler(factory_donehandler_t handler); + factory_donehandler_t getFrameDoneHandler(); + +protected: + virtual status_t m_initPipelines(ExynosCameraFrame *frame); + virtual status_t m_initFrameMetadata(ExynosCameraFrame *frame); + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame) = 0; + virtual status_t m_checkPipeInfo(uint32_t srcPipeId, uint32_t dstPipeId); + virtual status_t m_setCreate(bool create); + virtual bool m_getCreate(); + virtual int m_getFliteNodenum(); + + /* 54xx style*/ + virtual int m_getSensorId(unsigned int nodeNum, bool reprocessing); + + /* 74xx style*/ + virtual int m_getSensorId(unsigned int nodeNum, bool flagOTFInterface, bool flagLeader, bool reprocessing); + + virtual int setSrcNodeEmpty(int sensorId); + virtual int setLeader(int sensorId, bool flagLeader); + virtual status_t m_setupConfig(void) = 0; + virtual status_t m_checkNodeSetting(int pipeId); + virtual void m_initDeviceInfo(int pipeId); + virtual status_t m_initFlitePipe(void); + +private: + void m_init(void); + +protected: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + ExynosCameraPipe *m_pipes[MAX_NUM_PIPES]; + + int32_t m_nodeNums[MAX_NUM_PIPES][MAX_NODE]; + int32_t m_sensorIds[MAX_NUM_PIPES][MAX_NODE]; + int32_t m_secondarySensorIds[MAX_NUM_PIPES][MAX_NODE]; + camera_device_info_t m_nodeInfo[MAX_NUM_PIPES]; + + ExynosCamera3Parameters *m_parameters; + + ExynosCameraFrameManager *m_frameMgr; + + uint32_t m_frameCount; + Mutex m_frameLock; + + ExynosCameraActivityControl *m_activityControl; + + uint32_t m_requestFLITE; + uint32_t m_request3AP; + uint32_t m_request3AC; + uint32_t m_requestISP; + uint32_t m_requestISPP; + uint32_t m_requestISPC; + uint32_t m_requestSCC; + uint32_t m_requestDIS; + uint32_t m_requestSCP; + uint32_t m_requestVRA; + uint32_t m_requestJPEG; + uint32_t m_requestThumbnail; + + bool m_bypassDRC; + bool m_bypassDIS; + bool m_bypassDNR; + bool m_bypassFD; + + Mutex m_createLock; + + bool m_flagFlite3aaOTF; + bool m_flag3aaIspOTF; + bool m_flagIspTpuOTF; + bool m_flagIspMcscOTF; + bool m_flagTpuMcscOTF; + bool m_flagMcscVraOTF; + bool m_supportReprocessing; + bool m_flagReprocessing; + bool m_supportPureBayerReprocessing; + bool m_supportSCC; + bool m_supportMCSC; + + /* added by 3.2 HAL */ + factory_handler_t m_frameCreateHandler; + factory_donehandler_t m_frameDoneHandler; + +private: + bool m_create; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.cpp b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.cpp new file mode 100644 index 0000000..d9e4221 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.cpp @@ -0,0 +1,1562 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3FrameFactoryPreview" +#include + +#include "ExynosCamera3FrameFactoryPreview.h" + +namespace android { + +ExynosCamera3FrameFactoryPreview::~ExynosCamera3FrameFactoryPreview() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE2("destroy fail"); +} + +status_t ExynosCamera3FrameFactoryPreview::create(__unused bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_setupConfig(); + + int ret = 0; + int leaderPipe = PIPE_3AA; + int32_t nodeNums[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) + nodeNums[i] = -1; + + m_pipes[INDEX(PIPE_FLITE)] = (ExynosCameraPipe*)new ExynosCameraPipeFlite(m_cameraId, (ExynosCameraParameters *)m_parameters, false, m_nodeNums[INDEX(PIPE_FLITE)]); + m_pipes[INDEX(PIPE_FLITE)]->setPipeId(PIPE_FLITE); + m_pipes[INDEX(PIPE_FLITE)]->setPipeName("PIPE_FLITE"); + + m_pipes[INDEX(PIPE_3AA)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, (ExynosCameraParameters *)m_parameters, false, &m_nodeInfo[INDEX(PIPE_3AA)]); + m_pipes[INDEX(PIPE_3AA)]->setPipeId(PIPE_3AA); + m_pipes[INDEX(PIPE_3AA)]->setPipeName("PIPE_3AA"); + + m_pipes[INDEX(PIPE_ISP)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, (ExynosCameraParameters *)m_parameters, false, &m_nodeInfo[INDEX(PIPE_ISP)]); + m_pipes[INDEX(PIPE_ISP)]->setPipeId(PIPE_ISP); + m_pipes[INDEX(PIPE_ISP)]->setPipeName("PIPE_ISP"); + + if (m_parameters->getHWVdisMode()) { + m_pipes[INDEX(PIPE_DIS)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, (ExynosCameraParameters *)m_parameters, false, &m_nodeInfo[INDEX(PIPE_DIS)]); + m_pipes[INDEX(PIPE_DIS)]->setPipeId(PIPE_DIS); + m_pipes[INDEX(PIPE_DIS)]->setPipeName("PIPE_DIS"); + } + +/* Comment out, because it included ISP */ +/* + m_pipes[INDEX(PIPE_SCP)] = (ExynosCameraPipe*)new ExynosCameraPipeSCP(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_SCP)]); + m_pipes[INDEX(PIPE_SCP)]->setPipeId(PIPE_SCP); + m_pipes[INDEX(PIPE_SCP)]->setPipeName("PIPE_SCP"); +*/ + + if (m_flagMcscVraOTF == false) { + m_pipes[INDEX(PIPE_VRA)] = (ExynosCameraPipe*)new ExynosCameraPipeVRA(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_VRA)]); + m_pipes[INDEX(PIPE_VRA)]->setPipeId(PIPE_VRA); + m_pipes[INDEX(PIPE_VRA)]->setPipeName("PIPE_VRA"); + } + + m_pipes[INDEX(PIPE_GSC)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, (ExynosCameraParameters *)m_parameters, true, m_nodeNums[INDEX(PIPE_GSC)]); + m_pipes[INDEX(PIPE_GSC)]->setPipeId(PIPE_GSC); + m_pipes[INDEX(PIPE_GSC)]->setPipeName("PIPE_GSC"); + + m_pipes[INDEX(PIPE_GSC_VIDEO)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, (ExynosCameraParameters *)m_parameters, true, m_nodeNums[INDEX(PIPE_GSC_VIDEO)]); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeId(PIPE_GSC_VIDEO); + m_pipes[INDEX(PIPE_GSC_VIDEO)]->setPipeName("PIPE_GSC_VIDEO"); + + if (m_supportReprocessing == false) { + m_pipes[INDEX(PIPE_GSC_PICTURE)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, (ExynosCameraParameters *)m_parameters, true, m_nodeNums[INDEX(PIPE_GSC_PICTURE)]); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeId(PIPE_GSC_PICTURE); + m_pipes[INDEX(PIPE_GSC_PICTURE)]->setPipeName("PIPE_GSC_PICTURE"); + + m_pipes[INDEX(PIPE_JPEG)] = (ExynosCameraPipe*)new ExynosCameraPipeJpeg(m_cameraId, (ExynosCameraParameters *)m_parameters, true, m_nodeNums[INDEX(PIPE_JPEG)]); + m_pipes[INDEX(PIPE_JPEG)]->setPipeId(PIPE_JPEG); + m_pipes[INDEX(PIPE_JPEG)]->setPipeName("PIPE_JPEG"); + } + + /* flite pipe initialize */ + ret = m_pipes[INDEX(PIPE_FLITE)]->create(m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE2("FLITE create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_FLITE)); + +/* Comment out, because it same with M2M */ +#if 0 + if (m_flagFlite3aaOTF == true) { + /* 3AA_ISP pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA_ISP)]->create(); + if (ret < 0) { + CLOGE2("3AA_ISP create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_3AA_ISP)); + } else +#endif + { + /* ISP pipe initialize */ + ret = m_pipes[INDEX(PIPE_ISP)]->create(); + if (ret < 0) { + CLOGE2("ISP create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_ISP)); + + /* 3AA pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA)]->create(); + if (ret < 0) { + CLOGE2("3AA create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_3AA)); + } + + /* DIS pipe initialize */ + if (m_parameters->getHWVdisMode()) { + ret = m_pipes[INDEX(PIPE_DIS)]->create(); + if (ret < 0) { + CLOGE2("DIS create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_DIS)); + } + + /* SCP pipe initialize */ +/* Comment out SCP, because it included ISP */ +#if 0 + ret = m_pipes[INDEX(PIPE_SCP)]->create(); + if (ret < 0) { + CLOGE2("SCP create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_SCP)); +#endif + + /* VRA pipe initialize */ + if (m_flagMcscVraOTF == false) { + /* EOS */ + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret < 0) { + CLOGE2("PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", leaderPipe, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* Change leaderPipe to VRA, Create new instance */ + leaderPipe = PIPE_VRA; + + ret = m_pipes[INDEX(PIPE_VRA)]->create(); + if (ret < 0) { + CLOGE2("VRA create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_VRA)); + } + + /* GSC_PREVIEW pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC)]->create(); + if (ret < 0) { + CLOGE2("GSC create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_GSC)); + + ret = m_pipes[INDEX(PIPE_GSC_VIDEO)]->create(); + if (ret < 0) { + CLOGE2("PIPE_GSC_VIDEO create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_GSC_VIDEO)); + + if (m_supportReprocessing == false) { + /* GSC_PICTURE pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_PICTURE)]->create(); + if (ret < 0) { + CLOGE2("GSC_PICTURE create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_GSC_PICTURE)); + + /* JPEG pipe initialize */ + ret = m_pipes[INDEX(PIPE_JPEG)]->create(); + if (ret < 0) { + CLOGE2("JPEG create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("Pipe(%d) created", INDEX(PIPE_JPEG)); + } + + /* EOS */ + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret < 0) { + CLOGE2("PIPE_%d V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", leaderPipe, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + camera2_node_group node_group_info_3aa, node_group_info_isp, node_group_info_dis; + int zoom = m_parameters->getZoomLevel(); + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + ExynosRect bnsSize; /* == bayerCropInputSize */ + ExynosRect bayerCropSize; + ExynosRect bdsSize; + int perFramePos = 0; + bool tpu = false; + bool dis = false; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + /* m_parameters->getCallbackSize(&previewW, &previewH); */ + + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + tpu = m_parameters->getTpuEnabledMode(); + dis = m_parameters->getHWVdisMode(); + + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_dis, 0x0, sizeof(camera2_node_group)); + + /* should add this request value in FrameFactory */ + /* 3AA */ + node_group_info_3aa.leader.request = 1; + + /* 3AC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AC); + + /* 3AP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_3AP); + + /* should add this request value in FrameFactory */ + /* ISP */ + node_group_info_isp.leader.request = 1; + + /* SCC */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCC_POS : PERFRAME_FRONT_SCC_POS; + + if (m_supportSCC == true) + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_SCC); + else + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_ISPC); + + /* DIS */ + memcpy(&node_group_info_dis, &node_group_info_isp, sizeof (camera2_node_group)); + + if (tpu == true) { + /* ISPP */ + if (m_requestISPP == true) { + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_ISPP); + } + + /* SCP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + node_group_info_dis.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + } else { + /* SCP */ + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + if (m_flag3aaIspOTF == true) + node_group_info_3aa.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + else + node_group_info_isp.capture[perFramePos].request = frame->getRequest(PIPE_SCP); + } + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info_3aa, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + dis); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + dis); + + frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_3AA, zoom); + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, zoom); + frame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoom); + + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCamera3FrameFactoryPreview::createNewFrame(uint32_t frameCount) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + if (frameCount <= 0) { + frameCount = m_frameCount; + } + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, frameCount, FRAME_TYPE_PREVIEW); + + int requestEntityCount = 0; + bool dzoomScaler = false; + + dzoomScaler = m_parameters->getZoomPreviewWIthScaler(); + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE2("frame(%d) metadata initialize fail", frameCount); + + if (m_flagFlite3aaOTF == true) { + if (m_requestFLITE) { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + + } + /* set 3AA_ISP pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_3AA)]); + requestEntityCount++; + + if (m_requestDIS == true) { + if (m_flag3aaIspOTF == true) { + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } + } else { + if (m_flag3aaIspOTF == true) { + /* skip ISP pipe to linkageList */ + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + } + } + } else { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + + /* set 3AA pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_FLITE)], newEntity[INDEX(PIPE_3AA)]); + + /* set ISP pipe to linkageList */ + if (m_requestISP == true) { + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)]); + } + + /* set DIS pipe to linkageList */ + if (m_requestDIS == true) { + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)]); + } + + /* flite, 3aa, isp, dis as one. */ + requestEntityCount++; + } + +/* Comment out, because it included 3AA */ +#if 0 + if (m_request3AC) { + /* set 3AC pipe to linkageList */ + newEntity[INDEX(PIPE_3AC)] = new ExynosCameraFrameEntity(PIPE_3AC, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_3AC)]); + requestEntityCount++; + } +#endif + +/* Comment out, because it included ISP */ +#if 0 + if (m_requestISPC) { + /* set ISPC pipe to linkageList */ + newEntity[INDEX(PIPE_ISPC)] = new ExynosCameraFrameEntity(PIPE_ISPC, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_ISPC)]); + requestEntityCount++; + } +#endif + + /* set SCP pipe to linkageList */ +/* Comment out, because it included ISP */ +#if 0 + newEntity[INDEX(PIPE_SCP)] = new ExynosCameraFrameEntity(PIPE_SCP, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_SCP)]); + requestEntityCount++; +#endif + + if (m_flagMcscVraOTF == false) { + newEntity[INDEX(PIPE_VRA)] = new ExynosCameraFrameEntity(PIPE_VRA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_VRA)]); + requestEntityCount++; + } + + if (m_supportReprocessing == false) { + /* set GSC-Picture pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_PICTURE)] = new ExynosCameraFrameEntity(PIPE_GSC_PICTURE, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_PICTURE)]); + } + + /* set GSC pipe to linkageList */ + newEntity[INDEX(PIPE_GSC)] = new ExynosCameraFrameEntity(PIPE_GSC, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC)]); + + if (dzoomScaler) { + requestEntityCount++; + } + + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + /* PIPE_VRA's internal pipe entity */ + newEntity[INDEX(PIPE_GSC_VRA)] = new ExynosCameraFrameEntity(PIPE_GSC_VRA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VRA)]); + + if (m_supportReprocessing == false) { + /* set JPEG pipe to linkageList */ + newEntity[INDEX(PIPE_JPEG)] = new ExynosCameraFrameEntity(PIPE_JPEG, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_JPEG)]); + } + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE2("m_initPipelines fail, ret(%d)", ret); + } + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + + return frame; +} + +status_t ExynosCamera3FrameFactoryPreview::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + + ret = m_initFlitePipe(); + if (ret != NO_ERROR) { + CLOGE2("m_initFlitePipe() fail"); + return ret; + } + + ret = m_setDeviceInfo(); + if (ret != NO_ERROR) { + CLOGE2("m_setDeviceInfo() fail"); + return ret; + } + + ret = m_initPipes(); + if (ret != NO_ERROR) { + CLOGE2("m_initPipes() fail"); + return ret; + } + + m_frameCount = 0; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::preparePipes(void) +{ + int ret = 0; + + /* NOTE: Prepare for 3AA is moved after ISP stream on */ + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(); + if (ret < 0) { + CLOGE2("FLITE prepare fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + +/* Comment out, because it included ISP */ +#if 0 + ret = m_pipes[INDEX(PIPE_SCP)]->prepare(); + if (ret < 0) { + CLOGE2("SCP prepare fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::preparePipes(uint32_t prepareCnt) +{ + int ret = 0; + + /* NOTE: Prepare for 3AA is moved after ISP stream on */ + + if (prepareCnt) { + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(prepareCnt); + if (ret < 0) { + CLOGE2("PIPE_FLITE prepare(%d) fail, ret(%d)", prepareCnt, ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_supportSCC == true) { + enum pipeline pipe = (m_supportSCC == true) ? PIPE_SCC : PIPE_ISPC; + + ret = m_pipes[INDEX(pipe)]->prepare(); + if (ret < 0) { + CLOGE2("%s prepare fail, ret(%d)", m_pipes[INDEX(pipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::startPipes(void) +{ + int ret = 0; + + if (m_flagMcscVraOTF == false) { + ret = m_pipes[INDEX(PIPE_VRA)]->start(); + if (ret < 0) { + CLOGE2("VRA start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->getTpuEnabledMode() == true) { + ret = m_pipes[INDEX(PIPE_DIS)]->start(); + if (ret < 0) { + CLOGE2("DIS start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_3AA)]->start(); + if (ret < 0) { + CLOGE2("3AA start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->start(); + if (ret < 0) { + CLOGE("FLITE start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagFlite3aaOTF == true) { + /* Here is doing 3AA prepare(qbuf) */ + ret = m_pipes[INDEX(PIPE_3AA)]->prepare(); + if (ret < 0) { + CLOGE2("3AA prepare fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(true); + if (ret < 0) { + CLOGE2("FLITE sensorStream on fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI2("Starting Success!"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::startInitialThreads(void) +{ + int ret = 0; + + CLOGI2("start pre-ordered initial pipe thread"); + + if (m_requestFLITE) { + ret = startThread(PIPE_FLITE); + if (ret < 0) + return ret; + } + + ret = startThread(PIPE_3AA); + if (ret < 0) + return ret; + + if (m_parameters->is3aaIspOtf() == false) { + ret = startThread(PIPE_ISP); + if (ret < 0) + return ret; + } + + if (m_parameters->getTpuEnabledMode() == true) { + ret = startThread(PIPE_DIS); + if (ret < 0) + return ret; + } + + if (m_parameters->isMcscVraOtf() == false) { + ret = startThread(PIPE_VRA); + if (ret < 0) + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::setStopFlag(void) +{ + CLOGI("INFO(%s[%d]):", __FUNCTION__, __LINE__); + + int ret = 0; + + ret = m_pipes[INDEX(PIPE_FLITE)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_3AA)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_3AA)]->setStopFlag(); + + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_ISP)]->setStopFlag(); + + if (m_parameters->getHWVdisMode() == true + && m_pipes[INDEX(PIPE_DIS)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_DIS)]->setStopFlag(); + + if (m_flagMcscVraOTF == false + && m_pipes[INDEX(PIPE_VRA)]->flagStart() == true) + ret |= m_pipes[INDEX(PIPE_VRA)]->setStopFlag(); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::stopPipes(void) +{ + int ret = 0; + + if (m_pipes[INDEX(PIPE_VRA)] != NULL + && m_pipes[INDEX(PIPE_VRA)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_VRA)]->stopThread(); + if (ret < 0) { + CLOGE2("VRA stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_DIS)] != NULL + && m_pipes[INDEX(PIPE_DIS)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_DIS)]->stopThread(); + if (ret < 0) { + CLOGE2("DIS stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + +/* Comment out, because it included 3AA */ +#if 0 + if (m_supportReprocessing == true) { + if (m_supportPureBayerReprocessing == false) { + ret = m_pipes[INDEX(PIPE_3AC)]->stopThread(); + if (ret < 0) { + CLOGE2("3AC stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } +#endif + +/* Comment out, because it same with M2M */ +#if 0 + if (m_flagFlite3aaOTF == true) { + ret = m_pipes[INDEX(PIPE_3AA_ISP)]->stopThread(); + if (ret < 0) { + CLOGE2("3AA_ISP stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } else +#endif + { + if (m_pipes[INDEX(PIPE_3AA)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_3AA)]->stopThread(); + if (ret < 0) { + CLOGE2("3AA stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* stream off for ISP */ + if (m_pipes[INDEX(PIPE_ISP)] != NULL + && m_pipes[INDEX(PIPE_ISP)]->isThreadRunning() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->stopThread(); + if (ret < 0) { + CLOGE2("ISP stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + if (m_requestFLITE) { + ret = m_pipes[INDEX(PIPE_FLITE)]->stopThread(); + if (ret < 0) { + CLOGE2("FLITE stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_pipes[INDEX(PIPE_GSC)]->isThreadRunning() == true) { + ret = stopThread(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE2("PIPE_GSC stopThread fail, ret(%d)", ret); + return INVALID_OPERATION; + } + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(false); + if (ret < 0) { + CLOGE2("FLITE sensorStream off fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->stop(); + if (ret < 0) { + CLOGE2("FLITE stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* 3AA force done */ + ret = m_pipes[INDEX(PIPE_3AA)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE2("PIPE_3AA force done fail, ret(%d)", ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for 3AA */ + ret = m_pipes[INDEX(PIPE_3AA)]->stop(); + if (ret < 0) { + CLOGE2("3AA stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* ISP force done */ + if (m_pipes[INDEX(PIPE_ISP)]->flagStart() == true) { + ret = m_pipes[INDEX(PIPE_ISP)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE2("PIPE_ISP force done fail, ret(%d)", ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + /* stream off for ISP */ + ret = m_pipes[INDEX(PIPE_ISP)]->stop(); + if (ret < 0) { + CLOGE2("ISP stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->getHWVdisMode()) { + if (m_pipes[INDEX(PIPE_DIS)]->flagStart() == true) { + /* DIS force done */ + ret = m_pipes[INDEX(PIPE_DIS)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE2("PIPE_DIS force done fail, ret(%d)", ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + ret = m_pipes[INDEX(PIPE_DIS)]->stop(); + if (ret < 0) { + CLOGE2("DIS stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + if (m_flagMcscVraOTF == false) { + if (m_pipes[INDEX(PIPE_VRA)]->flagStart() == true) { + /* VRA force done */ + ret = m_pipes[INDEX(PIPE_VRA)]->forceDone(V4L2_CID_IS_FORCE_DONE, 0x1000); + if (ret < 0) { + CLOGE2("PIPE_VRA force done fail, ret(%d)", ret); + /* TODO: exception handling */ + /* return INVALID_OPERATION; */ + } + + ret = m_pipes[INDEX(PIPE_VRA)]->stop(); + if (ret < 0) { + CLOGE2("VRA stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + ret = stopThreadAndWait(INDEX(PIPE_GSC)); + if (ret < 0) { + CLOGE2("PIPE_GSC stopThreadAndWait fail, ret(%d)", ret); + } + + CLOGI2("Stopping Success!"); + + return NO_ERROR; +} + +void ExynosCamera3FrameFactoryPreview::m_init(void) +{ + m_supportReprocessing = false; + m_flagFlite3aaOTF = false; + m_flagIspMcscOTF = false; + m_flagMcscVraOTF = false; + m_supportSCC = false; + m_supportMCSC = false; + m_supportPureBayerReprocessing = false; + m_flagReprocessing = false; +} + +status_t ExynosCamera3FrameFactoryPreview::m_setupConfig() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + int32_t *secondaryControlId = NULL; + int32_t *prevNode = NULL; + + m_flagFlite3aaOTF = m_parameters->isFlite3aaOtf(); + m_flag3aaIspOTF = m_parameters->is3aaIspOtf(); + m_flagIspMcscOTF = m_parameters->isIspMcscOtf(); + m_flagMcscVraOTF = m_parameters->isMcscVraOtf(); + m_supportReprocessing = m_parameters->isReprocessing(); + m_supportSCC = m_parameters->isOwnScc(m_cameraId); + m_supportMCSC = m_parameters->isOwnMCSC(); + + if (m_parameters->getRecordingHint() == true) { + m_supportPureBayerReprocessing = (m_cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + } else { + m_supportPureBayerReprocessing = (m_cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + } + + m_flagReprocessing = false; + + if (m_supportReprocessing == false) { + if (m_supportSCC == true) + m_requestSCC = 1; + else + m_requestISPC = 1; + } + + if (m_flag3aaIspOTF == true) { + m_request3AP = 0; + m_requestISP = 0; + } else { + m_request3AP = 1; + m_requestISP = 1; + } + + if (m_flagMcscVraOTF == true) + m_requestVRA = 0; + else + m_requestVRA = 1; + + nodeNums = m_nodeNums[INDEX(PIPE_FLITE)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = m_getFliteNodenum(); + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_FLITE)]; + controlId[CAPTURE_NODE_1] = m_getSensorId(nodeNums[CAPTURE_NODE_1], m_flagReprocessing); + + ret = m_setDeviceInfo(); + if (ret != NO_ERROR) { + CLOGE2("m_setDeviceInfo() fail"); + return ret; + } + + nodeNums = m_nodeNums[INDEX(PIPE_GSC)]; + nodeNums[OUTPUT_NODE] = PREVIEW_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_VIDEO)]; + nodeNums[OUTPUT_NODE] = VIDEO_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_GSC_PICTURE)]; + nodeNums[OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + nodeNums = m_nodeNums[INDEX(PIPE_JPEG)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::m_setDeviceInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + bool flagDirtyBayer = false; + + if (m_supportReprocessing == true && m_supportPureBayerReprocessing == false) + flagDirtyBayer = true; + + int pipeId = -1; + int previousPipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + + int t3aaNums[MAX_NODE]; + int ispNums[MAX_NODE]; + + if (m_parameters->getDualMode() == true) { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_31S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_31C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_31P_NUM; + } else { + t3aaNums[OUTPUT_NODE] = FIMC_IS_VIDEO_30S_NUM; + t3aaNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_30C_NUM; + t3aaNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_30P_NUM; + } + + ispNums[OUTPUT_NODE] = FIMC_IS_VIDEO_I0S_NUM; + ispNums[CAPTURE_NODE_1] = FIMC_IS_VIDEO_I0C_NUM; + ispNums[CAPTURE_NODE_2] = FIMC_IS_VIDEO_I0P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA)); + m_initDeviceInfo(INDEX(PIPE_ISP)); + m_initDeviceInfo(INDEX(PIPE_DIS)); + + /******* + * 3AA + ******/ + pipeId = INDEX(PIPE_3AA); + + // 3AS + nodeType = getNodeType(PIPE_3AA); + m_nodeInfo[pipeId].nodeNum[nodeType] = t3aaNums[OUTPUT_NODE]; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeNums[INDEX(PIPE_FLITE)][getNodeType(PIPE_FLITE)], m_flagFlite3aaOTF, true, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AA; + + // 3AC + nodeType = getNodeType(PIPE_3AC); + if (flagDirtyBayer == true || m_parameters->isUsing3acForIspc() == true) { + m_nodeInfo[pipeId].nodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], false, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } else { + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_1]; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "3AA_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], true, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AC; + } + + // 3AP + nodeType = getNodeType(PIPE_3AP); + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = t3aaNums[CAPTURE_NODE_2]; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "3AA_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AA)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AP; + + // ISPS + nodeType = getNodeType(PIPE_ISP); + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[OUTPUT_NODE]; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_3AP)], m_flag3aaIspOTF, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_ISP; + + // ISPP + nodeType = getNodeType(PIPE_ISPP); + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = ispNums[CAPTURE_NODE_2]; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "ISP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISP)], true, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_ISPP; + + // DIS + if (m_parameters->getHWVdisMode()) { + nodeType = getNodeType(PIPE_DIS); + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = FIMC_IS_VIDEO_TPU_NUM; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "DIS_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_DIS; + } + + if (m_supportMCSC == true) { + // MCSC + nodeType = getNodeType(PIPE_MCSC); + m_nodeInfo[pipeId].secondaryNodeNum[nodeType] = FIMC_IS_VIDEO_M0S_NUM; + strncpy(m_nodeInfo[pipeId].secondaryNodeName[nodeType], "MCSC_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_secondarySensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], m_flagIspMcscOTF, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC; + + // MCSC0 + nodeType = getNodeType(PIPE_MCSC0); + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_M0P_NUM; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "MCSC_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_MCSC)], true, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC0; + } else { + // SCP + nodeType = getNodeType(PIPE_SCP); + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_SCP_NUM; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "SCP_PREVIEW", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].secondaryNodeNum[getNodeType(PIPE_ISPP)], true, false, m_flagReprocessing); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_SCP; + } + + // set nodeNum + for (int i = 0; i < MAX_NODE; i++) + m_nodeNums[pipeId][i] = m_nodeInfo[pipeId].nodeNum[i]; + + if (m_checkNodeSetting(pipeId) != NO_ERROR) { + CLOGE2("m_checkNodeSetting(%d) fail", pipeId); + return INVALID_OPERATION; + } + + // VRA + previousPipeId = pipeId; + nodeNums = m_nodeNums[INDEX(PIPE_VRA)]; + nodeNums[OUTPUT_NODE] = FIMC_IS_VIDEO_VRA_NUM; + nodeNums[CAPTURE_NODE_1] = -1; + nodeNums[CAPTURE_NODE_2] = -1; + controlId = m_sensorIds[INDEX(PIPE_VRA)]; + controlId[OUTPUT_NODE] = m_getSensorId(m_nodeInfo[previousPipeId].nodeNum[getNodeType(PIPE_SCP)], m_flagMcscVraOTF, true, true); + controlId[CAPTURE_NODE_1] = -1; + controlId[CAPTURE_NODE_2] = -1; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryPreview::m_initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getPictureFormat(); + int hwVdisformat = m_parameters->getHWVdisFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + ExynosRect bdsSize; + ExynosRect bnsSize; + ExynosRect bcropSize; + int perFramePos = 0; + int stride = 0; + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bcropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + + /* When high speed recording mode, hw sensor size is fixed. + * So, maxPreview size cannot exceed hw sensor size + */ + if (m_parameters->getHighSpeedRecording()) { + maxPreviewW = hwSensorW; + maxPreviewH = hwSensorH; + } + + CLOGI2("MaxSensorSize(%dx%d), HWSensorSize(%dx%d)", maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI2("MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI2("HWPictureSize(%dx%d)", hwPictureW, hwPictureH); + CLOGI2("BcropSize(%dx%d)", bcropSize.w, bcropSize.h); + + /* 3AS */ + enum NODE_TYPE t3asNodeType = getNodeType(PIPE_3AA); + + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + for (int i = 0; i < MAX_NODE; i++) { + ret = m_pipes[INDEX(PIPE_3AA)]->setPipeId((enum NODE_TYPE)i, m_nodeInfo[PIPE_3AA].pipeId[i]); + if (ret < 0) { + CLOGE2("setPipeId(%d, %d) fail, ret(%d)", i, m_nodeInfo[PIPE_3AA].pipeId[i], ret); + return ret; + } + + sensorIds[i] = m_sensorIds[INDEX(PIPE_3AA)][i]; + secondarySensorIds[i] = m_secondarySensorIds[INDEX(PIPE_3AA)][i]; + } + + if (m_flagFlite3aaOTF == true) { + tempRect.fullW = 32; + tempRect.fullH = 64; + tempRect.colorFormat = bayerFormat; + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_3aa_buffers; + } else { + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; + tempRect.colorFormat = bayerFormat; + +#ifdef CAMERA_PACKED_BAYER_ENABLE +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 2; + } + else +#endif + { + /* packed bayer bytesPerPlane */ + pipeInfo[t3asNodeType].bytesPerPlane[0] = ROUND_UP(tempRect.fullW, 10) * 8 / 5; + } +#endif + + pipeInfo[t3asNodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + } + + pipeInfo[t3asNodeType].rectInfo = tempRect; + pipeInfo[t3asNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[t3asNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* per frame info */ + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_3AA; + + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].nodeNum[t3asNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* 3AC */ + if ((m_supportReprocessing == true && m_supportPureBayerReprocessing == false) || m_parameters->isUsing3acForIspc() == true) { + enum NODE_TYPE t3acNodeType = getNodeType(PIPE_3AC); + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].nodeNum[t3acNodeType] - FIMC_IS_VIDEO_BAS_NUM); + +#ifdef FIXED_SENSOR_SIZE + tempRect.fullW = maxSensorW; + tempRect.fullH = maxSensorH; +#else + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; +#endif + if (m_parameters->isUsing3acForIspc() == true) { + tempRect.colorFormat = SCC_OUTPUT_COLOR_FMT; + /* tempRect.fullW = hwPictureW; */ + /* tempRect.fullH = hwPictureH; */ + tempRect.fullW = bcropSize.w; + tempRect.fullH = bcropSize.h; + } else { + tempRect.colorFormat = bayerFormat; + } + + pipeInfo[t3acNodeType].rectInfo = tempRect; + pipeInfo[t3acNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[t3acNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[t3acNodeType].bufInfo.count = config->current->bufInfo.num_sensor_buffers + config->current->bufInfo.num_bayer_buffers; + /* per frame info */ + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3acNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + } + + /* 3AP */ + enum NODE_TYPE t3apNodeType = getNodeType(PIPE_3AP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].secondaryNodeNum[t3apNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[t3apNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + /* ISPS */ + enum NODE_TYPE ispsNodeType = getNodeType(PIPE_ISP); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_ISP; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].secondaryNodeNum[ispsNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + /* ISPP */ + enum NODE_TYPE isppNodeType = getNodeType(PIPE_ISPP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_nodeInfo[PIPE_3AA].secondaryNodeNum[isppNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[isppNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + if (m_parameters->getHWVdisMode()) { + /* DIS */ + enum NODE_TYPE disNodeType = getNodeType(PIPE_DIS); + + pipeInfo[disNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_DIS; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[disNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].secondaryNodeNum[disNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + if (m_supportMCSC == true) { + /* MCSC */ + enum NODE_TYPE mcscNodeType = getNodeType(PIPE_MCSC); + + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_MCSC; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[ispsNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].secondaryNodeNum[mcscNodeType] - FIMC_IS_VIDEO_BAS_NUM); + } + + /* SCP & MCSC0 */ + enum NODE_TYPE scpNodeType = getNodeType(PIPE_SCP); + + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameNodeType = PERFRAME_NODE_TYPE_CAPTURE; + pipeInfo[t3asNodeType].perFrameNodeGroupInfo.perFrameCaptureInfo[perFramePos].perFrameVideoID = (m_nodeInfo[INDEX(PIPE_3AA)].nodeNum[scpNodeType] - FIMC_IS_VIDEO_BAS_NUM); + + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[scpNodeType].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + stride = m_parameters->getHwPreviewStride(); + CLOGV2("stride=%d", stride); + tempRect.fullW = hwPreviewW; + tempRect.fullH = hwPreviewH; + tempRect.colorFormat = previewFormat; +#ifdef USE_BUFFER_WITH_STRIDE +/* to use stride for preview buffer, set the bytesPerPlane */ + pipeInfo[scpNodeType].bytesPerPlane[0] = tempRect.fullW; +#endif + + pipeInfo[scpNodeType].rectInfo = tempRect; + pipeInfo[scpNodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[scpNodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + if (m_parameters->increaseMaxBufferOfPreview() == true) { + pipeInfo[scpNodeType].bufInfo.count = m_parameters->getPreviewBufferCount(); + } else { + pipeInfo[scpNodeType].bufInfo.count = config->current->bufInfo.num_preview_buffers; + } + + ret = m_pipes[INDEX(PIPE_3AA)]->setupPipe(pipeInfo, sensorIds, secondarySensorIds); + if (ret < 0) { + CLOGE2("3AA setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_flagMcscVraOTF == false) { + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + + int vraWidth = 0, vraHeight = 0; + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + /* VRA pipe */ + tempRect.fullW = vraWidth; + tempRect.fullH = vraHeight; + tempRect.colorFormat = m_parameters->getHwVraInputFormat(); + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = config->current->bufInfo.num_vra_buffers; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = CAPTURE_NODE_MAX; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex = PERFRAME_INFO_VRA; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_LEADER; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID = (FIMC_IS_VIDEO_VRA_NUM - FIMC_IS_VIDEO_BAS_NUM); + + ret = m_pipes[INDEX(PIPE_VRA)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_VRA)]); + if (ret < 0) { + CLOGE2("VRA setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCamera3FrameFactoryPreview::createNewFrame(void) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, m_frameCount, FRAME_TYPE_PREVIEW); + + int requestEntityCount = 0; + bool dzoomScaler = false; + + dzoomScaler = m_parameters->getZoomPreviewWIthScaler(); + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE2("frame(%d) metadata initialize fail", m_frameCount); + + if (m_flagFlite3aaOTF == true) { + if (m_requestFLITE) { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + } + + /* set 3AA_ISP pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_3AA)]); + requestEntityCount++; + + if (m_requestDIS == true) { + if (m_flag3aaIspOTF == true) { + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + + /* set DIS pipe to linkageList */ + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)], INDEX(PIPE_ISPP)); + requestEntityCount++; + } + } else { + if (m_flag3aaIspOTF == true) { + /* skip ISP pipe to linkageList */ + } else { + /* set ISP pipe to linkageList */ + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)], INDEX(PIPE_3AP)); + requestEntityCount++; + } + } + } else { + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + + /* set 3AA pipe to linkageList */ + newEntity[INDEX(PIPE_3AA)] = new ExynosCameraFrameEntity(PIPE_3AA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_FLITE)], newEntity[INDEX(PIPE_3AA)]); + + /* set ISP pipe to linkageList */ + if (m_requestISP == true) { + newEntity[INDEX(PIPE_ISP)] = new ExynosCameraFrameEntity(PIPE_ISP, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addChildEntity(newEntity[INDEX(PIPE_3AA)], newEntity[INDEX(PIPE_ISP)]); + } + + /* set DIS pipe to linkageList */ + if (m_requestDIS == true) { + newEntity[INDEX(PIPE_DIS)] = new ExynosCameraFrameEntity(PIPE_DIS, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_DELIVERY); + frame->addChildEntity(newEntity[INDEX(PIPE_ISP)], newEntity[INDEX(PIPE_DIS)]); + } + + /* flite, 3aa, isp, dis as one. */ + requestEntityCount++; + } + + if (m_supportReprocessing == false) { + /* set GSC-Picture pipe to linkageList */ + newEntity[INDEX(PIPE_GSC_PICTURE)] = new ExynosCameraFrameEntity(PIPE_GSC_PICTURE, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_PICTURE)]); + } + + /* set GSC pipe to linkageList */ + newEntity[INDEX(PIPE_GSC)] = new ExynosCameraFrameEntity(PIPE_GSC, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC)]); + + if (dzoomScaler) { + requestEntityCount++; + } + + newEntity[INDEX(PIPE_GSC_VIDEO)] = new ExynosCameraFrameEntity(PIPE_GSC_VIDEO, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VIDEO)]); + + /* PIPE_VRA's internal pipe entity */ + newEntity[INDEX(PIPE_GSC_VRA)] = new ExynosCameraFrameEntity(PIPE_GSC_VRA, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_GSC_VRA)]); + + if (m_supportReprocessing == false) { + /* set JPEG pipe to linkageList */ + newEntity[INDEX(PIPE_JPEG)] = new ExynosCameraFrameEntity(PIPE_JPEG, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_JPEG)]); + } + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE2("m_initPipelines fail, ret(%d)", ret); + } + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + + return frame; +} + +enum NODE_TYPE ExynosCamera3FrameFactoryPreview::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = OTF_NODE_1; + break; + case PIPE_ISP: + nodeType = OTF_NODE_2; + break; + case PIPE_ISPP: + nodeType = OTF_NODE_3; + break; + case PIPE_DIS: + nodeType = OTF_NODE_4; + break; + case PIPE_MCSC: + nodeType = OTF_NODE_5; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_7; + break; + case PIPE_VRA: + nodeType = OUTPUT_NODE; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.h b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.h new file mode 100644 index 0000000..4c03f3c --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryPreview.h @@ -0,0 +1,78 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA3_FRAME_FACTORY_PREVIEW_H +#define EXYNOS_CAMERA3_FRAME_FACTORY_PREVIEW_H + +#include "ExynosCamera3FrameFactory.h" + +#include "ExynosCameraFrame.h" + +namespace android { + +class ExynosCamera3FrameFactoryPreview : public ExynosCamera3FrameFactory { +public: + ExynosCamera3FrameFactoryPreview() + { + m_init(); + } + + ExynosCamera3FrameFactoryPreview(int cameraId, ExynosCamera3Parameters *param) + { + m_init(); + + m_cameraId = cameraId; + m_parameters = param; + m_activityControl = m_parameters->getActivityControl(); + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "FrameFactoryBackPreview" : "FrameFactoryFrontPreview"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + +public: + virtual ~ExynosCamera3FrameFactoryPreview(); + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + virtual status_t create(bool active = true); + + virtual ExynosCameraFrame *createNewFrame(void); + virtual ExynosCameraFrame *createNewFrame(uint32_t frameCount = 0); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + virtual status_t preparePipes(uint32_t prepareCnt); + virtual status_t startPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t setStopFlag(void); + virtual status_t stopPipes(void); + +protected: + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + virtual status_t m_setupConfig(void); + /* setting node number on every pipe */ + virtual status_t m_setDeviceInfo(void); + + /* pipe setting */ + virtual status_t m_initPipes(void); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.cpp b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.cpp new file mode 100644 index 0000000..c5f91cb --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.cpp @@ -0,0 +1,365 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3FrameFactoryVision" +#include + +#include "ExynosCamera3FrameFactoryVision.h" + +namespace android { + +ExynosCamera3FrameFactoryVision::~ExynosCamera3FrameFactoryVision() +{ + int ret = 0; + + ret = destroy(); + if (ret < 0) + CLOGE2("destroy fail"); +} + +status_t ExynosCamera3FrameFactoryVision::create(bool active) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_setupConfig(); + + int ret = 0; + int32_t nodeNums[MAX_NODE] = {-1, -1, -1}; + + m_pipes[INDEX(PIPE_FLITE)] = (ExynosCameraPipe*)new ExynosCameraPipeFlite(m_cameraId, m_parameters, false, m_nodeNums[INDEX(PIPE_FLITE)]); + m_pipes[INDEX(PIPE_FLITE)]->setPipeId(PIPE_FLITE); + m_pipes[INDEX(PIPE_FLITE)]->setPipeName("PIPE_FLITE"); + + /* flite pipe initialize */ + ret = m_pipes[INDEX(PIPE_FLITE)]->create(); + if (ret < 0) { + CLOGE2("FLITE create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + CLOGD2("Pipe(%d) created", INDEX(PIPE_FLITE)); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::destroy(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + ret = m_pipes[i]->destroy(); + if (ret != NO_ERROR) { + CLOGE2("m_pipes[%d]->destroy() fail", i); + return ret; + } + + SAFE_DELETE(m_pipes[i]); + + CLOGD2("Pipe(%d) destroyed", i); + } + } + + m_setCreate(false); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + /* Do nothing */ + return NO_ERROR; +} + +ExynosCameraFrame *ExynosCamera3FrameFactoryVision::createNewFrame(uint32_t frameCount) +{ + int ret = 0; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES]; + + if (frameCount <= 0) { + frameCount = m_frameCount; + } + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, frameCount); + + int requestEntityCount = 0; + + CLOGV("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_initFrameMetadata(frame); + if (ret < 0) + CLOGE2("frame(%d) metadata initialize fail", frameCount); + + /* set flite pipe to linkageList */ + newEntity[INDEX(PIPE_FLITE)] = new ExynosCameraFrameEntity(PIPE_FLITE, ENTITY_TYPE_OUTPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(PIPE_FLITE)]); + requestEntityCount++; + + ret = m_initPipelines(frame); + if (ret < 0) { + CLOGE2("m_initPipelines fail, ret(%d)", ret); + } + + m_fillNodeGroupInfo(frame); + + /* TODO: make it dynamic */ + frame->setNumRequestPipe(requestEntityCount); + + m_frameCount++; + + return frame; +} + +status_t ExynosCamera3FrameFactoryVision::initPipes(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + camera_pipe_info_t pipeInfo[3]; + int32_t nodeNums[MAX_NODE] = {-1, -1, -1}; + int32_t sensorIds[MAX_NODE] = {-1, -1, -1}; + + ExynosRect tempRect; + int maxSensorW = 0, maxSensorH = 0, hwSensorW = 0, hwSensorH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int bayerFormat = V4L2_PIX_FMT_SBGGR12; + int previewFormat = m_parameters->getHwPreviewFormat(); + int pictureFormat = m_parameters->getPictureFormat(); + + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + + CLOGI2("MaxSensorSize(%dx%d), HwSensorSize(%dx%d)", maxSensorW, maxSensorH, hwSensorW, hwSensorH); + CLOGI2("MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI2("MaxPixtureSize(%dx%d), HwPixtureSize(%dx%d)", maxPictureW, maxPictureH, hwPictureW, hwPictureH); + + memset(pipeInfo, 0, (sizeof(camera_pipe_info_t) * 3)); + + /* FLITE pipe */ +#if 0 + tempRect.fullW = maxSensorW + 16; + tempRect.fullH = maxSensorH + 10; + tempRect.colorFormat = bayerFormat; +#else + tempRect.fullW = VISION_WIDTH; + tempRect.fullH = VISION_HEIGHT; + tempRect.colorFormat = V4L2_PIX_FMT_SGRBG8; +#endif + + pipeInfo[0].rectInfo = tempRect; + pipeInfo[0].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[0].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + pipeInfo[0].bufInfo.count = FRONT_NUM_BAYER_BUFFERS; + /* per frame info */ + pipeInfo[0].perFrameNodeGroupInfo.perframeSupportNodeNum = 0; + pipeInfo[0].perFrameNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + + ret = m_pipes[INDEX(PIPE_FLITE)]->setupPipe(pipeInfo, m_sensorIds[INDEX(PIPE_FLITE)]); + if (ret < 0) { + CLOGE2("FLITE setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_frameCount = 0; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::preparePipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->prepare(); + if (ret < 0) { + CLOGE2("FLITE prepare fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::startPipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->start(); + if (ret < 0) { + CLOGE2("FLITE start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(true); + if (ret < 0) { + CLOGE2("FLITE sensorStream on fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI2("Starting Front [FLITE] Success!"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::startInitialThreads(void) +{ + int ret = 0; + + CLOGI2("start pre-ordered initial pipe thread"); + + ret = startThread(PIPE_FLITE); + if (ret < 0) + return ret; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameFactoryVision::stopPipes(void) +{ + int ret = 0; + + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_pipes[INDEX(PIPE_FLITE)]->sensorStream(false); + if (ret < 0) { + CLOGE2("FLITE sensorStream fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + ret = m_pipes[INDEX(PIPE_FLITE)]->stop(); + if (ret < 0) { + CLOGE2("FLITE stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI2("Stopping Front [FLITE] Success!"); + + return NO_ERROR; +} + +void ExynosCamera3FrameFactoryVision::setRequest3AC(bool enable) +{ + /* Do nothing */ + + return; +} + +void ExynosCamera3FrameFactoryVision::m_init(void) +{ + memset(m_nodeNums, -1, sizeof(m_nodeNums)); + memset(m_sensorIds, -1, sizeof(m_sensorIds)); + + /* This seems all need to set 0 */ + m_requestFLITE = 0; + m_request3AP = 0; + m_request3AC = 0; + m_requestISP = 1; + m_requestSCC = 0; + m_requestDIS = 0; + m_requestSCP = 1; + + m_supportReprocessing = false; + m_flagFlite3aaOTF = false; + m_supportSCC = false; + m_supportPureBayerReprocessing = false; + m_flagReprocessing = false; + +} + +status_t ExynosCamera3FrameFactoryVision::m_setupConfig(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + int32_t *nodeNums = NULL; + int32_t *controlId = NULL; + int32_t *prevNode = NULL; + bool enableVision = true; + + nodeNums = m_nodeNums[INDEX(PIPE_FLITE)]; + nodeNums[OUTPUT_NODE] = -1; + nodeNums[CAPTURE_NODE] = FRONT_CAMERA_FLITE_NUM; + nodeNums[SUB_NODE] = -1; + controlId = m_sensorIds[INDEX(PIPE_FLITE)]; + controlId[CAPTURE_NODE] = m_getSensorId(nodeNums[CAPTURE_NODE], enableVision); + prevNode = nodeNums; + + return NO_ERROR; +} + +enum NODE_TYPE ExynosCamera3FrameFactoryVision::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + + switch (pipeId) { + case PIPE_FLITE: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AA: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AC: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_3AP: + nodeType = OTF_NODE_1; + break; + case PIPE_ISP: + nodeType = OTF_NODE_2; + break; + case PIPE_ISPP: + nodeType = OTF_NODE_3; + break; + case PIPE_DIS: + nodeType = OTF_NODE_4; + break; + case PIPE_ISPC: + case PIPE_SCC: + case PIPE_JPEG: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_SCP: + nodeType = CAPTURE_NODE_7; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +} + + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.h b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.h new file mode 100644 index 0000000..bb5fadd --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameFactoryVision.h @@ -0,0 +1,69 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA3_FRAME_FACTORY_VISION_H +#define EXYNOS_CAMERA3_FRAME_FACTORY_VISION_H + +#include "ExynosCamera3FrameFactory.h" + +namespace android { + +class ExynosCamera3FrameFactoryVision : public ExynosCamera3FrameFactory { +public: + ExynosCamera3FrameFactoryVision() + { + m_init(); + + strncpy(m_name, "FrameFactoryVision", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + ExynosCamera3FrameFactoryVision(int cameraId, ExynosCamera3Parameters *param) : ExynosCamera3FrameFactory(cameraId, param) + { + m_init(); + + strncpy(m_name, "FrameFactoryVision", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + virtual ~ExynosCamera3FrameFactoryVision(); + + virtual status_t create(bool active = true); + virtual status_t destroy(void); + + virtual ExynosCameraFrame *createNewFrame(uint32_t frameCount = 0); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t stopPipes(void); + + virtual void setRequest3AC(bool enable); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); +protected: + status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + virtual status_t m_setupConfig(void); + +private: + void m_init(void); + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.cpp b/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.cpp new file mode 100644 index 0000000..9a96711 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.cpp @@ -0,0 +1,1214 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3FrameReprocessingFactory" +#include + +#include "ExynosCamera3FrameReprocessingFactory.h" + +namespace android { + +ExynosCamera3FrameReprocessingFactory::~ExynosCamera3FrameReprocessingFactory() +{ + status_t ret = NO_ERROR; + + ret = destroy(); + if (ret != NO_ERROR) + CLOGE2("destroy fail"); + + m_setCreate(false); +} + +status_t ExynosCamera3FrameReprocessingFactory::create(__unused bool active) +{ + CLOGI2("-IN-"); + + status_t ret = NO_ERROR; + uint32_t leaderPipe = PIPE_3AA_REPROCESSING; + + m_setupConfig(); + m_constructReprocessingPipes(); + + /* 3AA_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE2("3AA create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->getPipeName(), PIPE_3AA_REPROCESSING); + + /* ISP_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE2("ISP create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->getPipeName(), PIPE_ISP_REPROCESSING); + + /* MCSC_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE2("ISP create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->getPipeName(), PIPE_MCSC_REPROCESSING); + + /* GSC_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE2("GSC create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->getPipeName(), PIPE_GSC_REPROCESSING); + + /* GSC_REPROCESSING3 pipe initialize */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->create(); + if (ret != NO_ERROR) { + CLOGE2("GSC3 create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->getPipeName(), PIPE_GSC_REPROCESSING3); + + if (m_flagHWFCEnabled == false + || m_parameters->isHWFCOnDemand() == true) { + /* JPEG_REPROCESSING pipe initialize */ + ret = m_pipes[INDEX(PIPE_JPEG_REPROCESSING)]->create(); + if (ret != NO_ERROR) { + CLOGE2("JPEG create fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + CLOGD2("%s(%d) created", + m_pipes[INDEX(PIPE_JPEG_REPROCESSING)]->getPipeName(), PIPE_JPEG_REPROCESSING); + } + + /* EOS */ + if (m_parameters->isUseYuvReprocessing() == true) + leaderPipe = PIPE_MCSC_REPROCESSING; + else + leaderPipe = PIPE_3AA_REPROCESSING; + + ret = m_pipes[INDEX(leaderPipe)]->setControl(V4L2_CID_IS_END_OF_STREAM, 1); + if (ret != NO_ERROR) { + CLOGE2("%s V4L2_CID_IS_END_OF_STREAM fail, ret(%d)", + __FUNCTION__, __LINE__, m_pipes[INDEX(leaderPipe)]->getPipeName(), ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + m_setCreate(true); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::initPipes(void) +{ + CLOGI2("-IN-"); + + status_t ret = NO_ERROR; + camera_pipe_info_t pipeInfo[MAX_NODE]; + camera_pipe_info_t nullPipeInfo; + + int pipeId = -1; + enum NODE_TYPE nodeType = INVALID_NODE; + enum NODE_TYPE leaderNodeType = OUTPUT_NODE; + + int32_t nodeNums[MAX_NODE]; + int32_t sensorIds[MAX_NODE]; + int32_t secondarySensorIds[MAX_NODE]; + for (int i = 0; i < MAX_NODE; i++) { + nodeNums[i] = -1; + sensorIds[i] = -1; + secondarySensorIds[i] = -1; + } + + ExynosRect tempRect; + int hwSensorW = 0, hwSensorH = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int maxPreviewW = 0, maxPreviewH = 0, hwPreviewW = 0, hwPreviewH = 0; + int maxPictureW = 0, maxPictureH = 0, hwPictureW = 0, hwPictureH = 0; + int maxThumbnailW = 0, maxThumbnailH = 0; + int bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + int previewFormat = m_parameters->getPreviewFormat(); + int pictureFormat = m_parameters->getHwPictureFormat(); + struct ExynosConfigInfo *config = m_parameters->getConfig(); + int perFramePos = 0; + + memset(&nullPipeInfo, 0, sizeof(camera_pipe_info_t)); + +#ifdef DEBUG_RAWDUMP + if (m_parameters->checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + m_parameters->getHwSensorSize(&hwSensorW, &hwSensorH); + m_parameters->getMaxPreviewSize(&maxPreviewW, &maxPreviewH); + m_parameters->getHwPreviewSize(&hwPreviewW, &hwPreviewH); + m_parameters->getMaxPictureSize(&maxPictureW, &maxPictureH); + m_parameters->getHwPictureSize(&hwPictureW, &hwPictureH); + m_parameters->getPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getMaxThumbnailSize(&maxThumbnailW, &maxThumbnailH); + + CLOGI2("MaxPreviewSize(%dx%d), HwPreviewSize(%dx%d)", maxPreviewW, maxPreviewH, hwPreviewW, hwPreviewH); + CLOGI2("MaxPixtureSize(%dx%d), HwPixtureSize(%dx%d)", maxPictureW, maxPictureH, hwPictureW, hwPictureH); + CLOGI2("PreviewSize(%dx%d), PictureSize(%dx%d)", previewW, previewH, pictureW, pictureH); + CLOGI2("MaxThumbnailSize(%dx%d)", maxThumbnailW, maxThumbnailH); + + + if (m_parameters->isUseYuvReprocessing() == false) { + /* + * 3AA for Reprocessing + */ + if (m_supportPureBayerReprocessing == true) { + pipeId = PIPE_3AA_REPROCESSING; + + /* 3AS */ + nodeType = getNodeType(PIPE_3AA_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_3AA_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwSensorW; + tempRect.fullH = hwSensorH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + SET_OUTPUT_DEVICE_BASIC_INFO(PERFRAME_INFO_PURE_REPROCESSING_3AA); + +#if 0 + /* 3AC */ + nodeType = getNodeType(PIPE_3AC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AC_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + /* 3AP */ + nodeType = getNodeType(PIPE_3AP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_3AP_POS; + bayerFormat = m_parameters->getBayerFormat(PIPE_3AP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to 3AA pipe */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE2("3AA setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } else { + /* + * 3A video node is opened for dirty bayer. + * So, we have to do setinput to 3A video node. + */ + pipeId = PIPE_3AA_REPROCESSING; + + /* setup pipe info to 3AA pipe */ + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE2("3AA setupPipe for dirty bayer reprocessing fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + + + /* + * ISP for Reprocessing + */ + + /* ISP */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = PIPE_ISP_REPROCESSING; + nodeType = getNodeType(PIPE_ISP_REPROCESSING); + bayerFormat = m_parameters->getBayerFormat(PIPE_ISP_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = bayerFormat; + + /* set v4l2 video node bytes per plane */ + pipeInfo[nodeType].bytesPerPlane[0] = getBayerLineSize(tempRect.fullW, bayerFormat); + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = m_supportPureBayerReprocessing ? config->current->bufInfo.num_picture_buffers : config->current->bufInfo.num_bayer_buffers; + + /* Set output node default info */ + int ispPerframeInfoIndex = m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_ISP : PERFRAME_INFO_DIRTY_REPROCESSING_ISP; + SET_OUTPUT_DEVICE_BASIC_INFO(ispPerframeInfoIndex); + } + + /* ISPC */ + nodeType = getNodeType(PIPE_ISPC_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPC_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* ISPP */ + nodeType = getNodeType(PIPE_ISPP_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_ISPP_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* setup pipe info to ISP pipe */ + if (m_flagIspTpuOTF == false && m_flagIspMcscOTF == false) { + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE2("ISP setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* clear pipeInfo for next setupPipe */ + for (int i = 0; i < MAX_NODE; i++) + pipeInfo[i] = nullPipeInfo; + } + } + + + /* + * MCSC for Reprocessing + */ + + /* MCSC */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = PIPE_MCSC_REPROCESSING; + nodeType = getNodeType(PIPE_MCSC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = hwPictureW; + tempRect.fullH = hwPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set output node default info */ + int mcscPerframeInfoIndex = !(0) ? PERFRAME_INFO_YUV_REPROCESSING_MCSC : (m_supportPureBayerReprocessing ? PERFRAME_INFO_PURE_REPROCESSING_MCSC : PERFRAME_INFO_DIRTY_REPROCESSING_MCSC); + SET_OUTPUT_DEVICE_BASIC_INFO(mcscPerframeInfoIndex); + } + + /* MCSC0 */ + nodeType = getNodeType(PIPE_MCSC0_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC0_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* 34xx hasn't multi-port */ +#if 0 + /* MCSC3 */ + nodeType = getNodeType(PIPE_MCSC3_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC3_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); + + /* MCSC4 */ + nodeType = getNodeType(PIPE_MCSC4_REPROCESSING); + perFramePos = PERFRAME_REPROCESSING_MCSC4_POS; + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + SET_CAPTURE_DEVICE_BASIC_INFO(); +#endif + + if (m_flagHWFCEnabled == true) { + /* JPEG Src */ + nodeType = getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Src */ + nodeType = getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* JPEG Dst */ + nodeType = getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxPictureW; + tempRect.fullH = maxPictureH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + + /* Thumbnail Dst */ + nodeType = getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING); + + /* set v4l2 buffer size */ + tempRect.fullW = maxThumbnailW; + tempRect.fullH = maxThumbnailH; + tempRect.colorFormat = pictureFormat; + + /* set v4l2 video node buffer count */ + pipeInfo[nodeType].bufInfo.count = config->current->bufInfo.num_picture_buffers; + + /* Set capture node default info */ + pipeInfo[nodeType].rectInfo = tempRect; + pipeInfo[nodeType].bufInfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + pipeInfo[nodeType].bufInfo.memory = V4L2_CAMERA_MEMORY_TYPE; + } + + ret = m_pipes[INDEX(pipeId)]->setupPipe(pipeInfo, m_sensorIds[INDEX(pipeId)]); + if (ret != NO_ERROR) { + CLOGE2("ISP setupPipe fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::preparePipes(void) +{ +#if 0 + status_t ret = NO_ERROR; + + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->prepare(); + if (ret != NO_ERROR) { + CLOGE2("ISP prepare fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::startPipes(void) +{ + status_t ret = NO_ERROR; + CLOGI2("-IN-"); + + /* MCSC Reprocessing */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE2("MCSC start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + if (m_parameters->isUseYuvReprocessing() == false) { + /* ISP Reprocessing */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE2("ISP start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* 3AA Reprocessing */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->start(); + if (ret != NO_ERROR) { + CLOGE2("ISP start fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + CLOGI2("Starting Reprocessing [SCC>ISP] Success!"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::stopPipes(void) +{ + status_t ret = NO_ERROR; + CLOGI2(""); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA Reprocessing Thread stop */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE2("3AA stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* ISP Reprocessing Thread stop */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE2("ISP stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + /* MCSC Reprocessing Thread stop */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE2("MCSC stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* GSC Reprocessing Thread stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->stopThread(); + if (ret != NO_ERROR) { + CLOGE2("GSC stopThread fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA Reprocessing stop */ + if (m_supportPureBayerReprocessing == true) { + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE2("3AA stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* ISP Reprocessing stop */ + if (m_flag3aaIspOTF == false) { + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE2("ISP stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + } + + /* MCSC Reprocessing stop */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE2("MCSC stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + } + + /* GSC Reprocessing stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING)]->stop(); + if (ret != NO_ERROR) { + CLOGE2("GSC stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + /* GSC3 Reprocessing stop */ + ret = m_pipes[INDEX(PIPE_GSC_REPROCESSING3)]->stop(); + if (ret != NO_ERROR) { + CLOGE2("GSC3 stop fail, ret(%d)", ret); + /* TODO: exception handling */ + return INVALID_OPERATION; + } + + CLOGI2("Stopping Reprocessing [3AA>MCSC] Success!"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::startInitialThreads(void) +{ + status_t ret = NO_ERROR; + + CLOGI2("start pre-ordered initial pipe thread"); + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::setStopFlag(void) +{ + CLOGI2("-IN-"); + + status_t ret = NO_ERROR; + + ret = m_pipes[INDEX(PIPE_3AA_REPROCESSING)]->setStopFlag(); + ret = m_pipes[INDEX(PIPE_ISP_REPROCESSING)]->setStopFlag(); + ret = m_pipes[INDEX(PIPE_MCSC_REPROCESSING)]->setStopFlag(); + + return NO_ERROR; +} + +ExynosCameraFrame * ExynosCamera3FrameReprocessingFactory::createNewFrame(uint32_t frameCount) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *newEntity[MAX_NUM_PIPES] = {0}; + if (frameCount <= 0) + frameCount = m_frameCount; + + ExynosCameraFrame *frame = m_frameMgr->createFrame(m_parameters, frameCount, FRAME_TYPE_REPROCESSING); + + int requestEntityCount = 0; + int pipeId = -1; + int parentPipeId = PIPE_3AA_REPROCESSING; + int curShotMode = 0; + int curSeriesShotMode = 0; + if (m_parameters != NULL) { + curShotMode = m_parameters->getShotMode(); + curSeriesShotMode = m_parameters->getSeriesShotMode(); + } + + ret = m_initFrameMetadata(frame); + if (ret != NO_ERROR) + CLOGE2("frame(%d) metadata initialize fail", frameCount); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* set 3AA pipe to linkageList */ + if (m_supportPureBayerReprocessing == true) { + pipeId = PIPE_3AA_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + parentPipeId = pipeId; + } + + /* set ISP pipe to linkageList */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = PIPE_ISP_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + if (m_supportPureBayerReprocessing == true) + frame->addChildEntity(newEntity[INDEX(parentPipeId)], newEntity[INDEX(pipeId)], INDEX(PIPE_3AP_REPROCESSING)); + else + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + parentPipeId = pipeId; + } + + requestEntityCount++; + } + + /* set MCSC pipe to linkageList */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = PIPE_MCSC_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_ONLY, ENTITY_BUFFER_FIXED); + if (m_parameters->isUseYuvReprocessing() == true) + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + else + frame->addChildEntity(newEntity[INDEX(parentPipeId)], newEntity[INDEX(pipeId)], INDEX(PIPE_ISPC_REPROCESSING)); + requestEntityCount++; + } + + /* set GSC pipe to linkageList */ + pipeId = PIPE_GSC_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + if (m_parameters->needGSCForCapture(m_cameraId) == true) + requestEntityCount++; + + /* set GSC pipe to linkageList */ + pipeId = PIPE_GSC_REPROCESSING2; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + + /* set JPEG pipe to linkageList */ + pipeId = PIPE_JPEG_REPROCESSING; + newEntity[INDEX(pipeId)] = new ExynosCameraFrameEntity(pipeId, ENTITY_TYPE_INPUT_OUTPUT, ENTITY_BUFFER_FIXED); + frame->addSiblingEntity(NULL, newEntity[INDEX(pipeId)]); + if (curShotMode != SHOT_MODE_RICH_TONE + && curSeriesShotMode != SERIES_SHOT_MODE_LLS + && curSeriesShotMode != SERIES_SHOT_MODE_SIS + && m_parameters->getShotMode() != SHOT_MODE_FRONT_PANORAMA + && m_parameters->getHighResolutionCallbackMode() == false + && (m_flagHWFCEnabled == false || m_parameters->isHWFCOnDemand() == true)) + requestEntityCount++; + + ret = m_initPipelines(frame); + if (ret != NO_ERROR) { + CLOGE2("m_initPipelines fail, ret(%d)", ret); + } + + frame->setNumRequestPipe(requestEntityCount); + + m_fillNodeGroupInfo(frame); + + m_frameCount++; + + return frame; +} + +status_t ExynosCamera3FrameReprocessingFactory::m_setupConfig(void) +{ + CLOGI2("-IN-"); + + int pipeId = INDEX(PIPE_3AA_REPROCESSING); + int node3aa = -1, node3ac = -1, node3ap = -1; + int nodeIsp = -1, nodeIspc = -1, nodeIspp = -1; + int nodeTpu = -1; + int nodeMcsc = -1, nodeMcscp2 = -1, nodeMcscp3 = -1, nodeMcscp4 = -1; + int previousPipeId = PIPE_FLITE; + enum NODE_TYPE nodeType = INVALID_NODE; + bool flagStreamLeader = false; + + m_flagFlite3aaOTF = m_parameters->isFlite3aaOtf(); + m_flag3aaIspOTF = m_parameters->isReprocessing3aaIspOTF(); + m_flagIspTpuOTF = false; + m_flagIspMcscOTF = m_parameters->isReprocessingIspMcscOTF(); + m_flagTpuMcscOTF = false; + + m_supportReprocessing = m_parameters->isReprocessing(); + m_supportPureBayerReprocessing = m_parameters->getUsePureBayerReprocessing(); + + m_request3AP = !(m_flag3aaIspOTF); + if (m_flagHWFCEnabled == true) { + m_requestJPEG = true; + m_requestThumbnail = true; + } + + node3aa = FIMC_IS_VIDEO_30S_NUM; + node3ac = FIMC_IS_VIDEO_30C_NUM; + node3ap = FIMC_IS_VIDEO_30P_NUM; + nodeIsp = FIMC_IS_VIDEO_I0S_NUM; + nodeIspc = FIMC_IS_VIDEO_I0C_NUM; + nodeIspp = FIMC_IS_VIDEO_I0P_NUM; + nodeMcsc = FIMC_IS_VIDEO_M1S_NUM; + nodeMcscp2 = FIMC_IS_VIDEO_M0P_NUM; + nodeMcscp3 = FIMC_IS_VIDEO_M1P_NUM; + nodeMcscp4 = FIMC_IS_VIDEO_M2P_NUM; + + m_initDeviceInfo(INDEX(PIPE_3AA_REPROCESSING)); + m_initDeviceInfo(INDEX(PIPE_ISP_REPROCESSING)); + m_initDeviceInfo(INDEX(PIPE_MCSC_REPROCESSING)); + + + if (m_parameters->isUseYuvReprocessing() == false) { + /* + * 3AA for Reprocessing + */ + pipeId = INDEX(PIPE_3AA_REPROCESSING); + previousPipeId = PIPE_FLITE; + + /* 3AS */ + + /* + * If dirty bayer is used for reprocessing, the ISP video node is leader in the reprocessing stream. + */ + if (m_supportPureBayerReprocessing == false && m_flag3aaIspOTF == false) + flagStreamLeader = false; + else + flagStreamLeader = true; + + nodeType = getNodeType(PIPE_3AA_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AA_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = node3aa; + m_nodeInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagFlite3aaOTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_getFliteNodenum(), false, flagStreamLeader, m_flagReprocessing); + + /* Other nodes is not stream leader */ + flagStreamLeader = false; + +#if 0 + /* 3AC */ + nodeType = getNodeType(PIPE_3AC_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AC_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = node3ac; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_CAPTURE_OPT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AA_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); +#endif + + /* 3AP */ + nodeType = getNodeType(PIPE_3AP_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_3AP_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = node3ap; + m_nodeInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flag3aaIspOTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_3AA_CAPTURE_MAIN", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AA_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* + * ISP for Reprocessing + */ + previousPipeId = pipeId; + pipeId = m_flag3aaIspOTF ? INDEX(PIPE_3AA_REPROCESSING) : INDEX(PIPE_ISP_REPROCESSING); + + /* + * If dirty bayer is used for reprocessing, the ISP video node is leader in the reprocessing stream. + */ + if (m_supportPureBayerReprocessing == false && m_flag3aaIspOTF == false) + flagStreamLeader = true; + + /* ISPS */ + nodeType = getNodeType(PIPE_ISP_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_ISP_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeIsp; + m_nodeInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flag3aaIspOTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[previousPipeId].nodeNum[getNodeType(PIPE_3AP_REPROCESSING)], m_flag3aaIspOTF, flagStreamLeader, m_flagReprocessing); + + flagStreamLeader = false; + + /* ISPC */ + nodeType = getNodeType(PIPE_ISPC_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_ISPC_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeIspc; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_CAPTURE_M2M", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_ISP_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* ISPP */ + nodeType = getNodeType(PIPE_ISPP_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_ISPP_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeIspp; + m_nodeInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagIspMcscOTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_ISP_CAPTURE_OTF", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_ISP_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + } + + /* + * MCSC for Reprocessing + */ + previousPipeId = pipeId; + + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) + pipeId = INDEX(PIPE_MCSC_REPROCESSING); + + /* MCSC */ + nodeType = getNodeType(PIPE_MCSC_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeMcsc; + m_nodeInfo[pipeId].connectionMode[nodeType] = (unsigned int)m_flagIspMcscOTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_OUTPUT", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + if (m_parameters->isUseYuvReprocessing() == true) + m_sensorIds[pipeId][nodeType] = m_getSensorId(node3ac, m_flagIspMcscOTF, true, m_flagReprocessing); + else + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[previousPipeId].nodeNum[getNodeType(PIPE_ISPP_REPROCESSING)], m_flagIspMcscOTF, flagStreamLeader, m_flagReprocessing); + + /* MCSC0 */ + nodeType = getNodeType(PIPE_MCSC0_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC0_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeMcscp2; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_CAPTURE", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* 34xx hasn't multi-port */ +#if 0 + /* MCSC3 */ + nodeType = getNodeType(PIPE_MCSC3_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC3_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeMcscp3; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_CAPTURE_MAIN", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* MCSC4 */ + nodeType = getNodeType(PIPE_MCSC4_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_MCSC4_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = nodeMcscp4; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "REPROCESSING_MCSC_THUMBNAIL", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); +#endif + + if (m_flagHWFCEnabled == true) { + /* JPEG Src */ + nodeType = getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_JPEG_SRC_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_JPEG_NUM; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "HWFC_JPEG_SRC", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_JPEG_SRC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* Thumbnail Src */ + nodeType = getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_THUMB_SRC_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_THUMB_NUM; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "HWFC_THUMBNAIL_SRC", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_THUMB_SRC_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* JPEG Dst */ + nodeType = getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_JPEG_DST_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_JPEG_NUM; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "HWFC_JPEG_DST", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_JPEG_DST_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + + /* Thumbnail Dst */ + nodeType = getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING); + m_nodeInfo[pipeId].pipeId[nodeType] = PIPE_HWFC_THUMB_DST_REPROCESSING; + m_nodeInfo[pipeId].nodeNum[nodeType] = FIMC_IS_VIDEO_HWFC_THUMB_NUM; + m_nodeInfo[pipeId].connectionMode[nodeType] = HW_CONNECTION_MODE_OTF; + strncpy(m_nodeInfo[pipeId].nodeName[nodeType], "HWFC_THUMBNAIL_DST", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_sensorIds[pipeId][nodeType] = m_getSensorId(m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_HWFC_THUMB_DST_REPROCESSING)], true, flagStreamLeader, m_flagReprocessing); + } + + /* GSC for Reprocessing */ + m_nodeNums[INDEX(PIPE_GSC_REPROCESSING)][OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + + /* GSC3 for Reprocessing */ + m_nodeNums[INDEX(PIPE_GSC_REPROCESSING3)][OUTPUT_NODE] = PICTURE_GSC_NODE_NUM; + + /* JPEG for Reprocessing */ + m_nodeNums[INDEX(PIPE_JPEG_REPROCESSING)][OUTPUT_NODE] = -1; + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::m_constructReprocessingPipes(void) +{ + CLOGI2("-IN-"); + + int pipeId = -1; + + /* 3AA for Reprocessing */ + pipeId = PIPE_3AA_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_nodeInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_3AA_REPROCESSING"); + + /* ISP for Reprocessing */ + pipeId = PIPE_ISP_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_nodeInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_ISP_REPROCESSING"); + + /* MCSC for Reprocessing */ + pipeId = PIPE_MCSC_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraMCPipe(m_cameraId, m_parameters, m_flagReprocessing, &m_nodeInfo[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_MCSC_REPROCESSING"); + + /* GSC for Reprocessing */ + pipeId = PIPE_GSC_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_GSC_REPROCESSING"); + + /* GSC3 for Reprocessing */ + pipeId = PIPE_GSC_REPROCESSING3; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_GSC_REPROCESSING3"); + + if (m_flagHWFCEnabled == false + || m_parameters->isHWFCOnDemand() == true) { + /* JPEG for Reprocessing */ + pipeId = PIPE_JPEG_REPROCESSING; + m_pipes[INDEX(pipeId)] = (ExynosCameraPipe*)new ExynosCameraPipeJpeg(m_cameraId, m_parameters, m_flagReprocessing, m_nodeNums[INDEX(pipeId)]); + m_pipes[INDEX(pipeId)]->setPipeId(pipeId); + m_pipes[INDEX(pipeId)]->setPipeName("PIPE_JPEG_REPROCESSING"); + } + + CLOGI2("pipe ids for reprocessing"); + for (int i = 0; i < MAX_NUM_PIPES; i++) { + if (m_pipes[i] != NULL) { + CLOGI2("-> m_pipes[%d] : PipeId(%d)" , i, m_pipes[i]->getPipeId()); + } + } + + return NO_ERROR; +} + +status_t ExynosCamera3FrameReprocessingFactory::m_fillNodeGroupInfo(ExynosCameraFrame *frame) +{ + camera2_node_group node_group_info_3aa; + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_mcsc; + camera2_node_group *node_group_info_temp = NULL; + + int zoom = m_parameters->getZoomLevel(); + int pipeId = -1; + uint32_t perframePosition = 0; + + memset(&node_group_info_3aa, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_isp, 0x0, sizeof(camera2_node_group)); + memset(&node_group_info_mcsc, 0x0, sizeof(camera2_node_group)); + + if (m_parameters->isUseYuvReprocessing() == false) { + /* 3AA for Reprocessing */ + if (m_supportPureBayerReprocessing == true) { + pipeId = INDEX(PIPE_3AA_REPROCESSING); + node_group_info_temp = &node_group_info_3aa; + node_group_info_temp->leader.request = 1; + if (m_request3AC == true) { + node_group_info_temp->capture[perframePosition].request = m_request3AC; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AC_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + node_group_info_temp->capture[perframePosition].request = m_request3AP; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_3AP_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + /* ISP for Reprocessing */ + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + pipeId = INDEX(PIPE_ISP_REPROCESSING); + perframePosition = 0; + node_group_info_temp = &node_group_info_isp; + node_group_info_temp->leader.request = 1; + } + + if (m_flagIspMcscOTF == false) { + node_group_info_temp->capture[perframePosition].request = m_requestISPC; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_ISPC_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + + if (m_requestISPP == true) { + node_group_info_temp->capture[perframePosition].request = m_requestISPP; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_ISPP_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + } + } + + /* MCSC for Reprocessing */ + if (m_parameters->isUseYuvReprocessing() == true + || (m_flagIspMcscOTF == false && m_flagTpuMcscOTF == false)) { + pipeId = INDEX(PIPE_MCSC_REPROCESSING); + node_group_info_temp = &node_group_info_mcsc; + node_group_info_temp->leader.request = 1; + } + + perframePosition = PERFRAME_REPROCESSING_SCC_POS; + node_group_info_temp->capture[perframePosition].request = m_requestSCP; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC0_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + + /* 34xx hasn't multi-port */ +#if 0 + node_group_info_temp->capture[perframePosition].request = m_requestMCSC3; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC3_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; + perframePosition++; + + node_group_info_temp->capture[perframePosition].request = m_requestMCSC4; + node_group_info_temp->capture[perframePosition].vid = m_nodeInfo[pipeId].nodeNum[getNodeType(PIPE_MCSC4_REPROCESSING)] - FIMC_IS_VIDEO_BAS_NUM; +#endif + + if (m_parameters->isUseYuvReprocessing() == false) { + updateNodeGroupInfo( + PIPE_3AA_REPROCESSING, + m_parameters, + &node_group_info_3aa); + frame->storeNodeGroupInfo(&node_group_info_3aa, PERFRAME_INFO_PURE_REPROCESSING_3AA, zoom); + + if (m_supportPureBayerReprocessing == false || m_flag3aaIspOTF == false) { + updateNodeGroupInfo( + PIPE_ISP_REPROCESSING, + m_parameters, + &node_group_info_isp); + if (m_supportPureBayerReprocessing == true) + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_PURE_REPROCESSING_ISP, zoom); + else + frame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_DIRTY_REPROCESSING_ISP, zoom); + } + } else { + frame->storeNodeGroupInfo(&node_group_info_mcsc, PERFRAME_INFO_YUV_REPROCESSING_MCSC, zoom); + } + + return NO_ERROR; +} + +void ExynosCamera3FrameReprocessingFactory::m_init(void) +{ + m_flagReprocessing = true; + m_flagHWFCEnabled = m_parameters->isHWFCEnabled(); +} + +enum NODE_TYPE ExynosCamera3FrameReprocessingFactory::getNodeType(uint32_t pipeId) +{ + enum NODE_TYPE nodeType = INVALID_NODE; + switch (pipeId) { + case PIPE_FLITE_REPROCESSING: + nodeType = CAPTURE_NODE; + break; + case PIPE_3AA_REPROCESSING: + nodeType = OUTPUT_NODE; + break; + case PIPE_3AP_REPROCESSING: + nodeType = CAPTURE_NODE_1; + break; + case PIPE_ISP_REPROCESSING: + nodeType = OTF_NODE_1; + break; + case PIPE_ISPC_REPROCESSING: + nodeType = CAPTURE_NODE_2; + break; + case PIPE_ISPP_REPROCESSING: + case PIPE_SCC_REPROCESSING: + nodeType = CAPTURE_NODE_3; + break; + case PIPE_MCSC_REPROCESSING: + if (0) + nodeType = OTF_NODE_2; + else + nodeType = OUTPUT_NODE; + break; + case PIPE_MCSC0_REPROCESSING: + nodeType = CAPTURE_NODE_4; + break; + case PIPE_HWFC_JPEG_DST_REPROCESSING: + nodeType = CAPTURE_NODE_5; + break; + case PIPE_HWFC_JPEG_SRC_REPROCESSING: + nodeType = CAPTURE_NODE_6; + break; + case PIPE_HWFC_THUMB_SRC_REPROCESSING: + nodeType = CAPTURE_NODE_7; + break; + case PIPE_HWFC_THUMB_DST_REPROCESSING: + nodeType = CAPTURE_NODE_8; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Unexpected pipe_id(%d), assert!!!!", + __FUNCTION__, __LINE__, pipeId); + break; + } + + return nodeType; +}; + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.h b/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.h new file mode 100644 index 0000000..db4ff64 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3FrameReprocessingFactory.h @@ -0,0 +1,69 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA3_FRAME_REPROCESSING_FACTORY_H +#define EXYNOS_CAMERA3_FRAME_REPROCESSING_FACTORY_H + +#include "ExynosCamera3FrameFactory.h" + +namespace android { + +class ExynosCamera3FrameReprocessingFactory : public ExynosCamera3FrameFactory { +public: + ExynosCamera3FrameReprocessingFactory() + { + m_init(); + } + + ExynosCamera3FrameReprocessingFactory(int cameraId, ExynosCamera3Parameters *param) : ExynosCamera3FrameFactory(cameraId, param) + { + m_init(); + + strncpy(m_name, "ReprocessingFactory", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + + virtual ~ExynosCamera3FrameReprocessingFactory(); + + virtual status_t create(bool active = true); + + virtual status_t initPipes(void); + virtual status_t preparePipes(void); + + virtual status_t startPipes(void); + virtual status_t stopPipes(void); + virtual status_t startInitialThreads(void); + virtual status_t setStopFlag(void); + + virtual enum NODE_TYPE getNodeType(uint32_t pipeId); + + virtual ExynosCameraFrame *createNewFrame(uint32_t frameCount = 0); + +protected: + virtual status_t m_setupConfig(void); + virtual status_t m_constructReprocessingPipes(void); + virtual status_t m_fillNodeGroupInfo(ExynosCameraFrame *frame); + +private: + void m_init(void); + +private: + bool m_flagHWFCEnabled; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal3/ExynosCamera3Parameters.cpp b/libcamera/34xx/hal3/ExynosCamera3Parameters.cpp new file mode 100644 index 0000000..de46654 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3Parameters.cpp @@ -0,0 +1,8138 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3Parameters" +#include + +#include "ExynosCamera3Parameters.h" + +namespace android { + +ExynosCamera3Parameters::ExynosCamera3Parameters(int cameraId) +{ + m_cameraId = cameraId; + + const char *myName = (m_cameraId == CAMERA_ID_BACK) ? "ParametersBack" : "ParametersFront"; + strncpy(m_name, myName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + m_staticInfo = createExynosCamera3SensorInfo(cameraId); + m_useSizeTable = (m_staticInfo->sizeTableSupport) ? USE_CAMERA_SIZE_TABLE : false; + m_useAdaptiveCSCRecording = (cameraId == CAMERA_ID_BACK) ? USE_ADAPTIVE_CSC_RECORDING : false; + + m_exynosconfig = NULL; + m_activityControl = new ExynosCameraActivityControl(m_cameraId); + + memset(&m_cameraInfo, 0, sizeof(struct exynos_camera_info)); + memset(&m_exifInfo, 0, sizeof(m_exifInfo)); + + m_initMetadata(); + + setHalVersion(IS_HAL_VER_3_2); + m_setExifFixedAttribute(); + + m_exynosconfig = new ExynosConfigInfo(); + + mDebugInfo.num_of_appmarker = 1; /* Default : APP4 */ + mDebugInfo.idx[0][0] = APP_MARKER_4; /* matching the app marker 4 */ + + mDebugInfo.debugSize[APP_MARKER_4] = sizeof(struct camera2_udm); + mDebugInfo.debugData[APP_MARKER_4] = new char[mDebugInfo.debugSize[APP_MARKER_4]]; + memset((void *)mDebugInfo.debugData[APP_MARKER_4], 0, mDebugInfo.debugSize[APP_MARKER_4]); + memset((void *)m_exynosconfig, 0x00, sizeof(struct ExynosConfigInfo)); + + // CAUTION!! : Initial values must be prior to setDefaultParameter() function. + // Initial Values : START + m_IsThumbnailCallbackOn = false; + m_fastFpsMode = 0; + m_previewRunning = false; + m_previewSizeChanged = false; + m_pictureRunning = false; + m_recordingRunning = false; + m_flagRestartPreviewChecked = false; + m_flagRestartPreview = false; + m_reallocBuffer = false; + m_setFocusmodeSetting = false; + m_flagMeteringRegionChanged = false; + m_flagCheckDualMode = false; + m_flagHWVDisMode = false; + m_flagVideoStabilization = false; + m_flag3dnrMode = false; + + m_flagCheckRecordingHint = false; + m_zoomWithScaler = false; + + m_useDynamicBayer = (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_BAYER : USE_DYNAMIC_BAYER_FRONT; + m_useDynamicBayerVideoSnapShot = + (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_BAYER_VIDEO_SNAP_SHOT : USE_DYNAMIC_BAYER_VIDEO_SNAP_SHOT_FRONT; + m_useDynamicScc = (cameraId == CAMERA_ID_BACK) ? USE_DYNAMIC_SCC_REAR : USE_DYNAMIC_SCC_FRONT; + m_useFastenAeStable = (cameraId == CAMERA_ID_BACK) ? USE_FASTEN_AE_STABLE : false; + + /* we cannot know now, whether recording mode or not */ + /* + if (getRecordingHint() == true || getDualRecordingHint() == true) + m_usePureBayerReprocessing = (cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + else + */ + m_usePureBayerReprocessing = (cameraId == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + + m_enabledMsgType = 0; + + m_previewBufferCount = NUM_PREVIEW_BUFFERS; + + m_dvfsLock = false; + +#ifdef USE_BINNING_MODE + m_binningProperty = checkProperty(false); +#endif + m_zoom_activated = false; +#ifdef FORCE_CAL_RELOAD + m_calValid = true; +#endif + + m_zoomWithScaler = false; + m_exposureTimeCapture = 0; + // Initial Values : END + setDefaultCameraInfo(); + m_initDefaultInfo(); +} + +ExynosCamera3Parameters::~ExynosCamera3Parameters() +{ + if (m_staticInfo != NULL) { + delete m_staticInfo; + m_staticInfo = NULL; + } + + if (m_activityControl != NULL) { + delete m_activityControl; + m_activityControl = NULL; + } + + for(int i = 0; i < mDebugInfo.num_of_appmarker; i++) { + if(mDebugInfo.debugData[mDebugInfo.idx[i][0]]) + delete mDebugInfo.debugData[mDebugInfo.idx[i][0]]; + mDebugInfo.debugData[mDebugInfo.idx[i][0]] = NULL; + mDebugInfo.debugSize[mDebugInfo.idx[i][0]] = 0; + } + + if (m_exynosconfig != NULL) { + memset((void *)m_exynosconfig, 0x00, sizeof(struct ExynosConfigInfo)); + delete m_exynosconfig; + m_exynosconfig = NULL; + } + + if (m_exifInfo.maker_note) { + delete m_exifInfo.maker_note; + m_exifInfo.maker_note = NULL; + } + + if (m_exifInfo.user_comment) { + delete m_exifInfo.user_comment; + m_exifInfo.user_comment = NULL; + } +} + +int ExynosCamera3Parameters::getCameraId(void) +{ + return m_cameraId; +} + +status_t ExynosCamera3Parameters::m_initDefaultInfo(void) +{ + status_t ret = NO_ERROR; + + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + + m_setRecordingHint(false); + m_setDualMode(false); + m_setEffectHint(0); + + /* zoom */ + if (getZoomSupported() == true) { + int maxZoom = getMaxZoomLevel(); + if (0 < maxZoom) { + int max_zoom_ratio = (int)getMaxZoomRatio(); + setZoomRatioList(m_staticInfo->zoomRatioList, maxZoom - 1, (float)(max_zoom_ratio / 1000)); + } + } + + getPreviewFpsRange(&curMinFps, &curMaxFps); + CLOGI2("curFpsRange[Min=%d, Max=%d]", curMinFps, curMaxFps); + + m_setPreviewFpsRange((uint32_t)15, (uint32_t)30); + getPreviewFpsRange(&curMinFps, &curMaxFps); + m_activityControl->setFpsValue(curMinFps); + + m_setHwPreviewFormat(V4L2_PIX_FMT_NV21M); + m_setCallbackFormat(V4L2_PIX_FMT_NV21M); + m_setHwPictureFormat(SCC_OUTPUT_COLOR_FMT); + + /* Preview Size */ + getMaxPreviewSize(&m_cameraInfo.previewW, &m_cameraInfo.previewH); + m_setHwPreviewSize(m_cameraInfo.previewW, m_cameraInfo.previewH); + + return ret; +} + +void ExynosCamera3Parameters::setDefaultCameraInfo(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + m_setHwSensorSize(m_staticInfo->maxSensorW, m_staticInfo->maxSensorH); + CLOGI("INFO(%s[%d]) m_setHwPreviewSize : %d x %d", __FUNCTION__, __LINE__, m_staticInfo->maxPreviewW, m_staticInfo->maxPreviewH); + for (int i = 0; i < this->getYuvStreamMaxNum(); i++) { + m_setYuvSize(m_staticInfo->maxPreviewW, m_staticInfo->maxPreviewH, i); + m_setYuvFormat(V4L2_PIX_FMT_NV21, i); + } + + m_setHwPictureSize(m_staticInfo->maxPictureW, m_staticInfo->maxPictureH); + m_setHwPictureFormat(SCC_OUTPUT_COLOR_FMT); + + /* Initalize BNS scale ratio, step:500, ex)1500->x1.5 scale down */ + m_setBnsScaleRatio(1000); + /* Initalize Binning scale ratio */ + m_setBinningScaleRatio(1000); + /* Set Default VideoSize to FHD */ + m_setVideoSize(1920,1080); +} + +status_t ExynosCamera3Parameters::m_setIntelligentMode(int intelligentMode) +{ + status_t ret = NO_ERROR; + bool visionMode = false; + + m_cameraInfo.intelligentMode = intelligentMode; + + if (intelligentMode > 1) { + if (m_staticInfo->visionModeSupport == true) { + visionMode = true; + } else { + CLOGE2("tried to set vision mode(not supported)", "setParameters"); + ret = BAD_VALUE; + } + } else if (getVisionMode()) { + CLOGE2("vision mode can not change before stoppreview"); + visionMode = true; + } + + m_setVisionMode(visionMode); + + return ret; + } + +int ExynosCamera3Parameters::getIntelligentMode(void) +{ + return m_cameraInfo.intelligentMode; +} + +void ExynosCamera3Parameters::m_setVisionMode(bool vision) +{ + m_cameraInfo.visionMode = vision; +} + +bool ExynosCamera3Parameters::getVisionMode(void) +{ + return m_cameraInfo.visionMode; +} + +void ExynosCamera3Parameters::m_setVisionModeFps(int fps) +{ + m_cameraInfo.visionModeFps = fps; +} + +int ExynosCamera3Parameters::getVisionModeFps(void) +{ + return m_cameraInfo.visionModeFps; +} + +void ExynosCamera3Parameters::m_setVisionModeAeTarget(int ae) +{ + m_cameraInfo.visionModeAeTarget = ae; +} + +int ExynosCamera3Parameters::getVisionModeAeTarget(void) +{ + return m_cameraInfo.visionModeAeTarget; +} + +void ExynosCamera3Parameters::m_setRecordingHint(bool hint) +{ + m_cameraInfo.recordingHint = hint; + + if (hint) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_ON); + } else if (!hint && !getDualRecordingHint() && !getEffectRecordingHint()) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_OFF); + } + + /* RecordingHint is confirmed */ + m_flagCheckRecordingHint = true; +} + +bool ExynosCamera3Parameters::getRecordingHint(void) +{ + /* + * Before setParameters, we cannot know recordingHint is valid or not + * So, check and make assert for fast debugging + */ + if (m_flagCheckRecordingHint == false) + android_printAssert(NULL, LOG_TAG, "Cannot call getRecordingHint befor setRecordingHint, assert!!!!"); + + return m_cameraInfo.recordingHint; +} + +void ExynosCamera3Parameters::m_setDualMode(bool dual) +{ + m_cameraInfo.dualMode = dual; + /* dualMode is confirmed */ + m_flagCheckDualMode = true; +} + +bool ExynosCamera3Parameters::getDualMode(void) +{ + /* + * Before setParameters, we cannot know dualMode is valid or not + * So, check and make assert for fast debugging + */ + if (m_flagCheckDualMode == false) + android_printAssert(NULL, LOG_TAG, "Cannot call getDualMode befor checkDualMode, assert!!!!"); + + return m_cameraInfo.dualMode; +} + +void ExynosCamera3Parameters::m_setDualRecordingHint(bool hint) +{ + m_cameraInfo.dualRecordingHint = hint; + + if (hint) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_ON); + } else if (!hint && !getRecordingHint() && !getEffectRecordingHint()) { + setMetaVideoMode(&m_metadata, AA_VIDEOMODE_OFF); + } +} + +bool ExynosCamera3Parameters::getDualRecordingHint(void) +{ + return m_cameraInfo.dualRecordingHint; +} + +void ExynosCamera3Parameters::m_setEffectHint(bool hint) +{ + m_cameraInfo.effectHint = hint; +} + +bool ExynosCamera3Parameters::getEffectHint(void) +{ + return m_cameraInfo.effectHint; +} + +bool ExynosCamera3Parameters::getEffectRecordingHint(void) +{ + return m_cameraInfo.effectRecordingHint; +} + +status_t ExynosCamera3Parameters::m_adjustPreviewFpsRange(int &newMinFps, int &newMaxFps) +{ + bool flagSpecialMode = false; + int curSceneMode = 0; + int curShotMode = 0; + + if (getDualMode() == true) { + flagSpecialMode = true; + + /* when dual mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps. */ + newMinFps = newMaxFps; + CLOGD2("dualMode(true), newMaxFps=%d", newMaxFps); + } + + if (getDualRecordingHint() == true) { + flagSpecialMode = true; + + /* when dual recording mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps. */ + newMinFps = newMaxFps; + CLOGD2("dualRecordingHint(true), newMaxFps=%d", newMaxFps); + } + + if (getEffectHint() == true) { + flagSpecialMode = true; +#if 0 /* Don't use to set fixed fps in the hal side. */ + /* when effect mode, fps is limited by 24fps */ + if (24000 < newMaxFps) + newMaxFps = 24000; + + /* set fixed fps due to GPU preformance. */ + newMinFps = newMaxFps; +#endif + CLOGD2("effectHint(true), newMaxFps=%d", newMaxFps); + } + + if (getRecordingHint() == true) { + flagSpecialMode = true; +#if 0 /* Don't use to set fixed fps in the hal side. */ +#ifdef USE_VARIABLE_FPS_OF_FRONT_RECORDING + if (getCameraId() == CAMERA_ID_FRONT && getSamsungCamera() == true) { + /* Supported the variable frame rate for Image Quality Performacne */ + CLOGD2("RecordingHint(true),newMinFps=%d,newMaxFps=%d", newMinFps, newMaxFps); + } else +#endif + { + /* set fixed fps. */ + newMinFps = newMaxFps; + } + CLOGD2("RecordingHint(true), newMaxFps=%d", newMaxFps); +#endif + CLOGD2("RecordingHint(true),newMinFps=%d,newMaxFps=%d", newMinFps, newMaxFps); + } + + if (flagSpecialMode == true) { + CLOGD2("special mode enabled, newMaxFps=%d", newMaxFps); + goto done; + } + + curSceneMode = getSceneMode(); + switch (curSceneMode) { + case SCENE_MODE_ACTION: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps; + } else { + newMinFps = 30000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_PORTRAIT: + case SCENE_MODE_LANDSCAPE: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 2; + } else { + newMinFps = 15000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_NIGHT: + /* for Front MMS mode FPS */ + if (getCameraId() == CAMERA_ID_FRONT && getRecordingHint() == true) + break; + + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 4; + } else { + newMinFps = 8000; + newMaxFps = 30000; + } + break; + case SCENE_MODE_NIGHT_PORTRAIT: + case SCENE_MODE_THEATRE: + case SCENE_MODE_BEACH: + case SCENE_MODE_SNOW: + case SCENE_MODE_SUNSET: + case SCENE_MODE_STEADYPHOTO: + case SCENE_MODE_FIREWORKS: + case SCENE_MODE_SPORTS: + case SCENE_MODE_PARTY: + case SCENE_MODE_CANDLELIGHT: + if (getHighSpeedRecording() == true){ + newMinFps = newMaxFps / 2; + } else { + newMinFps = 15000; + newMaxFps = 30000; + } + break; + default: + break; + } + + curShotMode = getShotMode(); + switch (curShotMode) { + case SHOT_MODE_DRAMA: + case SHOT_MODE_3DTOUR: + case SHOT_MODE_3D_PANORAMA: + case SHOT_MODE_LIGHT_TRACE: + newMinFps = 30000; + newMaxFps = 30000; + break; + case SHOT_MODE_ANIMATED_SCENE: + newMinFps = 15000; + newMaxFps = 15000; + break; +#ifdef USE_LIMITATION_FOR_THIRD_PARTY + case THIRD_PARTY_BLACKBOX_MODE: + CLOGI2("limit the maximum 30 fps range in THIRD_PARTY_BLACKBOX_MODE(%d,%d)", newMinFps, newMaxFps); + if (newMinFps > 30000) { + newMinFps = 30000; + } + if (newMaxFps > 30000) { + newMaxFps = 30000; + } + break; + case THIRD_PARTY_VTCALL_MODE: + CLOGI2("limit the maximum 15 fps range in THIRD_PARTY_VTCALL_MODE(%d,%d)", newMinFps, newMaxFps); + if (newMinFps > 15000) { + newMinFps = 15000; + } + if (newMaxFps > 15000) { + newMaxFps = 15000; + } + break; + case THIRD_PARTY_HANGOUT_MODE: + CLOGI2("change fps range 15000,15000 in THIRD_PARTY_HANGOUT_MODE"); + newMinFps = 15000; + newMaxFps = 15000; + break; +#endif + default: + break; + } + +done: + if (newMinFps != newMaxFps) { + if (m_getSupportedVariableFpsList(newMinFps, newMaxFps, &newMinFps, &newMaxFps) == false) + newMinFps = newMaxFps / 2; + } + + return NO_ERROR; +} + +void ExynosCamera3Parameters::updatePreviewFpsRange(void) +{ + uint32_t curMinFps = 0; + uint32_t curMaxFps = 0; + int newMinFps = 0; + int newMaxFps = 0; + + getPreviewFpsRange(&curMinFps, &curMaxFps); + newMinFps = curMinFps * 1000; + newMaxFps = curMaxFps * 1000; + + if (m_adjustPreviewFpsRange(newMinFps, newMaxFps) != NO_ERROR) { + CLOGE2("Fils to adjust preview fps range"); + return; + } + + newMinFps = newMinFps / 1000; + newMaxFps = newMaxFps / 1000; + + if (curMinFps != (uint32_t)newMinFps || curMaxFps != (uint32_t)newMaxFps) { + m_setPreviewFpsRange((uint32_t)newMinFps, (uint32_t)newMaxFps); + } +} + +status_t ExynosCamera3Parameters::checkPreviewFpsRange(uint32_t minFps, uint32_t maxFps) +{ + status_t ret = NO_ERROR; + uint32_t curMinFps = 0, curMaxFps = 0; + + getPreviewFpsRange(&curMinFps, &curMaxFps); + if (curMinFps != minFps || curMaxFps != maxFps) + m_setPreviewFpsRange(minFps, maxFps); + + return ret; +} + +void ExynosCamera3Parameters::m_setPreviewFpsRange(uint32_t min, uint32_t max) +{ + setMetaCtlAeTargetFpsRange(&m_metadata, min, max); + setMetaCtlSensorFrameDuration(&m_metadata, (uint64_t)((1000 * 1000 * 1000) / (uint64_t)max)); + + CLOGI2("fps min(%d) max(%d)", min, max); +} + +void ExynosCamera3Parameters::getPreviewFpsRange(uint32_t *min, uint32_t *max) +{ + /* ex) min = 15 , max = 30 */ + getMetaCtlAeTargetFpsRange(&m_metadata, min, max); +} + +bool ExynosCamera3Parameters::m_getSupportedVariableFpsList(int min, int max, int *newMin, int *newMax) +{ + int (*sizeList)[2]; + + if (getCameraId() == CAMERA_ID_BACK) { + /* Try to find exactly same in REAR LIST*/ + sizeList = m_staticInfo->rearFPSList; + for (int i = 0; i < m_staticInfo->rearFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find exactly same in HIDDEN REAR LIST*/ + sizeList = m_staticInfo->hiddenRearFPSList; + for (int i = 0; i < m_staticInfo->hiddenRearFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find similar fps in REAR LIST*/ + sizeList = m_staticInfo->rearFPSList; + for (int i = 0; i < m_staticInfo->rearFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW2("calibrate new fps(%d/%d -> %d/%d)", min, max, *newMin, *newMax); + + return true; + } + } + /* Try to find similar fps in HIDDEN REAR LIST*/ + sizeList = m_staticInfo->hiddenRearFPSList; + for (int i = 0; i < m_staticInfo->hiddenRearFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW2("calibrate new fps(%d/%d -> %d/%d)", min, max, *newMin, *newMax); + + return true; + } + } + } else { + /* Try to find exactly same in FRONT LIST*/ + sizeList = m_staticInfo->frontFPSList; + for (int i = 0; i < m_staticInfo->frontFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find exactly same in HIDDEN FRONT LIST*/ + sizeList = m_staticInfo->hiddenFrontFPSList; + for (int i = 0; i < m_staticInfo->hiddenFrontFPSListMax; i++) { + if (sizeList[i][1] == max && sizeList[i][0] == min) { + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + return true; + } + } + /* Try to find similar fps in FRONT LIST*/ + sizeList = m_staticInfo->frontFPSList; + for (int i = 0; i < m_staticInfo->frontFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW2("calibrate new fps(%d/%d -> %d/%d)", min, max, *newMin, *newMax); + + return true; + } + } + /* Try to find similar fps in HIDDEN FRONT LIST*/ + sizeList = m_staticInfo->hiddenFrontFPSList; + for (int i = 0; i < m_staticInfo->hiddenFrontFPSListMax; i++) { + if (max <= sizeList[i][1] && sizeList[i][0] <= min) { + if(sizeList[i][1] == sizeList[i][0]) + continue; + + *newMin = sizeList[i][0]; + *newMax = sizeList[i][1]; + + CLOGW2("calibrate new fps(%d/%d -> %d/%d)", min, max, *newMin, *newMax); + + return true; + } + } + } + + return false; +} + +#if 0 +status_t ExynosCamera3Parameters::checkVideoSize(const CameraParameters& params) +{ + /* Video size */ + int newVideoW = 0; + int newVideoH = 0; + + params.getVideoSize(&newVideoW, &newVideoH); + + if (0 < newVideoW && 0 < newVideoH && + m_isSupportedVideoSize(newVideoW, newVideoH) == false) { + return BAD_VALUE; + } + + CLOGI("INFO(%s):newVideo Size (%dx%d), ratioId(%d)", + "setParameters", newVideoW, newVideoH, m_cameraInfo.videoSizeRatioId); + m_setVideoSize(newVideoW, newVideoH); + m_params.setVideoSize(newVideoW, newVideoH); + + return NO_ERROR; +} +#else +status_t ExynosCamera3Parameters::checkVideoSize(int newVideoW, int newVideoH) +{ + /* Video size */ +// params.getVideoSize(&newVideoW, &newVideoH); + + if (0 < newVideoW && 0 < newVideoH && + m_isSupportedVideoSize(newVideoW, newVideoH) == false) { + return BAD_VALUE; + } + + CLOGI("INFO(%s):newVideo Size (%dx%d), ratioId(%d)", + "setParameters", newVideoW, newVideoH, m_cameraInfo.videoSizeRatioId); + m_setVideoSize(newVideoW, newVideoH); +// m_params.setVideoSize(newVideoW, newVideoH); + + return NO_ERROR; +} +#endif + +bool ExynosCamera3Parameters::m_isSupportedVideoSize(const int width, + const int height) +{ + int maxWidth = 0; + int maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + getMaxVideoSize(&maxWidth, &maxHeight); + + if (maxWidth < width || maxHeight < height) { + CLOGE2("invalid video Size(maxSize(%d/%d) size(%d/%d)", maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearVideoList; + for (int i = 0; i < m_staticInfo->rearVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontVideoList; + for (int i = 0; i < m_staticInfo->frontVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearVideoList; + for (int i = 0; i < m_staticInfo->hiddenRearVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontVideoList; + for (int i = 0; i < m_staticInfo->hiddenFrontVideoListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.videoSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE2("Invalid video size(%dx%d)", width, height); + + return false; +} + +bool ExynosCamera3Parameters::m_isUHDRecordingMode(void) +{ + bool isUHDRecording = false; + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if (((videoW == 3840 && videoH == 2160) || (videoW == 2560 && videoH == 1440)) && getRecordingHint() == true) + isUHDRecording = true; + +#if 0 + /* we need to make WQHD SCP(LCD size), when FHD recording for clear rendering */ + int hwPreviewW = 0, hwPreviewH = 0; + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + + /* regard align margin(ex:1920x1088), check size more than 1920x1088 */ + /* if (1920 < hwPreviewW && 1080 < hwPreviewH) */ + if ((ALIGN_UP(1920, CAMERA_MAGIC_ALIGN) < hwPreviewW) && + (ALIGN_UP(1080, CAMERA_MAGIC_ALIGN) < hwPreviewH) && + (getRecordingHint() == true)) { + isUHDRecording = true; + } +#endif + + return isUHDRecording; +} + +void ExynosCamera3Parameters::m_setVideoSize(int w, int h) +{ + m_cameraInfo.videoW = w; + m_cameraInfo.videoH = h; +} + +bool ExynosCamera3Parameters::getUHDRecordingMode(void) +{ + return m_isUHDRecordingMode(); +} + +void ExynosCamera3Parameters::getVideoSize(int *w, int *h) +{ + *w = m_cameraInfo.videoW; + *h = m_cameraInfo.videoH; +} + +void ExynosCamera3Parameters::getMaxVideoSize(int *w, int *h) +{ + *w = m_staticInfo->maxVideoW; + *h = m_staticInfo->maxVideoH; +} + +int ExynosCamera3Parameters::getVideoFormat(void) +{ + if (getAdaptiveCSCRecording() == true) { + return V4L2_PIX_FMT_NV21M; + } else { + return V4L2_PIX_FMT_NV12M; + } +} + +status_t ExynosCamera3Parameters::checkCallbackSize(int callbackW, int callbackH) +{ + status_t ret = NO_ERROR; + int curCallbackW = -1, curCallbackH = -1; + + if (callbackW < 0 || callbackH < 0) { + CLOGE("ERR(%s[%d]):Invalid callback size. %dx%d", + __FUNCTION__, __LINE__, callbackW, callbackH); + return INVALID_OPERATION; + } + + getCallbackSize(&curCallbackW, &curCallbackH); + + if (callbackW != curCallbackW || callbackH != curCallbackH) { + ALOGI("INFO(%s[%d]):curCallbackSize %dx%d newCallbackSize %dx%d", + __FUNCTION__, __LINE__, + curCallbackW, curCallbackH, callbackW, callbackH); + + m_setCallbackSize(callbackW, callbackH); + } + + return ret; +} + +void ExynosCamera3Parameters::m_setCallbackSize(int w, int h) +{ + m_cameraInfo.callbackW = w; + m_cameraInfo.callbackH = h; +} + +void ExynosCamera3Parameters::getCallbackSize(int *w, int *h) +{ + *w = m_cameraInfo.callbackW; + *h = m_cameraInfo.callbackH; +} + +status_t ExynosCamera3Parameters::checkCallbackFormat(int callbackFormat) +{ + status_t ret = NO_ERROR; + int curCallbackFormat = -1; + int newCallbackFormat = -1; + + if (callbackFormat < 0) { + CLOGE("ERR(%s[%d]):Inavlid callback format. %x", + __FUNCTION__, __LINE__, callbackFormat); + return INVALID_OPERATION; + } + + newCallbackFormat = HAL_PIXEL_FORMAT_2_V4L2_PIX(callbackFormat); + curCallbackFormat = getCallbackFormat(); + + if (curCallbackFormat != newCallbackFormat) { + char curFormatName[V4L2_FOURCC_LENGTH] = {}; + char newFormatName[V4L2_FOURCC_LENGTH] = {}; + m_getV4l2Name(curFormatName, V4L2_FOURCC_LENGTH, curCallbackFormat); + m_getV4l2Name(newFormatName, V4L2_FOURCC_LENGTH, newCallbackFormat); + CLOGI("INFO(%s[%d]):curCallbackFormat %s newCallbackFormat %s", + __FUNCTION__, __LINE__, curFormatName, newFormatName); + + m_setCallbackFormat(newCallbackFormat); + } + + return ret; +} + +void ExynosCamera3Parameters::m_setCallbackFormat(int colorFormat) +{ + m_cameraInfo.callbackFormat = colorFormat; +} + +int ExynosCamera3Parameters::getCallbackFormat(void) +{ + return m_cameraInfo.callbackFormat; +} + +bool ExynosCamera3Parameters::getReallocBuffer() { + Mutex::Autolock lock(m_reallocLock); + return m_reallocBuffer; +} + +bool ExynosCamera3Parameters::setReallocBuffer(bool enable) { + Mutex::Autolock lock(m_reallocLock); + m_reallocBuffer = enable; + return m_reallocBuffer; +} + +void ExynosCamera3Parameters::setFastFpsMode(int fpsMode) +{ + m_fastFpsMode = fpsMode; +} + +int ExynosCamera3Parameters::getFastFpsMode(void) +{ + return m_fastFpsMode; +} + +void ExynosCamera3Parameters::m_setHighSpeedRecording(bool highSpeed) +{ + m_cameraInfo.highSpeedRecording = highSpeed; +} + +bool ExynosCamera3Parameters::getHighSpeedRecording(void) +{ + return m_cameraInfo.highSpeedRecording; +} + +bool ExynosCamera3Parameters::m_adjustHighSpeedRecording(int curMinFps, int curMaxFps, __unused int newMinFps, int newMaxFps) +{ + bool flagHighSpeedRecording = false; + bool restartPreview = false; + + /* setting high speed */ + if (30 < newMaxFps) { + flagHighSpeedRecording = true; + /* 30 -> 60/120 */ + if (curMaxFps <= 30) + restartPreview = true; + /* 60 -> 120 */ + else if (curMaxFps <= 60 && 120 <= newMaxFps) + restartPreview = true; + /* 120 -> 60 */ + else if (curMaxFps <= 120 && newMaxFps <= 60) + restartPreview = true; + /* variable 60 -> fixed 60 */ + else if (curMinFps < 60 && newMaxFps <= 60) + restartPreview = true; + /* variable 120 -> fixed 120 */ + else if (curMinFps < 120 && newMaxFps <= 120) + restartPreview = true; + } else if (newMaxFps <= 30) { + flagHighSpeedRecording = false; + if (30 < curMaxFps) + restartPreview = true; + } + + if (restartPreview == true && + getPreviewRunning() == true) { + CLOGD2("setRestartPreviewChecked true"); + m_setRestartPreviewChecked(true); + } + + return flagHighSpeedRecording; +} + +void ExynosCamera3Parameters::m_setRestartPreviewChecked(bool restart) +{ + CLOGD2("setRestartPreviewChecked(during SetParameters) %s", restart ? "true" : "false"); + Mutex::Autolock lock(m_parameterLock); + + m_flagRestartPreviewChecked = restart; +} + +bool ExynosCamera3Parameters::m_getRestartPreviewChecked(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_flagRestartPreviewChecked; +} + +bool ExynosCamera3Parameters::getPreviewSizeChanged(void) +{ + return m_previewSizeChanged; +} + +void ExynosCamera3Parameters::m_setRestartPreview(bool restart) +{ + CLOGD2("DEBUG(%s):setRestartPreview %s", restart ? "true" : "false"); + Mutex::Autolock lock(m_parameterLock); + + m_flagRestartPreview = restart; + +} + +void ExynosCamera3Parameters::setPreviewRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_previewRunning = enable; + m_flagRestartPreviewChecked = false; + m_flagRestartPreview = false; + m_previewSizeChanged = false; +} + +void ExynosCamera3Parameters::setPictureRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_pictureRunning = enable; +} + +void ExynosCamera3Parameters::setRecordingRunning(bool enable) +{ + Mutex::Autolock lock(m_parameterLock); + + m_recordingRunning = enable; +} + +bool ExynosCamera3Parameters::getPreviewRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_previewRunning; +} + +bool ExynosCamera3Parameters::getPictureRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_pictureRunning; +} + +bool ExynosCamera3Parameters::getRecordingRunning(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_recordingRunning; +} + +bool ExynosCamera3Parameters::getRestartPreview(void) +{ + Mutex::Autolock lock(m_parameterLock); + + return m_flagRestartPreview; +} + +void ExynosCamera3Parameters::m_setVideoStabilization(bool stabilization) +{ + m_cameraInfo.videoStabilization = stabilization; +} + +bool ExynosCamera3Parameters::getVideoStabilization(void) +{ + return m_cameraInfo.videoStabilization; +} + +bool ExynosCamera3Parameters::updateTpuParameters(void) +{ + status_t ret = NO_ERROR; + + /* 1. update data video stabilization state to actual*/ + CLOGD2("video stabilization old(%d) new(%d)", m_cameraInfo.videoStabilization, m_flagVideoStabilization); + m_setVideoStabilization(m_flagVideoStabilization); + + bool hwVdisMode = this->getHWVdisMode(); + + if (setDisEnable(hwVdisMode) != NO_ERROR) { + CLOGE2("setDisEnable(%d) fail", hwVdisMode); + } + + /* 2. update data 3DNR state to actual*/ + CLOGD2("3DNR old(%d) new(%d)", m_cameraInfo.is3dnrMode, m_flag3dnrMode); + m_set3dnrMode(m_flag3dnrMode); + if (setDnrEnable(m_flag3dnrMode) != NO_ERROR) { + CLOGE2("setDnrEnable(%d) fail", m_flag3dnrMode); + } + + return true; +} + +bool ExynosCamera3Parameters::isSWVdisMode(void) +{ + bool swVDIS_mode = false; + bool use3DNR_dmaout = false; + + int nPreviewW, nPreviewH; + getPreviewSize(&nPreviewW, &nPreviewH); + + if ((getRecordingHint() == true) && + (getHighSpeedRecording() == false) && + (use3DNR_dmaout == false) && + (getSWVdisUIMode() == true) && + ((nPreviewW == 1920 && nPreviewH == 1080) || (nPreviewW == 1280 && nPreviewH == 720))) + { + swVDIS_mode = true; + } + + return swVDIS_mode; +} + +bool ExynosCamera3Parameters::isSWVdisModeWithParam(int nPreviewW, int nPreviewH) +{ + bool swVDIS_mode = false; + bool use3DNR_dmaout = false; + + if ((getRecordingHint() == true) && + (getHighSpeedRecording() == false) && + (use3DNR_dmaout == false) && + (getSWVdisUIMode() == true) && + ((nPreviewW == 1920 && nPreviewH == 1080) || (nPreviewW == 1280 && nPreviewH == 720))) + { + swVDIS_mode = true; + } + + return swVDIS_mode; +} + +bool ExynosCamera3Parameters::getHWVdisMode(void) +{ + bool ret = this->getVideoStabilization(); + + /* + * Only true case, + * we will test whether support or not. + */ + if (ret == true) { + switch (getCameraId()) { + case CAMERA_ID_BACK: +#ifdef SUPPORT_BACK_HW_VDIS + ret = SUPPORT_BACK_HW_VDIS; +#else + ret = false; +#endif + break; + case CAMERA_ID_FRONT: +#ifdef SUPPORT_FRONT_HW_VDIS + ret = SUPPORT_FRONT_HW_VDIS; +#else + ret = false; +#endif + break; + default: + ret = false; + break; + } + } + + return ret; +} + +int ExynosCamera3Parameters::getHWVdisFormat(void) +{ + return V4L2_PIX_FMT_YUYV; +} + +void ExynosCamera3Parameters::m_setSWVdisMode(bool swVdis) +{ + m_cameraInfo.swVdisMode = swVdis; +} + +bool ExynosCamera3Parameters::getSWVdisMode(void) +{ + return m_cameraInfo.swVdisMode; +} + +void ExynosCamera3Parameters::m_setSWVdisUIMode(bool swVdisUI) +{ + m_cameraInfo.swVdisUIMode = swVdisUI; +} + +bool ExynosCamera3Parameters::getSWVdisUIMode(void) +{ + return m_cameraInfo.swVdisUIMode; +} +#if 0 +status_t ExynosCamera3Parameters::checkPreviewSize(const CameraParameters& params) +{ + /* preview size */ + int previewW = 0; + int previewH = 0; + int newPreviewW = 0; + int newPreviewH = 0; + int newCalHwPreviewW = 0; + int newCalHwPreviewH = 0; + + int curPreviewW = 0; + int curPreviewH = 0; + int curHwPreviewW = 0; + int curHwPreviewH = 0; + + params.getPreviewSize(&previewW, &previewH); + getPreviewSize(&curPreviewW, &curPreviewH); + getHwPreviewSize(&curHwPreviewW, &curHwPreviewH); + m_isHighResolutionMode(params); + + newPreviewW = previewW; + newPreviewH = previewH; + if (m_adjustPreviewSize(previewW, previewH, &newPreviewW, &newPreviewH, &newCalHwPreviewW, &newCalHwPreviewH) != OK) { + CLOGE("ERR(%s): adjustPreviewSize fail, newPreviewSize(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + if (m_isSupportedPreviewSize(newPreviewW, newPreviewH) == false) { + CLOGE("ERR(%s): new preview size is invalid(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + CLOGI("INFO(%s):Cur Preview size(%dx%d)", "setParameters", curPreviewW, curPreviewH); + CLOGI("INFO(%s):Cur HwPreview size(%dx%d)", "setParameters", curHwPreviewW, curHwPreviewH); + CLOGI("INFO(%s):param.preview size(%dx%d)", "setParameters", previewW, previewH); + CLOGI("INFO(%s):Adjust Preview size(%dx%d), ratioId(%d)", "setParameters", newPreviewW, newPreviewH, m_cameraInfo.previewSizeRatioId); + CLOGI("INFO(%s):Calibrated HwPreview size(%dx%d)", "setParameters", newCalHwPreviewW, newCalHwPreviewH); + + if (curPreviewW != newPreviewW || curPreviewH != newPreviewH || + curHwPreviewW != newCalHwPreviewW || curHwPreviewH != newCalHwPreviewH || + getHighResolutionCallbackMode() == true) { + m_setPreviewSize(newPreviewW, newPreviewH); + m_setHwPreviewSize(newCalHwPreviewW, newCalHwPreviewH); + + if (getHighResolutionCallbackMode() == true) { + m_previewSizeChanged = false; + } else { + CLOGD2("DEBUG(%s):setRestartPreviewChecked true"); + m_setRestartPreviewChecked(true); + m_previewSizeChanged = true; + } + } else { + m_previewSizeChanged = false; + } + + updateBinningScaleRatio(); + updateBnsScaleRatio(); + + m_params.setPreviewSize(newPreviewW, newPreviewH); + + return NO_ERROR; +} +#else +status_t ExynosCamera3Parameters::checkPreviewSize(int previewW, int previewH) +{ + /* preview size */ + int newPreviewW = 0; + int newPreviewH = 0; + int newCalHwPreviewW = 0; + int newCalHwPreviewH = 0; + + int curPreviewW = 0; + int curPreviewH = 0; + int curHwPreviewW = 0; + int curHwPreviewH = 0; + +// params.getPreviewSize(&previewW, &previewH); + getPreviewSize(&curPreviewW, &curPreviewH); + getHwPreviewSize(&curHwPreviewW, &curHwPreviewH); +// m_isHighResolutionMode(params); + + newPreviewW = previewW; + newPreviewH = previewH; + if (m_adjustPreviewSize(previewW, previewH, &newPreviewW, &newPreviewH, &newCalHwPreviewW, &newCalHwPreviewH) != OK) { + CLOGE("ERR(%s): adjustPreviewSize fail, newPreviewSize(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + if (m_isSupportedPreviewSize(newPreviewW, newPreviewH) == false) { + CLOGE("ERR(%s): new preview size is invalid(%dx%d)", "Parameters", newPreviewW, newPreviewH); + return BAD_VALUE; + } + + CLOGI("INFO(%s):Cur Preview size(%dx%d)", "setParameters", curPreviewW, curPreviewH); + CLOGI("INFO(%s):Cur HwPreview size(%dx%d)", "setParameters", curHwPreviewW, curHwPreviewH); + CLOGI("INFO(%s):param.preview size(%dx%d)", "setParameters", previewW, previewH); + CLOGI("INFO(%s):Adjust Preview size(%dx%d), ratioId(%d)", "setParameters", newPreviewW, newPreviewH, m_cameraInfo.previewSizeRatioId); + CLOGI("INFO(%s):Calibrated HwPreview size(%dx%d)", "setParameters", newCalHwPreviewW, newCalHwPreviewH); + + if (curPreviewW != newPreviewW || curPreviewH != newPreviewH || + curHwPreviewW != newCalHwPreviewW || curHwPreviewH != newCalHwPreviewH || + getHighResolutionCallbackMode() == true) { + m_setPreviewSize(newPreviewW, newPreviewH); + m_setHwPreviewSize(newCalHwPreviewW, newCalHwPreviewH); + + if (getHighResolutionCallbackMode() == true) { + m_previewSizeChanged = false; + } else { + CLOGD2("setRestartPreviewChecked true"); + m_setRestartPreviewChecked(true); + m_previewSizeChanged = true; + } + } else { + m_previewSizeChanged = false; + } + + updateBinningScaleRatio(); + updateBnsScaleRatio(); + +// m_params.setPreviewSize(newPreviewW, newPreviewH); + + return NO_ERROR; +} +#endif + +status_t ExynosCamera3Parameters::checkYuvSize(const int width, const int height, const int outputPortId) +{ + status_t ret = NO_ERROR; + int curYuvWidth = 0; + int curYuvHeight = 0; + + getYuvSize(&curYuvWidth, &curYuvHeight, outputPortId); + + if (m_isSupportedPictureSize(width, height) == false) { + ALOGE("ERR(%s[%d]):Invalid YUV size. %dx%d", + __FUNCTION__, __LINE__, width, height); + return BAD_VALUE; + } + + CLOGI("INFO(%s[%d]):curYuvSize %dx%d newYuvSize %dx%d outputPortId %d", + __FUNCTION__, __LINE__, + curYuvWidth, curYuvHeight, width, height, outputPortId); + + if (curYuvWidth != width || curYuvHeight != height) { + m_setYuvSize(width, height, outputPortId); + + ALOGD("DEBUG(%s):setRestartPreviewChecked true", __FUNCTION__); + m_setRestartPreviewChecked(true); + m_previewSizeChanged = true; + } else { + m_previewSizeChanged = false; + } + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::m_adjustPreviewSize(__unused int previewW, __unused int previewH, + int *newPreviewW, int *newPreviewH, + int *newCalHwPreviewW, int *newCalHwPreviewH) +{ + /* hack : when app give 1446, we calibrate to 1440 */ + if (*newPreviewW == 1446 && *newPreviewH == 1080) { + CLOGW2("Invalid previewSize(%d/%d). so, calibrate to (1440/%d)", *newPreviewW, *newPreviewH, *newPreviewH); + *newPreviewW = 1440; + } + + if (getRecordingHint() == true && getHighSpeedRecording() == true) { + int sizeList[SIZE_LUT_INDEX_END]; + + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + /* On high-speed recording, scaling-up by SCC/SCP occurs the IS-ISP performance degradation. + The scaling-up might be done by GSC for recording */ + *newPreviewW = (sizeList[BDS_W] < sizeList[TARGET_W])? sizeList[BDS_W] : sizeList[TARGET_W]; + *newPreviewH = (sizeList[BDS_H] < sizeList[TARGET_H])? sizeList[BDS_H] : sizeList[TARGET_H]; + } else { + CLOGE2("m_getPreviewSizeList() fail"); + } + } + + /* calibrate H/W aligned size*/ + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + ExynosRect bdsRect; + + getVideoSize(&videoW, &videoH); + + if ((videoW <= *newPreviewW) && (videoH <= *newPreviewH)) { + { +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + if ((videoW <= 1920 || videoH <= 1080) && + (1920 < *newPreviewW || 1080 < *newPreviewH)) { + + float videoRatio = ROUND_OFF(((float)videoW / (float)videoH), 2); + + if (videoRatio == 1.33f) { /* 4:3 */ + *newCalHwPreviewW = 1440; + *newCalHwPreviewH = 1080; + } else if (videoRatio == 1.77f) { /* 16:9 */ + *newCalHwPreviewW = 1920; + *newCalHwPreviewH = 1080; + } else if (videoRatio == 1.00f) { /* 1:1 */ + *newCalHwPreviewW = 1088; + *newCalHwPreviewH = 1088; + } else { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + + if (*newCalHwPreviewW != *newPreviewW || + *newCalHwPreviewH != *newPreviewH) { + CLOGW2("Limit hw preview size until %d x %d when videoSize(%d x %d)", + *newCalHwPreviewW, *newCalHwPreviewH, videoW, videoH); + } + } else +#endif + { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + } + } else { + /* video size > preview size : Use BDS size for SCP output size */ + { + CLOGV2("preview(%dx%d) is smaller than video(%dx%d)", *newPreviewW, *newPreviewH, videoW, videoH); + + /* If the video ratio is differ with preview ratio, + the default ratio is set into preview ratio */ + if (SIZE_RATIO(*newPreviewW, *newPreviewH) != SIZE_RATIO(videoW, videoH)) + CLOGW2("preview ratio(%dx%d) is not matched with video ratio(%dx%d)", + *newPreviewW, *newPreviewH, videoW, videoH); + + if (m_isSupportedPreviewSize(*newPreviewW, *newPreviewH) == false) { + CLOGE2("new preview size is invalid(%dx%d)", *newPreviewW, *newPreviewH); + return BAD_VALUE; + } + + /* + * This call is to get real preview size. + * so, HW dis size must not be added. + */ + m_getPreviewBdsSize(&bdsRect); + + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; + } + } + } else if (getHighResolutionCallbackMode() == true) { + if(CAMERA_LCD_SIZE == LCD_SIZE_1280_720) { + *newCalHwPreviewW = 1280; + *newCalHwPreviewH = 720; + } else { + *newCalHwPreviewW = 1920; + *newCalHwPreviewH = 1080; + } + } else { + *newCalHwPreviewW = *newPreviewW; + *newCalHwPreviewH = *newPreviewH; + } + +#ifdef USE_CAMERA2_API_SUPPORT +#if defined(ENABLE_FULL_FRAME) + ExynosRect bdsRect; + getPreviewBdsSize(&bdsRect); + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; +#else + /* 1. try to get exact ratio */ + if (m_isSupportedPreviewSize(*newPreviewW, *newPreviewH) == false) { + CLOGE("ERR(%s): new preview size is invalid(%dx%d)", "Parameters", newPreviewW, newPreviewH); + } + +#if 0 + /* 2. get bds size to set size to scp node due to internal scp buffer */ + int sizeList[SIZE_LUT_INDEX_END]; + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + *newCalHwPreviewW = sizeList[BDS_W]; + *newCalHwPreviewH = sizeList[BDS_H]; + } else { + ExynosRect bdsRect; + getPreviewBdsSize(&bdsRect); + *newCalHwPreviewW = bdsRect.w; + *newCalHwPreviewH = bdsRect.h; + } +#endif +#endif +#endif + + return NO_ERROR; +} + +bool ExynosCamera3Parameters::m_isSupportedPreviewSize(const int width, + const int height) +{ + int maxWidth, maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + if (getHighResolutionCallbackMode() == true) { + CLOGD2("Burst panorama mode start"); + m_cameraInfo.previewSizeRatioId = SIZE_RATIO_16_9; + return true; + } + + getMaxPreviewSize(&maxWidth, &maxHeight); + + if (maxWidth*maxHeight < width*height) { + CLOGE2("invalid PreviewSize(maxSize(%d/%d) size(%d/%d)", + maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearPreviewList; + for (int i = 0; i < m_staticInfo->rearPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontPreviewList; + for (int i = 0; i < m_staticInfo->frontPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPreviewList; + for (int i = 0; i < m_staticInfo->hiddenRearPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontPreviewList; + for (int i = 0; i < m_staticInfo->hiddenFrontPreviewListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.previewSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE2("Invalid preview size(%dx%d)", width, height); + + return false; +} + +status_t ExynosCamera3Parameters::m_getPreviewSizeList(int *sizeList) +{ + int *tempSizeList = NULL; + int configMode = -1; + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* CAMERA2_API use Video Scenario LUT as a default */ + if (m_staticInfo->videoSizeLut == NULL) { + CLOGE2("videoSizeLut is NULL"); + return INVALID_OPERATION; + } else if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + CLOGE2("unsupported video ratioId(%d)", m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } +#if defined(ENABLE_FULL_FRAME) + tempSizeList = m_staticInfo->videoSizeLut[m_cameraInfo.previewSizeRatioId]; +#else + configMode = this->getConfigMode(); + switch (configMode) { + case CONFIG_MODE::NORMAL: + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; + break; + case CONFIG_MODE::HIGHSPEED_120: + tempSizeList = m_staticInfo->videoSizeLutHighSpeed120[configMode-2]; + break; + } +#endif + } else { + if (getDualMode() == true) { + if (getDualRecordingHint() == true + && m_staticInfo->dualVideoSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->videoSizeLutMax) { + tempSizeList = m_staticInfo->dualVideoSizeLut[m_cameraInfo.previewSizeRatioId]; + } else if (m_staticInfo->dualPreviewSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->previewSizeLutMax) { + tempSizeList = m_staticInfo->dualPreviewSizeLut[m_cameraInfo.previewSizeRatioId]; + } else { /* Use Preview LUT as a default */ + if (m_staticInfo->previewSizeLut == NULL) { + CLOGE2("previewSizeLut is NULL"); + return INVALID_OPERATION; + } else if (m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + CLOGE2("unsupported preview ratioId(%d)", m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } else { /* getDualMode() == false */ + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + if (getHighSpeedRecording() == true) { + int fpsmode = 0; + fpsmode = getFastFpsMode(); + + if (fpsmode <= 0) { + CLOGE2("getFastFpsMode fpsmode(%d) fail", fpsmode); + return BAD_VALUE; + } else if (m_staticInfo->videoSizeLutHighSpeed == NULL) { + CLOGE2("videoSizeLutHighSpeed is NULL"); + return INVALID_OPERATION; + } + + fpsmode--; + tempSizeList = m_staticInfo->videoSizeLutHighSpeed[fpsmode]; + } +#ifdef USE_BNS_RECORDING + else if (m_staticInfo->videoSizeBnsLut != NULL + && videoW == 1920 && videoH == 1080) { /* Use BNS Recording only for FHD(16:9) */ + if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + CLOGE2("unsupported video ratioId(%d)", m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->videoSizeBnsLut[m_cameraInfo.previewSizeRatioId]; + } +#endif + else { /* Normal Recording Mode */ + if (m_staticInfo->videoSizeLut == NULL) { + CLOGE2("videoSizeLut is NULL"); + return INVALID_OPERATION; + } else if (m_staticInfo->videoSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + CLOGE2("unsupported video ratioId(%d)", m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->videoSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + /* + * VT mode + * 1: 3G vtmode (176x144, Fixed 7fps) + * 2: LTE or WIFI vtmode (640x480, Fixed 15fps) + */ + int index = 0; + if (m_staticInfo->vtcallSizeLut == NULL + || m_staticInfo->vtcallSizeLutMax == 0) { + CLOGE2("vtcallSizeLut is NULL"); + return INVALID_OPERATION; + } + + for (index = 0; index < m_staticInfo->vtcallSizeLutMax; index++) { + if (m_staticInfo->vtcallSizeLut[index][0] == m_cameraInfo.previewSizeRatioId) + break; + } + + if (m_staticInfo->vtcallSizeLutMax <= index) + index = 0; + + tempSizeList = m_staticInfo->vtcallSizeLut[index]; + } +#endif + else { /* Use Preview LUT */ + if (m_staticInfo->previewSizeLut == NULL) { + CLOGE2("previewSizeLut is NULL"); + return INVALID_OPERATION; + } else if (m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId) { + CLOGE2("unsupported preview ratioId(%d)", m_cameraInfo.previewSizeRatioId); + return BAD_VALUE; + } + + tempSizeList = m_staticInfo->previewSizeLut[m_cameraInfo.previewSizeRatioId]; + } + } + } + + if (tempSizeList == NULL) { + CLOGE2("fail to get LUT"); + return INVALID_OPERATION; + } + + for (int i = 0; i < SIZE_LUT_INDEX_END; i++) + sizeList[i] = tempSizeList[i]; + + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_getSWVdisPreviewSize(int w, int h, int *newW, int *newH) +{ + if (w < 0 || h < 0) { + return; + } + + if (w == 1920 && h == 1080) { + *newW = 2304; + *newH = 1296; + } + else if (w == 1280 && h == 720) { + *newW = 1536; + *newH = 864; + } + else { + *newW = ALIGN_UP((w * 6) / 5, CAMERA_ISP_ALIGN); + *newH = ALIGN_UP((h * 6) / 5, CAMERA_ISP_ALIGN); + } +} + +bool ExynosCamera3Parameters::m_isHighResolutionCallbackSize(const int width, const int height) +{ + bool highResolutionCallbackMode; + + if (width == m_staticInfo->highResolutionCallbackW && height == m_staticInfo->highResolutionCallbackH) + highResolutionCallbackMode = true; + else + highResolutionCallbackMode = false; + + CLOGD("DEBUG(%s):highResolutionCallSize:%s", "setParameters", + highResolutionCallbackMode == true? "on":"off"); + + m_setHighResolutionCallbackMode(highResolutionCallbackMode); + + return highResolutionCallbackMode; +} + +void ExynosCamera3Parameters::m_isHighResolutionMode(const CameraParameters& params) +{ + bool highResolutionCallbackMode; + int shotmode = params.getInt("shot-mode"); + + if ((getRecordingHint() == false) && (shotmode == SHOT_MODE_PANORAMA)) + highResolutionCallbackMode = true; + else + highResolutionCallbackMode = false; + + CLOGD("DEBUG(%s):highResolutionMode:%s", "setParameters", + highResolutionCallbackMode == true? "on":"off"); + + m_setHighResolutionCallbackMode(highResolutionCallbackMode); +} + +void ExynosCamera3Parameters::m_setHighResolutionCallbackMode(bool enable) +{ + m_cameraInfo.highResolutionCallbackMode = enable; +} + +bool ExynosCamera3Parameters::getHighResolutionCallbackMode(void) +{ + return m_cameraInfo.highResolutionCallbackMode; +} + +status_t ExynosCamera3Parameters::m_adjustPreviewFormat(__unused int &previewFormat, int &hwPreviewFormat) +{ +#if 1 + /* HACK : V4L2_PIX_FMT_NV21M is set to FIMC-IS * + * and Gralloc. V4L2_PIX_FMT_YVU420 is just * + * color format for callback frame. */ + hwPreviewFormat = V4L2_PIX_FMT_NV21M; +#else + if (previewFormat == V4L2_PIX_FMT_NV21) + hwPreviewFormat = V4L2_PIX_FMT_NV21M; + else if (previewFormat == V4L2_PIX_FMT_YVU420) + hwPreviewFormat = V4L2_PIX_FMT_YVU420M; +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::checkYuvFormat(const int format, const int outputPortId) +{ + status_t ret = NO_ERROR; + int curYuvFormat = -1; + int newYuvFormat = -1; + + newYuvFormat = HAL_PIXEL_FORMAT_2_V4L2_PIX(format); + curYuvFormat = getYuvFormat(outputPortId); + + if (newYuvFormat != curYuvFormat) { + char curFormatName[V4L2_FOURCC_LENGTH] = {}; + char newFormatName[V4L2_FOURCC_LENGTH] = {}; + m_getV4l2Name(curFormatName, V4L2_FOURCC_LENGTH, curYuvFormat); + m_getV4l2Name(newFormatName, V4L2_FOURCC_LENGTH, newYuvFormat); + CLOGI("INFO(%s[%d]):curYuvFormat %s newYuvFormat %s outputPortId %d", + __FUNCTION__, __LINE__, + curFormatName, newFormatName, outputPortId); + m_setYuvFormat(newYuvFormat, outputPortId); + } + + return ret; +} + +void ExynosCamera3Parameters::m_setPreviewSize(int w, int h) +{ + m_cameraInfo.previewW = w; + m_cameraInfo.previewH = h; +} + +void ExynosCamera3Parameters::getPreviewSize(int *w, int *h) +{ + *w = m_cameraInfo.previewW; + *h = m_cameraInfo.previewH; +} + +void ExynosCamera3Parameters::m_setYuvSize(const int width, const int height, const int index) +{ + m_cameraInfo.yuvWidth[index] = width; + m_cameraInfo.yuvHeight[index] = height; +} + +void ExynosCamera3Parameters::getYuvSize(int *width, int *height, const int index) +{ + *width = m_cameraInfo.yuvWidth[index]; + *height = m_cameraInfo.yuvHeight[index]; +} + +void ExynosCamera3Parameters::getMaxSensorSize(int *w, int *h) +{ + *w = m_staticInfo->maxSensorW; + *h = m_staticInfo->maxSensorH; +} + +void ExynosCamera3Parameters::getSensorMargin(int *w, int *h) +{ + *w = m_staticInfo->sensorMarginW; + *h = m_staticInfo->sensorMarginH; +} + +void ExynosCamera3Parameters::m_adjustSensorMargin(int *sensorMarginW, int *sensorMarginH) +{ + float bnsRatio = 1.00f; + float binningRatio = 1.00f; + float sensorMarginRatio = 1.00f; + + bnsRatio = (float)getBnsScaleRatio() / 1000.00f; + binningRatio = (float)getBinningScaleRatio() / 1000.00f; + sensorMarginRatio = bnsRatio * binningRatio; + if ((int)sensorMarginRatio < 1) { + CLOGW2("Invalid sensor margin ratio(%f), bnsRatio(%f), binningRatio(%f)", + sensorMarginRatio, bnsRatio, binningRatio); + sensorMarginRatio = 1.00f; + } + + if (getHalVersion() != IS_HAL_VER_3_2) { + *sensorMarginW = ALIGN_DOWN((int)(*sensorMarginW / sensorMarginRatio), 2); + *sensorMarginH = ALIGN_DOWN((int)(*sensorMarginH / sensorMarginRatio), 2); + } else { + int leftMargin = 0, rightMargin = 0, topMargin = 0, bottomMargin = 0; + + rightMargin = ALIGN_DOWN((int)(m_staticInfo->sensorMarginBase[WIDTH_BASE] / sensorMarginRatio), 2); + leftMargin = m_staticInfo->sensorMarginBase[LEFT_BASE] + rightMargin; + bottomMargin = ALIGN_DOWN((int)(m_staticInfo->sensorMarginBase[HEIGHT_BASE] / sensorMarginRatio), 2); + topMargin = m_staticInfo->sensorMarginBase[TOP_BASE] + bottomMargin; + + *sensorMarginW = leftMargin + rightMargin; + *sensorMarginH = topMargin + bottomMargin; + } +} + +void ExynosCamera3Parameters::getMaxPreviewSize(int *w, int *h) +{ + *w = m_staticInfo->maxPreviewW; + *h = m_staticInfo->maxPreviewH; +} + +int ExynosCamera3Parameters::getBayerFormat(int pipeId) +{ + int bayerFormat = V4L2_PIX_FMT_SBGGR16; + + switch (pipeId) { + case PIPE_FLITE: + case PIPE_3AA_REPROCESSING: + bayerFormat = V4L2_PIX_FMT_SBGGR16; + break; + case PIPE_3AA: + case PIPE_FLITE_REPROCESSING: + bayerFormat = V4L2_PIX_FMT_SBGGR12; + break; + case PIPE_3AC: + case PIPE_3AP: + case PIPE_ISP: + case PIPE_3AC_REPROCESSING: + case PIPE_3AP_REPROCESSING: + case PIPE_ISP_REPROCESSING: + bayerFormat = V4L2_PIX_FMT_SBGGR10; + break; + default: + CLOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + break; + } + +#ifndef CAMERA_PACKED_BAYER_ENABLE + bayerFormat = V4L2_PIX_FMT_SBGGR16; +#endif + + return bayerFormat; +} + +void ExynosCamera3Parameters::m_setPreviewFormat(int fmt) +{ + m_cameraInfo.previewFormat = fmt; +} + +void ExynosCamera3Parameters::m_setYuvFormat(const int format, const int index) +{ + m_cameraInfo.yuvFormat[index] = format; +} + +int ExynosCamera3Parameters::getPreviewFormat(void) +{ + return m_cameraInfo.previewFormat; +} + +int ExynosCamera3Parameters::getYuvFormat(const int index) +{ + return m_cameraInfo.yuvFormat[index]; +} + +void ExynosCamera3Parameters::m_setHwPreviewSize(int w, int h) +{ + m_cameraInfo.hwPreviewW = w; + m_cameraInfo.hwPreviewH = h; +} + +void ExynosCamera3Parameters::getHwPreviewSize(int *w, int *h) +{ + if (m_cameraInfo.scalableSensorMode != true) { + *w = m_cameraInfo.hwPreviewW; + *h = m_cameraInfo.hwPreviewH; + } else { + int newSensorW = 0; + int newSensorH = 0; + m_getScalableSensorSize(&newSensorW, &newSensorH); + + *w = newSensorW; + *h = newSensorH; +/* + * Should not use those value + * *w = 1024; + * *h = 768; + * *w = 1440; + * *h = 1080; + */ + *w = m_cameraInfo.hwPreviewW; + *h = m_cameraInfo.hwPreviewH; + } +} + +void ExynosCamera3Parameters::setHwPreviewStride(int stride) +{ + m_cameraInfo.previewStride = stride; +} + +int ExynosCamera3Parameters::getHwPreviewStride(void) +{ + return m_cameraInfo.previewStride; +} + +void ExynosCamera3Parameters::m_setHwPreviewFormat(int fmt) +{ + m_cameraInfo.hwPreviewFormat = fmt; +} + +int ExynosCamera3Parameters::getHwPreviewFormat(void) +{ + return m_cameraInfo.hwPreviewFormat; +} + +void ExynosCamera3Parameters::updateHwSensorSize(void) +{ + int curHwSensorW = 0; + int curHwSensorH = 0; + int newHwSensorW = 0; + int newHwSensorH = 0; + int maxHwSensorW = 0; + int maxHwSensorH = 0; + + getHwSensorSize(&newHwSensorW, &newHwSensorH); + getMaxSensorSize(&maxHwSensorW, &maxHwSensorH); + + if (newHwSensorW > maxHwSensorW || newHwSensorH > maxHwSensorH) { + CLOGE2("Invalid sensor size (maxSize(%d/%d) size(%d/%d)", + maxHwSensorW, maxHwSensorH, newHwSensorW, newHwSensorH); + } + + if (getHighSpeedRecording() == true) { +#if 0 + int sizeList[SIZE_LUT_INDEX_END]; + m_getHighSpeedRecordingSize(sizeList); + newHwSensorW = sizeList[SENSOR_W]; + newHwSensorH = sizeList[SENSOR_H]; +#endif + } else if (getScalableSensorMode() == true) { + m_getScalableSensorSize(&newHwSensorW, &newHwSensorH); + } else { + getBnsSize(&newHwSensorW, &newHwSensorH); + } + + getHwSensorSize(&curHwSensorW, &curHwSensorH); + CLOGI2("curHwSensor size(%dx%d) newHwSensor size(%dx%d)", curHwSensorW, curHwSensorH, newHwSensorW, newHwSensorH); + if (curHwSensorW != newHwSensorW || curHwSensorH != newHwSensorH) { + m_setHwSensorSize(newHwSensorW, newHwSensorH); + CLOGI2("newHwSensor size(%dx%d)", newHwSensorW, newHwSensorH); + } +} + +void ExynosCamera3Parameters::m_setHwSensorSize(int w, int h) +{ + m_cameraInfo.hwSensorW = w; + m_cameraInfo.hwSensorH = h; +} + +void ExynosCamera3Parameters::getHwSensorSize(int *w, int *h) +{ + CLOGV2("getScalableSensorMode()(%d)", getScalableSensorMode()); + int width = 0; + int height = 0; + int sizeList[SIZE_LUT_INDEX_END]; + + if (m_cameraInfo.scalableSensorMode != true) { + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == true + && m_staticInfo->previewSizeLut != NULL + && m_cameraInfo.previewSizeRatioId < m_staticInfo->previewSizeLutMax + && m_getPreviewSizeList(sizeList) == NO_ERROR) { + + width = sizeList[SENSOR_W]; + height = sizeList[SENSOR_H]; + + } else { + width = m_cameraInfo.hwSensorW; + height = m_cameraInfo.hwSensorH; + } + } else { + m_getScalableSensorSize(&width, &height); + } + + *w = width; + *h = height; +} + +void ExynosCamera3Parameters::updateBnsScaleRatio(void) +{ + int ret = 0; + uint32_t bnsRatio = DEFAULT_BNS_RATIO * 1000; + int curPreviewW = 0, curPreviewH = 0; + + if (m_staticInfo->bnsSupport == false) + return; + + getPreviewSize(&curPreviewW, &curPreviewH); + if (getDualMode() == true) { +#if defined(USE_BNS_DUAL_PREVIEW) || defined(USE_BNS_DUAL_RECORDING) + bnsRatio = 2000; +#endif + } else if ((getRecordingHint() == true) +/* || (curPreviewW == curPreviewH)*/) { +#ifdef USE_BNS_RECORDING + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if ((getHighSpeedRecording() == true)) { + bnsRatio = 1000; + } else if (videoW == 1920 && videoH == 1080) { + bnsRatio = 1500; + CLOGI2("bnsRatio(%d), videoSize (%d, %d)", bnsRatio, videoW, videoH); + } else +#endif + { + bnsRatio = 1000; + } + if (bnsRatio != getBnsScaleRatio()) { + CLOGI2("restart set due to changing bnsRatio(%d/%d)", bnsRatio, getBnsScaleRatio()); + m_setRestartPreview(true); + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + bnsRatio = 1000; + } +#endif + + if (bnsRatio != getBnsScaleRatio()) + ret = m_setBnsScaleRatio(bnsRatio); + + if (ret < 0) + CLOGE2("Cannot update BNS scale ratio(%d)", bnsRatio); +} + +status_t ExynosCamera3Parameters::m_setBnsScaleRatio(int ratio) +{ +#define MIN_BNS_RATIO 1000 +#define MAX_BNS_RATIO 8000 + + if (m_staticInfo->bnsSupport == false) { + CLOGD2("This camera does not support BNS"); + ratio = MIN_BNS_RATIO; + } + + if (ratio < MIN_BNS_RATIO || ratio > MAX_BNS_RATIO) { + CLOGE2("Out of bound, ratio(%d), min:max(%d:%d)", ratio, MAX_BNS_RATIO, MAX_BNS_RATIO); + return BAD_VALUE; + } + + CLOGD2("update BNS ratio(%d -> %d)", m_cameraInfo.bnsScaleRatio, ratio); + + m_cameraInfo.bnsScaleRatio = ratio; + + /* When BNS scale ratio is changed, reset BNS size to MAX sensor size */ + getMaxSensorSize(&m_cameraInfo.bnsW, &m_cameraInfo.bnsH); + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::m_addHiddenResolutionList(String8 &string8Buf, + __unused struct ExynosSensorInfoBase *sensorInfo, + int w, int h, enum MODE mode, int cameraId) + +{ + status_t ret = NO_ERROR; + bool found = false; + + int (*sizeList)[SIZE_OF_RESOLUTION]; + int listSize = 0; + + switch (mode) { + case MODE_PREVIEW: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPreviewList; + listSize = m_staticInfo->hiddenRearPreviewListMax; + } else { + sizeList = m_staticInfo->hiddenFrontPreviewList; + listSize = m_staticInfo->hiddenFrontPreviewListMax; + } + break; + case MODE_PICTURE: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPictureList; + listSize = m_staticInfo->hiddenRearPictureListMax; + } else { + sizeList = m_staticInfo->hiddenFrontPictureList; + listSize = m_staticInfo->hiddenFrontPictureListMax; + } + break; + case MODE_VIDEO: + if (cameraId == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearVideoList; + listSize = m_staticInfo->hiddenRearVideoListMax; + } else { + sizeList = m_staticInfo->hiddenFrontVideoList; + listSize = m_staticInfo->hiddenFrontVideoListMax; + } + break; + default: + CLOGE2("invalid mode(%d)", mode); + return BAD_VALUE; + break; + } + + for (int i = 0; i < listSize; i++) { + if (w == sizeList[i][0] && h == sizeList[i][1]) { + found = true; + break; + } + } + + if (found == true) { + String8 uhdTempStr; + char strBuf[32]; + + snprintf(strBuf, sizeof(strBuf), "%dx%d,", w, h); + + /* append on head of string8Buf */ + uhdTempStr.setTo(strBuf); + uhdTempStr.append(string8Buf); + string8Buf.setTo(uhdTempStr); + } else { + ret = INVALID_OPERATION; + } + + return ret; +} + +uint32_t ExynosCamera3Parameters::getBnsScaleRatio(void) +{ + return m_cameraInfo.bnsScaleRatio; +} + +void ExynosCamera3Parameters::setBnsSize(int w, int h) +{ + m_cameraInfo.bnsW = w; + m_cameraInfo.bnsH = h; + + updateHwSensorSize(); + +#if 0 + int zoom = getZoomLevel(); + int previewW = 0, previewH = 0; + getPreviewSize(&previewW, &previewH); + if (m_setParamCropRegion(zoom, w, h, previewW, previewH) != NO_ERROR) + CLOGE2("m_setParamCropRegion() fail"); +#else + ExynosRect srcRect, dstRect; + getPreviewBayerCropSize(&srcRect, &dstRect); +#endif +} + +void ExynosCamera3Parameters::getBnsSize(int *w, int *h) +{ + *w = m_cameraInfo.bnsW; + *h = m_cameraInfo.bnsH; +} + +void ExynosCamera3Parameters::updateBinningScaleRatio(void) +{ + int ret = 0; + uint32_t binningRatio = DEFAULT_BINNING_RATIO * 1000; + + if ((getRecordingHint() == true) + && (getHighSpeedRecording() == true)) { + int fpsmode = 0; + fpsmode = getFastFpsMode(); + switch (fpsmode) { + case 1: /* 60 fps */ + binningRatio = 2000; + break; + case 2: /* 120 fps */ + case 3: /* 240 fps */ + binningRatio = 4000; + break; + default: + CLOGE2("Invalide FastFpsMode(%d)", fpsmode); + } + } +#ifdef USE_BINNING_MODE + else if (getBinningMode() == true) { + binningRatio = 2000; + } +#endif + + if (binningRatio != getBinningScaleRatio()) { + CLOGI2("New sensor binning ratio(%d)", binningRatio); + ret = m_setBinningScaleRatio(binningRatio); + } + + if (ret < 0) + CLOGE2("Cannot update BNS scale ratio(%d)", binningRatio); +} + +status_t ExynosCamera3Parameters::m_setBinningScaleRatio(int ratio) +{ +#define MIN_BINNING_RATIO 1000 +#define MAX_BINNING_RATIO 6000 + + if (ratio < MIN_BINNING_RATIO || ratio > MAX_BINNING_RATIO) { + CLOGE2("Out of bound, ratio(%d), min:max(%d:%d)", ratio, MAX_BINNING_RATIO, MAX_BINNING_RATIO); + return BAD_VALUE; + } + + m_cameraInfo.binningScaleRatio = ratio; + + return NO_ERROR; +} + +uint32_t ExynosCamera3Parameters::getBinningScaleRatio(void) +{ + return m_cameraInfo.binningScaleRatio; +} +#if 0 +status_t ExynosCamera3Parameters::checkPictureSize(const CameraParameters& params) +{ + int curPictureW = 0; + int curPictureH = 0; + int newPictureW = 0; + int newPictureH = 0; + int curHwPictureW = 0; + int curHwPictureH = 0; + int newHwPictureW = 0; + int newHwPictureH = 0; + int right_ratio = 177; + + params.getPictureSize(&newPictureW, &newPictureH); + + if (newPictureW < 0 || newPictureH < 0) { + return BAD_VALUE; + } + + if (m_adjustPictureSize(&newPictureW, &newPictureH, &newHwPictureW, &newHwPictureH) != NO_ERROR) { + return BAD_VALUE; + } + + if (m_isSupportedPictureSize(newPictureW, newPictureH) == false) { + int maxHwPictureW =0; + int maxHwPictureH = 0; + + CLOGE2("Invalid picture size(%dx%d)", newPictureW, newPictureH); + + /* prevent wrong size setting */ + getMaxPictureSize(&maxHwPictureW, &maxHwPictureH); + m_setPictureSize(maxHwPictureW, maxHwPictureH); + m_setHwPictureSize(maxHwPictureW, maxHwPictureH); + m_params.setPictureSize(maxHwPictureW, maxHwPictureH); + CLOGE2("changed picture size to MAX(%dx%d)", maxHwPictureW, maxHwPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + return INVALID_OPERATION; + } + CLOGI("INFO(%s):newPicture Size (%dx%d), ratioId(%d)", + "setParameters", newPictureW, newPictureH, m_cameraInfo.pictureSizeRatioId); + + if ((int)(m_staticInfo->maxSensorW * 100 / m_staticInfo->maxSensorH) == right_ratio) { + setHorizontalViewAngle(newPictureW, newPictureH); + } + m_params.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); + + getPictureSize(&curPictureW, &curPictureH); + getHwPictureSize(&curHwPictureW, &curHwPictureH); + + if (curPictureW != newPictureW || curPictureH != newPictureH || + curHwPictureW != newHwPictureW || curHwPictureH != newHwPictureH) { + + CLOGI("INFO(%s[%d]): Picture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curPictureW, curPictureH, newPictureW, newPictureH); + CLOGI("INFO(%s[%d]): HwPicture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curHwPictureW, curHwPictureH, newHwPictureW, newHwPictureH); + + m_setPictureSize(newPictureW, newPictureH); + m_setHwPictureSize(newHwPictureW, newHwPictureH); + m_params.setPictureSize(newPictureW, newPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + } + + return NO_ERROR; +} +#else +status_t ExynosCamera3Parameters::checkPictureSize(int newPictureW, int newPictureH) +{ + int curPictureW = 0; + int curPictureH = 0; + int curHwPictureW = 0; + int curHwPictureH = 0; + int newHwPictureW = 0; + int newHwPictureH = 0; + +// params.getPictureSize(&newPictureW, &newPictureH); + + if (newPictureW < 0 || newPictureH < 0) { + return BAD_VALUE; + } + + if (m_adjustPictureSize(&newPictureW, &newPictureH, &newHwPictureW, &newHwPictureH) != NO_ERROR) { + return BAD_VALUE; + } + + if (m_isSupportedPictureSize(newPictureW, newPictureH) == false) { + int maxHwPictureW =0; + int maxHwPictureH = 0; + + CLOGE2("Invalid picture size(%dx%d)", newPictureW, newPictureH); + + /* prevent wrong size setting */ + getMaxPictureSize(&maxHwPictureW, &maxHwPictureH); + m_setPictureSize(maxHwPictureW, maxHwPictureH); + m_setHwPictureSize(maxHwPictureW, maxHwPictureH); + +// m_params.setPictureSize(maxHwPictureW, maxHwPictureH); + + CLOGE2("changed picture size to MAX(%dx%d)", maxHwPictureW, maxHwPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + return INVALID_OPERATION; + } + CLOGI("INFO(%s):newPicture Size (%dx%d), ratioId(%d)", + "setParameters", newPictureW, newPictureH, m_cameraInfo.pictureSizeRatioId); + + getPictureSize(&curPictureW, &curPictureH); + getHwPictureSize(&curHwPictureW, &curHwPictureH); + + if (curPictureW != newPictureW || curPictureH != newPictureH || + curHwPictureW != newHwPictureW || curHwPictureH != newHwPictureH) { + + CLOGI("INFO(%s[%d]): Picture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curPictureW, curPictureH, newPictureW, newPictureH); + CLOGI("INFO(%s[%d]): HwPicture size changed: cur(%dx%d) -> new(%dx%d)", + "setParameters", __LINE__, curHwPictureW, curHwPictureH, newHwPictureW, newHwPictureH); + + m_setPictureSize(newPictureW, newPictureH); + m_setHwPictureSize(newHwPictureW, newHwPictureH); + +// m_params.setPictureSize(newPictureW, newPictureH); + +#ifdef FIXED_SENSOR_SIZE + updateHwSensorSize(); +#endif + } + + return NO_ERROR; +} +#endif + +status_t ExynosCamera3Parameters::m_adjustPictureSize(int *newPictureW, int *newPictureH, + int *newHwPictureW, int *newHwPictureH) +{ + int ret = 0; + int newX = 0, newY = 0, newW = 0, newH = 0; + float zoomRatio = getZoomRatio(0) / 1000; + + if ((getRecordingHint() == true && getHighSpeedRecording() == true) +#ifdef USE_BINNING_MODE + || getBinningMode() +#endif + ) + { + int sizeList[SIZE_LUT_INDEX_END]; + if (m_getPreviewSizeList(sizeList) == NO_ERROR) { + *newPictureW = sizeList[TARGET_W]; + *newPictureH = sizeList[TARGET_H]; + *newHwPictureW = *newPictureW; + *newHwPictureH = *newPictureH; + + return NO_ERROR; + } else { + CLOGE2("m_getPreviewSizeList() fail"); + return BAD_VALUE; + } + } + + getMaxPictureSize(newHwPictureW, newHwPictureH); + + if (getCameraId() == CAMERA_ID_BACK) { + ret = getCropRectAlign(*newHwPictureW, *newHwPictureH, + *newPictureW, *newPictureH, + &newX, &newY, &newW, &newH, + CAMERA_ISP_ALIGN, 2, 0, zoomRatio); + if (ret < 0) { + CLOGE2("getCropRectAlign(%d, %d, %d, %d) fail", *newHwPictureW, *newHwPictureH, *newPictureW, *newPictureH); + return BAD_VALUE; + } + *newHwPictureW = newW; + *newHwPictureH = newH; + +#ifdef FIXED_SENSOR_SIZE + /* + * sensor crop size: + * sensor crop is only used at 16:9 aspect ratio in picture size. + */ + if (getSamsungCamera() == true) { + if (((float)*newPictureW / (float)*newPictureH) == ((float)16 / (float)9)) { + CLOGD2("Use sensor crop (ratio: %f)", ((float)*newPictureW / (float)*newPictureH)); + m_setHwSensorSize(newW, newH); + } + } +#endif + } + + return NO_ERROR; +} + +bool ExynosCamera3Parameters::m_isSupportedPictureSize(const int width, + const int height) +{ + int maxWidth, maxHeight = 0; + int (*sizeList)[SIZE_OF_RESOLUTION]; + + getMaxPictureSize(&maxWidth, &maxHeight); + + if (maxWidth < width || maxHeight < height) { + CLOGE2("invalid picture Size(maxSize(%d/%d) size(%d/%d)", + maxWidth, maxHeight, width, height); + return false; + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->rearPictureList; + for (int i = 0; i < m_staticInfo->rearPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->frontPictureList; + for (int i = 0; i < m_staticInfo->frontPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + if (getCameraId() == CAMERA_ID_BACK) { + sizeList = m_staticInfo->hiddenRearPictureList; + for (int i = 0; i < m_staticInfo->hiddenRearPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } else { + sizeList = m_staticInfo->hiddenFrontPictureList; + for (int i = 0; i < m_staticInfo->hiddenFrontPictureListMax; i++) { + if (sizeList[i][0] > maxWidth || sizeList[i][1] > maxHeight) + continue; + if (sizeList[i][0] == width && sizeList[i][1] == height) { + m_cameraInfo.pictureSizeRatioId = sizeList[i][2]; + return true; + } + } + } + + CLOGE2("Invalid picture size(%dx%d)", width, height); + + return false; +} + +void ExynosCamera3Parameters::m_setPictureSize(int w, int h) +{ + m_cameraInfo.pictureW = w; + m_cameraInfo.pictureH = h; +} + +void ExynosCamera3Parameters::getPictureSize(int *w, int *h) +{ + *w = m_cameraInfo.pictureW; + *h = m_cameraInfo.pictureH; +} + +void ExynosCamera3Parameters::getMaxPictureSize(int *w, int *h) +{ + *w = m_staticInfo->maxPictureW; + *h = m_staticInfo->maxPictureH; +} + +void ExynosCamera3Parameters::m_setHwPictureSize(int w, int h) +{ + m_cameraInfo.hwPictureW = w; + m_cameraInfo.hwPictureH = h; +} + +void ExynosCamera3Parameters::getHwPictureSize(int *w, int *h) +{ + *w = m_cameraInfo.hwPictureW; + *h = m_cameraInfo.hwPictureH; +} + +void ExynosCamera3Parameters::m_setHwBayerCropRegion(int w, int h, int x, int y) +{ + Mutex::Autolock lock(m_parameterLock); + + m_cameraInfo.hwBayerCropW = w; + m_cameraInfo.hwBayerCropH = h; + m_cameraInfo.hwBayerCropX = x; + m_cameraInfo.hwBayerCropY = y; +} + +void ExynosCamera3Parameters::getHwBayerCropRegion(int *w, int *h, int *x, int *y) +{ + Mutex::Autolock lock(m_parameterLock); + + *w = m_cameraInfo.hwBayerCropW; + *h = m_cameraInfo.hwBayerCropH; + *x = m_cameraInfo.hwBayerCropX; + *y = m_cameraInfo.hwBayerCropY; +} + +void ExynosCamera3Parameters::getHwVraInputSize(int *w, int *h) +{ +#if defined(MAX_VRA_INPUT_SIZE_WIDTH) && defined(MAX_VRA_INPUT_SIZE_HEIGHT) + int vraWidth = MAX_VRA_INPUT_WIDTH; + int vraHeight = MAX_VRA_INPUT_HEIGHT; +#else + int vraWidth = 640; + int vraHeight = 480; +#endif + float vraRatio = ROUND_OFF(((float)vraWidth / (float)vraHeight), 2); + + switch (m_cameraInfo.previewSizeRatioId) { + case SIZE_RATIO_16_9: + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 16) * 9, 2); + break; + case SIZE_RATIO_4_3: + *w = ALIGN_UP((vraHeight / 3) * 4, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + case SIZE_RATIO_1_1: + *w = vraHeight; + *h = vraHeight; + break; + case SIZE_RATIO_3_2: + if (vraRatio == 1.33f) { /* 4:3 */ + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 3) * 2, 2); + } else if (vraRatio == 1.77f) { /* 16:9 */ + *w = ALIGN_UP((vraHeight / 2) * 3, CAMERA_16PX_ALIGN); + *h = vraHeight; + } else { + *w = vraWidth; + *h = vraHeight; + } + break; + case SIZE_RATIO_5_4: + *w = ALIGN_UP((vraHeight / 4) * 5, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + case SIZE_RATIO_5_3: + if (vraRatio == 1.33f) { /* 4:3 */ + *w = vraWidth; + *h = ALIGN_UP((vraWidth / 5) * 3, 2); + } else if (vraRatio == 1.77f) { /* 16:9 */ + *w = ALIGN_UP((vraHeight / 3) * 5, CAMERA_16PX_ALIGN); + *h = vraHeight; + } else { + *w = vraWidth; + *h = vraHeight; + } + break; + case SIZE_RATIO_11_9: + *w = ALIGN_UP((vraHeight / 9) * 11, CAMERA_16PX_ALIGN); + *h = vraHeight; + break; + default: + CLOGW2("Invalid size ratio(%d)", m_cameraInfo.previewSizeRatioId); + + *w = vraWidth; + *h = vraHeight; + break; + } +} + +int ExynosCamera3Parameters::getHwVraInputFormat(void) +{ +#if defined(CAMERA_VRA_INPUT_FORMAT) + return CAMERA_VRA_INPUT_FORMAT; +#else + return V4L2_PIX_FMT_NV21; +#endif +} + +void ExynosCamera3Parameters::m_setHwPictureFormat(int fmt) +{ + m_cameraInfo.hwPictureFormat = fmt; +} + +int ExynosCamera3Parameters::getHwPictureFormat(void) +{ + CLOGE("INFO(%s):m_cameraInfo.pictureFormat(%d)", __FUNCTION__, m_cameraInfo.hwPictureFormat); + + return m_cameraInfo.hwPictureFormat; +} +status_t ExynosCamera3Parameters::checkJpegQuality(int quality) +{ + int curJpegQuality = -1; + if (quality < 0 || quality > 100) { + CLOGE("ERR(%s[%d]):Invalid JPEG quality %d.", + __FUNCTION__, __LINE__, quality); + return BAD_VALUE; + } + curJpegQuality = getJpegQuality(); + if (curJpegQuality != quality) { + CLOGI("INFO(%s[%d]):curJpegQuality %d newJpegQuality %d", + __FUNCTION__, __LINE__, curJpegQuality, quality); + m_setJpegQuality(quality); + } + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setJpegQuality(int quality) +{ + m_cameraInfo.jpegQuality = quality; +} + +int ExynosCamera3Parameters::getJpegQuality(void) +{ + return m_cameraInfo.jpegQuality; +} +status_t ExynosCamera3Parameters::checkThumbnailSize(int thumbnailW, int thumbnailH) +{ + int curThumbnailW = -1, curThumbnailH = -1; + if (thumbnailW < 0 || thumbnailH < 0 + || thumbnailW > m_staticInfo->maxThumbnailW + || thumbnailH > m_staticInfo->maxThumbnailH) { + CLOGE("ERR(%s[%d]):Invalide thumbnail size %dx%d", + __FUNCTION__, __LINE__, thumbnailW, thumbnailH); + return BAD_VALUE; + } + getThumbnailSize(&curThumbnailW, &curThumbnailH); + if (curThumbnailW != thumbnailW || curThumbnailH != thumbnailH) { + CLOGI("INFO(%s[%d]):curThumbnailSize %dx%d newThumbnailSize %dx%d", + __FUNCTION__, __LINE__, + curThumbnailW, curThumbnailH, thumbnailW, thumbnailH); + m_setThumbnailSize(thumbnailW, thumbnailH); + } + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setThumbnailSize(int w, int h) +{ + m_cameraInfo.thumbnailW = w; + m_cameraInfo.thumbnailH = h; +} + +void ExynosCamera3Parameters::getThumbnailSize(int *w, int *h) +{ + *w = m_cameraInfo.thumbnailW; + *h = m_cameraInfo.thumbnailH; +} + +void ExynosCamera3Parameters::getMaxThumbnailSize(int *w, int *h) +{ + *w = m_staticInfo->maxThumbnailW; + *h = m_staticInfo->maxThumbnailH; +} +status_t ExynosCamera3Parameters::checkThumbnailQuality(int quality) +{ + int curThumbnailQuality = -1; + if (quality < 0 || quality > 100) { + CLOGE("ERR(%s[%d]):Invalid thumbnail quality %d", + __FUNCTION__, __LINE__, quality); + return BAD_VALUE; + } + curThumbnailQuality = getThumbnailQuality(); + if (curThumbnailQuality != quality) { + CLOGI("INFO(%s[%d]):curThumbnailQuality %d newThumbnailQuality %d", + __FUNCTION__, __LINE__, curThumbnailQuality, quality); + m_setThumbnailQuality(quality); + } + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setThumbnailQuality(int quality) +{ + m_cameraInfo.thumbnailQuality = quality; +} + +int ExynosCamera3Parameters::getThumbnailQuality(void) +{ + return m_cameraInfo.thumbnailQuality; +} + +void ExynosCamera3Parameters::m_set3dnrMode(bool toggle) +{ + m_cameraInfo.is3dnrMode = toggle; +} + +bool ExynosCamera3Parameters::get3dnrMode(void) +{ + return m_cameraInfo.is3dnrMode; +} + +void ExynosCamera3Parameters::m_setDrcMode(bool toggle) +{ + m_cameraInfo.isDrcMode = toggle; + if (setDrcEnable(toggle) < 0) { + CLOGE2("set DRC fail, toggle(%d)", toggle); + } +} + +bool ExynosCamera3Parameters::getDrcMode(void) +{ + return m_cameraInfo.isDrcMode; +} + +void ExynosCamera3Parameters::m_setOdcMode(bool toggle) +{ + m_cameraInfo.isOdcMode = toggle; +} + +bool ExynosCamera3Parameters::getOdcMode(void) +{ + return m_cameraInfo.isOdcMode; +} + +bool ExynosCamera3Parameters::getTpuEnabledMode(void) +{ + if (getHWVdisMode() == true) + return true; + + if (get3dnrMode() == true) + return true; + + if (getOdcMode() == true) + return true; + + return false; +} + +status_t ExynosCamera3Parameters::setZoomLevel(int zoom) +{ + int srcW = 0; + int srcH = 0; + int dstW = 0; + int dstH = 0; + + m_cameraInfo.zoom = zoom; + + getHwSensorSize(&srcW, &srcH); + getHwPreviewSize(&dstW, &dstH); + +#if 0 + if (m_setParamCropRegion(zoom, srcW, srcH, dstW, dstH) != NO_ERROR) { + return BAD_VALUE; + } +#else + ExynosRect srcRect, dstRect; + getPreviewBayerCropSize(&srcRect, &dstRect); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::setCropRegion(int x, int y, int w, int h) +{ + status_t ret = NO_ERROR; + + ret = setMetaCtlCropRegion(&m_metadata, x, y, w, h); + if (ret != NO_ERROR) { + CLOGE2("Failed to setMetaCtlCropRegion(%d, %d, %d, %d)", x, y, w, h); + } + + return ret; +} + +void ExynosCamera3Parameters::m_getCropRegion(int *x, int *y, int *w, int *h) +{ + getMetaCtlCropRegion(&m_metadata, x, y, w, h); +} + +status_t ExynosCamera3Parameters::m_setParamCropRegion( + int zoom, + int srcW, int srcH, + int dstW, int dstH) +{ + int newX = 0, newY = 0, newW = 0, newH = 0; + float zoomRatio = getZoomRatio(zoom) / 1000; + + if (getCropRectAlign(srcW, srcH, + dstW, dstH, + &newX, &newY, + &newW, &newH, + CAMERA_MAGIC_ALIGN, 2, + zoom, zoomRatio) != NO_ERROR) { + CLOGE2("getCropRectAlign(%d, %d, %d, %d) fail", srcW, srcH, dstW, dstH); + return BAD_VALUE; + } + + newX = ALIGN_UP(newX, 2); + newY = ALIGN_UP(newY, 2); + newW = srcW - (newX * 2); + newH = srcH - (newY * 2); + + CLOGI2("size0(%d, %d, %d, %d)", srcW, srcH, dstW, dstH); + CLOGI2("size(%d, %d, %d, %d), level(%d)", newX, newY, newW, newH, zoom); + + m_setHwBayerCropRegion(dstW, dstH, newX, newY); + + return NO_ERROR; +} + +int ExynosCamera3Parameters::getZoomLevel(void) +{ + return m_cameraInfo.zoom; +} + +void ExynosCamera3Parameters::m_setRotation(int rotation) +{ + m_cameraInfo.rotation = rotation; +} + +int ExynosCamera3Parameters::getRotation(void) +{ + return m_cameraInfo.rotation; +} + +void ExynosCamera3Parameters::m_setAutoExposureLock(bool lock) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + m_cameraInfo.autoExposureLock = lock; + setMetaCtlAeLock(&m_metadata, lock); + } +} + +bool ExynosCamera3Parameters::getAutoExposureLock(void) +{ + return m_cameraInfo.autoExposureLock; +} + +void ExynosCamera3Parameters::m_adjustAeMode(enum aa_aemode curAeMode, enum aa_aemode *newAeMode) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + int curMeteringMode = getMeteringMode(); + if (curAeMode == AA_AEMODE_OFF) { + switch(curMeteringMode){ + case METERING_MODE_AVERAGE: + *newAeMode = AA_AEMODE_AVERAGE; + break; + case METERING_MODE_CENTER: + *newAeMode = AA_AEMODE_CENTER; + break; + case METERING_MODE_MATRIX: + *newAeMode = AA_AEMODE_MATRIX; + break; + case METERING_MODE_SPOT: + *newAeMode = AA_AEMODE_SPOT; + break; + default: + *newAeMode = curAeMode; + break; + } + } + } +} + +/* TODO: Who explane this offset value? */ +#define FW_CUSTOM_OFFSET (1) +/* F/W's middle value is 5, and step is -4, -3, -2, -1, 0, 1, 2, 3, 4 */ +void ExynosCamera3Parameters::m_setExposureCompensation(int32_t value) +{ +#if defined(USE_SUBDIVIDED_EV) + setMetaCtlExposureCompensation(&m_metadata, value); + setMetaCtlExposureCompensationStep(&m_metadata, m_staticInfo->exposureCompensationStep); +#else + setMetaCtlExposureCompensation(&m_metadata, value + IS_EXPOSURE_DEFAULT + FW_CUSTOM_OFFSET); +#endif +} + +int32_t ExynosCamera3Parameters::getExposureCompensation(void) +{ + int32_t expCompensation; + getMetaCtlExposureCompensation(&m_metadata, &expCompensation); +#if defined(USE_SUBDIVIDED_EV) + return expCompensation; +#else + return expCompensation - IS_EXPOSURE_DEFAULT - FW_CUSTOM_OFFSET; +#endif +} + +void ExynosCamera3Parameters::m_setMeteringAreas(uint32_t num, ExynosRect *rects, int *weights) +{ + ExynosRect2 *rect2s = new ExynosRect2[num]; + + for (uint32_t i = 0; i < num; i++) + convertingRectToRect2(&rects[i], &rect2s[i]); + + m_setMeteringAreas(num, rect2s, weights); + + delete [] rect2s; +} + +void ExynosCamera3Parameters::getMeteringAreas(__unused ExynosRect *rects) +{ + /* TODO */ +} + +void ExynosCamera3Parameters::getMeteringAreas(__unused ExynosRect2 *rect2s) +{ + /* TODO */ +} + +void ExynosCamera3Parameters::m_setMeteringMode(int meteringMode) +{ + uint32_t x = 0; + uint32_t y = 0; + uint32_t w = 0; + uint32_t h = 0; + uint32_t weight = 0; + int hwSensorW = 0; + int hwSensorH = 0; + enum aa_aemode aeMode; + + if (getAutoExposureLock() == true) { + CLOGD2("autoExposure is Locked"); + return; + } + + m_cameraInfo.meteringMode = meteringMode; + + getHwSensorSize(&hwSensorW, &hwSensorH); + + switch (meteringMode) { + case METERING_MODE_AVERAGE: + aeMode = AA_AEMODE_AVERAGE; + x = 0; + y = 0; + w = hwSensorW; + h = hwSensorH; + weight = 1000; + break; + case METERING_MODE_MATRIX: + aeMode = AA_AEMODE_MATRIX; + x = 0; + y = 0; + w = hwSensorW; + h = hwSensorH; + weight = 1000; + break; + case METERING_MODE_SPOT: + /* In spot mode, default region setting is 100x100 rectangle on center */ + aeMode = AA_AEMODE_SPOT; + x = hwSensorW / 2 - 50; + y = hwSensorH / 2 - 50; + w = hwSensorW / 2 + 50; + h = hwSensorH / 2 + 50; + weight = 50; + break; +#ifdef TOUCH_AE + case METERING_MODE_MATRIX_TOUCH: + aeMode = AA_AEMODE_MATRIX_TOUCH; + break; + case METERING_MODE_SPOT_TOUCH: + aeMode = AA_AEMODE_SPOT_TOUCH; + break; + case METERING_MODE_CENTER_TOUCH: + aeMode = AA_AEMODE_CENTER_TOUCH; + break; + case METERING_MODE_AVERAGE_TOUCH: + aeMode = AA_AEMODE_AVERAGE_TOUCH; + break; +#endif + case METERING_MODE_CENTER: + default: + aeMode = AA_AEMODE_CENTER; + x = 0; + y = 0; + w = 0; + h = 0; + weight = 1000; + break; + } + + setMetaCtlAeMode(&m_metadata, aeMode); + + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setFlashExposure(aeMode); +} + +int ExynosCamera3Parameters::getMeteringMode(void) +{ + return m_cameraInfo.meteringMode; +} + +int ExynosCamera3Parameters::getSupportedMeteringMode(void) +{ + return m_staticInfo->meteringList; +} + +void ExynosCamera3Parameters::m_setMeteringAreas(uint32_t num, ExynosRect2 *rect2s, int *weights) +{ + uint32_t maxNumMeteringAreas = getMaxNumMeteringAreas(); + + if(getSamsungCamera()) { + maxNumMeteringAreas = 1; + } + + if (maxNumMeteringAreas == 0) { + CLOGV2("maxNumMeteringAreas is 0. so, ignored"); + return; + } + + if (maxNumMeteringAreas < num) + num = maxNumMeteringAreas; + + if (getAutoExposureLock() == true) { + CLOGD2("autoExposure is Locked"); + return; + } + + if (num == 1) { +#ifdef CAMERA_GED_FEATURE + int meteringMode = getMeteringMode(); + + if (isRectNull(&rect2s[0]) == true) { + switch (meteringMode) { + case METERING_MODE_SPOT: + /* + * Even if SPOT metering mode, area must set valid values, + * but areas was invalid values, we change mode to CENTER. + */ + m_setMeteringMode(METERING_MODE_CENTER); + m_cameraInfo.isTouchMetering = false; + break; + case METERING_MODE_AVERAGE: + case METERING_MODE_CENTER: + case METERING_MODE_MATRIX: + default: + /* adjust metering setting */ + break; + } + } else { + switch (meteringMode) { + case METERING_MODE_CENTER: + /* + * SPOT metering mode in GED camera App was not set METERING_MODE_SPOT, + * but set metering areas only. + */ + m_setMeteringMode(METERING_MODE_SPOT); + m_cameraInfo.isTouchMetering = true; + break; + case METERING_MODE_AVERAGE: + case METERING_MODE_MATRIX: + case METERING_MODE_SPOT: + default: + /* adjust metering setting */ + break; + } + } +#endif + } else { + if (num > 1 && isRectEqual(&rect2s[0], &rect2s[1]) == false) { + /* if MATRIX mode support, mode set METERING_MODE_MATRIX */ + m_setMeteringMode(METERING_MODE_AVERAGE); + m_cameraInfo.isTouchMetering = false; + } else { + m_setMeteringMode(METERING_MODE_AVERAGE); + m_cameraInfo.isTouchMetering = false; + } + } + + ExynosRect cropRegionRect; + ExynosRect2 newRect2; + + getHwBayerCropRegion(&cropRegionRect.w, &cropRegionRect.h, &cropRegionRect.x, &cropRegionRect.y); + + for (uint32_t i = 0; i < num; i++) { + bool isChangeMeteringArea = false; +#ifdef CAMERA_GED_FEATURE + if (isRectNull(&rect2s[i]) == false) + isChangeMeteringArea = true; + else + isChangeMeteringArea = false; +#else + if ((isRectNull(&rect2s[i]) == false) ||((isRectNull(&rect2s[i]) == true) && (getMeteringMode() == METERING_MODE_SPOT))) + isChangeMeteringArea = true; +#ifdef TOUCH_AE + else if((getMeteringMode() == METERING_MODE_SPOT_TOUCH) || (getMeteringMode() == METERING_MODE_MATRIX_TOUCH) + || (getMeteringMode() == METERING_MODE_CENTER_TOUCH) || (getMeteringMode() == METERING_MODE_AVERAGE_TOUCH)) + isChangeMeteringArea = true; +#endif + else + isChangeMeteringArea = false; +#endif + if (isChangeMeteringArea == true) { + CLOGD2("(%d %d %d %d) %d", rect2s->x1, rect2s->y1, rect2s->x2, rect2s->y2, getMeteringMode()); + newRect2 = convertingAndroidArea2HWAreaBcropOut(&rect2s[i], &cropRegionRect); + setMetaCtlAeRegion(&m_metadata, newRect2.x1, newRect2.y1, + newRect2.x2, newRect2.y2, weights[i]); + } + } +} + +const char *ExynosCamera3Parameters::m_adjustAntibanding(const char *strAntibanding) +{ + const char *strAdjustedAntibanding = NULL; + + strAdjustedAntibanding = strAntibanding; + +#if 0 /* fixed the flicker issue when highspeed recording(60fps or 120fps) */ + /* when high speed recording mode, off thre antibanding */ + if (getHighSpeedRecording()) + strAdjustedAntibanding = CameraParameters::ANTIBANDING_OFF; +#endif + return strAdjustedAntibanding; +} + + +void ExynosCamera3Parameters::m_setAntibanding(int value) +{ + setMetaCtlAntibandingMode(&m_metadata, (enum aa_ae_antibanding_mode)value); +} + +int ExynosCamera3Parameters::getAntibanding(void) +{ + enum aa_ae_antibanding_mode antibanding; + getMetaCtlAntibandingMode(&m_metadata, &antibanding); + return (int)antibanding; +} + +int ExynosCamera3Parameters::getSupportedAntibanding(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->antiBandingList; + } +} + +void ExynosCamera3Parameters::m_setSceneMode(int value) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + + switch (value) { + case SCENE_MODE_PORTRAIT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PORTRAIT; + break; + case SCENE_MODE_LANDSCAPE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_LANDSCAPE; + break; + case SCENE_MODE_NIGHT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_NIGHT; + break; + case SCENE_MODE_BEACH: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_BEACH; + break; + case SCENE_MODE_SNOW: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SNOW; + break; + case SCENE_MODE_SUNSET: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SUNSET; + break; + case SCENE_MODE_FIREWORKS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_FIREWORKS; + break; + case SCENE_MODE_SPORTS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SPORTS; + break; + case SCENE_MODE_PARTY: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PARTY; + break; + case SCENE_MODE_CANDLELIGHT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_CANDLELIGHT; + break; + case SCENE_MODE_STEADYPHOTO: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_STEADYPHOTO; + break; + case SCENE_MODE_ACTION: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ACTION; + break; + case SCENE_MODE_NIGHT_PORTRAIT: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_NIGHT_PORTRAIT; + break; + case SCENE_MODE_THEATRE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_THEATRE; + break; + case SCENE_MODE_AQUA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_AQUA; + break; + case SCENE_MODE_AUTO: + default: + mode = AA_CONTROL_AUTO; + sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + break; + } + + m_cameraInfo.sceneMode = value; + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); + m_cameraInfo.whiteBalanceMode = m_convertMetaCtlAwbMode(&m_metadata); +} + +int ExynosCamera3Parameters::getSceneMode(void) +{ + return m_cameraInfo.sceneMode; +} + +int ExynosCamera3Parameters::getSupportedSceneModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->sceneModeList; + } +} + +const char *ExynosCamera3Parameters::m_adjustFocusMode(const char *focusMode) +{ + int sceneMode = getSceneMode(); + const char *newFocusMode = NULL; + + /* TODO: vendor specific adjust */ + + newFocusMode = focusMode; + + return newFocusMode; +} + +void ExynosCamera3Parameters::m_setFocusMode(int focusMode) +{ + m_cameraInfo.focusMode = focusMode; + + if(getZoomActiveOn()) { + CLOGD("DEBUG(%s):zoom moving..", "setParameters"); + return; + } + + /* TODO: Notify auto focus activity */ + if(getPreviewRunning() == true) { + CLOGD2("set Focus Mode(%s[%d]) !!!!"); + m_activityControl->setAutoFocusMode(focusMode); + } else { + m_setFocusmodeSetting = true; + } +} + +void ExynosCamera3Parameters::setFocusModeLock(bool enable) { + int curFocusMode = getFocusMode(); + + CLOGD2("FocusModeLock (%s)", enable? "true" : "false"); + + if(enable) { + m_activityControl->stopAutoFocus(); + } else { + m_setFocusMode(curFocusMode); + } +} + +void ExynosCamera3Parameters::setFocusModeSetting(bool enable) +{ + m_setFocusmodeSetting = enable; +} + +int ExynosCamera3Parameters::getFocusModeSetting(void) +{ + return m_setFocusmodeSetting; +} + +int ExynosCamera3Parameters::getFocusMode(void) +{ + return m_cameraInfo.focusMode; +} + +int ExynosCamera3Parameters::getSupportedFocusModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->focusModeList; + } +} + +const char *ExynosCamera3Parameters::m_adjustFlashMode(const char *flashMode) +{ + int sceneMode = getSceneMode(); + const char *newFlashMode = NULL; + + /* TODO: vendor specific adjust */ + + newFlashMode = flashMode; + + return newFlashMode; +} + +void ExynosCamera3Parameters::m_setFlashMode(int flashMode) +{ + m_cameraInfo.flashMode = flashMode; + + /* TODO: Notity flash activity */ + m_activityControl->setFlashMode(flashMode); +} + +int ExynosCamera3Parameters::getFlashMode(void) +{ + return m_cameraInfo.flashMode; +} + +int ExynosCamera3Parameters::getSupportedFlashModes(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->flashModeList; + } +} + +const char *ExynosCamera3Parameters::m_adjustWhiteBalanceMode(const char *whiteBalance) +{ + int sceneMode = getSceneMode(); + const char *newWhiteBalance = NULL; + + /* TODO: vendor specific adjust */ + + /* TN' feautre can change whiteBalance even if Non SCENE_MODE_AUTO */ + + newWhiteBalance = whiteBalance; + + return newWhiteBalance; +} + +status_t ExynosCamera3Parameters::m_setWhiteBalanceMode(int whiteBalance) +{ + enum aa_awbmode awbMode; + + switch (whiteBalance) { + case WHITE_BALANCE_AUTO: + awbMode = AA_AWBMODE_WB_AUTO; + break; + case WHITE_BALANCE_INCANDESCENT: + awbMode = AA_AWBMODE_WB_INCANDESCENT; + break; + case WHITE_BALANCE_FLUORESCENT: + awbMode = AA_AWBMODE_WB_FLUORESCENT; + break; + case WHITE_BALANCE_DAYLIGHT: + awbMode = AA_AWBMODE_WB_DAYLIGHT; + break; + case WHITE_BALANCE_CLOUDY_DAYLIGHT: + awbMode = AA_AWBMODE_WB_CLOUDY_DAYLIGHT; + break; + case WHITE_BALANCE_WARM_FLUORESCENT: + awbMode = AA_AWBMODE_WB_WARM_FLUORESCENT; + break; + case WHITE_BALANCE_TWILIGHT: + awbMode = AA_AWBMODE_WB_TWILIGHT; + break; + case WHITE_BALANCE_SHADE: + awbMode = AA_AWBMODE_WB_SHADE; + break; + default: + CLOGE("ERR(%s):Unsupported value(%d)", __FUNCTION__, whiteBalance); + return BAD_VALUE; + } + + m_cameraInfo.whiteBalanceMode = whiteBalance; + setMetaCtlAwbMode(&m_metadata, awbMode); + + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + m_flashMgr->setFlashWhiteBalance(awbMode); + + return NO_ERROR; +} + +int ExynosCamera3Parameters::m_convertMetaCtlAwbMode(struct camera2_shot_ext *shot_ext) +{ + int awbMode = WHITE_BALANCE_AUTO; + + switch (shot_ext->shot.ctl.aa.awbMode) { + case AA_AWBMODE_WB_AUTO: + awbMode = WHITE_BALANCE_AUTO; + break; + case AA_AWBMODE_WB_INCANDESCENT: + awbMode = WHITE_BALANCE_INCANDESCENT; + break; + case AA_AWBMODE_WB_FLUORESCENT: + awbMode = WHITE_BALANCE_FLUORESCENT; + break; + case AA_AWBMODE_WB_DAYLIGHT: + awbMode = WHITE_BALANCE_DAYLIGHT; + break; + case AA_AWBMODE_WB_CLOUDY_DAYLIGHT: + awbMode = WHITE_BALANCE_CLOUDY_DAYLIGHT; + break; + case AA_AWBMODE_WB_WARM_FLUORESCENT: + awbMode = WHITE_BALANCE_WARM_FLUORESCENT; + break; + case AA_AWBMODE_WB_TWILIGHT: + awbMode = WHITE_BALANCE_TWILIGHT; + break; + case AA_AWBMODE_WB_SHADE: + awbMode = WHITE_BALANCE_SHADE; + break; + default: + CLOGE2("Unsupported awbMode(%d)", shot_ext->shot.ctl.aa.awbMode); + return BAD_VALUE; + } + + return awbMode; +} + +int ExynosCamera3Parameters::getWhiteBalanceMode(void) +{ + return m_cameraInfo.whiteBalanceMode; +} + +int ExynosCamera3Parameters::getSupportedWhiteBalance(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->whiteBalanceList; + } +} + +int ExynosCamera3Parameters::getSupportedISO(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return 0; + } else { + return m_staticInfo->isoValues; + } +} + +void ExynosCamera3Parameters::m_setAutoWhiteBalanceLock(bool value) +{ + if (getHalVersion() != IS_HAL_VER_3_2) { + m_cameraInfo.autoWhiteBalanceLock = value; + setMetaCtlAwbLock(&m_metadata, value); + } +} + +bool ExynosCamera3Parameters::getAutoWhiteBalanceLock(void) +{ + return m_cameraInfo.autoWhiteBalanceLock; +} + +void ExynosCamera3Parameters::m_setFocusAreas(uint32_t numValid, ExynosRect *rects, int *weights) +{ + ExynosRect2 *rect2s = new ExynosRect2[numValid]; + + for (uint32_t i = 0; i < numValid; i++) + convertingRectToRect2(&rects[i], &rect2s[i]); + + m_setFocusAreas(numValid, rect2s, weights); + + delete [] rect2s; +} + +void ExynosCamera3Parameters::m_setFocusAreas(uint32_t numValid, ExynosRect2 *rect2s, int *weights) +{ + uint32_t maxNumFocusAreas = getMaxNumFocusAreas(); + if (maxNumFocusAreas < numValid) + numValid = maxNumFocusAreas; + + if ((numValid == 1 || numValid == 0) && (isRectNull(&rect2s[0]) == true)) { + /* m_setFocusMode(FOCUS_MODE_AUTO); */ + ExynosRect2 newRect2(0,0,0,0); + m_activityControl->setAutoFcousArea(newRect2, 1000); + + m_activityControl->touchAFMode = false; + m_activityControl->touchAFModeForFlash = false; + } else { + ExynosRect cropRegionRect; + ExynosRect2 newRect2; + + getHwBayerCropRegion(&cropRegionRect.w, &cropRegionRect.h, &cropRegionRect.x, &cropRegionRect.y); + + for (uint32_t i = 0; i < numValid; i++) { + newRect2 = convertingAndroidArea2HWAreaBcropOut(&rect2s[i], &cropRegionRect); + /*setMetaCtlAfRegion(&m_metadata, rect2s[i].x1, rect2s[i].y1, + rect2s[i].x2, rect2s[i].y2, weights[i]);*/ + m_activityControl->setAutoFcousArea(newRect2, weights[i]); + } + m_activityControl->touchAFMode = true; + m_activityControl->touchAFModeForFlash = true; + } + + m_cameraInfo.numValidFocusArea = numValid; +} + +void ExynosCamera3Parameters::m_setColorEffectMode(int effect) +{ + aa_effect_mode_t newEffect; + + switch(effect) { + case EFFECT_NONE: + newEffect = AA_EFFECT_OFF; + break; + case EFFECT_MONO: + newEffect = AA_EFFECT_MONO; + break; + case EFFECT_NEGATIVE: + newEffect = AA_EFFECT_NEGATIVE; + break; + case EFFECT_SOLARIZE: + newEffect = AA_EFFECT_SOLARIZE; + break; + case EFFECT_SEPIA: + newEffect = AA_EFFECT_SEPIA; + break; + case EFFECT_POSTERIZE: + newEffect = AA_EFFECT_POSTERIZE; + break; + case EFFECT_WHITEBOARD: + newEffect = AA_EFFECT_WHITEBOARD; + break; + case EFFECT_BLACKBOARD: + newEffect = AA_EFFECT_BLACKBOARD; + break; + case EFFECT_AQUA: + newEffect = AA_EFFECT_AQUA; + break; + case EFFECT_RED_YELLOW: + newEffect = AA_EFFECT_RED_YELLOW_POINT; + break; + case EFFECT_BLUE: + newEffect = AA_EFFECT_BLUE_POINT; + break; + case EFFECT_WARM_VINTAGE: + newEffect = AA_EFFECT_WARM_VINTAGE; + break; + case EFFECT_COLD_VINTAGE: + newEffect = AA_EFFECT_COLD_VINTAGE; + break; + case EFFECT_BEAUTY_FACE: + newEffect = AA_EFFECT_BEAUTY_FACE; + break; + default: + newEffect = AA_EFFECT_OFF; + CLOGE2("Color Effect mode(%d) is not supported", effect); + break; + } + setMetaCtlAaEffect(&m_metadata, newEffect); +} + +int ExynosCamera3Parameters::getColorEffectMode(void) +{ + aa_effect_mode_t curEffect; + int effect; + + getMetaCtlAaEffect(&m_metadata, &curEffect); + + switch(curEffect) { + case AA_EFFECT_OFF: + effect = EFFECT_NONE; + break; + case AA_EFFECT_MONO: + effect = EFFECT_MONO; + break; + case AA_EFFECT_NEGATIVE: + effect = EFFECT_NEGATIVE; + break; + case AA_EFFECT_SOLARIZE: + effect = EFFECT_SOLARIZE; + break; + case AA_EFFECT_SEPIA: + effect = EFFECT_SEPIA; + break; + case AA_EFFECT_POSTERIZE: + effect = EFFECT_POSTERIZE; + break; + case AA_EFFECT_WHITEBOARD: + effect = EFFECT_WHITEBOARD; + break; + case AA_EFFECT_BLACKBOARD: + effect = EFFECT_BLACKBOARD; + break; + case AA_EFFECT_AQUA: + effect = EFFECT_AQUA; + break; + case AA_EFFECT_RED_YELLOW_POINT: + effect = EFFECT_RED_YELLOW; + break; + case AA_EFFECT_BLUE_POINT: + effect = EFFECT_BLUE; + break; + case AA_EFFECT_WARM_VINTAGE: + effect = EFFECT_WARM_VINTAGE; + break; + case AA_EFFECT_COLD_VINTAGE: + effect = EFFECT_COLD_VINTAGE; + break; + case AA_EFFECT_BEAUTY_FACE: + effect = EFFECT_BEAUTY_FACE; + break; + default: + effect = 0; + CLOGE2("Color Effect mode(%d) is invalid value", curEffect); + break; + } + + return effect; +} + +int ExynosCamera3Parameters::getSupportedColorEffects(void) +{ + return m_staticInfo->effectList; +} + +bool ExynosCamera3Parameters::isSupportedColorEffects(int effectMode) +{ + int ret = false; + + if (effectMode & getSupportedColorEffects()) { + return true; + } + + if (effectMode & m_staticInfo->hiddenEffectList) { + return true; + } + + return ret; +} + +void ExynosCamera3Parameters::m_setGpsAltitude(double altitude) +{ + m_cameraInfo.gpsAltitude = altitude; +} + +double ExynosCamera3Parameters::getGpsAltitude(void) +{ + return m_cameraInfo.gpsAltitude; +} + +void ExynosCamera3Parameters::m_setGpsLatitude(double latitude) +{ + m_cameraInfo.gpsLatitude = latitude; +} + +double ExynosCamera3Parameters::getGpsLatitude(void) +{ + return m_cameraInfo.gpsLatitude; +} + +void ExynosCamera3Parameters::m_setGpsLongitude(double longitude) +{ + m_cameraInfo.gpsLongitude = longitude; +} + +double ExynosCamera3Parameters::getGpsLongitude(void) +{ + return m_cameraInfo.gpsLongitude; +} + +void ExynosCamera3Parameters::m_setGpsProcessingMethod(const char *gpsProcessingMethod) +{ + memset(m_exifInfo.gps_processing_method, 0, sizeof(m_exifInfo.gps_processing_method)); + if (gpsProcessingMethod == NULL) + return; + + size_t len = strlen(gpsProcessingMethod); + + if (len > sizeof(m_exifInfo.gps_processing_method)) { + len = sizeof(m_exifInfo.gps_processing_method); + } + memcpy(m_exifInfo.gps_processing_method, gpsProcessingMethod, len); +} + +const char *ExynosCamera3Parameters::getGpsProcessingMethod(void) +{ + return (const char *)m_exifInfo.gps_processing_method; +} + +void ExynosCamera3Parameters::m_setExifFixedAttribute(void) +{ + char property[PROPERTY_VALUE_MAX]; + + memset(&m_exifInfo, 0, sizeof(m_exifInfo)); + + /* 2 0th IFD TIFF Tags */ + /* 3 Maker */ + strncpy((char *)m_exifInfo.maker, EXIF_DEF_MAKER, + sizeof(m_exifInfo.maker) - 1); + m_exifInfo.maker[sizeof(EXIF_DEF_MAKER) - 1] = '\0'; + + /* 3 Model */ + property_get("ro.product.model", property, EXIF_DEF_MODEL); + strncpy((char *)m_exifInfo.model, property, + sizeof(m_exifInfo.model) - 1); + m_exifInfo.model[sizeof(m_exifInfo.model) - 1] = '\0'; + /* 3 Software */ + property_get("ro.build.PDA", property, EXIF_DEF_SOFTWARE); + strncpy((char *)m_exifInfo.software, property, + sizeof(m_exifInfo.software) - 1); + m_exifInfo.software[sizeof(m_exifInfo.software) - 1] = '\0'; + + /* 3 YCbCr Positioning */ + m_exifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; + + /*2 0th IFD Exif Private Tags */ + /* 3 Exposure Program */ + m_exifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; + /* 3 Exif Version */ + memcpy(m_exifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(m_exifInfo.exif_version)); + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* 3 Aperture */ + m_exifInfo.aperture.num = (int) m_staticInfo->aperture * COMMON_DENOMINATOR; + m_exifInfo.aperture.den = COMMON_DENOMINATOR; + /* 3 F Number */ + m_exifInfo.fnumber.num = m_staticInfo->fNumber * COMMON_DENOMINATOR; + m_exifInfo.fnumber.den = COMMON_DENOMINATOR; + /* 3 Maximum lens aperture */ + m_exifInfo.max_aperture.num = m_staticInfo->aperture * COMMON_DENOMINATOR; + m_exifInfo.max_aperture.den = COMMON_DENOMINATOR; + /* 3 Lens Focal Length */ + m_exifInfo.focal_length.num = m_staticInfo->focalLength * COMMON_DENOMINATOR; + m_exifInfo.focal_length.den = COMMON_DENOMINATOR; + } else { + m_exifInfo.aperture.num = m_staticInfo->apertureNum; + m_exifInfo.aperture.den = m_staticInfo->apertureDen; + /* 3 F Number */ + m_exifInfo.fnumber.num = m_staticInfo->fNumberNum; + m_exifInfo.fnumber.den = m_staticInfo->fNumberDen; + /* 3 Maximum lens aperture */ + m_exifInfo.max_aperture.num = m_staticInfo->apertureNum; + m_exifInfo.max_aperture.den = m_staticInfo->apertureDen; + /* 3 Lens Focal Length */ + m_exifInfo.focal_length.num = m_staticInfo->focalLengthNum; + m_exifInfo.focal_length.den = m_staticInfo->focalLengthDen; + } + + /* 3 Maker note */ + if (m_exifInfo.maker_note) + delete m_exifInfo.maker_note; + + m_exifInfo.maker_note_size = 98; + m_exifInfo.maker_note = new unsigned char[m_exifInfo.maker_note_size]; + memset((void *)m_exifInfo.maker_note, 0, m_exifInfo.maker_note_size); + /* 3 User Comments */ + if (m_exifInfo.user_comment) + delete m_exifInfo.user_comment; + + m_exifInfo.user_comment_size = sizeof("user comment"); + m_exifInfo.user_comment = new unsigned char[m_exifInfo.user_comment_size + 8]; + memset((void *)m_exifInfo.user_comment, 0, m_exifInfo.user_comment_size + 8); + + /* 3 Color Space information */ + m_exifInfo.color_space = EXIF_DEF_COLOR_SPACE; + /* 3 interoperability */ + m_exifInfo.interoperability_index = EXIF_DEF_INTEROPERABILITY; + /* 3 Exposure Mode */ + m_exifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; + + /* 2 0th IFD GPS Info Tags */ + unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; + memcpy(m_exifInfo.gps_version_id, gps_version, sizeof(gps_version)); + + /* 2 1th IFD TIFF Tags */ + m_exifInfo.compression_scheme = EXIF_DEF_COMPRESSION; + m_exifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; + m_exifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; + m_exifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; + m_exifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; + m_exifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; +} + +void ExynosCamera3Parameters::setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + m_setExifChangedAttribute(exifInfo, pictureRect, thumbnailRect, shot); + } else { + m_setExifChangedAttribute(exifInfo, pictureRect, thumbnailRect, &(shot->dm), &(shot->udm)); + } +} + +void ExynosCamera3Parameters::m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + __unused camera2_dm *dm, + camera2_udm *udm) +{ + /* 2 0th IFD TIFF Tags */ + /* 3 Width */ + exifInfo->width = pictureRect->w; + /* 3 Height */ + exifInfo->height = pictureRect->h; + + /* 3 Orientation */ + switch (m_cameraInfo.rotation) { + case 90: + exifInfo->orientation = EXIF_ORIENTATION_90; + break; + case 180: + exifInfo->orientation = EXIF_ORIENTATION_180; + break; + case 270: + exifInfo->orientation = EXIF_ORIENTATION_270; + break; + case 0: + default: + exifInfo->orientation = EXIF_ORIENTATION_UP; + break; + } + + /* 3 Maker note */ + /* back-up udm info for exif's maker note */ + memcpy((void *)mDebugInfo.debugData[APP_MARKER_4], (void *)udm, mDebugInfo.debugSize[APP_MARKER_4]); + + /* TODO */ +#if 0 + if (getSeriesShotCount() && getShotMode() != SHOT_MODE_BEST_PHOTO) { + unsigned char l_makernote[98] = { 0x07, 0x00, 0x01, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x30, 0x31, 0x30, 0x30, 0x02, 0x00, 0x04, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x20, 0x01, 0x00, 0x40, 0x00, 0x04, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, + 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x05, 0x00, 0x01, 0x00, 0x00, 0x00, 0x5A, 0x00, + 0x00, 0x00, 0x50, 0x00, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; + long long int mCityId = getCityId(); + l_makernote[46] = getWeatherId(); + memcpy(l_makernote + 90, &mCityId, 8); + exifInfo->maker_note_size = 98; + memcpy(exifInfo->maker_note, l_makernote, sizeof(l_makernote)); + } else { + exifInfo->maker_note_size = 0; + } +#else + exifInfo->maker_note_size = 0; +#endif + + /* 3 Date time */ + struct timeval rawtime; + struct tm timeinfo; + gettimeofday(&rawtime, NULL); + localtime_r((time_t *)&rawtime.tv_sec, &timeinfo); + strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", &timeinfo); + sprintf((char *)exifInfo->sec_time, "%d", (int)(rawtime.tv_usec/1000)); + + /* 2 0th IFD Exif Private Tags */ + bool flagSLSIAlgorithm = true; + /* + * vendorSpecific2[100] : exposure + * vendorSpecific2[101] : iso(gain) + * vendorSpecific2[102] /256 : Bv + * vendorSpecific2[103] : Tv + */ + + /* 3 ISO Speed Rating */ + exifInfo->iso_speed_rating = udm->internal.vendorSpecific2[101]; + + /* 3 Exposure Time */ + exifInfo->exposure_time.num = 1; + + if (udm->ae.vendorSpecific[0] == 0xAEAEAEAE) + exifInfo->exposure_time.den = (uint32_t)udm->ae.vendorSpecific[64]; + else + exifInfo->exposure_time.den = (uint32_t)udm->internal.vendorSpecific2[100]; + + /* 3 Shutter Speed */ + exifInfo->shutter_speed.num = (uint32_t)(ROUND_OFF_HALF(((double)(udm->internal.vendorSpecific2[103] / 256.f) * EXIF_DEF_APEX_DEN), 0)); + exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; + + if (getHalVersion() == IS_HAL_VER_3_2) { + /* 3 Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->aperture.den = COMMON_DENOMINATOR; + + /* 3 Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->max_aperture.den = COMMON_DENOMINATOR; + } else { + /* 3 Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * m_staticInfo->apertureDen; + exifInfo->aperture.den = m_staticInfo->apertureDen; + + /* 3 Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double)(exifInfo->fnumber.num) / (double)(exifInfo->fnumber.den)) * m_staticInfo->apertureDen; + exifInfo->max_aperture.den = m_staticInfo->apertureDen; + } + + /* 3 Brightness */ + int temp = udm->internal.vendorSpecific2[102]; + if ((int)udm->ae.vendorSpecific[102] < 0) + temp = -temp; + + exifInfo->brightness.num = (int32_t)(ROUND_OFF_HALF((double)((temp * EXIF_DEF_APEX_DEN) / 256.f), 0)); + if ((int)udm->ae.vendorSpecific[102] < 0) + exifInfo->brightness.num = -exifInfo->brightness.num; + + exifInfo->brightness.den = EXIF_DEF_APEX_DEN; + + CLOGD2("udm->internal.vendorSpecific2[100](%d)", udm->internal.vendorSpecific2[100]); + CLOGD2("udm->internal.vendorSpecific2[101](%d)", udm->internal.vendorSpecific2[101]); + CLOGD2("udm->internal.vendorSpecific2[102](%d)", udm->internal.vendorSpecific2[102]); + CLOGD2("udm->internal.vendorSpecific2[103](%d)", udm->internal.vendorSpecific2[103]); + + CLOGD2("iso_speed_rating(%d)", exifInfo->iso_speed_rating); + CLOGD2("exposure_time(%d/%d)", exifInfo->exposure_time.num, exifInfo->exposure_time.den); + CLOGD2("shutter_speed(%d/%d)", exifInfo->shutter_speed.num, exifInfo->shutter_speed.den); + CLOGD2("aperture (%d/%d)", exifInfo->aperture.num, exifInfo->aperture.den); + CLOGD2("brightness (%d/%d)", exifInfo->brightness.num, exifInfo->brightness.den); + + /* 3 Exposure Bias */ + exifInfo->exposure_bias.num = (int32_t)getExposureCompensation() * (m_staticInfo->exposureCompensationStep * 10); + exifInfo->exposure_bias.den = 10; + + /* 3 Metering Mode */ + { + switch (m_cameraInfo.meteringMode) { + case METERING_MODE_CENTER: + exifInfo->metering_mode = EXIF_METERING_CENTER; + break; + case METERING_MODE_MATRIX: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + case METERING_MODE_SPOT: +#ifdef TOUCH_AE + case METERING_MODE_CENTER_TOUCH: + case METERING_MODE_SPOT_TOUCH: + case METERING_MODE_AVERAGE_TOUCH: + case METERING_MODE_MATRIX_TOUCH: +#endif + exifInfo->metering_mode = EXIF_METERING_SPOT; + break; + case METERING_MODE_AVERAGE: + default: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + } + } + + /* 3 Flash */ + if (m_cameraInfo.flashMode == FLASH_MODE_OFF) { + exifInfo->flash = 0; + } else if (m_cameraInfo.flashMode == FLASH_MODE_TORCH) { + exifInfo->flash = 1; + } else { + exifInfo->flash = getMarkingOfExifFlash(); + } + + /* 3 White Balance */ + if (m_cameraInfo.whiteBalanceMode == WHITE_BALANCE_AUTO) + exifInfo->white_balance = EXIF_WB_AUTO; + else + exifInfo->white_balance = EXIF_WB_MANUAL; + + /* 3 Focal Length in 35mm length */ + exifInfo->focal_length_in_35mm_length = m_staticInfo->focalLengthIn35mmLength; + + /* 3 Scene Capture Type */ + switch (m_cameraInfo.sceneMode) { + case SCENE_MODE_PORTRAIT: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + case SCENE_MODE_LANDSCAPE: + exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; + break; + case SCENE_MODE_NIGHT: + exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; + break; + default: + exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; + break; + } + + switch (this->getShotMode()) { + case SHOT_MODE_BEAUTY_FACE: + case SHOT_MODE_BEST_FACE: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + default: + break; + } + + /* 3 Image Unique ID */ + /* 2 0th IFD GPS Info Tags */ + if (m_cameraInfo.gpsLatitude != 0 && m_cameraInfo.gpsLongitude != 0) { + if (m_cameraInfo.gpsLatitude > 0) + strncpy((char *)exifInfo->gps_latitude_ref, "N", 2); + else + strncpy((char *)exifInfo->gps_latitude_ref, "S", 2); + + if (m_cameraInfo.gpsLongitude > 0) + strncpy((char *)exifInfo->gps_longitude_ref, "E", 2); + else + strncpy((char *)exifInfo->gps_longitude_ref, "W", 2); + + if (m_cameraInfo.gpsAltitude > 0) + exifInfo->gps_altitude_ref = 0; + else + exifInfo->gps_altitude_ref = 1; + + double latitude = fabs(m_cameraInfo.gpsLatitude); + double longitude = fabs(m_cameraInfo.gpsLongitude); + double altitude = fabs(m_cameraInfo.gpsAltitude); + + exifInfo->gps_latitude[0].num = (uint32_t)latitude; + exifInfo->gps_latitude[0].den = 1; + exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); + exifInfo->gps_latitude[1].den = 1; + exifInfo->gps_latitude[2].num = (uint32_t)(round((((latitude - exifInfo->gps_latitude[0].num) * 60) + - exifInfo->gps_latitude[1].num) * 60)); + exifInfo->gps_latitude[2].den = 1; + + exifInfo->gps_longitude[0].num = (uint32_t)longitude; + exifInfo->gps_longitude[0].den = 1; + exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); + exifInfo->gps_longitude[1].den = 1; + exifInfo->gps_longitude[2].num = (uint32_t)(round((((longitude - exifInfo->gps_longitude[0].num) * 60) + - exifInfo->gps_longitude[1].num) * 60)); + exifInfo->gps_longitude[2].den = 1; + + exifInfo->gps_altitude.num = (uint32_t)altitude; + exifInfo->gps_altitude.den = 1; + + struct tm tm_data; + gmtime_r(&m_cameraInfo.gpsTimeStamp, &tm_data); + exifInfo->gps_timestamp[0].num = tm_data.tm_hour; + exifInfo->gps_timestamp[0].den = 1; + exifInfo->gps_timestamp[1].num = tm_data.tm_min; + exifInfo->gps_timestamp[1].den = 1; + exifInfo->gps_timestamp[2].num = tm_data.tm_sec; + exifInfo->gps_timestamp[2].den = 1; + snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), + "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); + + exifInfo->enableGps = true; + } else { + exifInfo->enableGps = false; + } + + /* 2 1th IFD TIFF Tags */ + exifInfo->widthThumb = thumbnailRect->w; + exifInfo->heightThumb = thumbnailRect->h; + + setMarkingOfExifFlash(0); +} +#ifdef USE_CAMERA2_API_SUPPORT +void ExynosCamera3Parameters::m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *pictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot) +{ + /* JPEG Picture Size */ + exifInfo->width = pictureRect->w; + exifInfo->height = pictureRect->h; + + /* Orientation */ + switch (shot->ctl.jpeg.orientation) { + case 90: + exifInfo->orientation = EXIF_ORIENTATION_90; + break; + case 180: + exifInfo->orientation = EXIF_ORIENTATION_180; + break; + case 270: + exifInfo->orientation = EXIF_ORIENTATION_270; + break; + case 0: + default: + exifInfo->orientation = EXIF_ORIENTATION_UP; + break; + } + + /* Maker Note Size */ + /* back-up udm info for exif's maker note */ + memcpy((void *)mDebugInfo.debugData[APP_MARKER_4], (void *)&shot->udm, mDebugInfo.debugSize[APP_MARKER_4]); + + /* TODO */ +#if 0 + if (getSeriesShotCount() && getShotMode() != SHOT_MODE_BEST_PHOTO) { + unsigned char l_makernote[98] = { 0x07, 0x00, 0x01, 0x00, 0x07, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x30, 0x31, 0x30, 0x30, 0x02, 0x00, 0x04, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x20, 0x01, 0x00, 0x40, 0x00, 0x04, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0C, 0x00, + 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x10, 0x00, 0x05, 0x00, 0x01, 0x00, 0x00, 0x00, 0x5A, 0x00, + 0x00, 0x00, 0x50, 0x00, 0x04, 0x00, 0x01, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0x00, 0x01, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}; + long long int mCityId = getCityId(); + l_makernote[46] = getWeatherId(); + memcpy(l_makernote + 90, &mCityId, 8); + exifInfo->maker_note_size = 98; + memcpy(exifInfo->maker_note, l_makernote, sizeof(l_makernote)); + } else { + exifInfo->maker_note_size = 0; + } +#else + exifInfo->maker_note_size = 0; +#endif + + /* Date Time */ + struct timeval rawtime; + struct tm timeinfo; + gettimeofday(&rawtime, NULL); + localtime_r((time_t *)&rawtime.tv_sec, &timeinfo); + strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", &timeinfo); + sprintf((char *)exifInfo->sec_time, "%d", (int)(rawtime.tv_usec/1000)); + + /* Exif Private Tags */ + bool flagSLSIAlgorithm = true; + /* + * vendorSpecific2[0] : info + * vendorSpecific2[100] : 0:sirc 1:cml + * vendorSpecific2[101] : cml exposure + * vendorSpecific2[102] : cml iso(gain) + * vendorSpecific2[103] : cml Bv + */ + + /* ISO Speed Rating */ +#if 0 /* TODO: Must be same with the sensitivity in Result Metadata */ + exifInfo->iso_speed_rating = shot->udm.internal.vendorSpecific2[102]; +#else + exifInfo->iso_speed_rating = shot->dm.sensor.sensitivity; +#endif + + /* Exposure Time */ + exifInfo->exposure_time.num = 1; +#if 0 /* TODO: Must be same with the exposure time in Result Metadata */ + if (shot->udm.ae.vendorSpecific[0] == 0xAEAEAEAE) { + exifInfo->exposure_time.den = (uint32_t) shot->udm.ae.vendorSpecific[64]; + } else +#endif + { + /* HACK : Sometimes, F/W does NOT send the exposureTime */ + if (shot->dm.sensor.exposureTime != 0) + exifInfo->exposure_time.den = (uint32_t) 1e9 / shot->dm.sensor.exposureTime; + else + exifInfo->exposure_time.num = 0; + } + + /* Shutter Speed */ + exifInfo->shutter_speed.num = (uint32_t) (ROUND_OFF_HALF(((double) (shot->udm.internal.vendorSpecific2[104] / 256.f) * EXIF_DEF_APEX_DEN), 0)); + exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; + + /* Aperture */ + exifInfo->aperture.num = APEX_FNUM_TO_APERTURE((double) (exifInfo->fnumber.num) / (double) (exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->aperture.den = COMMON_DENOMINATOR; + + /* Max Aperture */ + exifInfo->max_aperture.num = APEX_FNUM_TO_APERTURE((double) (exifInfo->fnumber.num) / (double) (exifInfo->fnumber.den)) * COMMON_DENOMINATOR; + exifInfo->max_aperture.den = COMMON_DENOMINATOR; + + /* Brightness */ + int temp = shot->udm.internal.vendorSpecific2[103]; + if ((int) shot->udm.ae.vendorSpecific[103] < 0) + temp = -temp; + exifInfo->brightness.num = (int32_t) (ROUND_OFF_HALF((double)((temp * EXIF_DEF_APEX_DEN)/256.f), 0)); + if ((int) shot->udm.ae.vendorSpecific[103] < 0) + exifInfo->brightness.num = -exifInfo->brightness.num; + exifInfo->brightness.den = EXIF_DEF_APEX_DEN; + + CLOGD2("udm->internal.vendorSpecific2[101](%d)", shot->udm.internal.vendorSpecific2[101]); + CLOGD2("udm->internal.vendorSpecific2[102](%d)", shot->udm.internal.vendorSpecific2[102]); + CLOGD2("udm->internal.vendorSpecific2[103](%d)", shot->udm.internal.vendorSpecific2[103]); + CLOGD2("udm->internal.vendorSpecific2[104](%d)", shot->udm.internal.vendorSpecific2[104]); + + CLOGD2("iso_speed_rating(%d)", exifInfo->iso_speed_rating); + CLOGD2("exposure_time(%d/%d)", exifInfo->exposure_time.num, exifInfo->exposure_time.den); + CLOGD2("shutter_speed(%d/%d)", exifInfo->shutter_speed.num, exifInfo->shutter_speed.den); + CLOGD2("aperture (%d/%d)", exifInfo->aperture.num, exifInfo->aperture.den); + CLOGD2("brightness (%d/%d)", exifInfo->brightness.num, exifInfo->brightness.den); + + /* Exposure Bias */ +#if defined(USE_SUBDIVIDED_EV) + exifInfo->exposure_bias.num = shot->ctl.aa.aeExpCompensation * (m_staticInfo->exposureCompensationStep * 10); +#else + exifInfo->exposure_bias.num = + (shot->ctl.aa.aeExpCompensation) * (m_staticInfo->exposureCompensationStep * 10); +#endif + exifInfo->exposure_bias.den = 10; + + /* Metering Mode */ + { + switch (shot->ctl.aa.aeMode) { + case AA_AEMODE_CENTER: + exifInfo->metering_mode = EXIF_METERING_CENTER; + break; + case AA_AEMODE_MATRIX: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + case AA_AEMODE_SPOT: + exifInfo->metering_mode = EXIF_METERING_SPOT; + break; + default: + exifInfo->metering_mode = EXIF_METERING_AVERAGE; + break; + } + } + + /* Flash Mode */ + if (shot->ctl.flash.flashMode == CAM2_FLASH_MODE_OFF) { + exifInfo->flash = 0; + } else if (shot->ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) { + exifInfo->flash = 1; + } else { + exifInfo->flash = getMarkingOfExifFlash(); + } + + /* White Balance */ + if (shot->ctl.aa.awbMode == AA_AWBMODE_WB_AUTO) + exifInfo->white_balance = EXIF_WB_AUTO; + else + exifInfo->white_balance = EXIF_WB_MANUAL; + + /* Focal Length in 35mm length */ + exifInfo->focal_length_in_35mm_length = getFocalLengthIn35mmFilm(); + + /* Scene Capture Type */ + switch (shot->ctl.aa.sceneMode) { + case AA_SCENE_MODE_PORTRAIT: + case AA_SCENE_MODE_FACE_PRIORITY: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + case AA_SCENE_MODE_LANDSCAPE: + exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; + break; + case AA_SCENE_MODE_NIGHT: + exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; + break; + default: + exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; + break; + } + + switch (this->getShotMode()) { + case SHOT_MODE_BEAUTY_FACE: + case SHOT_MODE_BEST_FACE: + exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + default: + break; + } + + /* Image Unique ID */ + /* GPS Coordinates */ + double gpsLatitude = shot->ctl.jpeg.gpsCoordinates[0]; + double gpsLongitude = shot->ctl.jpeg.gpsCoordinates[1]; + double gpsAltitude = shot->ctl.jpeg.gpsCoordinates[2]; + if (gpsLatitude != 0 && gpsLongitude != 0) { + if (gpsLatitude > 0) + strncpy((char *) exifInfo->gps_latitude_ref, "N", 2); + else + strncpy((char *) exifInfo->gps_latitude_ref, "s", 2); + + if (gpsLongitude > 0) + strncpy((char *) exifInfo->gps_longitude_ref, "E", 2); + else + strncpy((char *) exifInfo->gps_longitude_ref, "W", 2); + + if (gpsAltitude > 0) + exifInfo->gps_altitude_ref = 0; + else + exifInfo->gps_altitude_ref = 1; + + gpsLatitude = fabs(gpsLatitude); + gpsLongitude = fabs(gpsLongitude); + gpsAltitude = fabs(gpsAltitude); + + exifInfo->gps_latitude[0].num = (uint32_t) gpsLatitude; + exifInfo->gps_latitude[0].den = 1; + exifInfo->gps_latitude[1].num = (uint32_t)((gpsLatitude - exifInfo->gps_latitude[0].num) * 60); + exifInfo->gps_latitude[1].den = 1; + exifInfo->gps_latitude[2].num = (uint32_t)(round((((gpsLatitude - exifInfo->gps_latitude[0].num) * 60) + - exifInfo->gps_latitude[1].num) * 60)); + exifInfo->gps_latitude[2].den = 1; + + exifInfo->gps_longitude[0].num = (uint32_t)gpsLongitude; + exifInfo->gps_longitude[0].den = 1; + exifInfo->gps_longitude[1].num = (uint32_t)((gpsLongitude - exifInfo->gps_longitude[0].num) * 60); + exifInfo->gps_longitude[1].den = 1; + exifInfo->gps_longitude[2].num = (uint32_t)(round((((gpsLongitude - exifInfo->gps_longitude[0].num) * 60) + - exifInfo->gps_longitude[1].num) * 60)); + exifInfo->gps_longitude[2].den = 1; + + exifInfo->gps_altitude.num = (uint32_t)gpsAltitude; + exifInfo->gps_altitude.den = 1; + + struct tm tm_data; + long gpsTimestamp = (long) shot->ctl.jpeg.gpsTimestamp; + gmtime_r(&gpsTimestamp, &tm_data); + exifInfo->gps_timestamp[0].num = tm_data.tm_hour; + exifInfo->gps_timestamp[0].den = 1; + exifInfo->gps_timestamp[1].num = tm_data.tm_min; + exifInfo->gps_timestamp[1].den = 1; + exifInfo->gps_timestamp[2].num = tm_data.tm_sec; + exifInfo->gps_timestamp[2].den = 1; + snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), + "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); + + exifInfo->enableGps = true; + } else { + exifInfo->enableGps = false; + } + + /* Thumbnail Size */ + exifInfo->widthThumb = thumbnailRect->w; + exifInfo->heightThumb = thumbnailRect->h; + + setMarkingOfExifFlash(0); +} +#endif + +debug_attribute_t *ExynosCamera3Parameters::getDebugAttribute(void) +{ + return &mDebugInfo; +} + +status_t ExynosCamera3Parameters::getFixedExifInfo(exif_attribute_t *exifInfo) +{ + if (exifInfo == NULL) { + CLOGE2("buffer is NULL"); + return BAD_VALUE; + } + + memcpy(exifInfo, &m_exifInfo, sizeof(exif_attribute_t)); + + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setGpsTimeStamp(long timeStamp) +{ + m_cameraInfo.gpsTimeStamp = timeStamp; +} + +long ExynosCamera3Parameters::getGpsTimeStamp(void) +{ + return m_cameraInfo.gpsTimeStamp; +} + +/* TODO: Do not used yet */ +#if 0 +status_t ExynosCamera3Parameters::checkCityId(const CameraParameters& params) +{ + long long int newCityId = params.getInt64(CameraParameters::KEY_CITYID); + long long int curCityId = -1; + + if (newCityId < 0) + newCityId = 0; + + curCityId = getCityId(); + + if (curCityId != newCityId) { + m_setCityId(newCityId); + m_params.set(CameraParameters::KEY_CITYID, newCityId); + } + + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setCityId(long long int cityId) +{ + m_cameraInfo.cityId = cityId; +} + +long long int ExynosCamera3Parameters::getCityId(void) +{ + return m_cameraInfo.cityId; +} + +status_t ExynosCamera3Parameters::checkWeatherId(const CameraParameters& params) +{ + int newWeatherId = params.getInt(CameraParameters::KEY_WEATHER); + int curWeatherId = -1; + + if (newWeatherId < 0 || newWeatherId > 5) { + return BAD_VALUE; + } + + curWeatherId = (int)getWeatherId(); + + if (curWeatherId != newWeatherId) { + m_setWeatherId((unsigned char)newWeatherId); + m_params.set(CameraParameters::KEY_WEATHER, newWeatherId); + } + + return NO_ERROR; +} + +void ExynosCamera3Parameters::m_setWeatherId(unsigned char weatherId) +{ + m_cameraInfo.weatherId = weatherId; +} + +unsigned char ExynosCamera3Parameters::getWeatherId(void) +{ + return m_cameraInfo.weatherId; +} +#endif + + +/* F/W's middle value is 3, and step is -2, -1, 0, 1, 2 */ +void ExynosCamera3Parameters::m_setBrightness(int brightness) +{ + setMetaCtlBrightness(&m_metadata, brightness + IS_BRIGHTNESS_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera3Parameters::getBrightness(void) +{ + int32_t brightness = 0; + + getMetaCtlBrightness(&m_metadata, &brightness); + return brightness - IS_BRIGHTNESS_DEFAULT - FW_CUSTOM_OFFSET; +} + +void ExynosCamera3Parameters::m_setSaturation(int saturation) +{ + setMetaCtlSaturation(&m_metadata, saturation + IS_SATURATION_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera3Parameters::getSaturation(void) +{ + int32_t saturation = 0; + + getMetaCtlSaturation(&m_metadata, &saturation); + return saturation - IS_SATURATION_DEFAULT - FW_CUSTOM_OFFSET; +} + +void ExynosCamera3Parameters::m_setSharpness(int sharpness) +{ + int newSharpness = sharpness + IS_SHARPNESS_DEFAULT; + enum processing_mode edge_mode = PROCESSING_MODE_OFF; + enum processing_mode noise_mode = PROCESSING_MODE_OFF; + int edge_strength = 0; + int noise_strength = 0; + + switch (newSharpness) { + case IS_SHARPNESS_MINUS_2: + edge_mode = PROCESSING_MODE_OFF; + noise_mode = PROCESSING_MODE_HIGH_QUALITY; + edge_strength = 0; + noise_strength = 10; + break; + case IS_SHARPNESS_MINUS_1: + edge_mode = PROCESSING_MODE_OFF; + noise_mode = PROCESSING_MODE_HIGH_QUALITY; + edge_strength = 0; + noise_strength = 5; + break; + case IS_SHARPNESS_DEFAULT: + edge_mode = PROCESSING_MODE_OFF; + noise_mode = PROCESSING_MODE_OFF; + edge_strength = 0; + noise_strength = 0; + break; + case IS_SHARPNESS_PLUS_1: + edge_mode = PROCESSING_MODE_HIGH_QUALITY; + noise_mode = PROCESSING_MODE_OFF; + edge_strength = 5; + noise_strength = 0; + break; + case IS_SHARPNESS_PLUS_2: + edge_mode = PROCESSING_MODE_HIGH_QUALITY; + noise_mode = PROCESSING_MODE_OFF; + edge_strength = 10; + noise_strength = 0; + break; + default: + break; + } + + CLOGD2("newSharpness %d edge_mode(%d),st(%d), noise(%d),st(%d)", + newSharpness, edge_mode, edge_strength, noise_mode, noise_strength); + + setMetaCtlSharpness(&m_metadata, edge_mode, edge_strength, noise_mode, noise_strength); +} + +int ExynosCamera3Parameters::getSharpness(void) +{ + int32_t edge_sharpness = 0; + int32_t noise_sharpness = 0; + int32_t sharpness = 0; + enum processing_mode edge_mode = PROCESSING_MODE_OFF; + enum processing_mode noise_mode = PROCESSING_MODE_OFF; + + getMetaCtlSharpness(&m_metadata, &edge_mode, &edge_sharpness, &noise_mode, &noise_sharpness); + + if(noise_sharpness == 10 && edge_sharpness == 0) { + sharpness = IS_SHARPNESS_MINUS_2; + } else if(noise_sharpness == 5 && edge_sharpness == 0) { + sharpness = IS_SHARPNESS_MINUS_1; + } else if(noise_sharpness == 0 && edge_sharpness == 0) { + sharpness = IS_SHARPNESS_DEFAULT; + } else if(noise_sharpness == 0 && edge_sharpness == 5) { + sharpness = IS_SHARPNESS_PLUS_1; + } else if(noise_sharpness == 0 && edge_sharpness == 10) { + sharpness = IS_SHARPNESS_PLUS_2; + } else { + sharpness = IS_SHARPNESS_DEFAULT; + } + + return sharpness - IS_SHARPNESS_DEFAULT; +} + +void ExynosCamera3Parameters::m_setHue(int hue) +{ + setMetaCtlHue(&m_metadata, hue + IS_HUE_DEFAULT + FW_CUSTOM_OFFSET); +} + +int ExynosCamera3Parameters::getHue(void) +{ + int32_t hue = 0; + + getMetaCtlHue(&m_metadata, &hue); + return hue - IS_HUE_DEFAULT - FW_CUSTOM_OFFSET; +} + +void ExynosCamera3Parameters::m_setIso(uint32_t iso) +{ + enum aa_isomode mode = AA_ISOMODE_AUTO; + + if (iso == 0 ) + mode = AA_ISOMODE_AUTO; + else + mode = AA_ISOMODE_MANUAL; + + setMetaCtlIso(&m_metadata, mode, iso); +} + +uint32_t ExynosCamera3Parameters::getIso(void) +{ + enum aa_isomode mode = AA_ISOMODE_AUTO; + uint32_t iso = 0; + + getMetaCtlIso(&m_metadata, &mode, &iso); + + return iso; +} + +uint64_t ExynosCamera3Parameters::getCaptureExposureTime(void) +{ + return m_exposureTimeCapture; +} + +void ExynosCamera3Parameters::m_setContrast(uint32_t contrast) +{ + setMetaCtlContrast(&m_metadata, contrast); +} + +uint32_t ExynosCamera3Parameters::getContrast(void) +{ + uint32_t contrast = 0; + getMetaCtlContrast(&m_metadata, &contrast); + return contrast; +} + +void ExynosCamera3Parameters::m_setHdrMode(bool hdr) +{ + m_cameraInfo.hdrMode = hdr; + + if (hdr == true) + m_setShotMode(SHOT_MODE_RICH_TONE); + else + m_setShotMode(SHOT_MODE_NORMAL); + + m_activityControl->setHdrMode(hdr); +} + +bool ExynosCamera3Parameters::getHdrMode(void) +{ + return m_cameraInfo.hdrMode; +} + +void ExynosCamera3Parameters::m_setWdrMode(bool wdr) +{ + m_cameraInfo.wdrMode = wdr; +} + +bool ExynosCamera3Parameters::getWdrMode(void) +{ + return m_cameraInfo.wdrMode; +} + +#ifdef USE_BINNING_MODE +int ExynosCamera3Parameters::getBinningMode(void) +{ + char cameraModeProperty[PROPERTY_VALUE_MAX]; + int ret = 0; + + if (m_staticInfo->vtcallSizeLutMax == 0 || m_staticInfo->vtcallSizeLut == NULL) { + CLOGV2("vtCallSizeLut is NULL, can't support the binnig mode"); + return ret; + } + + /* For VT Call with DualCamera Scenario */ + if (getDualMode()) { + CLOGV2("DualMode can't support the binnig mode.(%d,%d)", getCameraId(), getDualMode()); + return ret; + } + + if (getVtMode() != 3 && getVtMode() > 0 && getVtMode() < 5) { + ret = 1; + } else { + if (m_binningProperty == true) { + ret = 1; + } + } + return ret; +} +#endif + +void ExynosCamera3Parameters::m_setShotMode(int shotMode) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + bool changeSceneMode = true; + + switch (shotMode) { + case SHOT_MODE_DRAMA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_DRAMA; + break; + case SHOT_MODE_3D_PANORAMA: + case SHOT_MODE_PANORAMA: + case SHOT_MODE_FRONT_PANORAMA: + case SHOT_MODE_INTERACTIVE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_PANORAMA; + break; + case SHOT_MODE_NIGHT: + case SHOT_MODE_NIGHT_SCENE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_LLS; + break; + case SHOT_MODE_ANIMATED_SCENE: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ANIMATED; + break; + case SHOT_MODE_SPORTS: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_SPORTS; + break; + case SHOT_MODE_GOLF: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_GOLF; + break; + case SHOT_MODE_NORMAL: + case SHOT_MODE_AUTO: + case SHOT_MODE_BEAUTY_FACE: + case SHOT_MODE_BEST_PHOTO: + case SHOT_MODE_BEST_FACE: + case SHOT_MODE_ERASER: + case SHOT_MODE_RICH_TONE: + case SHOT_MODE_STORY: + case SHOT_MODE_SELFIE_ALARM: + case SHOT_MODE_FASTMOTION: + mode = AA_CONTROL_AUTO; + sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + break; + case SHOT_MODE_DUAL: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_DUAL; + break; + case SHOT_MODE_AQUA: + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_AQUA; + break; + case SHOT_MODE_AUTO_PORTRAIT: + case SHOT_MODE_PET: + default: + changeSceneMode = false; + break; + } + + m_cameraInfo.shotMode = shotMode; + if (changeSceneMode == true) + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); +} + +int ExynosCamera3Parameters::getPreviewBufferCount(void) +{ + CLOGV("DEBUG(%s):getPreviewBufferCount %d", "setParameters", m_previewBufferCount); + + return m_previewBufferCount; +} + +void ExynosCamera3Parameters::setPreviewBufferCount(int previewBufferCount) +{ + m_previewBufferCount = previewBufferCount; + + CLOGV("DEBUG(%s):setPreviewBufferCount %d", "setParameters", m_previewBufferCount); + + return; +} + +int ExynosCamera3Parameters::getShotMode(void) +{ + return m_cameraInfo.shotMode; +} + +void ExynosCamera3Parameters::m_setAntiShake(bool toggle) +{ + enum aa_mode mode = AA_CONTROL_AUTO; + enum aa_scene_mode sceneMode = AA_SCENE_MODE_FACE_PRIORITY; + + if (toggle == true) { + mode = AA_CONTROL_USE_SCENE_MODE; + sceneMode = AA_SCENE_MODE_ANTISHAKE; + } + + setMetaCtlSceneMode(&m_metadata, mode, sceneMode); + m_cameraInfo.antiShake = toggle; +} + +bool ExynosCamera3Parameters::getAntiShake(void) +{ + return m_cameraInfo.antiShake; +} + +void ExynosCamera3Parameters::m_setVtMode(int vtMode) +{ + m_cameraInfo.vtMode = vtMode; + + setMetaVtMode(&m_metadata, (enum camera_vt_mode)vtMode); +} + +int ExynosCamera3Parameters::getVtMode(void) +{ + return m_cameraInfo.vtMode; +} + +int ExynosCamera3Parameters::getVRMode(void) +{ + return m_cameraInfo.vrMode; +} + +void ExynosCamera3Parameters::m_setGamma(bool gamma) +{ + m_cameraInfo.gamma = gamma; +} + +bool ExynosCamera3Parameters::getGamma(void) +{ + return m_cameraInfo.gamma; +} + +void ExynosCamera3Parameters::m_setSlowAe(bool slowAe) +{ + m_cameraInfo.slowAe = slowAe; +} + +bool ExynosCamera3Parameters::getSlowAe(void) +{ + return m_cameraInfo.slowAe; +} + +bool ExynosCamera3Parameters::isScalableSensorSupported(void) +{ + return m_staticInfo->scalableSensorSupport; +} + +bool ExynosCamera3Parameters::m_adjustScalableSensorMode(const int scaleMode) +{ + bool adjustedScaleMode = false; + int pictureW = 0; + int pictureH = 0; + float pictureRatio = 0; + uint32_t minFps = 0; + uint32_t maxFps = 0; + + /* If scale_mode is 1 or dual camera, scalable sensor turn on */ + if (scaleMode == 1) + adjustedScaleMode = true; + + if (getDualMode() == true) + adjustedScaleMode = true; + + /* + * scalable sensor only support 24 fps for 4:3 - picture size + * scalable sensor only support 15, 24, 30 fps for 16:9 - picture size + */ + getPreviewFpsRange(&minFps, &maxFps); + getPictureSize(&pictureW, &pictureH); + + pictureRatio = ROUND_OFF(((float)pictureW / (float)pictureH), 2); + + if (pictureRatio == 1.33f) { /* 4:3 */ + if (maxFps != 24) + adjustedScaleMode = false; + } else if (pictureRatio == 1.77f) { /* 16:9 */ + if ((maxFps != 15) && (maxFps != 24) && (maxFps != 30)) + adjustedScaleMode = false; + } else { + adjustedScaleMode = false; + } + + if (scaleMode == 1 && adjustedScaleMode == false) { + CLOGW2("pictureRatio(%f, %d, %d) fps(%d, %d) is not proper for scalable", + pictureRatio, pictureW, pictureH, minFps, maxFps); + } + + return adjustedScaleMode; +} + +void ExynosCamera3Parameters::setScalableSensorMode(bool scaleMode) +{ + m_cameraInfo.scalableSensorMode = scaleMode; +} + +bool ExynosCamera3Parameters::getScalableSensorMode(void) +{ + return m_cameraInfo.scalableSensorMode; +} + +void ExynosCamera3Parameters::m_getScalableSensorSize(int *newSensorW, int *newSensorH) +{ + int previewW = 0; + int previewH = 0; + + *newSensorW = 1920; + *newSensorH = 1080; + + /* default scalable sensor size is 1920x1080(16:9) */ + getPreviewSize(&previewW, &previewH); + + /* when preview size is 1440x1080(4:3), return sensor size(1920x1440) */ + /* if (previewW == 1440 && previewH == 1080) { */ + if ((previewW * 3 / 4) == previewH) { + *newSensorW = 1920; + *newSensorH = 1440; + } +} + +status_t ExynosCamera3Parameters::m_setImageUniqueId(const char *uniqueId) +{ + int uniqueIdLength; + + if (uniqueId == NULL) { + return BAD_VALUE; + } + + memset(m_cameraInfo.imageUniqueId, 0, sizeof(m_cameraInfo.imageUniqueId)); + + uniqueIdLength = strlen(uniqueId); + memcpy(m_cameraInfo.imageUniqueId, uniqueId, uniqueIdLength); + + return NO_ERROR; +} + +const char *ExynosCamera3Parameters::getImageUniqueId(void) +{ + return m_cameraInfo.imageUniqueId; +} + +#ifdef BURST_CAPTURE +int ExynosCamera3Parameters::getSeriesShotSaveLocation(void) +{ + int seriesShotSaveLocation = m_seriesShotSaveLocation; + int shotMode = getShotMode(); + + /* GED's series shot work as callback */ +#ifdef CAMERA_GED_FEATURE + seriesShotSaveLocation = BURST_SAVE_CALLBACK; +#else + if (shotMode == SHOT_MODE_BEST_PHOTO) { + seriesShotSaveLocation = BURST_SAVE_CALLBACK; + } else { + if (m_seriesShotSaveLocation == 0) + seriesShotSaveLocation = BURST_SAVE_PHONE; + else + seriesShotSaveLocation = BURST_SAVE_SDCARD; + } +#endif + + return seriesShotSaveLocation; +} + +void ExynosCamera3Parameters::setSeriesShotSaveLocation(int ssaveLocation) +{ + m_seriesShotSaveLocation = ssaveLocation; +} + +char *ExynosCamera3Parameters::getSeriesShotFilePath(void) +{ + return m_seriesShotFilePath; +} +#endif + +int ExynosCamera3Parameters::getSeriesShotDuration(void) +{ + switch (m_cameraInfo.seriesShotMode) { + case SERIES_SHOT_MODE_BURST: + return NORMAL_BURST_DURATION; + case SERIES_SHOT_MODE_BEST_FACE: + return BEST_FACE_DURATION; + case SERIES_SHOT_MODE_BEST_PHOTO: + return BEST_PHOTO_DURATION; + case SERIES_SHOT_MODE_ERASER: + return ERASER_DURATION; + case SERIES_SHOT_MODE_SELFIE_ALARM: + return SELFIE_ALARM_DURATION; + default: + return 0; + } + return 0; +} + +int ExynosCamera3Parameters::getSeriesShotMode(void) +{ + return m_cameraInfo.seriesShotMode; +} + +void ExynosCamera3Parameters::setSeriesShotMode(int sshotMode, int count) +{ + int sshotCount = 0; + int shotMode = getShotMode(); + if (sshotMode == SERIES_SHOT_MODE_BURST) { + if (shotMode == SHOT_MODE_BEST_PHOTO) { + sshotMode = SERIES_SHOT_MODE_BEST_PHOTO; + sshotCount = 8; + } else if (shotMode == SHOT_MODE_BEST_FACE) { + sshotMode = SERIES_SHOT_MODE_BEST_FACE; + sshotCount = 5; + } else if (shotMode == SHOT_MODE_ERASER) { + sshotMode = SERIES_SHOT_MODE_ERASER; + sshotCount = 5; + } + else if (shotMode == SHOT_MODE_SELFIE_ALARM) { + sshotMode = SERIES_SHOT_MODE_SELFIE_ALARM; + sshotCount = 3; + } else { + sshotMode = SERIES_SHOT_MODE_BURST; + sshotCount = MAX_SERIES_SHOT_COUNT; + } + } else if (sshotMode == SERIES_SHOT_MODE_LLS || + sshotMode == SERIES_SHOT_MODE_SIS) { + if(count > 0) { + sshotCount = count; + } else { + sshotCount = 5; + } + } + + CLOGD2("set shotmode(%d), shotCount(%d)", sshotMode, sshotCount); + + m_cameraInfo.seriesShotMode = sshotMode; + m_setSeriesShotCount(sshotCount); +} + +void ExynosCamera3Parameters::m_setSeriesShotCount(int seriesShotCount) +{ + m_cameraInfo.seriesShotCount = seriesShotCount; +} + +int ExynosCamera3Parameters::getSeriesShotCount(void) +{ + return m_cameraInfo.seriesShotCount; +} + +void ExynosCamera3Parameters::setSamsungCamera(bool value) +{ + String8 tempStr; + ExynosCameraActivityAutofocus *autoFocusMgr = m_activityControl->getAutoFocusMgr(); + + m_cameraInfo.samsungCamera = value; + +#if 0 + autoFocusMgr->setSamsungCamera(value); + + /* zoom */ + if (getZoomSupported() == true) { + int maxZoom = getMaxZoomLevel(); + CLOGI2("change MaxZoomLevel and ZoomRatio List.(%d)", maxZoom); + + if (0 < maxZoom) { + m_params.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true"); + + if (getSmoothZoomSupported() == true) + m_params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true"); + else + m_params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + + m_params.set(CameraParameters::KEY_MAX_ZOOM, maxZoom - 1); + m_params.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + + int max_zoom_ratio = (int)getMaxZoomRatio(); + tempStr.setTo(""); + if (getZoomRatioList(tempStr, maxZoom, max_zoom_ratio, m_staticInfo->zoomRatioList) == NO_ERROR) + m_params.set(CameraParameters::KEY_ZOOM_RATIOS, tempStr.string()); + else + m_params.set(CameraParameters::KEY_ZOOM_RATIOS, "100"); + + m_params.set("constant-growth-rate-zoom-supported", "true"); + + CLOGV("INFO(%s):zoomRatioList=%s", "setDefaultParameter", tempStr.string()); + } else { + m_params.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); + m_params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + m_params.set(CameraParameters::KEY_MAX_ZOOM, ZOOM_LEVEL_0); + m_params.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + } + } else { + m_params.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); + m_params.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + m_params.set(CameraParameters::KEY_MAX_ZOOM, ZOOM_LEVEL_0); + m_params.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + } +#endif +} + +bool ExynosCamera3Parameters::getSamsungCamera(void) +{ + return m_cameraInfo.samsungCamera; +} + +bool ExynosCamera3Parameters::getZoomSupported(void) +{ + return m_staticInfo->zoomSupport; +} + +bool ExynosCamera3Parameters::getSmoothZoomSupported(void) +{ + return m_staticInfo->smoothZoomSupport; +} + +void ExynosCamera3Parameters::setHorizontalViewAngle(int pictureW, int pictureH) +{ + double pi_camera = 3.1415926f; + double distance; + double ratio; + double hViewAngle_half_rad = pi_camera / 180 * (double)m_staticInfo->horizontalViewAngle[SIZE_RATIO_16_9] / 2; + + distance = ((double)m_staticInfo->maxSensorW / (double)m_staticInfo->maxSensorH * 9 / 2) + / tan(hViewAngle_half_rad); + ratio = (double)pictureW / (double)pictureH; + + m_calculatedHorizontalViewAngle = (float)(atan(ratio * 9 / 2 / distance) * 2 * 180 / pi_camera); +} + +float ExynosCamera3Parameters::getHorizontalViewAngle(void) +{ + int right_ratio = 177; + + if ((int)(m_staticInfo->maxSensorW * 100 / m_staticInfo->maxSensorH) == right_ratio) { + return m_calculatedHorizontalViewAngle; + } else { + return m_staticInfo->horizontalViewAngle[m_cameraInfo.pictureSizeRatioId]; + } +} + +float ExynosCamera3Parameters::getVerticalViewAngle(void) +{ + return m_staticInfo->verticalViewAngle; +} + +void ExynosCamera3Parameters::getFnumber(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->fNumber * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->fNumberNum; + *den = m_staticInfo->fNumberDen; + } +} + +void ExynosCamera3Parameters::getApertureValue(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->aperture * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->apertureNum; + *den = m_staticInfo->apertureDen; + } +} + +int ExynosCamera3Parameters::getFocalLengthIn35mmFilm(void) +{ + return m_staticInfo->focalLengthIn35mmLength; +} + +void ExynosCamera3Parameters::getFocalLength(int *num, int *den) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + *num = m_staticInfo->focalLength * COMMON_DENOMINATOR; + *den = COMMON_DENOMINATOR; + } else { + *num = m_staticInfo->focalLengthNum; + *den = m_staticInfo->focalLengthDen; + } +} + +void ExynosCamera3Parameters::getFocusDistances(int *num, int *den) +{ + *num = m_staticInfo->focusDistanceNum; + *den = m_staticInfo->focusDistanceDen; +} + +int ExynosCamera3Parameters::getMinExposureCompensation(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->exposureCompensationRange[MIN]; + } else { + return m_staticInfo->minExposureCompensation; + } +} + +int ExynosCamera3Parameters::getMaxExposureCompensation(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->exposureCompensationRange[MAX]; + } else { + return m_staticInfo->maxExposureCompensation; + } +} + +float ExynosCamera3Parameters::getExposureCompensationStep(void) +{ + return m_staticInfo->exposureCompensationStep; +} + +int ExynosCamera3Parameters::getMaxNumDetectedFaces(void) +{ + return m_staticInfo->maxNumDetectedFaces; +} + +uint32_t ExynosCamera3Parameters::getMaxNumFocusAreas(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->max3aRegions[AF]; + } else { + return m_staticInfo->maxNumFocusAreas; + } +} + +uint32_t ExynosCamera3Parameters::getMaxNumMeteringAreas(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return m_staticInfo->max3aRegions[AE]; + } else { + return m_staticInfo->maxNumMeteringAreas; + } +} + +int ExynosCamera3Parameters::getMaxZoomLevel(void) +{ + return m_staticInfo->maxZoomLevel; +} + +float ExynosCamera3Parameters::getMaxZoomRatio(void) +{ + return (float)m_staticInfo->maxZoomRatio; +} + +float ExynosCamera3Parameters::getZoomRatio(int zoomLevel) +{ + float zoomRatio = 1.00f; + if (getZoomSupported() == true) + zoomRatio = (float)m_staticInfo->zoomRatioList[zoomLevel]; + else + zoomRatio = 1000.00f; + + return zoomRatio; +} + +bool ExynosCamera3Parameters::getVideoSnapshotSupported(void) +{ + return m_staticInfo->videoSnapshotSupport; +} + +bool ExynosCamera3Parameters::getVideoStabilizationSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + bool supported = false; + for (size_t i = 0; i < m_staticInfo->videoStabilizationModesLength; i++) { + if (m_staticInfo->videoStabilizationModes[i] + == ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON) { + supported = true; + break; + } + } + return supported; + } else { + return m_staticInfo->videoStabilizationSupport; + } +} + +bool ExynosCamera3Parameters::getAutoWhiteBalanceLockSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return true; + } else { + return m_staticInfo->autoWhiteBalanceLockSupport; + } +} + +bool ExynosCamera3Parameters::getAutoExposureLockSupported(void) +{ + if (getHalVersion() == IS_HAL_VER_3_2) { + return true; + } else { + return m_staticInfo->autoExposureLockSupport; + } +} + +void ExynosCamera3Parameters::enableMsgType(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + m_enabledMsgType |= msgType; +} + +void ExynosCamera3Parameters::disableMsgType(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + m_enabledMsgType &= ~msgType; +} + +bool ExynosCamera3Parameters::msgTypeEnabled(int32_t msgType) +{ + Mutex::Autolock lock(m_msgLock); + return (m_enabledMsgType & msgType); +} + +void ExynosCamera3Parameters::m_initMetadata(void) +{ + memset(&m_metadata, 0x00, sizeof(struct camera2_shot_ext)); + struct camera2_shot *shot = &m_metadata.shot; + + // 1. ctl + // request + shot->ctl.request.id = 0; + shot->ctl.request.metadataMode = METADATA_MODE_FULL; + shot->ctl.request.frameCount = 0; + + // lens + shot->ctl.lens.focusDistance = -1.0f; +#ifdef USE_CAMERA2_API_SUPPORT + shot->ctl.lens.aperture = m_staticInfo->aperture; + shot->ctl.lens.focalLength = m_staticInfo->focalLength; +#else + shot->ctl.lens.aperture = (float)m_staticInfo->apertureNum / (float)m_staticInfo->apertureDen; + shot->ctl.lens.focalLength = (float)m_staticInfo->focalLengthNum / (float)m_staticInfo->focalLengthDen; +#endif + shot->ctl.lens.filterDensity = 0.0f; + shot->ctl.lens.opticalStabilizationMode = ::OPTICAL_STABILIZATION_MODE_OFF; + + int minFps = (m_staticInfo->minFps == 0) ? 0 : (m_staticInfo->maxFps / 2); + int maxFps = (m_staticInfo->maxFps == 0) ? 0 : m_staticInfo->maxFps; + + /* The min fps can not be '0'. Therefore it is set up default value '15'. */ + if (minFps == 0) { + CLOGW2("Invalid min fps value(%d)", minFps); + minFps = 15; + } + + /* The initial fps can not be '0' and bigger than '30'. Therefore it is set up default value '30'. */ + if (maxFps == 0 || 30 < maxFps) { + CLOGW2("Invalid max fps value(%d)", maxFps); + maxFps = 30; + } + + /* sensor */ + shot->ctl.sensor.exposureTime = 0; + shot->ctl.sensor.frameDuration = (1000 * 1000 * 1000) / maxFps; + shot->ctl.sensor.sensitivity = 0; + + /* flash */ + shot->ctl.flash.flashMode = ::CAM2_FLASH_MODE_OFF; + shot->ctl.flash.firingPower = 0; + shot->ctl.flash.firingTime = 0; + + /* hotpixel */ + shot->ctl.hotpixel.mode = (enum processing_mode)0; + + /* demosaic */ + shot->ctl.demosaic.mode = (enum demosaic_processing_mode)0; + + /* noise */ + shot->ctl.noise.mode = ::PROCESSING_MODE_OFF; + shot->ctl.noise.strength = 5; + + /* shading */ + shot->ctl.shading.mode = (enum processing_mode)0; + + /* color */ + shot->ctl.color.mode = ::COLORCORRECTION_MODE_FAST; + static const float colorTransform[9] = { + 1.0f, 0.f, 0.f, + 0.f, 1.f, 0.f, + 0.f, 0.f, 1.f + }; + +#ifdef USE_CAMERA2_API_SUPPORT + for (size_t i = 0; i < sizeof(colorTransform)/sizeof(colorTransform[0]); i++) { + shot->ctl.color.transform[i].num = colorTransform[i] * COMMON_DENOMINATOR; + shot->ctl.color.transform[i].den = COMMON_DENOMINATOR; + } +#else + memcpy(shot->ctl.color.transform, colorTransform, sizeof(colorTransform)); +#endif + + /* tonemap */ + shot->ctl.tonemap.mode = ::TONEMAP_MODE_FAST; + static const float tonemapCurve[4] = { + 0.f, 0.f, + 1.f, 1.f + }; + + int tonemapCurveSize = sizeof(tonemapCurve); + int sizeOfCurve = sizeof(shot->ctl.tonemap.curveRed) / sizeof(shot->ctl.tonemap.curveRed[0]); + + for (int i = 0; i < sizeOfCurve; i ++) { + memcpy(&(shot->ctl.tonemap.curveRed[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveGreen[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveBlue[i]), tonemapCurve, tonemapCurveSize); + } + + /* edge */ + shot->ctl.edge.mode = ::PROCESSING_MODE_OFF; + shot->ctl.edge.strength = 5; + + /* scaler + * Max Picture Size == Max Sensor Size - Sensor Margin + */ + if (m_setParamCropRegion(0, + m_staticInfo->maxPictureW, m_staticInfo->maxPictureH, + m_staticInfo->maxPreviewW, m_staticInfo->maxPreviewH + ) != NO_ERROR) { + CLOGE2("m_setZoom() fail"); + } + + /* jpeg */ + shot->ctl.jpeg.quality = 96; + shot->ctl.jpeg.thumbnailSize[0] = m_staticInfo->maxThumbnailW; + shot->ctl.jpeg.thumbnailSize[1] = m_staticInfo->maxThumbnailH; + shot->ctl.jpeg.thumbnailQuality = 100; + shot->ctl.jpeg.gpsCoordinates[0] = 0; + shot->ctl.jpeg.gpsCoordinates[1] = 0; + shot->ctl.jpeg.gpsCoordinates[2] = 0; + memset(&shot->ctl.jpeg.gpsProcessingMethod, 0x0, + sizeof(shot->ctl.jpeg.gpsProcessingMethod)); + shot->ctl.jpeg.gpsTimestamp = 0L; + shot->ctl.jpeg.orientation = 0L; + + /* stats */ + shot->ctl.stats.faceDetectMode = ::FACEDETECT_MODE_OFF; + shot->ctl.stats.histogramMode = ::STATS_MODE_OFF; + shot->ctl.stats.sharpnessMapMode = ::STATS_MODE_OFF; + + /* aa */ + shot->ctl.aa.captureIntent = ::AA_CAPTURE_INTENT_CUSTOM; + shot->ctl.aa.mode = ::AA_CONTROL_AUTO; + shot->ctl.aa.effectMode = ::AA_EFFECT_OFF; + shot->ctl.aa.sceneMode = ::AA_SCENE_MODE_FACE_PRIORITY; + shot->ctl.aa.videoStabilizationMode = (enum aa_videostabilization_mode)0; + + /* default metering is center */ + shot->ctl.aa.aeMode = ::AA_AEMODE_CENTER; + shot->ctl.aa.aeRegions[0] = 0; + shot->ctl.aa.aeRegions[1] = 0; + shot->ctl.aa.aeRegions[2] = 0; + shot->ctl.aa.aeRegions[3] = 0; + shot->ctl.aa.aeRegions[4] = 1000; + shot->ctl.aa.aeLock = ::AA_AE_LOCK_OFF; +#if defined(USE_SUBDIVIDED_EV) + shot->ctl.aa.aeExpCompensation = 0; /* 21 is middle */ +#else + shot->ctl.aa.aeExpCompensation = 5; /* 5 is middle */ +#endif + shot->ctl.aa.aeTargetFpsRange[0] = minFps; + shot->ctl.aa.aeTargetFpsRange[1] = maxFps; + + shot->ctl.aa.aeAntibandingMode = ::AA_AE_ANTIBANDING_AUTO; + shot->ctl.aa.vendor_aeflashMode = ::AA_FLASHMODE_OFF; + shot->ctl.aa.awbMode = ::AA_AWBMODE_WB_AUTO; + shot->ctl.aa.awbLock = ::AA_AWB_LOCK_OFF; + shot->ctl.aa.afMode = ::AA_AFMODE_OFF; + shot->ctl.aa.afRegions[0] = 0; + shot->ctl.aa.afRegions[1] = 0; + shot->ctl.aa.afRegions[2] = 0; + shot->ctl.aa.afRegions[3] = 0; + shot->ctl.aa.afRegions[4] = 1000; + shot->ctl.aa.afTrigger = (enum aa_af_trigger)0; + shot->ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot->ctl.aa.vendor_isoValue = 0; + + /* 2. dm */ + + /* 3. utrl */ + /* 4. udm */ + + /* 5. magicNumber */ + shot->magicNumber = SHOT_MAGIC_NUMBER; + + setMetaSetfile(&m_metadata, 0x0); + + /* user request */ + m_metadata.drc_bypass = 1; + m_metadata.dis_bypass = 1; + m_metadata.dnr_bypass = 1; + m_metadata.fd_bypass = 1; +} + +status_t ExynosCamera3Parameters::duplicateCtrlMetadata(void *buf) +{ + if (buf == NULL) { + CLOGE2("buf is NULL"); + return BAD_VALUE; + } + + struct camera2_shot_ext *meta_shot_ext = (struct camera2_shot_ext *)buf; + memcpy(&meta_shot_ext->shot.ctl, &m_metadata.shot.ctl, sizeof(struct camera2_ctl)); + meta_shot_ext->shot.uctl.vtMode = m_metadata.shot.uctl.vtMode; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::setFrameSkipCount(int count) +{ + m_frameSkipCounter.setCount(count); + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getFrameSkipCount(int *count) +{ + *count = m_frameSkipCounter.getCount(); + m_frameSkipCounter.decCount(); + + return NO_ERROR; +} + +int ExynosCamera3Parameters::getFrameSkipCount(void) +{ + return m_frameSkipCounter.getCount(); +} + +void ExynosCamera3Parameters::setIsFirstStartFlag(bool flag) +{ + m_flagFirstStart = flag; +} + +int ExynosCamera3Parameters::getIsFirstStartFlag(void) +{ + return m_flagFirstStart; +} + +ExynosCameraActivityControl *ExynosCamera3Parameters::getActivityControl(void) +{ + return m_activityControl; +} + +status_t ExynosCamera3Parameters::setAutoFocusMacroPosition(int autoFocusMacroPosition) +{ + int oldAutoFocusMacroPosition = m_cameraInfo.autoFocusMacroPosition; + m_cameraInfo.autoFocusMacroPosition = autoFocusMacroPosition; + + m_activityControl->setAutoFocusMacroPosition(oldAutoFocusMacroPosition, autoFocusMacroPosition); + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::setDisEnable(bool enable) +{ + setMetaBypassDis(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera3Parameters::getDisEnable(void) +{ + return m_metadata.dis_bypass; +} + +status_t ExynosCamera3Parameters::setDrcEnable(bool enable) +{ + setMetaBypassDrc(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera3Parameters::getDrcEnable(void) +{ + return m_metadata.drc_bypass; +} + +status_t ExynosCamera3Parameters::setDnrEnable(bool enable) +{ + setMetaBypassDnr(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera3Parameters::getDnrEnable(void) +{ + return m_metadata.dnr_bypass; +} + +status_t ExynosCamera3Parameters::setFdEnable(bool enable) +{ + setMetaBypassFd(&m_metadata, enable == true ? 0 : 1); + return NO_ERROR; +} + +bool ExynosCamera3Parameters::getFdEnable(void) +{ + return m_metadata.fd_bypass; +} + +status_t ExynosCamera3Parameters::setFdMode(enum facedetect_mode mode) +{ + setMetaCtlFdMode(&m_metadata, mode); + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getFdMeta(bool reprocessing, void *buf) +{ + if (buf == NULL) { + CLOGE2("buf is NULL"); + return BAD_VALUE; + } + + struct camera2_shot_ext *meta_shot_ext = (struct camera2_shot_ext *)buf; + + /* disable face detection for reprocessing frame */ + if (reprocessing) { + meta_shot_ext->fd_bypass = 1; + meta_shot_ext->shot.ctl.stats.faceDetectMode = ::FACEDETECT_MODE_OFF; + } + + return NO_ERROR; +} + +void ExynosCamera3Parameters::setFlipHorizontal(int val) +{ + if (val < 0) { + CLOGE2("setFlipHorizontal ignored, invalid value(%d)", val); + return; + } + + m_cameraInfo.flipHorizontal = val; +} + +int ExynosCamera3Parameters::getFlipHorizontal(void) +{ + if (m_cameraInfo.shotMode == SHOT_MODE_FRONT_PANORAMA) { + return 0; + } else { + return m_cameraInfo.flipHorizontal; + } +} + +void ExynosCamera3Parameters::setFlipVertical(int val) +{ + if (val < 0) { + CLOGE2("setFlipVertical ignored, invalid value(%d)", val); + return; + } + + m_cameraInfo.flipVertical = val; +} + +int ExynosCamera3Parameters::getFlipVertical(void) +{ + return m_cameraInfo.flipVertical; +} + +bool ExynosCamera3Parameters::getCallbackNeedCSC(void) +{ + bool ret = true; + + int previewW = 0, previewH = 0; + int hwPreviewW = 0, hwPreviewH = 0; + int previewFormat = getPreviewFormat(); + + getPreviewSize(&previewW, &previewH); + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + if ((previewW == hwPreviewW)&& + (previewH == hwPreviewH)&& + (previewFormat == V4L2_PIX_FMT_NV21)) { + ret = false; + } + return ret; +} + +bool ExynosCamera3Parameters::getCallbackNeedCopy2Rendering(void) +{ + bool ret = false; + + int previewW = 0, previewH = 0; + + getPreviewSize(&previewW, &previewH); + if (previewW * previewH <= 1920*1080) + ret = true; + + return ret; +} + +bool ExynosCamera3Parameters::setDeviceOrientation(int orientation) +{ + if (orientation < 0 || orientation % 90 != 0) { + CLOGE2("Invalid orientation (%d)", orientation); + return false; + } + + m_cameraInfo.deviceOrientation = orientation; + + /* fd orientation need to be calibrated, according to f/w spec */ + int hwRotation = BACK_ROTATION; + +#if 0 + if (this->getCameraId() == CAMERA_ID_FRONT) + hwRotation = FRONT_ROTATION; +#endif + + int fdOrientation = (orientation + hwRotation) % 360; + + CLOGD2("orientation(%d), hwRotation(%d), fdOrientation(%d)", orientation, hwRotation, fdOrientation); + + return true; +} + +int ExynosCamera3Parameters::getDeviceOrientation(void) +{ + return m_cameraInfo.deviceOrientation; +} + +int ExynosCamera3Parameters::getFdOrientation(void) +{ + /* HACK: Calibrate FRONT FD orientation */ + if (getCameraId() == CAMERA_ID_FRONT) + return (m_cameraInfo.deviceOrientation + FRONT_ROTATION + 180) % 360; + else + return (m_cameraInfo.deviceOrientation + BACK_ROTATION) % 360; +} + +void ExynosCamera3Parameters::getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) +{ + if (flagReprocessing == true) { + *setfile = m_setfileReprocessing; + *yuvRange = m_yuvRangeReprocessing; + } else { + *setfile = m_setfile; + *yuvRange = m_yuvRange; + } +} + +void ExynosCamera3Parameters::setSetfileYuvRange(void) +{ + + /* general */ + m_getSetfileYuvRange(false, &m_setfile, &m_yuvRange); + + /* reprocessing */ + m_getSetfileYuvRange(true, &m_setfileReprocessing, &m_yuvRangeReprocessing); + + CLOGD2("m_cameraId(%d) : general[setfile(%d) YUV range(%d)] : reprocesing[setfile(%d) YUV range(%d)]", + m_cameraId, + m_setfile, m_yuvRange, + m_setfileReprocessing, m_yuvRangeReprocessing); + +} + +void ExynosCamera3Parameters::m_getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange) +{ + uint32_t currentSetfile = 0; + uint32_t stateReg = 0; + int flagYUVRange = YUV_FULL_RANGE; + + unsigned int minFps = 0; + unsigned int maxFps = 0; + getPreviewFpsRange(&minFps, &maxFps); + + if (m_isUHDRecordingMode() == true) + stateReg |= STATE_REG_UHD_RECORDING; + + if (getDualMode() == true) { + stateReg |= STATE_REG_DUAL_MODE; + if (getDualRecordingHint() == true) + stateReg |= STATE_REG_DUAL_RECORDINGHINT; + } else { + if (getRecordingHint() == true) + stateReg |= STATE_REG_RECORDINGHINT; + } + + if (flagReprocessing == true) + stateReg |= STATE_REG_FLAG_REPROCESSING; + + if ((stateReg & STATE_REG_RECORDINGHINT)|| + (stateReg & STATE_REG_UHD_RECORDING)|| + (stateReg & STATE_REG_DUAL_RECORDINGHINT)) { + if (flagReprocessing == false) { + flagYUVRange = YUV_LIMITED_RANGE; + } + } + + if (m_cameraId == CAMERA_ID_FRONT) { + int vtMode = getVtMode(); + + if (0 < vtMode) { + switch (vtMode) { + case 1: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT1; + if(stateReg == STATE_STILL_CAPTURE) + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT1_STILL_CAPTURE; + break; + case 2: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT2; + break; + case 4: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT4; + break; + default: + currentSetfile = ISS_SUB_SCENARIO_FRONT_VT2; + break; + } + } else if (getIntelligentMode() == 1) { + currentSetfile = ISS_SUB_SCENARIO_FRONT_SMART_STAY; + } else if (getShotMode() == SHOT_MODE_FRONT_PANORAMA) { + currentSetfile = ISS_SUB_SCENARIO_FRONT_PANORAMA; + } else { + switch(stateReg) { + case STATE_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW; + break; + + case STATE_STILL_PREVIEW_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_ON; + break; + + case STATE_STILL_PREVIEW_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_AUTO; + break; + + case STATE_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_VIDEO; + break; + + case STATE_VIDEO_WDR_ON: + case STATE_UHD_VIDEO_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_VIDEO_WDR_ON; + break; + + case STATE_VIDEO_WDR_AUTO: + case STATE_UHD_VIDEO_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_VIDEO_WDR_AUTO; + break; + + case STATE_STILL_CAPTURE: + case STATE_VIDEO_CAPTURE: + case STATE_UHD_PREVIEW_CAPTURE: + case STATE_UHD_VIDEO_CAPTURE: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE; + break; + + case STATE_STILL_CAPTURE_WDR_ON: + case STATE_VIDEO_CAPTURE_WDR_ON: + case STATE_UHD_PREVIEW_CAPTURE_WDR_ON: + case STATE_UHD_VIDEO_CAPTURE_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON; + break; + + case STATE_STILL_CAPTURE_WDR_AUTO: + case STATE_VIDEO_CAPTURE_WDR_AUTO: + case STATE_UHD_PREVIEW_CAPTURE_WDR_AUTO: + case STATE_UHD_VIDEO_CAPTURE_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO; + break; + + case STATE_DUAL_STILL_PREVIEW: + case STATE_DUAL_STILL_CAPTURE: + case STATE_DUAL_VIDEO_CAPTURE: + currentSetfile = ISS_SUB_SCENARIO_DUAL_STILL; + break; + + case STATE_DUAL_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_DUAL_VIDEO; + break; + + case STATE_UHD_PREVIEW: + case STATE_UHD_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_UHD_30FPS; + break; + + default: + CLOGD2("can't define senario of setfile.(0x%4x)", stateReg); + break; + } + } + } else { + switch(stateReg) { + case STATE_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW; + break; + + case STATE_STILL_PREVIEW_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_ON; + break; + + case STATE_STILL_PREVIEW_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_STILL_PREVIEW_WDR_AUTO; + break; + + case STATE_STILL_CAPTURE: + case STATE_VIDEO_CAPTURE: + case STATE_DUAL_STILL_CAPTURE: + case STATE_DUAL_VIDEO_CAPTURE: + case STATE_UHD_PREVIEW_CAPTURE: + case STATE_UHD_VIDEO_CAPTURE: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE; + break; + + case STATE_STILL_CAPTURE_WDR_ON: + case STATE_VIDEO_CAPTURE_WDR_ON: + case STATE_UHD_PREVIEW_CAPTURE_WDR_ON: + case STATE_UHD_VIDEO_CAPTURE_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_ON; + break; + + case STATE_STILL_CAPTURE_WDR_AUTO: + case STATE_VIDEO_CAPTURE_WDR_AUTO: + case STATE_UHD_PREVIEW_CAPTURE_WDR_AUTO: + case STATE_UHD_VIDEO_CAPTURE_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_STILL_CAPTURE_WDR_AUTO; + break; + + case STATE_VIDEO: + if (30 < minFps && 30 < maxFps) { + if (300 == minFps && 300 == maxFps) { + currentSetfile = ISS_SUB_SCENARIO_WVGA_300FPS; + } else if (60 == minFps && 60 == maxFps) { + currentSetfile = ISS_SUB_SCENARIO_FHD_60FPS; + } else if (240 == minFps && 240 == maxFps) { + currentSetfile = ISS_SUB_SCENARIO_FHD_240FPS; + } else { + currentSetfile = ISS_SUB_SCENARIO_VIDEO_HIGH_SPEED; + } + } else { + currentSetfile = ISS_SUB_SCENARIO_VIDEO; + } + break; + + case STATE_VIDEO_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_VIDEO_WDR_ON; + break; + + case STATE_VIDEO_WDR_AUTO: + currentSetfile = ISS_SUB_SCENARIO_VIDEO_WDR_AUTO; + break; + + case STATE_DUAL_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_DUAL_VIDEO; + break; + + case STATE_DUAL_STILL_PREVIEW: + currentSetfile = ISS_SUB_SCENARIO_DUAL_STILL; + break; + + case STATE_UHD_PREVIEW: + case STATE_UHD_VIDEO: + currentSetfile = ISS_SUB_SCENARIO_UHD_30FPS; + break; + + case STATE_UHD_PREVIEW_WDR_ON: + case STATE_UHD_VIDEO_WDR_ON: + currentSetfile = ISS_SUB_SCENARIO_UHD_30FPS_WDR_ON; + break; + + default: + CLOGD2("can't define senario of setfile.(0x%4x)", stateReg); + break; + } + } +#if 0 + CLOGD2("==============================================================================="); + CLOGD2("CurrentState(0x%4x)", stateReg); + CLOGD2("getRTHdr()(%d)", getRTHdr()); + CLOGD2("getRecordingHint()(%d)", getRecordingHint()); + CLOGD2("m_isUHDRecordingMode()(%d)", m_isUHDRecordingMode()); + CLOGD2("getDualMode()(%d)", getDualMode()); + CLOGD2("getDualRecordingHint()(%d)", getDualRecordingHint()); + CLOGD2("flagReprocessing(%d)", flagReprocessing); + CLOGD2(" ==============================================================================="); + CLOGD2("currentSetfile(%d)", currentSetfile); + CLOGD2("flagYUVRange(%d)", flagYUVRange); + CLOGD2("==============================================================================="); +#else + CLOGD2("CurrentState (0x%4x), currentSetfile(%d)", stateReg, currentSetfile); +#endif + +done: + *setfile = currentSetfile; + *yuvRange = flagYUVRange; +} + +void ExynosCamera3Parameters::setUseDynamicBayer(bool enable) +{ + m_useDynamicBayer = enable; +} + +bool ExynosCamera3Parameters::getUseDynamicBayer(void) +{ + return m_useDynamicBayer; +} + +void ExynosCamera3Parameters::setUseDynamicBayerVideoSnapShot(bool enable) +{ + m_useDynamicBayerVideoSnapShot = enable; +} + +bool ExynosCamera3Parameters::getUseDynamicBayerVideoSnapShot(void) +{ + return m_useDynamicBayerVideoSnapShot; +} + +void ExynosCamera3Parameters::setUseDynamicScc(bool enable) +{ + m_useDynamicScc = enable; +} + +bool ExynosCamera3Parameters::getUseDynamicScc(void) +{ + bool dynamicScc = m_useDynamicScc; + bool reprocessing = isReprocessing(); + + if (getRecordingHint() == true && reprocessing == false) + dynamicScc = false; + + return dynamicScc; +} + +void ExynosCamera3Parameters::setUseFastenAeStable(bool enable) +{ + m_useFastenAeStable = enable; +} + +bool ExynosCamera3Parameters::getUseFastenAeStable(void) +{ + return m_useFastenAeStable; +} + +status_t ExynosCamera3Parameters::calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int previewW = 0, previewH = 0, previewFormat = 0; + int hwPreviewW = 0, hwPreviewH = 0, hwPreviewFormat = 0; + previewFormat = getPreviewFormat(); + hwPreviewFormat = getHwPreviewFormat(); + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getPreviewSize(&previewW, &previewH); + + ret = getCropRectAlign(srcRect->w, srcRect->h, + previewW, previewH, + &srcRect->x, &srcRect->y, + &srcRect->w, &srcRect->h, + 2, 2, + 0, 1); + + srcRect->colorFormat = hwPreviewFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + dstRect->colorFormat = previewFormat; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int previewW = 0, previewH = 0, previewFormat = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + previewFormat = getPreviewFormat(); + pictureFormat = getPictureFormat(); + + if (isOwnScc(m_cameraId) == true) + getPictureSize(&pictureW, &pictureH); + else + getHwPictureSize(&pictureW, &pictureH); + getPreviewSize(&previewW, &previewH); + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = pictureW; + srcRect->h = pictureH; + srcRect->fullW = pictureW; + srcRect->fullH = pictureH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + dstRect->colorFormat = previewFormat; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + + int hwPreviewW = 0, hwPreviewH = 0, hwPreviewFormat = 0; + int videoW = 0, videoH = 0, videoFormat = 0; + + hwPreviewFormat = getHwPreviewFormat(); + videoFormat = getVideoFormat(); + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getVideoSize(&videoW, &videoH); + + ret = getCropRectAlign(srcRect->w, srcRect->h, + videoW, videoH, + &srcRect->x, &srcRect->y, + &srcRect->w, &srcRect->h, + 2, 2, + 0, 1); + + srcRect->colorFormat = hwPreviewFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = videoW; + dstRect->h = videoH; + dstRect->fullW = videoW; + dstRect->fullH = videoH; + dstRect->colorFormat = videoFormat; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0, hwPictureFormat = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int previewW = 0, previewH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + + /* TODO: check state ready for start */ + pictureFormat = getPictureFormat(); + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + ALIGN_UP(crop_crop_x, 2); + ALIGN_UP(crop_crop_y, 2); + +#if 0 + CLOGD2("hwSensorSize (%dx%d), previewSize (%dx%d)", + hwSensorW, hwSensorH, previewW, previewH); + CLOGD2("hwPictureSize (%dx%d), pictureSize (%dx%d)", + hwPictureW, hwPictureH, pictureW, pictureH); + CLOGD2("size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + cropX, cropY, cropW, cropH, zoomLevel); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = crop_crop_x; + srcRect->y = crop_crop_y; + srcRect->w = crop_crop_w; + srcRect->h = crop_crop_h; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + float zoomRatio = getZoomRatio(0) / 1000; +#if 0 + int zoom = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; +#endif + /* TODO: check state ready for start */ + pictureFormat = getPictureFormat(); + getPictureSize(&pictureW, &pictureH); + + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(originW, originH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + ALIGN_UP(crop_crop_x, 2); + ALIGN_UP(crop_crop_y, 2); + +#if 0 + CLOGD2("originSize (%dx%d) pictureSize (%dx%d)", + originW, originH, pictureW, pictureH); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoom); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = crop_crop_x; + srcRect->y = crop_crop_y; + srcRect->w = crop_crop_w; + srcRect->h = crop_crop_h; + srcRect->fullW = originW; + srcRect->fullH = originH; + srcRect->colorFormat = pictureFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int hwBnsW = 0; + int hwBnsH = 0; + int hwBcropW = 0; + int hwBcropH = 0; + int zoomLevel = 0; + float zoomRatio = 1.00f; + int sizeList[SIZE_LUT_INDEX_END]; + int hwSensorMarginW = 0; + int hwSensorMarginH = 0; + float bnsRatio = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->previewSizeLut == NULL + || m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId + || (getUsePureBayerReprocessing() == false && + m_cameraInfo.pictureSizeRatioId != m_cameraInfo.previewSizeRatioId) + || m_getPreviewSizeList(sizeList) != NO_ERROR) + return calcPreviewBayerCropSize(srcRect, dstRect); + + /* use LUT */ + hwBnsW = sizeList[BNS_W]; + hwBnsH = sizeList[BNS_H]; + hwBcropW = sizeList[BCROP_W]; + hwBcropH = sizeList[BCROP_H]; + + if (getRecordingHint() == true) { + if (m_cameraInfo.previewSizeRatioId != m_cameraInfo.videoSizeRatioId) { + CLOGV2("preview ratioId(%d) != videoRatioId(%d), use previewRatioId", + m_cameraInfo.previewSizeRatioId, m_cameraInfo.videoSizeRatioId); + } + } + + int curBnsW = 0, curBnsH = 0; + getBnsSize(&curBnsW, &curBnsH); + if (SIZE_RATIO(curBnsW, curBnsH) != SIZE_RATIO(hwBnsW, hwBnsH)) + CLOGW2("current BNS size(%dx%d) is NOT same with Hw BNS size(%dx%d)", + curBnsW, curBnsH, hwBnsW, hwBnsH); + + zoomLevel = getZoomLevel(); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + /* Skip to calculate the crop size with zoom level + * Condition 1 : High-speed camcording D-zoom with External Scaler + * Condition 2 : HAL3 (Service calculates the crop size by itself + */ + int fastFpsMode = getFastFpsMode(); + if ((fastFpsMode > CONFIG_MODE::HIGHSPEED_60 && + fastFpsMode < CONFIG_MODE::MAX && + getZoomPreviewWIthScaler() == true) || + getHalVersion() == IS_HAL_VER_3_2) { + CLOGV2("hwBnsSize (%dx%d), hwBcropSize(%dx%d), fastFpsMode(%d)", + hwBnsW, hwBnsH, + hwBcropW, hwBcropH, + fastFpsMode); + } else { +#if defined(SCALER_MAX_SCALE_UP_RATIO) + /* + * After dividing float & casting int, + * zoomed size can be smaller too much. + * so, when zoom until max, ceil up about floating point. + */ + if (ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN) * SCALER_MAX_SCALE_UP_RATIO < hwBcropW || + ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2) * SCALER_MAX_SCALE_UP_RATIO < hwBcropH) { + hwBcropW = ALIGN_UP((int)ceil((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)ceil((float)hwBcropH / zoomRatio), 2); + } else +#endif + { + hwBcropW = ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2); + } + } + + /* Re-calculate the BNS size for removing Sensor Margin */ + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + m_adjustSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + hwBnsW = hwBnsW - hwSensorMarginW; + hwBnsH = hwBnsH - hwSensorMarginH; + + /* src */ + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwBnsW; + srcRect->h = hwBnsH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + status_t ret = NO_ERROR; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwBnsW / (float) maxSensorW; + scaleRatioY = (float) hwBnsH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwBnsW; + cropRegionH = hwBnsH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + if ((cropRegionW > hwBcropW) && (cropRegionH > hwBcropH)) { + dstRect->x = ALIGN_DOWN((cropRegionX + ((cropRegionW - hwBcropW) >> 1)), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + ((cropRegionH - hwBcropH) >> 1)), 2); + dstRect->w = hwBcropW; + dstRect->h = hwBcropH; + } else { + CLOGV2("hwBcrop (%d x %d)", hwBcropW, hwBcropH); + ret = getCropRectAlign(cropRegionW, cropRegionH, + hwBcropW, hwBcropH, + &(dstRect->x), &(dstRect->y), + &(dstRect->w), &(dstRect->h), + CAMERA_BCROP_ALIGN, 2, + 0, 0.0f); + dstRect->x = ALIGN_DOWN((cropRegionX + dstRect->x), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + dstRect->y), 2); + } + } else { + if (hwBnsW > hwBcropW) { + dstRect->x = ALIGN_UP(((hwBnsW - hwBcropW) >> 1), 2); + dstRect->w = hwBcropW; + } else { + dstRect->x = 0; + dstRect->w = hwBnsW; + } + + if (hwBnsH > hwBcropH) { + dstRect->y = ALIGN_UP(((hwBnsH - hwBcropH) >> 1), 2); + dstRect->h = hwBcropH; + } else { + dstRect->y = 0; + dstRect->h = hwBnsH; + } + } + + m_setHwBayerCropRegion(dstRect->w, dstRect->h, dstRect->x, dstRect->y); +#ifdef DEBUG_PERFRAME + CLOGD2("zoomLevel=%d", zoomLevel); + CLOGD2("hwBnsSize (%dx%d), hwBcropSize (%d, %d)(%dx%d)", + srcRect->w, srcRect->h, dstRect->x, dstRect->y, dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; + int hwSensorMarginW = 0, hwSensorMarginH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; +#if 0 + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int zoomLevel = 0; + int maxZoomRatio = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = getZoomRatio(0) / 1000; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + /* This function is disabled, because is not necessary. + m_adjustSensorMargin function use sensorMarginBase[] size, + but, here not using sensorMarginBase[] size. + */ + //m_adjustSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + hwSensorW -= hwSensorMarginW; + hwSensorH -= hwSensorMarginH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwSensorW / (float) maxSensorW; + scaleRatioY = (float) hwSensorH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwSensorW; + cropRegionH = hwSensorH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + ret = getCropRectAlign(cropRegionW, cropRegionH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_BCROP_ALIGN, 2, + 0, 0.0f); + + cropX = ALIGN_DOWN((cropRegionX + cropX), 2); + cropY = ALIGN_DOWN((cropRegionY + cropY), 2); + } else { + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_BCROP_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + } + + if (getUsePureBayerReprocessing() == false + && pictureW > 0 && pictureH > 0) { + int pictureCropX = 0, pictureCropY = 0; + int pictureCropW = 0, pictureCropH = 0; + + zoomLevel = 0; + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &pictureCropX, &pictureCropY, + &pictureCropW, &pictureCropH, + CAMERA_BCROP_ALIGN, 2, + zoomLevel, zoomRatio); + + pictureCropX = ALIGN_DOWN(pictureCropX, 2); + pictureCropY = ALIGN_DOWN(pictureCropY, 2); + pictureCropW = cropW - (pictureCropX * 2); + pictureCropH = cropH - (pictureCropY * 2); + + if (pictureCropW < pictureW / maxZoomRatio || pictureCropH < pictureH / maxZoomRatio) { + CLOGW2("zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", maxZoomRatio, cropW, cropH, pictureW, pictureH); + float src_ratio = 1.0f; + float dst_ratio = 1.0f; + /* ex : 1024 / 768 */ + src_ratio = ROUND_OFF_HALF(((float)cropW / (float)cropH), 2); + /* ex : 352 / 288 */ + dst_ratio = ROUND_OFF_HALF(((float)pictureW / (float)pictureH), 2); + + if (dst_ratio <= src_ratio) { + /* shrink w */ + cropX = ALIGN_DOWN(((int)(hwSensorW - ((pictureH / maxZoomRatio) * src_ratio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorH - (pictureH / maxZoomRatio)) >> 1), 2); + } else { + /* shrink h */ + cropX = ALIGN_DOWN(((hwSensorW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((int)(hwSensorH - ((pictureW / maxZoomRatio) / src_ratio)) >> 1), 2); + } + cropW = ALIGN_UP(hwSensorW - (cropX * 2), CAMERA_BCROP_ALIGN); + cropH = hwSensorH - (cropY * 2); + } + } + +#if 0 + CLOGD2("hwSensorSize (%dx%d), previewSize (%dx%d)", + hwSensorW, hwSensorH, previewW, previewH); + CLOGD2("hwPictureSize (%dx%d), pictureSize (%dx%d)", + hwPictureW, hwPictureH, pictureW, pictureH); + CLOGD2("size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + cropX, cropY, cropW, cropH, zoomLevel); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwSensorW; + srcRect->h = hwSensorH; + srcRect->fullW = hwSensorW; + srcRect->fullH = hwSensorH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = cropX; + dstRect->y = cropY; + dstRect->w = cropW; + dstRect->h = cropH; + dstRect->fullW = cropW; + dstRect->fullH = cropH; + dstRect->colorFormat = bayerFormat; + + m_setHwBayerCropRegion(dstRect->w, dstRect->h, dstRect->x, dstRect->y); + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPreviewDzoomCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int previewW = 0, previewH = 0; + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + + int zoomLevel = 0; + int maxZoomRatio = 0; + float zoomRatio = getZoomRatio(0) / 1000; + + /* TODO: check state ready for start */ + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPreviewSize(&previewW, &previewH); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + ret = getCropRectAlign(srcRect->w, srcRect->h, + previewW, previewH, + &srcRect->x, &srcRect->y, + &srcRect->w, &srcRect->h, + 2, 2, + zoomLevel, zoomRatio); + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = previewW; + dstRect->h = previewH; + + CLOGV2("SRC cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d ratio = %f", srcRect->x, srcRect->y, srcRect->w, srcRect->h, zoomLevel, zoomRatio); + CLOGV2("DST cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d ratio = %f", dstRect->x, dstRect->y, dstRect->w, dstRect->h, zoomLevel, zoomRatio); + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int hwBnsW = 0; + int hwBnsH = 0; + int hwBcropW = 0; + int hwBcropH = 0; + int zoomLevel = 0; + float zoomRatio = 1.00f; + int hwSensorMarginW = 0; + int hwSensorMarginH = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->pictureSizeLut == NULL + || m_staticInfo->pictureSizeLutMax <= m_cameraInfo.pictureSizeRatioId + || m_cameraInfo.pictureSizeRatioId != m_cameraInfo.previewSizeRatioId) + return calcPictureBayerCropSize(srcRect, dstRect); + + /* use LUT */ + hwBnsW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BNS_W]; + hwBnsH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BNS_H]; + hwBcropW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BCROP_W]; + hwBcropH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BCROP_H]; + + if (getHalVersion() != IS_HAL_VER_3_2) { + zoomLevel = getZoomLevel(); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + +#if defined(SCALER_MAX_SCALE_UP_RATIO) + /* + * After dividing float & casting int, + * zoomed size can be smaller too much. + * so, when zoom until max, ceil up about floating point. + */ + if (ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN) * SCALER_MAX_SCALE_UP_RATIO < hwBcropW || + ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2) * SCALER_MAX_SCALE_UP_RATIO < hwBcropH) { + hwBcropW = ALIGN_UP((int)ceil((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)ceil((float)hwBcropH / zoomRatio), 2); + } else +#endif + { + hwBcropW = ALIGN_UP((int)((float)hwBcropW / zoomRatio), CAMERA_BCROP_ALIGN); + hwBcropH = ALIGN_UP((int)((float)hwBcropH / zoomRatio), 2); + } + } + + /* Re-calculate the BNS size for removing Sensor Margin. + On Capture Stream(3AA_M2M_Input), the BNS is not used. + So, the BNS ratio is not needed to be considered for sensor margin */ + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + hwBnsW = hwBnsW - hwSensorMarginW; + hwBnsH = hwBnsH - hwSensorMarginH; + + /* src */ + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = hwBnsW; + srcRect->h = hwBnsH; + + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + int maxSensorW = 0, maxSensorH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + status_t ret = NO_ERROR; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + getMaxSensorSize(&maxSensorW, &maxSensorH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwBnsW / (float) maxSensorW; + scaleRatioY = (float) hwBnsH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwBnsW; + cropRegionH = hwBnsH; + } + + /* 2. Calculate the real crop region with considering the target ratio */ + if ((cropRegionW > hwBcropW) && (cropRegionH > hwBcropH)) { + dstRect->x = ALIGN_DOWN((cropRegionX + ((cropRegionW - hwBcropW) >> 1)), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + ((cropRegionH - hwBcropH) >> 1)), 2); + dstRect->w = hwBcropW; + dstRect->h = hwBcropH; + } else { + ret = getCropRectAlign(cropRegionW, cropRegionH, + hwBcropW, hwBcropH, + &(dstRect->x), &(dstRect->y), + &(dstRect->w), &(dstRect->h), + CAMERA_MAGIC_ALIGN, 2, + 0, 0.0f); + dstRect->x = ALIGN_DOWN((cropRegionX + dstRect->x), 2); + dstRect->y = ALIGN_DOWN((cropRegionY + dstRect->y), 2); + } + } else { + /* dst */ + if (hwBnsW > hwBcropW) { + dstRect->x = ALIGN_UP(((hwBnsW - hwBcropW) >> 1), 2); + dstRect->w = hwBcropW; + } else { + dstRect->x = 0; + dstRect->w = hwBnsW; + } + + if (hwBnsH > hwBcropH) { + dstRect->y = ALIGN_UP(((hwBnsH - hwBcropH) >> 1), 2); + dstRect->h = hwBcropH; + } else { + dstRect->y = 0; + dstRect->h = hwBnsH; + } + } + +#if DEBUG + CLOGD2("zoomRatio=%f", zoomRatio); + CLOGD2("hwBnsSize (%dx%d), hwBcropSize (%d, %d)(%dx%d)", + srcRect->w, srcRect->h, dstRect->x, dstRect->y, dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int maxSensorW = 0, maxSensorH = 0; + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0, hwPictureFormat = 0; + int hwSensorCropX = 0, hwSensorCropY = 0; + int hwSensorCropW = 0, hwSensorCropH = 0; + int pictureW = 0, pictureH = 0, pictureFormat = 0; + int previewW = 0, previewH = 0; + int hwSensorMarginW = 0, hwSensorMarginH = 0; + + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + float zoomRatio = 1.0f; + int maxZoomRatio = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ + pictureFormat = getPictureFormat(); + zoomLevel = getZoomLevel(); + maxZoomRatio = getMaxZoomRatio() / 1000; + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getMaxSensorSize(&maxSensorW, &maxSensorH); + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + getSensorMargin(&hwSensorMarginW, &hwSensorMarginH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + hwSensorW -= hwSensorMarginW; + hwSensorH -= hwSensorMarginH; + + if (getUsePureBayerReprocessing() == true) { + if (getHalVersion() == IS_HAL_VER_3_2) { + int cropRegionX = 0, cropRegionY = 0, cropRegionW = 0, cropRegionH = 0; + float scaleRatioX = 0.0f, scaleRatioY = 0.0f; + + m_getCropRegion(&cropRegionX, &cropRegionY, &cropRegionW, &cropRegionH); + + /* 1. Scale down the crop region to adjust with the bcrop input size */ + scaleRatioX = (float) hwSensorW / (float) maxSensorW; + scaleRatioY = (float) hwSensorH / (float) maxSensorH; + cropRegionX = (int) (cropRegionX * scaleRatioX); + cropRegionY = (int) (cropRegionY * scaleRatioY); + cropRegionW = (int) (cropRegionW * scaleRatioX); + cropRegionH = (int) (cropRegionH * scaleRatioY); + + if (cropRegionW < 1 || cropRegionH < 1) { + cropRegionW = hwSensorW; + cropRegionH = hwSensorH; + } + + ret = getCropRectAlign(cropRegionW, cropRegionH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + 0, 0.0f); + + cropX = ALIGN_DOWN((cropRegionX + cropX), 2); + cropY = ALIGN_DOWN((cropRegionY + cropY), 2); + } else { + ret = getCropRectAlign(hwSensorW, hwSensorH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + } + + if (cropW < pictureW / maxZoomRatio || cropH < pictureH / maxZoomRatio) { + CLOGW2("zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", maxZoomRatio, cropW, cropH, pictureW, pictureH); + cropX = ALIGN_DOWN(((hwSensorW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorH - (pictureH / maxZoomRatio)) >> 1), 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + } + } else { + zoomLevel = 0; + if (getHalVersion() == IS_HAL_VER_3_2) + zoomRatio = 0.0f; + else + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + getHwBayerCropRegion(&hwSensorCropW, &hwSensorCropH, &hwSensorCropX, &hwSensorCropY); + + ret = getCropRectAlign(hwSensorCropW, hwSensorCropH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + cropX = ALIGN_DOWN(cropX, 2); + cropY = ALIGN_DOWN(cropY, 2); + cropW = hwSensorCropW - (cropX * 2); + cropH = hwSensorCropH - (cropY * 2); + + if (cropW < pictureW / maxZoomRatio || cropH < pictureH / maxZoomRatio) { + CLOGW2("zoom ratio is upto x%d, crop(%dx%d), picture(%dx%d)", + maxZoomRatio, cropW, cropH, pictureW, pictureH); + cropX = ALIGN_DOWN(((hwSensorCropW - (pictureW / maxZoomRatio)) >> 1), 2); + cropY = ALIGN_DOWN(((hwSensorCropH - (pictureH / maxZoomRatio)) >> 1), 2); + cropW = hwSensorCropW - (cropX * 2); + cropH = hwSensorCropH - (cropY * 2); + } + } + +#if 1 + CLOGD2("maxSensorSize (%dx%d), hwSensorSize (%dx%d), previewSize (%dx%d)", + maxSensorW, maxSensorH, hwSensorW, hwSensorH, previewW, previewH); + CLOGD2("hwPictureSize (%dx%d), pictureSize (%dx%d)", + hwPictureW, hwPictureH, pictureW, pictureH); + CLOGD2("size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + cropX, cropY, cropW, cropH, zoomLevel); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = 0; + srcRect->y = 0; + srcRect->w = maxSensorW; + srcRect->h = maxSensorH; + srcRect->fullW = maxSensorW; + srcRect->fullH = maxSensorH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = cropX; + dstRect->y = cropY; + dstRect->w = cropW; + dstRect->h = cropH; + dstRect->fullW = cropW; + dstRect->fullH = cropH; + dstRect->colorFormat = bayerFormat; + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::m_getPreviewBdsSize(ExynosRect *dstRect) +{ + int hwBdsW = 0; + int hwBdsH = 0; + int sizeList[SIZE_LUT_INDEX_END]; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->previewSizeLut == NULL + || m_staticInfo->previewSizeLutMax <= m_cameraInfo.previewSizeRatioId + || m_getPreviewSizeList(sizeList) != NO_ERROR) { + ExynosRect rect; + return calcPreviewBDSSize(&rect, dstRect); + } + + /* use LUT */ + hwBdsW = sizeList[BDS_W]; + hwBdsH = sizeList[BDS_H]; + + if (getRecordingHint() == true) { + int videoW = 0, videoH = 0; + getVideoSize(&videoW, &videoH); + + if (m_cameraInfo.previewSizeRatioId != m_cameraInfo.videoSizeRatioId) + CLOGV2("preview ratioId(%d) != videoRatioId(%d), use previewRatioId", + m_cameraInfo.previewSizeRatioId, m_cameraInfo.videoSizeRatioId); + + if ((videoW == 3840 && videoH == 2160) || (videoW == 2560 && videoH == 1440)) { + hwBdsW = videoW; + hwBdsH = videoH; + } + } + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = hwBdsW; + dstRect->h = hwBdsH; + +#ifdef DEBUG_PERFRAME + CLOGD2("hwBdsSize (%dx%d)", dstRect->w, dstRect->h); +#endif + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getPreviewBdsSize(ExynosRect *dstRect) +{ + status_t ret = NO_ERROR; + + ret = m_getPreviewBdsSize(dstRect); + if (ret != NO_ERROR) { + CLOGE2("calcPreviewBDSSize() fail"); + return ret; + } + + if (this->getHWVdisMode() == true) { + int disW = ALIGN_UP((int)(dstRect->w * HW_VDIS_W_RATIO), 2); + int disH = ALIGN_UP((int)(dstRect->h * HW_VDIS_H_RATIO), 2); + + CLOGV2("HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", dstRect->w, dstRect->h, disW, disH); + + /* + * check H/W VDIS size(BDS dst size) is too big than bayerCropSize(BDS out size). + */ + ExynosRect bnsSize, bayerCropSize; + + if (getPreviewBayerCropSize(&bnsSize, &bayerCropSize) != NO_ERROR) { + CLOGE2("getPreviewBayerCropSize() fail"); + } else { + if (bayerCropSize.w < disW || bayerCropSize.h < disH) { + CLOGV2("bayerCropSize (%d x %d) is smaller than (%d x %d). so force bayerCropSize", + bayerCropSize.w, bayerCropSize.h, disW, disH); + + disW = bayerCropSize.w; + disH = bayerCropSize.h; + } + } + + dstRect->w = disW; + dstRect->h = disH; + } + +#ifdef DEBUG_PERFRAME + CLOGD2("hwBdsSize (%dx%d)", dstRect->w, dstRect->h); +#endif + + return ret; +} + +status_t ExynosCamera3Parameters::calcPreviewBDSSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int hwSensorW = 0, hwSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; + ExynosRect bnsSize; + ExynosRect bayerCropSize; +#if 0 + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + zoomRatio = getZoomRatio(zoomLevel) / 1000; + + getHwSensorSize(&hwSensorW, &hwSensorH); + getPreviewSize(&previewW, &previewH); + + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(hwSensorW, hwSensorH, + previewW, previewH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + + ret = getCropRectAlign(cropW, cropH, + previewW, previewH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + cropX = ALIGN_UP(cropX, 2); + cropY = ALIGN_UP(cropY, 2); + cropW = hwSensorW - (cropX * 2); + cropH = hwSensorH - (cropY * 2); + +// ALIGN_UP(crop_crop_x, 2); +// ALIGN_UP(crop_crop_y, 2); + +#if 0 + CLOGD2("hwSensorSize (%dx%d), previewSize (%dx%d)", + hwSensorW, hwSensorH, previewW, previewH); + CLOGD2("hwPictureSize (%dx%d), pictureSize (%dx%d)", + hwPictureW, hwPictureH, pictureW, pictureH); + CLOGD2("size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + cropX, cropY, cropW, cropH, zoomLevel); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + /* For Front Single Scenario, BDS should not be used */ + if (m_cameraId == CAMERA_ID_FRONT && getDualMode() == false) { + getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + dstRect->w = bayerCropSize.w; + dstRect->h = bayerCropSize.h; + dstRect->fullW = bayerCropSize.w; + dstRect->fullH = bayerCropSize.h; + } else { + dstRect->w = previewW; + dstRect->h = previewH; + dstRect->fullW = previewW; + dstRect->fullH = previewH; + } + + if (dstRect->w > srcRect->w) + dstRect->w = srcRect->w; + if (dstRect->h > srcRect->h) + dstRect->h = srcRect->h; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getPictureBdsSize(ExynosRect *dstRect) +{ + int hwBdsW = 0; + int hwBdsH = 0; + + /* matched ratio LUT is not existed, use equation */ + if (m_useSizeTable == false + || m_staticInfo->pictureSizeLut == NULL + || m_staticInfo->pictureSizeLutMax <= m_cameraInfo.pictureSizeRatioId) { + ExynosRect rect; + return calcPictureBDSSize(&rect, dstRect); + } + + /* use LUT */ + hwBdsW = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BDS_W]; + hwBdsH = m_staticInfo->pictureSizeLut[m_cameraInfo.pictureSizeRatioId][BDS_H]; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = hwBdsW; + dstRect->h = hwBdsH; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getFastenAeStableSensorSize(int *hwSensorW, int *hwSensorH) +{ + *hwSensorW = m_staticInfo->videoSizeLutHighSpeed[0][SENSOR_W]; + *hwSensorH = m_staticInfo->videoSizeLutHighSpeed[0][SENSOR_H]; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getFastenAeStableBcropSize(int *hwBcropW, int *hwBcropH) +{ + *hwBcropW = m_staticInfo->videoSizeLutHighSpeed[0][BCROP_W]; + *hwBcropH = m_staticInfo->videoSizeLutHighSpeed[0][BCROP_H]; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::getFastenAeStableBdsSize(int *hwBdsW, int *hwBdsH) +{ + *hwBdsW = m_staticInfo->videoSizeLutHighSpeed[0][BDS_W]; + *hwBdsH = m_staticInfo->videoSizeLutHighSpeed[0][BDS_H]; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcPictureBDSSize(ExynosRect *srcRect, ExynosRect *dstRect) +{ + int ret = 0; + + int maxSensorW = 0, maxSensorH = 0; + int hwPictureW = 0, hwPictureH = 0; + int pictureW = 0, pictureH = 0; + int previewW = 0, previewH = 0; +#if 0 + int pictureFormat = 0, hwPictureFormat = 0; +#endif + int cropX = 0, cropY = 0; + int cropW = 0, cropH = 0; + int crop_crop_x = 0, crop_crop_y = 0; + int crop_crop_w = 0, crop_crop_h = 0; + + int zoomLevel = 0; + int bayerFormat = CAMERA_BAYER_FORMAT; + float zoomRatio = 1.0f; + +#ifdef DEBUG_RAWDUMP + if (checkBayerDumpEnable()) { + bayerFormat = CAMERA_DUMP_BAYER_FORMAT; + } +#endif + + /* TODO: check state ready for start */ +#if 0 + pictureFormat = getPictureFormat(); +#endif + zoomLevel = getZoomLevel(); + getHwPictureSize(&hwPictureW, &hwPictureH); + getPictureSize(&pictureW, &pictureH); + + getMaxSensorSize(&maxSensorW, &maxSensorH); + getPreviewSize(&previewW, &previewH); + + zoomRatio = getZoomRatio(zoomLevel) / 1000; + /* TODO: get crop size from ctlMetadata */ + ret = getCropRectAlign(maxSensorW, maxSensorH, + pictureW, pictureH, + &cropX, &cropY, + &cropW, &cropH, + CAMERA_MAGIC_ALIGN, 2, + zoomLevel, zoomRatio); + + zoomRatio = getZoomRatio(0) / 1000; + ret = getCropRectAlign(cropW, cropH, + pictureW, pictureH, + &crop_crop_x, &crop_crop_y, + &crop_crop_w, &crop_crop_h, + 2, 2, + 0, zoomRatio); + + cropX = ALIGN_UP(cropX, 2); + cropY = ALIGN_UP(cropY, 2); + cropW = maxSensorW - (cropX * 2); + cropH = maxSensorH - (cropY * 2); + +// ALIGN_UP(crop_crop_x, 2); +// ALIGN_UP(crop_crop_y, 2); + +#if 0 + CLOGD2("SensorSize (%dx%d), previewSize (%dx%d)", + maxSensorW, maxSensorH, previewW, previewH); + CLOGD2("hwPictureSize (%dx%d), pictureSize (%dx%d)", + hwPictureW, hwPictureH, pictureW, pictureH); + CLOGD2("size cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + cropX, cropY, cropW, cropH, zoomLevel); + CLOGD2("size2 cropX = %d, cropY = %d, cropW = %d, cropH = %d, zoom = %d", + crop_crop_x, crop_crop_y, crop_crop_w, crop_crop_h, zoomLevel); + CLOGD2("size pictureFormat = 0x%x, JPEG_INPUT_COLOR_FMT = 0x%x", + pictureFormat, JPEG_INPUT_COLOR_FMT); +#endif + + srcRect->x = cropX; + srcRect->y = cropY; + srcRect->w = cropW; + srcRect->h = cropH; + srcRect->fullW = cropW; + srcRect->fullH = cropH; + srcRect->colorFormat = bayerFormat; + + dstRect->x = 0; + dstRect->y = 0; + dstRect->w = pictureW; + dstRect->h = pictureH; + dstRect->fullW = pictureW; + dstRect->fullH = pictureH; + dstRect->colorFormat = JPEG_INPUT_COLOR_FMT; + + if (dstRect->w > srcRect->w) + dstRect->w = srcRect->w; + if (dstRect->h > srcRect->h) + dstRect->h = srcRect->h; + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcNormalToTpuSize(int srcW, int srcH, int *dstW, int *dstH) +{ + status_t ret = NO_ERROR; + if (srcW < 0 || srcH < 0) { + CLOGE2("src size is invalid(%d x %d)", srcW, srcH); + return INVALID_OPERATION; + } + + int disW = ALIGN_UP((int)(srcW * HW_VDIS_W_RATIO), 2); + int disH = ALIGN_UP((int)(srcH * HW_VDIS_H_RATIO), 2); + + *dstW = disW; + *dstH = disH; + CLOGD2("HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", srcW, srcH, disW, disH); + + return NO_ERROR; +} + +status_t ExynosCamera3Parameters::calcTpuToNormalSize(int srcW, int srcH, int *dstW, int *dstH) +{ + status_t ret = NO_ERROR; + if (srcW < 0 || srcH < 0) { + CLOGE2("src size is invalid(%d x %d)", srcW, srcH); + return INVALID_OPERATION; + } + + int disW = ALIGN_DOWN((int)(srcW / HW_VDIS_W_RATIO), 2); + int disH = ALIGN_DOWN((int)(srcH / HW_VDIS_H_RATIO), 2); + + *dstW = disW; + *dstH = disH; + CLOGD2("HWVdis adjusted BDS Size (%d x %d) -> (%d x %d)", srcW, srcH, disW, disH); + + return ret; +} + +void ExynosCamera3Parameters::setUsePureBayerReprocessing(bool enable) +{ + m_usePureBayerReprocessing = enable; +} + +bool ExynosCamera3Parameters::getUsePureBayerReprocessing(void) +{ + int oldMode = m_usePureBayerReprocessing; + + if (getRecordingHint() == true) { + if (getDualMode() == true) + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_DUAL_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_DUAL_RECORDING; + else + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_RECORDING : USE_PURE_BAYER_REPROCESSING_FRONT_ON_RECORDING; + } else { + if (getDualMode() == true) + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING_ON_DUAL : USE_PURE_BAYER_REPROCESSING_FRONT_ON_DUAL; + else + m_usePureBayerReprocessing = (getCameraId() == CAMERA_ID_BACK) ? USE_PURE_BAYER_REPROCESSING : USE_PURE_BAYER_REPROCESSING_FRONT; + } + + if (oldMode != m_usePureBayerReprocessing) { + CLOGD2("bayer usage is changed (%d -> %d)", oldMode, m_usePureBayerReprocessing); + } + + return m_usePureBayerReprocessing; +} + +bool ExynosCamera3Parameters::isUseYuvReprocessing(void) +{ + bool ret = false; + +#ifdef USE_YUV_REPROCESSING + ret = USE_YUV_REPROCESSING; +#endif + + return ret; +} + +bool ExynosCamera3Parameters::isUseYuvReprocessingForThumbnail(void) +{ + bool ret = false; + +#ifdef USE_YUV_REPROCESSING_FOR_THUMBNAIL + if (isUseYuvReprocessing() == true) + ret = USE_YUV_REPROCESSING_FOR_THUMBNAIL; +#endif + + return ret; +} + +int32_t ExynosCamera3Parameters::getReprocessingBayerMode(void) +{ + int32_t mode = REPROCESSING_BAYER_MODE_NONE; + bool useDynamicBayer = (getRecordingHint() == true || getDualRecordingHint() == true) ? + getUseDynamicBayerVideoSnapShot() : getUseDynamicBayer(); + + if (isReprocessing() == false) + return mode; + + if (useDynamicBayer == true) { + if (getUsePureBayerReprocessing() == true) + mode = REPROCESSING_BAYER_MODE_PURE_DYNAMIC; + else + mode = REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC; + } else { + if (getUsePureBayerReprocessing() == true) + mode = REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON; + else + mode = REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON; + } + + return mode; +} + +void ExynosCamera3Parameters::setAdaptiveCSCRecording(bool enable) +{ + m_useAdaptiveCSCRecording = enable; +} + +bool ExynosCamera3Parameters::getAdaptiveCSCRecording(void) +{ + return m_useAdaptiveCSCRecording; +} + +bool ExynosCamera3Parameters::doCscRecording(void) +{ + bool ret = true; + int hwPreviewW = 0, hwPreviewH = 0; + int videoW = 0, videoH = 0; + + getHwPreviewSize(&hwPreviewW, &hwPreviewH); + getVideoSize(&videoW, &videoH); + CLOGV2("hwPreviewSize = %d x %d", hwPreviewW, hwPreviewH); + CLOGV2("VideoSize = %d x %d", videoW, videoH); + + if (((videoW == 3840 && videoH == 2160) || (videoW == 1920 && videoH == 1080) || (videoW == 2560 && videoH == 1440)) + && m_useAdaptiveCSCRecording == true + && videoW == hwPreviewW + && videoH == hwPreviewH) { + ret = false; + } + + return ret; +} + +int ExynosCamera3Parameters::getHalPixelFormat(void) +{ + int setfile = 0; + int yuvRange = 0; + int previewFormat = getHwPreviewFormat(); + int halFormat = 0; + + m_getSetfileYuvRange(false, &setfile, &yuvRange); + + halFormat = convertingHalPreviewFormat(previewFormat, yuvRange); + + return halFormat; +} + +#if (TARGET_ANDROID_VER_MAJ >= 4 && TARGET_ANDROID_VER_MIN >= 4) +int ExynosCamera3Parameters::convertingHalPreviewFormat(int previewFormat, int yuvRange) +{ + int halFormat = 0; + + switch (previewFormat) { + case V4L2_PIX_FMT_NV21: + CLOGD2("preview format NV21"); + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + break; + case V4L2_PIX_FMT_NV21M: + CLOGD2("preview format NV21M"); + if (yuvRange == YUV_FULL_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL; + } else if (yuvRange == YUV_LIMITED_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + } else { + CLOGW2("invalid yuvRange, force set to full range"); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL; + } + break; + case V4L2_PIX_FMT_YVU420: + CLOGD2("DEBUG(%s[%d]): preview format YVU420"); + halFormat = HAL_PIXEL_FORMAT_YV12; + break; + case V4L2_PIX_FMT_YVU420M: + CLOGD2("preview format YVU420M"); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12_M; + break; + default: + CLOGE2("unknown preview format(%d)", previewFormat); + break; + } + + return halFormat; +} +#else +int ExynosCamera3Parameters::convertingHalPreviewFormat(int previewFormat, int yuvRange) +{ + int halFormat = 0; + + switch (previewFormat) { + case V4L2_PIX_FMT_NV21: + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; + break; + case V4L2_PIX_FMT_NV21M: + if (yuvRange == YUV_FULL_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + } else if (yuvRange == YUV_LIMITED_RANGE) { + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; + } else { + CLOGW2("invalid yuvRange, force set to full range"); + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + } + break; + case V4L2_PIX_FMT_YVU420: + halFormat = HAL_PIXEL_FORMAT_YV12; + break; + case V4L2_PIX_FMT_YVU420M: + halFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; + break; + default: + CLOGE2("unknown preview format(%d)", previewFormat); + break; + } + + return halFormat; +} +#endif + +void ExynosCamera3Parameters::setDvfsLock(bool lock) { + m_dvfsLock = lock; +} + +bool ExynosCamera3Parameters::getDvfsLock(void) { + return m_dvfsLock; +} + +#ifdef DEBUG_RAWDUMP +bool ExynosCamera3Parameters::checkBayerDumpEnable(void) +{ +#ifndef RAWDUMP_CAPTURE + char enableRawDump[PROPERTY_VALUE_MAX]; + property_get("ro.debug.rawdump", enableRawDump, "0"); + + if (strcmp(enableRawDump, "1") == 0) { + /*CLOGD("checkBayerDumpEnable : 1");*/ + return true; + } else { + /*CLOGD("checkBayerDumpEnable : 0");*/ + return false; + } +#endif + return true; +} +#endif /* DEBUG_RAWDUMP */ + +bool ExynosCamera3Parameters::setConfig(struct ExynosConfigInfo* config) +{ + memcpy(m_exynosconfig, config, sizeof(struct ExynosConfigInfo)); + setConfigMode(m_exynosconfig->mode); + return true; +} +struct ExynosConfigInfo* ExynosCamera3Parameters::getConfig() +{ + return m_exynosconfig; +} + +bool ExynosCamera3Parameters::setConfigMode(uint32_t mode) +{ + bool ret = false; + switch(mode){ + case CONFIG_MODE::NORMAL: + case CONFIG_MODE::HIGHSPEED_60: + case CONFIG_MODE::HIGHSPEED_120: + case CONFIG_MODE::HIGHSPEED_240: + m_exynosconfig->current = &m_exynosconfig->info[mode]; + m_exynosconfig->mode = mode; + ret = true; + break; + default: + CLOGE2("unknown config mode (%d)", mode); + } + return ret; +} + +int ExynosCamera3Parameters::getConfigMode() +{ + int ret = -1; + switch(m_exynosconfig->mode){ + case CONFIG_MODE::NORMAL: + case CONFIG_MODE::HIGHSPEED_60: + case CONFIG_MODE::HIGHSPEED_120: + case CONFIG_MODE::HIGHSPEED_240: + ret = m_exynosconfig->mode; + break; + default: + CLOGE2("unknown config mode (%d)", m_exynosconfig->mode); + } + + return ret; +} + +void ExynosCamera3Parameters::setZoomActiveOn(bool enable) +{ + m_zoom_activated = enable; +} + +bool ExynosCamera3Parameters::getZoomActiveOn(void) +{ + return m_zoom_activated; +} + +status_t ExynosCamera3Parameters::setMarkingOfExifFlash(int flag) +{ + m_firing_flash_marking = flag; + + return NO_ERROR; +} + +int ExynosCamera3Parameters::getMarkingOfExifFlash(void) +{ + return m_firing_flash_marking; +} + +bool ExynosCamera3Parameters::increaseMaxBufferOfPreview(void) +{ + if((getShotMode() == SHOT_MODE_BEAUTY_FACE)||(getShotMode() == SHOT_MODE_FRONT_PANORAMA) + ) { + return true; + } else { + return false; + } +} + +bool ExynosCamera3Parameters::getSensorOTFSupported(void) +{ + return m_staticInfo->flite3aaOtfSupport; +} + +bool ExynosCamera3Parameters::isReprocessing(void) +{ + bool reprocessing = false; + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { +#if defined(MAIN_CAMERA_DUAL_REPROCESSING) && defined(MAIN_CAMERA_SINGLE_REPROCESSING) + reprocessing = (flagDual == true) ? MAIN_CAMERA_DUAL_REPROCESSING : MAIN_CAMERA_SINGLE_REPROCESSING; +#else + CLOGW2("MAIN_CAMERA_DUAL_REPROCESSING/MAIN_CAMERA_SINGLE_REPROCESSING is not defined"); +#endif + } else { +#if defined(FRONT_CAMERA_DUAL_REPROCESSING) && defined(FRONT_CAMERA_SINGLE_REPROCESSING) + reprocessing = (flagDual == true) ? FRONT_CAMERA_DUAL_REPROCESSING : FRONT_CAMERA_SINGLE_REPROCESSING; +#else + CLOGW2("FRONT_CAMERA_DUAL_REPROCESSING/FRONT_CAMERA_SINGLE_REPROCESSING is not defined"); +#endif + } + + return reprocessing; +} + +bool ExynosCamera3Parameters::isSccCapture(void) +{ + bool sccCapture = false; + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { +#if defined(MAIN_CAMERA_DUAL_SCC_CAPTURE) && defined(MAIN_CAMERA_SINGLE_SCC_CAPTURE) + sccCapture = (flagDual == true) ? MAIN_CAMERA_DUAL_SCC_CAPTURE : MAIN_CAMERA_SINGLE_SCC_CAPTURE; +#else + CLOGW2("MAIN_CAMERA_DUAL_SCC_CAPTURE/MAIN_CAMERA_SINGLE_SCC_CAPTUREis not defined"); +#endif + } else { +#if defined(FRONT_CAMERA_DUAL_SCC_CAPTURE) && defined(FRONT_CAMERA_SINGLE_SCC_CAPTURE) + sccCapture = (flagDual == true) ? FRONT_CAMERA_DUAL_SCC_CAPTURE : FRONT_CAMERA_SINGLE_SCC_CAPTURE; +#else + CLOGW2("FRONT_CAMERA_DUAL_SCC_CAPTURE/FRONT_CAMERA_SINGLE_SCC_CAPTURE is not defined"); +#endif + } + + return sccCapture; +} + +bool ExynosCamera3Parameters::isFlite3aaOtf(void) +{ + bool flagOtfInput = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + bool flagSensorOtf = getSensorOTFSupported(); + + if (flagSensorOtf == false) { + return flagOtfInput; + } + + if (cameraId == CAMERA_ID_BACK) { + /* for 52xx scenario */ + flagOtfInput = true; + + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_FLITE_3AA_OTF + flagOtfInput = MAIN_CAMERA_DUAL_FLITE_3AA_OTF; +#else + CLOGW2("MAIN_CAMERA_DUAL_FLITE_3AA_OTF is not defined"); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_FLITE_3AA_OTF + flagOtfInput = MAIN_CAMERA_SINGLE_FLITE_3AA_OTF; +#else + CLOGW2("MAIN_CAMERA_SINGLE_FLITE_3AA_OTF is not defined"); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_FLITE_3AA_OTF + flagOtfInput = FRONT_CAMERA_DUAL_FLITE_3AA_OTF; +#else + CLOGW2("FRONT_CAMERA_DUAL_FLITE_3AA_OTF is not defined"); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_FLITE_3AA_OTF + flagOtfInput = FRONT_CAMERA_SINGLE_FLITE_3AA_OTF; +#else + CLOGW2("FRONT_CAMERA_SINGLE_FLITE_3AA_OTF is not defined"); +#endif + } + } + + return flagOtfInput; +} + +bool ExynosCamera3Parameters::is3aaIspOtf(void) +{ + bool ret = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_3AA_ISP_OTF + ret = MAIN_CAMERA_DUAL_3AA_ISP_OTF; +#else + CLOGW2("MAIN_CAMERA_DUAL_3AA_ISP_OTF is not defined"); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_3AA_ISP_OTF + ret = MAIN_CAMERA_SINGLE_3AA_ISP_OTF; +#else + CLOGW2("MAIN_CAMERA_SINGLE_3AA_ISP_OTF is not defined"); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_3AA_ISP_OTF + ret = FRONT_CAMERA_DUAL_3AA_ISP_OTF; +#else + CLOGW2("FRONT_CAMERA_DUAL_3AA_ISP_OTF is not defined"); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_ISP_OTF + ret = FRONT_CAMERA_SINGLE_3AA_ISP_OTF; +#else + CLOGW2("FRONT_CAMERA_SINGLE_3AA_ISP_OTF is not defined"); +#endif + } + } + + return ret; +} + +bool ExynosCamera3Parameters::isIspMcscOtf(void) +{ + bool ret = true; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_ISP_MCSC_OTF + ret = MAIN_CAMERA_DUAL_ISP_MCSC_OTF; +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_ISP_MCSC_OTF + ret = MAIN_CAMERA_SINGLE_ISP_MCSC_OTF; +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_ISP_MCSC_OTF + ret = FRONT_CAMERA_DUAL_ISP_MCSC_OTF; +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_ISP_MCSC_OTF + ret = FRONT_CAMERA_SINGLE_ISP_MCSC_OTF; +#endif + } + } + + return ret; +} + +bool ExynosCamera3Parameters::isMcscVraOtf(void) +{ + bool ret = true; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_MCSC_VRA_OTF + ret = MAIN_CAMERA_DUAL_MCSC_VRA_OTF; +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_MCSC_VRA_OTF + ret = MAIN_CAMERA_SINGLE_MCSC_VRA_OTF; +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_MCSC_VRA_OTF + ret = FRONT_CAMERA_DUAL_MCSC_VRA_OTF; +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_MCSC_VRA_OTF + ret = FRONT_CAMERA_SINGLE_MCSC_VRA_OTF; +#endif + } + } + + return ret; +} + +bool ExynosCamera3Parameters::isReprocessing3aaIspOTF(void) +{ + bool otf = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING + otf = MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING; +#else + CLOGW2("MAIN_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING is not defined"); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING + otf = MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING; +#else + CLOGW("MAIN_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING is not defined"); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING + otf = FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING; +#else + CLOGW2("FRONT_CAMERA_DUAL_3AA_ISP_OTF_REPROCESSING is not defined"); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING + otf = FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING; +#else + CLOGW2("FRONT_CAMERA_SINGLE_3AA_ISP_OTF_REPROCESSING is not defined"); +#endif + } + } + + if (otf == true) { + bool flagDirtyBayer = false; + + int reprocessingBayerMode = this->getReprocessingBayerMode(); + switch(reprocessingBayerMode) { + case REPROCESSING_BAYER_MODE_NONE: + case REPROCESSING_BAYER_MODE_PURE_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_PURE_DYNAMIC: + flagDirtyBayer = false; + break; + case REPROCESSING_BAYER_MODE_DIRTY_ALWAYS_ON: + case REPROCESSING_BAYER_MODE_DIRTY_DYNAMIC: + default: + flagDirtyBayer = true; + break; + } + + if (flagDirtyBayer == true) { + CLOGW2("otf == true. but, flagDirtyBayer == true. so force false on 3aa_isp otf"); + + otf = false; + } + } + + return otf; +} + +bool ExynosCamera3Parameters::isReprocessingIspMcscOTF(void) +{ + bool otf = false; + + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + + if (isUseYuvReprocessing() == false) { + if (cameraId == CAMERA_ID_BACK) { + if (flagDual == true) { +#ifdef MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING + otf = MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING + otf = MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): MAIN_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } else { + if (flagDual == true) { +#ifdef FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING + otf = FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_DUAL_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } else { +#ifdef FRONT_CAMERA_SINGLE_3AA_OTF_MCSC_REPROCESSING + otf = FRONT_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING; +#else + ALOGW("WRN(%s[%d]): FRONT_CAMERA_SINGLE_ISP_MCSC_OTF_REPROCESSING is not defined", __FUNCTION__, __LINE__); +#endif + } + } + } + + return otf; +} + +bool ExynosCamera3Parameters::isHWFCEnabled(void) +{ +#if defined(USE_JPEG_HWFC) + return USE_JPEG_HWFC; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::isHWFCOnDemand(void) +{ +#if defined(USE_JPEG_HWFC_ONDEMAND) + return USE_JPEG_HWFC_ONDEMAND; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::isUseThumbnailHWFC(void) +{ +#if defined(USE_THUMBNAIL_HWFC) + return USE_JPEG_HWFC_ONDEMAND; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::getSupportedZoomPreviewWIthScaler(void) +{ + bool ret = false; + int cameraId = getCameraId(); + bool flagDual = getDualMode(); + int fastFpsMode = getFastFpsMode(); + int vrMode = getVRMode(); + + if (cameraId == CAMERA_ID_BACK) { + if (fastFpsMode > CONFIG_MODE::HIGHSPEED_60 && + fastFpsMode < CONFIG_MODE::MAX && + vrMode != 1) { + ret = true; + } + } else { + if (flagDual == true) { + ret = true; + } + } + + return ret; +} + +void ExynosCamera3Parameters::setZoomPreviewWIthScaler(bool enable) +{ + m_zoomWithScaler = enable; +} + +bool ExynosCamera3Parameters::getZoomPreviewWIthScaler(void) +{ + return m_zoomWithScaler; +} + +bool ExynosCamera3Parameters::isOwnScc(int cameraId) +{ + bool ret = false; + + if (cameraId == CAMERA_ID_BACK) { +#ifdef MAIN_CAMERA_HAS_OWN_SCC + ret = MAIN_CAMERA_HAS_OWN_SCC; +#else + CLOGW2("MAIN_CAMERA_HAS_OWN_SCC is not defined"); +#endif + } else { +#ifdef FRONT_CAMERA_HAS_OWN_SCC + ret = FRONT_CAMERA_HAS_OWN_SCC; +#else + CLOGW2("FRONT_CAMERA_HAS_OWN_SCC is not defined"); +#endif + } + + return ret; +} + +bool ExynosCamera3Parameters::isOwnMCSC(void) +{ + bool ret = false; + +#ifdef OWN_MCSC_HW + ret = OWN_MCSC_HW; +#endif + + return ret; +} + +bool ExynosCamera3Parameters::isCompanion(int cameraId) +{ + bool ret = false; + + if (cameraId == CAMERA_ID_BACK) { + CLOGI2("MAIN_CAMERA_USE_SAMSUNG_COMPANION is not defined"); + } else { + CLOGI2("FRONT_CAMERA_USE_SAMSUNG_COMPANION is not defined"); + } + + return ret; +} + +int ExynosCamera3Parameters::getHalVersion(void) +{ + return m_halVersion; +} + +void ExynosCamera3Parameters::setHalVersion(int halVersion) +{ + m_halVersion = halVersion; + m_activityControl->setHalVersion(m_halVersion); + + CLOGI2("m_halVersion(%d)", m_halVersion); + + return; +} + +struct ExynosSensorInfoBase *ExynosCamera3Parameters::getSensorStaticInfo() +{ + return m_staticInfo; +} + +bool ExynosCamera3Parameters::getSetFileCtlMode(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL + return true; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::getSetFileCtl3AA_ISP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_3AA_ISP + return SET_SETFILE_BY_SET_CTRL_3AA_ISP; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::getSetFileCtl3AA(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_3AA + return SET_SETFILE_BY_SET_CTRL_3AA; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::getSetFileCtlISP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_ISP + return SET_SETFILE_BY_SET_CTRL_ISP; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::getSetFileCtlSCP(void) +{ +#ifdef SET_SETFILE_BY_SET_CTRL_SCP + return SET_SETFILE_BY_SET_CTRL_SCP; +#else + return false; +#endif +} + +bool ExynosCamera3Parameters::isUsing3acForIspc(void) +{ +#if (defined(USE_3AC_FOR_ISPC) && (USE_3AC_FOR_ISPC)) + return true; +#else + return false; +#endif +} + +void ExynosCamera3Parameters::m_getV4l2Name(char* colorName, size_t length, int colorFormat) +{ + size_t index = 0; + if (colorName == NULL) { + CLOGE("ERR(%s[%d]):colorName is NULL", __FUNCTION__, __LINE__); + return; + } + + for (index = 0; index < length-1; index++) { + colorName[index] = colorFormat & 0xff; + colorFormat = colorFormat >> 8; + } + colorName[index] = '\0'; +} + +int32_t ExynosCamera3Parameters::getYuvStreamMaxNum(void) +{ + int32_t yuvStreamMaxNum = -1; + + if (m_staticInfo == NULL) { + CLOGE("ERR(%s[%d]):m_staticInfo is NULL", + __FUNCTION__, __LINE__); + + return INVALID_OPERATION; + } + + yuvStreamMaxNum = m_staticInfo->maxNumOutputStreams[PROCESSED]; + if (yuvStreamMaxNum < 0) { + CLOGE("ERR(%s[%d]):Invalid MaxNumOutputStreamsProcessed %d", + __FUNCTION__, __LINE__, yuvStreamMaxNum); + return BAD_VALUE; + } + + return yuvStreamMaxNum; +} + +status_t ExynosCamera3Parameters::setYuvBufferCount(const int count, const int index) +{ + if (count < 0 || count > VIDEO_MAX_FRAME + || index < 0 || index > m_staticInfo->maxNumOutputStreams[PROCESSED]) { + CLOGE("ERR(%s[%d]):Invalid argument. count %d index %d", + __FUNCTION__, __LINE__, count, index); + + return BAD_VALUE; + } + + m_yuvBufferCount[index] = count; + + return NO_ERROR; +} + +int ExynosCamera3Parameters::getYuvBufferCount(const int index) +{ + if (index < 0 || index > m_staticInfo->maxNumOutputStreams[PROCESSED]) { + CLOGE("ERR(%s[%d]):Invalid index %d", + __FUNCTION__, __LINE__, index); + return 0; + } + + return m_yuvBufferCount[index]; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCamera3Parameters.h b/libcamera/34xx/hal3/ExynosCamera3Parameters.h new file mode 100644 index 0000000..e3e353a --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCamera3Parameters.h @@ -0,0 +1,965 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_3_PARAMETERS_H +#define EXYNOS_CAMERA_3_PARAMETERS_H + +#include "ExynosCameraConfig.h" +#include "ExynosCameraParameters.h" +#include "ExynosCamera3SensorInfo.h" +#include "ExynosCameraUtilsModule.h" + +#define V4L2_FOURCC_LENGTH 5 + +namespace android { + +class ExynosCamera3Parameters : public ExynosCameraParameters { +public: + /* Constructor */ + ExynosCamera3Parameters(int cameraId); + + /* Destructor */ + virtual ~ExynosCamera3Parameters(); + + /* Create the instance */ + bool create(int cameraId); + /* Destroy the instance */ + bool destroy(void); + /* Check if the instance was created */ + bool flagCreate(void); + + void setDefaultCameraInfo(void); + void setDefaultParameter(void); + status_t m_initDefaultInfo(void); + +public: + status_t checkPreviewSize(int previewW, int previewH); + status_t checkPictureSize(int newPictureW, int newPictureH); + status_t checkJpegQuality(int quality); + status_t checkThumbnailSize(int thumbnailW, int thumbnailH); + status_t checkThumbnailQuality(int quality); + status_t checkVideoSize(int newVideoW, int newVideoH); + status_t checkCallbackSize(int callbackW, int callbackH); + status_t checkCallbackFormat(int callbackFormat); + status_t checkYuvSize(const int width, const int height, const int outputPortId); + status_t checkYuvFormat(const int format, const int outputPortId); + status_t checkPreviewFpsRange(uint32_t minFps, uint32_t maxFps); + + void getYuvSize(int *width, int *height, const int outputPortId); + int getYuvFormat(const int outputPortId); + + status_t calcPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcHighResolutionPreviewGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcRecordingGSCRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureRect(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureRect(int originW, int originH, ExynosRect *srcRect, ExynosRect *dstRect); + + status_t getPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t getPreviewBdsSize(ExynosRect *dstRect); + status_t getPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t getPictureBdsSize(ExynosRect *dstRect); + status_t getFastenAeStableSensorSize(int *hwSensorW, int *hwSensorH); + status_t getFastenAeStableBcropSize(int *hwBcropW, int *hwBcropH); + status_t getFastenAeStableBdsSize(int *hwBdsW, int *hwBdsH); + + status_t calcPreviewBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureBayerCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPreviewBDSSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcPictureBDSSize(ExynosRect *srcRect, ExynosRect *dstRect); + status_t calcNormalToTpuSize(int srcW, int srcH, int *dstW, int *dstH); + status_t calcTpuToNormalSize(int srcW, int srcH, int *dstW, int *dstH); + status_t calcPreviewDzoomCropSize(ExynosRect *srcRect, ExynosRect *dstRect); + /* Sets the auto-exposure lock state. */ + void m_setAutoExposureLock(bool lock); + /* Sets the auto-white balance lock state. */ + void m_setAutoWhiteBalanceLock(bool value); + +private: + /* Sets the dimensions for preview pictures. */ + void m_setPreviewSize(int w, int h); + /* Sets the image format for preview pictures. */ + void m_setPreviewFormat(int colorFormat); + /* Sets the dimensions for pictures. */ + void m_setPictureSize(int w, int h); + /* Sets the image format for picture-related HW. */ + void m_setHwPictureFormat(int colorFormat); + /* Sets video's width, height */ + void m_setVideoSize(int w, int h); + /* Sets video's color format */ + void m_setVideoFormat(int colorFormat); + /* Sets the dimensions for callback pictures. */ + void m_setCallbackSize(int w, int h); + /* Sets the image format for callback pictures. */ + void m_setCallbackFormat(int colorFormat); + void m_setYuvSize(const int width, const int height, const int index); + void m_setYuvFormat(const int format, const int index); + + /* Sets the dimensions for Sesnor-related HW. */ + void m_setHwSensorSize(int w, int h); + /* Sets the dimensions for preview-related HW. */ + void m_setHwPreviewSize(int w, int h); + /* Sets the image format for preview-related HW. */ + void m_setHwPreviewFormat(int colorFormat); + /* Sets the dimensions for picture-related HW. */ + void m_setHwPictureSize(int w, int h); + /* Sets HW Bayer Crop Size */ + void m_setHwBayerCropRegion(int w, int h, int x, int y); + /* Sets the antibanding. */ + void m_setAntibanding(int value); + /* Sets the current color effect setting. */ + void m_setColorEffectMode(int effect); + /* Sets the exposure compensation index. */ + void m_setExposureCompensation(int value); + /* Sets the flash mode. */ + void m_setFlashMode(int flashMode); + /* Sets focus areas. */ + void m_setFocusAreas(uint32_t numValid, ExynosRect* rects, int *weights); + /* Sets focus areas. (Using ExynosRect2) */ + void m_setFocusAreas(uint32_t numValid, ExynosRect2* rect2s, int *weights); + /* Sets the focus mode. */ + void m_setFocusMode(int focusMode); + /* Sets Jpeg quality of captured picture. */ + void m_setJpegQuality(int quality); + /* Sets the quality of the EXIF thumbnail in Jpeg picture. */ + void m_setThumbnailQuality(int quality); + /* Sets the dimensions for EXIF thumbnail in Jpeg picture. */ + void m_setThumbnailSize(int w, int h); + /* Sets metering areas. */ + void m_setMeteringAreas(uint32_t num, ExynosRect *rects, int *weights); + /* Sets metering areas.(Using ExynosRect2) */ + void m_setMeteringAreas(uint32_t num, ExynosRect2 *rect2s, int *weights); + /* Sets the frame rate range for preview. */ + void m_setPreviewFpsRange(uint32_t min, uint32_t max); + /* Sets the scene mode. */ + void m_setSceneMode(int sceneMode); + /* Enables and disables video stabilization. */ + void m_setVideoStabilization(bool stabilization); + /* Sets the white balance. */ + status_t m_setWhiteBalanceMode(int whiteBalance); + /* Sets Bayer Crop Region */ + status_t m_setParamCropRegion(int zoom, int srcW, int srcH, int dstW, int dstH); + /* Sets recording mode hint. */ + void m_setRecordingHint(bool hint); + /* Sets GPS altitude. */ + void m_setGpsAltitude(double altitude); + /* Sets GPS latitude coordinate. */ + void m_setGpsLatitude(double latitude); + /* Sets GPS longitude coordinate. */ + void m_setGpsLongitude(double longitude); + /* Sets GPS processing method. */ + void m_setGpsProcessingMethod(const char *gpsProcessingMethod); + /* Sets GPS timestamp. */ + void m_setGpsTimeStamp(long timeStamp); + /* Sets the rotation angle in degrees relative to the orientation of the camera. */ + void m_setRotation(int rotation); + +/* + * Additional API. + */ + /* Sets metering areas. */ + void m_setMeteringMode(int meteringMode); + /* Sets brightness */ + void m_setBrightness(int brightness); + /* Sets ISO */ + void m_setIso(uint32_t iso); + /* Sets Contrast */ + void m_setContrast(uint32_t contrast); + /* Sets Saturation */ + void m_setSaturation(int saturation); + /* Sets Sharpness */ + void m_setSharpness(int sharpness); + /* Sets Hue */ + void m_setHue(int hue); + /* Sets WDR */ + void m_setHdrMode(bool hdr); + /* Sets WDR */ + void m_setWdrMode(bool wdr); + /* Sets anti shake */ + void m_setAntiShake(bool toggle); + /* Sets gamma */ + void m_setGamma(bool gamma); + /* Sets ODC */ + void m_setOdcMode(bool toggle); + /* Sets Slow AE */ + void m_setSlowAe(bool slowAe); + /* Sets 3DNR */ + void m_set3dnrMode(bool toggle); + /* Sets DRC */ + void m_setDrcMode(bool toggle); + +/* + * Vendor specific APIs + */ + /* Sets Intelligent mode */ + status_t m_setIntelligentMode(int intelligentMode); + void m_setVisionMode(bool vision); + void m_setVisionModeFps(int fps); + void m_setVisionModeAeTarget(int ae); + + void m_setSWVdisMode(bool swVdis); + void m_setSWVdisUIMode(bool swVdisUI); + + /* Sets VT mode */ + void m_setVtMode(int vtMode); + + /* Sets Dual mode */ + void m_setDualMode(bool toggle); + /* Sets dual recording mode hint. */ + void m_setDualRecordingHint(bool hint); + /* Sets effect hint. */ + void m_setEffectHint(bool hint); + + void m_setHighResolutionCallbackMode(bool enable); + void m_setHighSpeedRecording(bool highSpeed); + void m_setCityId(long long int cityId); + void m_setWeatherId(unsigned char cityId); + status_t m_setImageUniqueId(const char *uniqueId); + /* Sets camera angle */ + bool m_setAngle(int angle); + /* Sets Top-down mirror */ + bool m_setTopDownMirror(void); + /* Sets Left-right mirror */ + bool m_setLRMirror(void); + /* Sets Burst mode */ + void m_setSeriesShotCount(int seriesShotCount); + bool m_setAutoFocusMacroPosition(int autoFocusMacroPosition); + /* Sets Low Light A */ + bool m_setLLAMode(void); + + /* Sets object tracking */ + bool m_setObjectTracking(bool toggle); + /* Start or stop object tracking operation */ + bool m_setObjectTrackingStart(bool toggle); + /* Sets x, y position for object tracking operation */ + bool m_setObjectPosition(int x, int y); + /* Sets smart auto */ + bool m_setSmartAuto(bool toggle); + /* Sets beauty shot */ + bool m_setBeautyShot(bool toggle); + +/* + * Others + */ + void m_setRestartPreviewChecked(bool restart); + bool m_getRestartPreviewChecked(void); + void m_setRestartPreview(bool restart); + void m_setExifFixedAttribute(void); + int m_convertMetaCtlAwbMode(struct camera2_shot_ext *shot_ext); + +public: + + /* Returns the image format for FLITE/3AC/3AP bayer */ + int getBayerFormat(int pipeId); + /* Returns the dimensions setting for preview pictures. */ + void getPreviewSize(int *w, int *h); + /* Returns the image format for preview frames got from Camera.PreviewCallback. */ + int getPreviewFormat(void); + /* Returns the dimension setting for pictures. */ + void getPictureSize(int *w, int *h); + /* Returns the image format for picture-related HW. */ + int getHwPictureFormat(void); + int getPictureFormat(void) {return 0;} + /* Gets video's width, height */ + void getVideoSize(int *w, int *h); + /* Gets video's color format */ + int getVideoFormat(void); + /* Gets the dimensions setting for callback pictures. */ + void getCallbackSize(int *w, int *h); + /* Gets the image format for callback pictures. */ + int getCallbackFormat(void); + /* Gets the supported sensor sizes. */ + void getMaxSensorSize(int *w, int *h); + /* Gets the supported sensor margin. */ + void getSensorMargin(int *w, int *h); + /* Gets the supported preview sizes. */ + void getMaxPreviewSize(int *w, int *h); + /* Gets the supported picture sizes. */ + void getMaxPictureSize(int *w, int *h); + /* Gets the supported video frame sizes that can be used by MediaRecorder. */ + void getMaxVideoSize(int *w, int *h); + /* Gets the supported jpeg thumbnail sizes. */ + bool getSupportedJpegThumbnailSizes(int *w, int *h); + + /* Returns the dimensions setting for preview-related HW. */ + void getHwSensorSize(int *w, int *h); + /* Returns the dimensions setting for preview-related HW. */ + void getHwPreviewSize(int *w, int *h); + /* Returns the image format for preview-related HW. */ + int getHwPreviewFormat(void); + /* Returns the dimension setting for picture-related HW. */ + void getHwPictureSize(int *w, int *h); + /* Returns HW Bayer Crop Size */ + void getHwBayerCropRegion(int *w, int *h, int *x, int *y); + /* Returns VRA input Size */ + void getHwVraInputSize(int *w, int *h); + /* Returns VRA format */ + int getHwVraInputFormat(void); + + /* Gets the current antibanding setting. */ + int getAntibanding(void); + /* Gets the state of the auto-exposure lock. */ + bool getAutoExposureLock(void); + /* Gets the state of the auto-white balance lock. */ + bool getAutoWhiteBalanceLock(void); + /* Gets the current color effect setting. */ + int getColorEffectMode(void); + /* Gets the current exposure compensation index. */ + int getExposureCompensation(void); + /* Gets the current flash mode setting. */ + int getFlashMode(void); + /* Gets the current focus areas. */ + void getFocusAreas(int *validFocusArea, ExynosRect2 *rect2s, int *weights); + /* Gets the current focus mode setting. */ + int getFocusMode(void); + /* Returns the quality setting for the JPEG picture. */ + int getJpegQuality(void); + /* Returns the quality setting for the EXIF thumbnail in Jpeg picture. */ + int getThumbnailQuality(void); + /* Returns the dimensions for EXIF thumbnail in Jpeg picture. */ + void getThumbnailSize(int *w, int *h); + /* Returns the max size for EXIF thumbnail in Jpeg picture. */ + void getMaxThumbnailSize(int *w, int *h); + /* Gets the current metering areas. */ + void getMeteringAreas(ExynosRect *rects); + /* Gets the current metering areas.(Using ExynosRect2) */ + void getMeteringAreas(ExynosRect2 *rect2s); + /* Returns the current minimum and maximum preview fps. */ + void getPreviewFpsRange(uint32_t *min, uint32_t *max); + /* Gets scene mode */ + int getSceneMode(void); + /* Gets the current state of video stabilization. */ + bool getVideoStabilization(void); + /* Gets the current white balance setting. */ + int getWhiteBalanceMode(void); + /* Sets current zoom value. */ + status_t setZoomLevel(int value); + /* Gets current zoom value. */ + int getZoomLevel(void); + /* Set the current crop region info */ + status_t setCropRegion(int x, int y, int w, int h); + /* Returns the recording mode hint. */ + bool getRecordingHint(void); + /* Gets GPS altitude. */ + double getGpsAltitude(void); + /* Gets GPS latitude coordinate. */ + double getGpsLatitude(void); + /* Gets GPS longitude coordinate. */ + double getGpsLongitude(void); + /* Gets GPS processing method. */ + const char * getGpsProcessingMethod(void); + /* Gets GPS timestamp. */ + long getGpsTimeStamp(void); + /* Gets the rotation angle in degrees relative to the orientation of the camera. */ + int getRotation(void); + +/* + * Additional API. + */ + + /* Gets metering */ + int getMeteringMode(void); + /* Gets metering List */ + int getSupportedMeteringMode(void); + /* Gets brightness */ + int getBrightness(void); + /* Gets ISO */ + uint32_t getIso(void); + + /* Gets ExposureTime for capture */ + uint64_t getCaptureExposureTime(void); + int32_t getLongExposureShotCount(void){return 0;}; + + /* Gets Contrast */ + uint32_t getContrast(void); + /* Gets Saturation */ + int getSaturation(void); + /* Gets Sharpness */ + int getSharpness(void); + /* Gets Hue */ + int getHue(void); + /* Gets WDR */ + bool getHdrMode(void); + /* Gets WDR */ + bool getWdrMode(void); + /* Gets anti shake */ + bool getAntiShake(void); + /* Gets gamma */ + bool getGamma(void); + /* Gets ODC */ + bool getOdcMode(void); + /* Gets Slow AE */ + bool getSlowAe(void); + /* Gets Shot mode */ + int getShotMode(void); + /* Gets Preview Buffer Count */ + int getPreviewBufferCount(void); + /* Sets Preview Buffer Count */ + void setPreviewBufferCount(int previewBufferCount); + + /* Gets 3DNR */ + bool get3dnrMode(void); + /* Gets DRC */ + bool getDrcMode(void); + /* Gets TPU enable case or not */ + bool getTpuEnabledMode(void); + +/* + * Vendor specific APIs + */ + + /* Gets Intelligent mode */ + int getIntelligentMode(void); + bool getVisionMode(void); + int getVisionModeFps(void); + int getVisionModeAeTarget(void); + + bool isSWVdisMode(void); /* need to change name */ + bool isSWVdisModeWithParam(int nPreviewW, int nPreviewH); + bool getSWVdisMode(void); + bool getSWVdisUIMode(void); + + bool getHWVdisMode(void); + int getHWVdisFormat(void); + + /* Gets VT mode */ + int getVtMode(void); + + /* Gets VR mode */ + int getVRMode(void); + + /* Gets Dual mode */ + bool getDualMode(void); + /* Returns the dual recording mode hint. */ + bool getDualRecordingHint(void); + /* Returns the effect hint. */ + bool getEffectHint(void); + /* Returns the effect recording mode hint. */ + bool getEffectRecordingHint(void); + + void setFastFpsMode(int fpsMode); + int getFastFpsMode(void); + + bool getHighResolutionCallbackMode(void); + bool getSamsungCamera(void); + void setSamsungCamera(bool value); + bool getHighSpeedRecording(void); + bool getScalableSensorMode(void); + void setScalableSensorMode(bool scaleMode); + long long int getCityId(void); + unsigned char getWeatherId(void); + /* Gets ImageUniqueId */ + const char *getImageUniqueId(void); + /* Gets camera angle */ + int getAngle(void); + + void setFlipHorizontal(int val); + int getFlipHorizontal(void); + void setFlipVertical(int val); + int getFlipVertical(void); + + /* Gets Burst mode */ + int getSeriesShotCount(void); + /* Return callback need CSC */ + bool getCallbackNeedCSC(void); + /* Return callback need copy to rendering */ + bool getCallbackNeedCopy2Rendering(void); + + /* Gets Illumination */ + int getIllumination(void); + /* Gets Low Light Shot */ + int getLLS(struct camera2_shot_ext *shot); + /* Gets Low Light A */ + bool getLLAMode(void); + /* Sets the device orientation angle in degrees to camera FW for FD scanning property. */ + bool setDeviceOrientation(int orientation); + /* Gets the device orientation angle in degrees . */ + int getDeviceOrientation(void); + /* Gets the FD orientation angle in degrees . */ + int getFdOrientation(void); + /* Gets object tracking */ + bool getObjectTracking(void); + /* Gets status of object tracking operation */ + int getObjectTrackingStatus(void); + /* Gets smart auto */ + bool getSmartAuto(void); + /* Gets the status of smart auto operation */ + int getSmartAutoStatus(void); + /* Gets beauty shot */ + bool getBeautyShot(void); + +/* + * Static info + */ + /* Gets the exposure compensation step. */ + float getExposureCompensationStep(void); + + /* Gets the focal length (in millimeter) of the camera. */ + void getFocalLength(int *num, int *den); + + /* Gets the distances from the camera to where an object appears to be in focus. */ + void getFocusDistances(int *num, int *den);; + + /* Gets the minimum exposure compensation index. */ + int getMinExposureCompensation(void); + + /* Gets the maximum exposure compensation index. */ + int getMaxExposureCompensation(void); + + /* Gets the maximum number of detected faces supported. */ + int getMaxNumDetectedFaces(void); + + /* Gets the maximum number of focus areas supported. */ + uint32_t getMaxNumFocusAreas(void); + + /* Gets the maximum number of metering areas supported. */ + uint32_t getMaxNumMeteringAreas(void); + + /* Gets the maximum zoom value allowed for snapshot. */ + int getMaxZoomLevel(void); + + /* Gets the supported antibanding values. */ + int getSupportedAntibanding(void); + + /* Gets the supported color effects. */ + int getSupportedColorEffects(void); + /* Gets the supported color effects & hidden color effect. */ + bool isSupportedColorEffects(int effectMode); + /* Check whether the target support Flash */ + int getSupportedFlashModes(void); + + /* Gets the supported focus modes. */ + int getSupportedFocusModes(void); + + /* Gets the supported preview fps range. */ + bool getMaxPreviewFpsRange(int *min, int *max); + + /* Gets the supported scene modes. */ + int getSupportedSceneModes(void); + + /* Gets the supported white balance. */ + int getSupportedWhiteBalance(void); + + /* Gets the supported Iso values. */ + int getSupportedISO(void); + + /* Gets max zoom ratio */ + float getMaxZoomRatio(void); + /* Gets zoom ratio */ + float getZoomRatio(int zoom); + + /* Returns true if auto-exposure locking is supported. */ + bool getAutoExposureLockSupported(void); + + /* Returns true if auto-white balance locking is supported. */ + bool getAutoWhiteBalanceLockSupported(void); + + /* Returns true if smooth zoom is supported. */ + bool getSmoothZoomSupported(void); + + /* Returns true if video snapshot is supported. */ + bool getVideoSnapshotSupported(void); + + /* Returns true if video stabilization is supported. */ + bool getVideoStabilizationSupported(void); + + /* Returns true if zoom is supported. */ + bool getZoomSupported(void); + + /* Gets the horizontal angle of view in degrees. */ + float getHorizontalViewAngle(void); + + /* Sets the horizontal angle of view in degrees. */ + void setHorizontalViewAngle(int pictureW, int pictureH); + + /* Gets the vertical angle of view in degrees. */ + float getVerticalViewAngle(void); + + /* Gets Fnumber */ + void getFnumber(int *num, int *den); + + /* Gets Aperture value */ + void getApertureValue(int *num, int *den); + + /* Gets FocalLengthIn35mmFilm */ + int getFocalLengthIn35mmFilm(void); + + bool isScalableSensorSupported(void); + + status_t getFixedExifInfo(exif_attribute_t *exifInfo); + void setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot); + + debug_attribute_t *getDebugAttribute(void); + +#ifdef DEBUG_RAWDUMP +// bool checkBayerDumpEnable(void); +#endif/* DEBUG_RAWDUMP */ +#ifdef USE_BINNING_MODE + int getBinningMode(void); +#endif /* USE_BINNING_MODE */ +public: + bool DvfsLock(); + bool DvfsUnLock(); + + void updatePreviewFpsRange(void); + void updateHwSensorSize(void); + void updateBinningScaleRatio(void); + void updateBnsScaleRatio(void); + + void setHwPreviewStride(int stride); + int getHwPreviewStride(void); + + status_t duplicateCtrlMetadata(void *buf); + + status_t setRequestDis(int value); + + status_t setDisEnable(bool enable); + status_t setDrcEnable(bool enable); + status_t setDnrEnable(bool enable); + status_t setFdEnable(bool enable); + + bool getDisEnable(void); + bool getDrcEnable(void); + bool getDnrEnable(void); + bool getFdEnable(void); + + status_t setFdMode(enum facedetect_mode mode); + status_t getFdMeta(bool reprocessing, void *buf); + bool getUHDRecordingMode(void); + +private: + bool m_isSupportedPreviewSize(const int width, const int height); + bool m_isSupportedPictureSize(const int width, const int height); + bool m_isSupportedVideoSize(const int width, const int height); + bool m_isHighResolutionCallbackSize(const int width, const int height); + void m_isHighResolutionMode(const CameraParameters& params); + + bool m_getSupportedVariableFpsList(int min, int max, + int *newMin, int *newMax); + + status_t m_getPreviewSizeList(int *sizeList); + + void m_getSWVdisPreviewSize(int w, int h, int *newW, int *newH); + void m_getScalableSensorSize(int *newSensorW, int *newSensorH); + + void m_initMetadata(void); + + bool m_isUHDRecordingMode(void); + +/* + * Vendor specific adjust function + */ +private: + status_t m_adjustPreviewFpsRange(int &newMinFps, int &newMaxFps); + status_t m_getPreviewBdsSize(ExynosRect *dstRect); + status_t m_adjustPreviewSize(int previewW, int previewH, + int *newPreviewW, int *newPreviewH, + int *newCalPreviewW, int *newCalPreviewH); + status_t m_adjustPreviewFormat(int &previewFormat, int &hwPreviewFormatH); + status_t m_adjustPictureSize(int *newPictureW, int *newPictureH, + int *newHwPictureW, int *newHwPictureH); + bool m_adjustHighSpeedRecording(int curMinFps, int curMaxFps, int newMinFps, int newMaxFps); + const char * m_adjustAntibanding(const char *strAntibanding); + const char * m_adjustFocusMode(const char *focusMode); + const char * m_adjustFlashMode(const char *flashMode); + const char * m_adjustWhiteBalanceMode(const char *whiteBalance); + bool m_adjustScalableSensorMode(const int scaleMode); + void m_adjustAeMode(enum aa_aemode curAeMode, enum aa_aemode *newAeMode); + void m_adjustSensorMargin(int *sensorMarginW, int *sensorMarginH); + void m_getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange); + void m_getCropRegion(int *x, int *y, int *w, int *h); + + /* for initial 120fps start due to quick launch */ +/* + void set120FpsState(enum INIT_120FPS_STATE state); + void clear120FpsState(enum INIT_120FPS_STATE state); + bool flag120FpsStart(void); + bool setSensorFpsAfter120fps(void); + void setInitValueAfter120fps(bool isAfter); +*/ + + status_t m_setBinningScaleRatio(int ratio); + status_t m_setBnsScaleRatio(int ratio); + status_t m_addHiddenResolutionList(String8 &string8Buf, struct ExynosSensorInfoBase *sensorInfo, + int w, int h, enum MODE mode, int cameraId); + void m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_dm *dm, + camera2_udm *udm); +#ifdef USE_CAMERA2_API_SUPPORT + void m_setExifChangedAttribute(exif_attribute_t *exifInfo, + ExynosRect *PictureRect, + ExynosRect *thumbnailRect, + camera2_shot_t *shot); +#endif + + void m_getV4l2Name(char* colorName, size_t length, int colorFormat); + +public: + int getCameraId(void); + /* Gets the detected faces areas. */ + int getDetectedFacesAreas(int num, int *id, + int *score, ExynosRect *face, + ExynosRect *leftEye, ExynosRect *rightEye, + ExynosRect *mouth); + /* Gets the detected faces areas. (Using ExynosRect2) */ + int getDetectedFacesAreas(int num, int *id, + int *score, ExynosRect2 *face, + ExynosRect2 *leftEye, ExynosRect2 *rightEye, + ExynosRect2 *mouth); + + void enableMsgType(int32_t msgType); + void disableMsgType(int32_t msgType); + bool msgTypeEnabled(int32_t msgType); + + status_t setFrameSkipCount(int count); + status_t getFrameSkipCount(int *count); + int getFrameSkipCount(void); + + void setIsFirstStartFlag(bool flag); + int getIsFirstStartFlag(void); + + void setPreviewRunning(bool enable); + void setPictureRunning(bool enable); + void setRecordingRunning(bool enable); + bool getPreviewRunning(void); + bool getPictureRunning(void); + bool getRecordingRunning(void); + bool getRestartPreview(void); + bool getPreviewSizeChanged(void); + + ExynosCameraActivityControl *getActivityControl(void); + status_t setAutoFocusMacroPosition(int autoFocusMacroPosition); + + void getSetfileYuvRange(bool flagReprocessing, int *setfile, int *yuvRange); + void setSetfileYuvRange(void); + + void setUseDynamicBayer(bool enable); + bool getUseDynamicBayer(void); + void setUseDynamicBayerVideoSnapShot(bool enable); + bool getUseDynamicBayerVideoSnapShot(void); + void setUseDynamicScc(bool enable); + bool getUseDynamicScc(void); + + void setUseFastenAeStable(bool enable); + bool getUseFastenAeStable(void); + + void setUsePureBayerReprocessing(bool enable); + bool getUsePureBayerReprocessing(void); + + bool isUseYuvReprocessing(void); + bool isUseYuvReprocessingForThumbnail(void); + + int32_t getReprocessingBayerMode(void); + + void setAdaptiveCSCRecording(bool enable); + bool getAdaptiveCSCRecording(void); + bool doCscRecording(void); + + uint32_t getBinningScaleRatio(void); + uint32_t getBnsScaleRatio(void); + /* Sets the dimensions for Sesnor-related BNS. */ + void setBnsSize(int w, int h); + /* Gets the dimensions for Sesnor-related BNS. */ + void getBnsSize(int *w, int *h); + + /* + * This must call before startPreview(), + * this update h/w setting at once. + */ + bool updateTpuParameters(void); + + int getHalVersion(void); + void setHalVersion(int halVersion); + struct ExynosSensorInfoBase *getSensorStaticInfo(); + + int32_t getYuvStreamMaxNum(void); + +#ifdef BURST_CAPTURE + int getSeriesShotSaveLocation(void); + void setSeriesShotSaveLocation(int ssaveLocation); + char *getSeriesShotFilePath(void); + int m_seriesShotSaveLocation; + char m_seriesShotFilePath[100]; +#endif + int getSeriesShotDuration(void); + int getSeriesShotMode(void); + void setSeriesShotMode(int sshotMode, int count = 0); + + int getHalPixelFormat(void); + int convertingHalPreviewFormat(int previewFormat, int yuvRange); + + void setDvfsLock(bool lock); + bool getDvfsLock(void); + + void setFocusModeSetting(bool enable); + int getFocusModeSetting(void); + bool getSensorOTFSupported(void); + bool isReprocessing(void); + bool isSccCapture(void); + bool isFlite3aaOtf(void); + bool is3aaIspOtf(void); + bool isIspMcscOtf(void); + bool isMcscVraOtf(void); + bool isReprocessing3aaIspOTF(void); + bool isReprocessingIspMcscOTF(void); + bool isHWFCEnabled(void); + bool isHWFCOnDemand(void); + bool isUseThumbnailHWFC(void); + + bool getSupportedZoomPreviewWIthScaler(void); + void setZoomPreviewWIthScaler(bool enable); + bool getZoomPreviewWIthScaler(void); + bool isUsing3acForIspc(void); + + bool getReallocBuffer(); + bool setReallocBuffer(bool enable); + + bool setConfig(struct ExynosConfigInfo* config); + struct ExynosConfigInfo* getConfig(); + + bool setConfigMode(uint32_t mode); + int getConfigMode(); + /* Sets Shot mode */ + void m_setShotMode(int shotMode); + void setZoomActiveOn(bool enable); + bool getZoomActiveOn(void); + void setFocusModeLock(bool enable); + status_t setMarkingOfExifFlash(int flag); + int getMarkingOfExifFlash(void); + bool increaseMaxBufferOfPreview(void); + status_t setYuvBufferCount(const int count, const int index); + int getYuvBufferCount(const int index); + +//Added + int getHDRDelay(void) { return HDR_DELAY; } + int getReprocessingBayerHoldCount(void) { return REPROCESSING_BAYER_HOLD_COUNT; } + int getFastenAeFps(void) { return FASTEN_AE_FPS; } + int getPerFrameControlPipe(void) {return PERFRAME_CONTROL_PIPE; } + int getPerFrameControlReprocessingPipe(void) {return PERFRAME_CONTROL_REPROCESSING_PIPE; } + int getPerFrameInfo3AA(void) { return PERFRAME_INFO_3AA; }; + int getPerFrameInfoIsp(void) { return PERFRAME_INFO_ISP; }; + int getPerFrameInfoDis(void) { return PERFRAME_INFO_DIS; }; + int getPerFrameInfoReprocessingPure3AA(void) { return PERFRAME_INFO_PURE_REPROCESSING_3AA; } + int getPerFrameInfoReprocessingPureIsp(void) { return PERFRAME_INFO_PURE_REPROCESSING_ISP; } + int getScalerNodeNumPicture(void) { return PICTURE_GSC_NODE_NUM;} + int getScalerNodeNumPreview(void) { return PREVIEW_GSC_NODE_NUM;} + int getScalerNodeNumVideo(void) { return VIDEO_GSC_NODE_NUM;} + bool isOwnScc(int cameraId); + bool isOwnMCSC(void); + bool isCompanion(int cameraId); + bool needGSCForCapture(int camId) { return (camId == CAMERA_ID_BACK) ? USE_GSC_FOR_CAPTURE_BACK : USE_GSC_FOR_CAPTURE_FRONT; } + bool getSetFileCtlMode(void); + bool getSetFileCtl3AA_ISP(void); + bool getSetFileCtl3AA(void); + bool getSetFileCtlISP(void); + bool getSetFileCtlSCP(void); + + virtual void setNormalBestFrameCount(uint32_t) {} + virtual uint32_t getNormalBestFrameCount() {return 0;} + virtual void resetNormalBestFrameCount() {} + virtual void setSCPFrameCount(uint32_t) {} + virtual uint32_t getSCPFrameCount() {return 0;} + virtual void resetSCPFrameCount() {} + virtual void setBayerFrameCount(uint32_t) {} + virtual uint32_t getBayerFrameCount() {return 0;} + virtual void resetBayerFrameCount() {} + virtual bool getUseBestPic() {return false;} + virtual void setLLSCaptureCount(int) {} + virtual int getLLSCaptureCount() {return 0;}; + + +private: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + struct camera2_shot_ext m_metadata; + + struct exynos_camera_info m_cameraInfo; + struct ExynosSensorInfoBase *m_staticInfo; + + exif_attribute_t m_exifInfo; + debug_attribute_t mDebugInfo; + + int32_t m_enabledMsgType; + mutable Mutex m_msgLock; + + float m_calculatedHorizontalViewAngle; + /* frame skips */ + ExynosCameraCounter m_frameSkipCounter; + + mutable Mutex m_parameterLock; + + ExynosCameraActivityControl *m_activityControl; + + /* Flags for camera status */ + bool m_previewRunning; + bool m_previewSizeChanged; + bool m_pictureRunning; + bool m_recordingRunning; + bool m_flagCheckDualMode; + bool m_IsThumbnailCallbackOn; + bool m_flagRestartPreviewChecked; + bool m_flagRestartPreview; + int m_fastFpsMode; + bool m_flagFirstStart; + bool m_flagMeteringRegionChanged; + bool m_flagHWVDisMode; + + bool m_flagVideoStabilization; + bool m_flag3dnrMode; + + bool m_flagCheckRecordingHint; + + int m_setfile; + int m_yuvRange; + int m_setfileReprocessing; + int m_yuvRangeReprocessing; + + bool m_useSizeTable; + bool m_useDynamicBayer; + bool m_useDynamicBayerVideoSnapShot; + bool m_useDynamicScc; + bool m_useFastenAeStable; + bool m_usePureBayerReprocessing; + bool m_useAdaptiveCSCRecording; + bool m_dvfsLock; + int m_previewBufferCount; + + bool m_reallocBuffer; + mutable Mutex m_reallocLock; + + struct ExynosConfigInfo *m_exynosconfig; + + bool m_setFocusmodeSetting; + bool m_zoom_activated; + int m_firing_flash_marking; + + uint64_t m_exposureTimeCapture; + bool m_zoomWithScaler; + int m_halVersion; + int m_yuvBufferCount[3]; + +}; + + +}; /* namespace android */ + +#endif diff --git a/libcamera/34xx/hal3/ExynosCameraSizeControl.cpp b/libcamera/34xx/hal3/ExynosCameraSizeControl.cpp new file mode 100644 index 0000000..612d4e6 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCameraSizeControl.cpp @@ -0,0 +1,96 @@ +/* + ** + ** Copyright 2015, Samsung Electronics Co. LTD + ** + ** Licensed under the Apache License, Version 2.0 (the "License"); + ** you may not use this file except in compliance with the License. + ** You may obtain a copy of the License at + ** + ** http://www.apache.org/licenses/LICENSE-2.0 + ** + ** Unless required by applicable law or agreed to in writing, software + ** distributed under the License is distributed on an "AS IS" BASIS, + ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + ** See the License for the specific language governing permissions and + ** limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraSizeControl" +#include + +#include "ExynosCameraSizeControl.h" + +namespace android { + +void updateNodeGroupInfo( + __unused int pipeId, + __unused ExynosCamera3Parameters *params, + __unused camera2_node_group *node_group_info) +{ + ALOGE("ERR(%s[%d]):This is invalid function call in 34xx. Use ExynosCameraUtilsModule's APIs", __FUNCTION__, __LINE__); +} + +void setLeaderSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + int cropX, int cropY, + int width, int height) +{ + node_group_info->leader.input.cropRegion[0] = cropX; + node_group_info->leader.input.cropRegion[1] = cropY; + node_group_info->leader.input.cropRegion[2] = width; + node_group_info->leader.input.cropRegion[3] = height; + + node_group_info->leader.output.cropRegion[0] = 0; + node_group_info->leader.output.cropRegion[1] = 0; + node_group_info->leader.output.cropRegion[2] = width; + node_group_info->leader.output.cropRegion[3] = height; +} + +void setCaptureSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int width, int height) +{ + node_group_info->capture[perframePosition].input.cropRegion[0] = 0; + node_group_info->capture[perframePosition].input.cropRegion[1] = 0; + node_group_info->capture[perframePosition].input.cropRegion[2] = width; + node_group_info->capture[perframePosition].input.cropRegion[3] = height; + + node_group_info->capture[perframePosition].output.cropRegion[0] = 0; + node_group_info->capture[perframePosition].output.cropRegion[1] = 0; + node_group_info->capture[perframePosition].output.cropRegion[2] = width; + node_group_info->capture[perframePosition].output.cropRegion[3] = height; +} + +void setCaptureCropNScaleSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int cropX, int cropY, + int cropWidth, int cropHeight, + int targetWidth, int targetHeight) +{ + node_group_info->capture[perframePosition].input.cropRegion[0] = cropX; + node_group_info->capture[perframePosition].input.cropRegion[1] = cropY; + node_group_info->capture[perframePosition].input.cropRegion[2] = cropWidth; + node_group_info->capture[perframePosition].input.cropRegion[3] = cropHeight; + + node_group_info->capture[perframePosition].output.cropRegion[0] = 0; + node_group_info->capture[perframePosition].output.cropRegion[1] = 0; + node_group_info->capture[perframePosition].output.cropRegion[2] = targetWidth; + node_group_info->capture[perframePosition].output.cropRegion[3] = targetHeight; +} + +bool useSizeControlApi(void) +{ + bool use = false; +#ifdef USE_SIZE_CONTROL_API + use = USE_SIZE_CONTROL_API; +#else + use = false; + ALOGV("INFO(%s[%d]):Use Legacy Utils Module API", __FUNCTION__, __LINE__); +#endif + return use; +} + +}; /* namespace android */ diff --git a/libcamera/34xx/hal3/ExynosCameraSizeControl.h b/libcamera/34xx/hal3/ExynosCameraSizeControl.h new file mode 100644 index 0000000..755df84 --- /dev/null +++ b/libcamera/34xx/hal3/ExynosCameraSizeControl.h @@ -0,0 +1,65 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_SIZE_CONTROL_H +#define EXYNOS_CAMERA_SIZE_CONTROL_H + +#include +#include +#include + +#include "ExynosCamera3Parameters.h" + +#include "ExynosCamera3Config.h" + +#include "ExynosRect.h" +#include "fimc-is-metadata.h" +#include "ExynosCameraUtils.h" + +/* #define DEBUG_PERFRAME_SIZE */ + +namespace android { + +void updateNodeGroupInfo( + int pipeId, + ExynosCamera3Parameters *params, + camera2_node_group *node_group_info); + +/* Helper function */ +void setLeaderSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + int cropX, int cropY, + int width, int height); + +void setCaptureSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int width, int height); + +void setCaptureCropNScaleSizeToNodeGroupInfo( + camera2_node_group *node_group_info, + uint32_t perframePosition, + int cropX, int cropY, + int cropWidth, int cropHeight, + int targetWidth, int targetHeight); + +bool useSizeControlApi(void); + +}; /* namespace android */ + +#endif + diff --git a/libcamera/34xx/videodev2_exynos_camera.h b/libcamera/34xx/videodev2_exynos_camera.h new file mode 100644 index 0000000..65a35cb --- /dev/null +++ b/libcamera/34xx/videodev2_exynos_camera.h @@ -0,0 +1,1306 @@ +/* + * Video for Linux Two header file for samsung + * + * Copyright 2012, Samsung Electronics Co. LTD + * + * This header file contains several v4l2 APIs to be proposed to v4l2 + * community and until bein accepted, will be used restrictly in Samsung's + * camera interface driver FIMC. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + */ + +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __LINUX_VIDEODEV2_EXYNOS_CAMERA_H +#define __LINUX_VIDEODEV2_EXYNOS_CAMERA_H + +/* Values for 'capabilities' field */ +/* Object detection device */ +#define V4L2_CAP_OBJ_RECOGNITION 0x10000000 +/* strobe control */ +#define V4L2_CAP_STROBE 0x20000000 + +#define V4L2_CID_FOCUS_MODE (V4L2_CID_CAMERA_CLASS_BASE+17) +/* Focus Methods */ +enum v4l2_focus_mode { + V4L2_FOCUS_MODE_AUTO = 0, + V4L2_FOCUS_MODE_MACRO = 1, + V4L2_FOCUS_MODE_MANUAL = 2, + V4L2_FOCUS_MODE_LASTP = 2, +}; + +#define V4L2_CID_ZOOM_MODE (V4L2_CID_CAMERA_CLASS_BASE+18) +/* Zoom Methods */ +enum v4l2_zoom_mode { + V4L2_ZOOM_MODE_CONTINUOUS = 0, + V4L2_ZOOM_MODE_OPTICAL = 1, + V4L2_ZOOM_MODE_DIGITAL = 2, + V4L2_ZOOM_MODE_LASTP = 2, +}; + +/* Exposure Methods */ +#define V4L2_CID_PHOTOMETRY (V4L2_CID_CAMERA_CLASS_BASE+19) +enum v4l2_photometry_mode { + V4L2_PHOTOMETRY_MULTISEG = 0, /*Multi Segment*/ + V4L2_PHOTOMETRY_CWA = 1, /*Centre Weighted Average*/ + V4L2_PHOTOMETRY_SPOT = 2, + V4L2_PHOTOMETRY_AFSPOT = 3, /*Spot metering on focused point*/ + V4L2_PHOTOMETRY_LASTP = V4L2_PHOTOMETRY_AFSPOT, +}; + +/* Manual exposure control items menu type: iris, shutter, iso */ +#define V4L2_CID_CAM_APERTURE (V4L2_CID_CAMERA_CLASS_BASE+20) +#define V4L2_CID_CAM_SHUTTER (V4L2_CID_CAMERA_CLASS_BASE+21) +#define V4L2_CID_CAM_ISO (V4L2_CID_CAMERA_CLASS_BASE+22) + +/* Following CIDs are menu type */ +#define V4L2_CID_SCENEMODE (V4L2_CID_CAMERA_CLASS_BASE+23) +#define V4L2_CID_CAM_STABILIZE (V4L2_CID_CAMERA_CLASS_BASE+24) +#define V4L2_CID_CAM_MULTISHOT (V4L2_CID_CAMERA_CLASS_BASE+25) + +/* Control dynamic range */ +#define V4L2_CID_CAM_DR (V4L2_CID_CAMERA_CLASS_BASE+26) + +/* White balance preset control */ +#define V4L2_CID_WHITE_BALANCE_PRESET (V4L2_CID_CAMERA_CLASS_BASE+27) +#define V4L2_CID_CAM_SENSOR_FW_VER (V4L2_CID_CAMERA_CLASS_BASE + 28) +#define V4L2_CID_CAM_PHONE_FW_VER (V4L2_CID_CAMERA_CLASS_BASE + 29) + +/* CID extensions */ +#define V4L2_CID_ROTATION (V4L2_CID_PRIVATE_BASE + 0) +#define V4L2_CID_PADDR_Y (V4L2_CID_PRIVATE_BASE + 1) +#define V4L2_CID_PADDR_CB (V4L2_CID_PRIVATE_BASE + 2) +#define V4L2_CID_PADDR_CR (V4L2_CID_PRIVATE_BASE + 3) +#define V4L2_CID_PADDR_CBCR (V4L2_CID_PRIVATE_BASE + 4) +#define V4L2_CID_OVERLAY_AUTO (V4L2_CID_PRIVATE_BASE + 5) +#define V4L2_CID_OVERLAY_VADDR0 (V4L2_CID_PRIVATE_BASE + 6) +#define V4L2_CID_OVERLAY_VADDR1 (V4L2_CID_PRIVATE_BASE + 7) +#define V4L2_CID_OVERLAY_VADDR2 (V4L2_CID_PRIVATE_BASE + 8) +#define V4L2_CID_OVLY_MODE (V4L2_CID_PRIVATE_BASE + 9) +#define V4L2_CID_DST_INFO (V4L2_CID_PRIVATE_BASE + 10) +/* UMP secure id control */ +#define V4L2_CID_GET_UMP_SECURE_ID (V4L2_CID_PRIVATE_BASE + 11) +#define V4L2_CID_GET_PHY_SRC_YADDR (V4L2_CID_PRIVATE_BASE + 12) +#define V4L2_CID_GET_PHY_SRC_CADDR (V4L2_CID_PRIVATE_BASE + 13) +#define V4L2_CID_IMAGE_EFFECT_FN (V4L2_CID_PRIVATE_BASE + 16) +#define V4L2_CID_IMAGE_EFFECT_APPLY (V4L2_CID_PRIVATE_BASE + 17) +#define V4L2_CID_IMAGE_EFFECT_CB (V4L2_CID_PRIVATE_BASE + 18) +#define V4L2_CID_IMAGE_EFFECT_CR (V4L2_CID_PRIVATE_BASE + 19) +#define V4L2_CID_RESERVED_MEM_BASE_ADDR (V4L2_CID_PRIVATE_BASE + 20) +#define V4L2_CID_FIMC_VERSION (V4L2_CID_PRIVATE_BASE + 21) + +#define V4L2_CID_STREAM_PAUSE (V4L2_CID_PRIVATE_BASE + 53) +#define V4L2_CID_CACHE_FLUSH (V4L2_CID_PRIVATE_BASE + 61) +#define V4L2_CID_RESERVED_MEM_SIZE (V4L2_CID_PRIVATE_BASE + 63) + +/* CID Extensions for camera sensor operations */ +#define V4L2_CID_CAM_PREVIEW_ONOFF (V4L2_CID_PRIVATE_BASE + 64) +#define V4L2_CID_CAM_CAPTURE (V4L2_CID_PRIVATE_BASE + 65) +/* #define V4L2_CID_CAM_JPEG_MEMSIZE (V4L2_CID_PRIVATE_BASE + 66) */ + +#define V4L2_CID_CAM_DATE_INFO_YEAR (V4L2_CID_PRIVATE_BASE + 14) +#define V4L2_CID_CAM_DATE_INFO_MONTH (V4L2_CID_PRIVATE_BASE + 15) +#define V4L2_CID_CAM_DATE_INFO_DATE (V4L2_CID_PRIVATE_BASE + 22) +#define V4L2_CID_CAM_SENSOR_VER (V4L2_CID_PRIVATE_BASE + 23) +#define V4L2_CID_CAM_FW_MINOR_VER (V4L2_CID_PRIVATE_BASE + 24) +#define V4L2_CID_CAM_FW_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 25) +#define V4L2_CID_CAM_PRM_MINOR_VER (V4L2_CID_PRIVATE_BASE + 26) +#define V4L2_CID_CAM_PRM_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 27) +#define V4L2_CID_CAM_FW_VER (V4L2_CID_PRIVATE_BASE + 28) +#define V4L2_CID_CAM_SET_FW_ADDR (V4L2_CID_PRIVATE_BASE + 29) +#define V4L2_CID_CAM_SET_FW_SIZE (V4L2_CID_PRIVATE_BASE + 30) +#define V4L2_CID_CAM_UPDATE_FW (V4L2_CID_PRIVATE_BASE + 31) +enum v4l2_firmware_mode { + FW_MODE_UPDATE, + FW_MODE_VERSION, + FW_MODE_DUMP, +}; + +#define V4L2_CID_CAM_JPEG_MAIN_SIZE (V4L2_CID_PRIVATE_BASE + 32) +#define V4L2_CID_CAM_JPEG_MAIN_OFFSET (V4L2_CID_PRIVATE_BASE + 33) +#define V4L2_CID_CAM_JPEG_THUMB_SIZE (V4L2_CID_PRIVATE_BASE + 34) +#define V4L2_CID_CAM_JPEG_THUMB_OFFSET (V4L2_CID_PRIVATE_BASE + 35) +#define V4L2_CID_CAM_JPEG_POSTVIEW_OFFSET (V4L2_CID_PRIVATE_BASE + 36) +#define V4L2_CID_CAM_JPEG_QUALITY (V4L2_CID_PRIVATE_BASE + 37) +#define V4L2_CID_CAM_SENSOR_MAKER (V4L2_CID_PRIVATE_BASE + 38) +#define V4L2_CID_CAM_SENSOR_OPTICAL (V4L2_CID_PRIVATE_BASE + 39) +#define V4L2_CID_CAM_AF_VER_LOW (V4L2_CID_PRIVATE_BASE + 40) +#define V4L2_CID_CAM_AF_VER_HIGH (V4L2_CID_PRIVATE_BASE + 41) +#define V4L2_CID_CAM_GAMMA_RG_LOW (V4L2_CID_PRIVATE_BASE + 42) +#define V4L2_CID_CAM_GAMMA_RG_HIGH (V4L2_CID_PRIVATE_BASE + 43) +#define V4L2_CID_CAM_GAMMA_BG_LOW (V4L2_CID_PRIVATE_BASE + 44) +#define V4L2_CID_CAM_GAMMA_BG_HIGH (V4L2_CID_PRIVATE_BASE + 45) +#define V4L2_CID_CAM_DUMP_FW (V4L2_CID_PRIVATE_BASE + 46) +#define V4L2_CID_CAM_GET_DUMP_SIZE (V4L2_CID_PRIVATE_BASE + 47) +#define V4L2_CID_CAMERA_VT_MODE (V4L2_CID_PRIVATE_BASE + 48) +#define V4L2_CID_CAMERA_VGA_BLUR (V4L2_CID_PRIVATE_BASE + 49) +#define V4L2_CID_CAMERA_CAPTURE (V4L2_CID_PRIVATE_BASE + 50) +#define V4L2_CID_CAMERA_HDR (V4L2_CID_PRIVATE_BASE + 51) + +#define V4L2_CID_MAIN_SW_DATE_INFO_YEAR (V4L2_CID_PRIVATE_BASE + 54) +#define V4L2_CID_MAIN_SW_DATE_INFO_MONTH (V4L2_CID_PRIVATE_BASE + 55) +#define V4L2_CID_MAIN_SW_DATE_INFO_DATE (V4L2_CID_PRIVATE_BASE + 56) +#define V4L2_CID_MAIN_SW_FW_MINOR_VER (V4L2_CID_PRIVATE_BASE + 57) +#define V4L2_CID_MAIN_SW_FW_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 58) +#define V4L2_CID_MAIN_SW_PRM_MINOR_VER (V4L2_CID_PRIVATE_BASE + 59) +#define V4L2_CID_MAIN_SW_PRM_MAJOR_VER (V4L2_CID_PRIVATE_BASE + 60) + +#define V4L2_CID_FIMC_IS_BASE (V4L2_CTRL_CLASS_CAMERA | 0x1000) +#define V4L2_CID_FIMC_IS_TUNE_BASE (V4L2_CTRL_CLASS_CAMERA | 0x2000) +#define V4L2_CID_SENSOR_BASE (V4L2_CTRL_CLASS_CAMERA | 0x3000) + +#define V4L2_CID_IS_LOAD_FW (V4L2_CID_FIMC_IS_BASE + 10) +#define V4L2_CID_IS_INIT_PARAM (V4L2_CID_FIMC_IS_BASE + 11) +#define V4L2_CID_IS_RESET (V4L2_CID_FIMC_IS_BASE + 12) +#define V4L2_CID_IS_S_POWER (V4L2_CID_FIMC_IS_BASE + 13) +enum is_set_power { + IS_POWER_OFF, + IS_POWER_ON +}; + +#define V4L2_CID_IS_S_STREAM (V4L2_CID_FIMC_IS_BASE + 14) +enum is_set_stream { + IS_DISABLE_STREAM, + IS_ENABLE_STREAM +}; + +#define V4L2_CID_IS_S_SCENARIO_MODE (V4L2_CID_FIMC_IS_BASE + 15) +#define V4L2_CID_IS_S_FORMAT_SCENARIO (V4L2_CID_FIMC_IS_BASE + 16) +enum scenario_mode { + IS_MODE_PREVIEW_STILL, + IS_MODE_PREVIEW_VIDEO, + IS_MODE_CAPTURE_STILL, + IS_MODE_CAPTURE_VIDEO, + IS_MODE_MAX +}; + +#define V4L2_CID_IS_G_CAPABILITY (V4L2_CID_FIMC_IS_BASE + 17) +#define V4L2_CID_IS_G_COMPLETES (V4L2_CID_FIMC_IS_BASE + 18) +#define V4L2_CID_IS_BDS_WIDTH (V4L2_CID_FIMC_IS_BASE + 19) +#define V4L2_CID_IS_BDS_HEIGHT (V4L2_CID_FIMC_IS_BASE + 20) +#define V4L2_CID_IS_DVFS_LOCK (V4L2_CID_FIMC_IS_BASE + 21) +#define V4L2_CID_IS_DVFS_UNLOCK (V4L2_CID_FIMC_IS_BASE + 22) +#define V4L2_CID_IS_DVFS_CLUSTER0 (V4L2_CID_FIMC_IS_BASE + 23) +#define V4L2_CID_IS_DVFS_CLUSTER1 (V4L2_CID_FIMC_IS_BASE + 24) +#define V4L2_CID_IS_FORCE_DONE (V4L2_CID_FIMC_IS_BASE + 50) +#define V4L2_CID_IS_SET_SETFILE (V4L2_CID_FIMC_IS_BASE + 51) +#define V4L2_CID_IS_S_BNS (V4L2_CID_FIMC_IS_BASE + 52) +#define V4L2_CID_IS_G_BNS_SIZE (V4L2_CID_FIMC_IS_BASE + 53) +#define V4L2_CID_IS_COLOR_RANGE (V4L2_CID_FIMC_IS_BASE + 54) +#define V4L2_CID_IS_MIN_TARGET_FPS (V4L2_CID_FIMC_IS_BASE + 55) +#define V4L2_CID_IS_MAX_TARGET_FPS (V4L2_CID_FIMC_IS_BASE + 56) +#define V4L2_CID_IS_G_DTPSTATUS (V4L2_CID_FIMC_IS_BASE + 57) +#define V4L2_CID_IS_END_OF_STREAM (V4L2_CID_FIMC_IS_BASE + 58) +#define V4L2_CID_IS_MAP_BUFFER (V4L2_CID_FIMC_IS_BASE + 59) +/* HACK: For fast OIS Capture */ +#define V4L2_CID_IS_INTENT (V4L2_CID_FIMC_IS_BASE + 60) +#define V4L2_CID_IS_CAMERA_TYPE (V4L2_CID_FIMC_IS_BASE + 61) +#define V4L2_CID_IS_DEBUG_DUMP (V4L2_CID_FIMC_IS_BASE + 900) +#define V4L2_CID_IS_DEBUG_SYNC_LOG (V4L2_CID_FIMC_IS_BASE + 901) +#define V4L2_CID_IS_HAL_VERSION (V4L2_CID_FIMC_IS_BASE + 902) +enum v4l2_is_hal_version { + IS_HAL_VER_1_0, + IS_HAL_VER_3_2, + IS_HAL_VER_MAX, +}; + +/* global */ +#define V4L2_CID_IS_CAMERA_SHOT_MODE_NORMAL (V4L2_CID_FIMC_IS_BASE + 101) +/* value : 1 : single shot , >=2 : continuous shot */ + +#define V4L2_CID_IS_CAMERA_SENSOR_NUM (V4L2_CID_FIMC_IS_BASE + 201) + +#define V4L2_CID_IS_CAMERA_FOCUS_MODE (V4L2_CID_FIMC_IS_BASE + 401) +enum is_focus_mode { + IS_FOCUS_MODE_AUTO, + IS_FOCUS_MODE_MACRO, + IS_FOCUS_MODE_INFINITY, + IS_FOCUS_MODE_CONTINUOUS, + IS_FOCUS_MODE_TOUCH, + IS_FOCUS_MODE_FACEDETECT, + IS_FOCUS_MODE_IDLE, + IS_FOCUS_MODE_MAX, +}; + +#define V4L2_CID_IS_CAMERA_FLASH_MODE (V4L2_CID_FIMC_IS_BASE + 402) +enum is_flash_mode { + IS_FLASH_MODE_OFF, + IS_FLASH_MODE_AUTO, + IS_FLASH_MODE_AUTO_REDEYE, + IS_FLASH_MODE_ON, + IS_FLASH_MODE_TORCH, + IS_FLASH_MODE_MAX +}; + +#define V4L2_CID_IS_CAMERA_AWB_MODE (V4L2_CID_FIMC_IS_BASE + 403) +enum is_awb_mode { + IS_AWB_AUTO, + IS_AWB_DAYLIGHT, + IS_AWB_CLOUDY, + IS_AWB_TUNGSTEN, + IS_AWB_FLUORESCENT, + IS_AWB_MAX +}; + +#define V4L2_CID_IS_CAMERA_IMAGE_EFFECT (V4L2_CID_FIMC_IS_BASE + 404) +enum is_image_effect { + IS_IMAGE_EFFECT_DISABLE, + IS_IMAGE_EFFECT_MONOCHROME, + IS_IMAGE_EFFECT_NEGATIVE_MONO, + IS_IMAGE_EFFECT_NEGATIVE_COLOR, + IS_IMAGE_EFFECT_SEPIA, + IS_IMAGE_EFFECT_SEPIA_CB, + IS_IMAGE_EFFECT_SEPIA_CR, + IS_IMAGE_EFFECT_NEGATIVE, + IS_IMAGE_EFFECT_ARTFREEZE, + IS_IMAGE_EFFECT_EMBOSSING, + IS_IMAGE_EFFECT_SILHOUETTE, + IS_IMAGE_EFFECT_MAX +}; + +#define V4L2_CID_IS_CAMERA_ISO (V4L2_CID_FIMC_IS_BASE + 405) +enum is_iso { + IS_ISO_AUTO, + IS_ISO_50, + IS_ISO_100, + IS_ISO_200, + IS_ISO_400, + IS_ISO_800, + IS_ISO_1600, + IS_ISO_MAX +}; + +#define V4L2_CID_IS_CAMERA_CONTRAST (V4L2_CID_FIMC_IS_BASE + 406) +enum is_contrast { + IS_CONTRAST_AUTO, + IS_CONTRAST_MINUS_2, + IS_CONTRAST_MINUS_1, + IS_CONTRAST_DEFAULT, + IS_CONTRAST_PLUS_1, + IS_CONTRAST_PLUS_2, + IS_CONTRAST_MAX +}; + +#define V4L2_CID_IS_CAMERA_SATURATION (V4L2_CID_FIMC_IS_BASE + 407) +enum is_saturation { + IS_SATURATION_MINUS_2, + IS_SATURATION_MINUS_1, + IS_SATURATION_DEFAULT, + IS_SATURATION_PLUS_1, + IS_SATURATION_PLUS_2, + IS_SATURATION_MAX +}; + +#define V4L2_CID_IS_CAMERA_SHARPNESS (V4L2_CID_FIMC_IS_BASE + 408) +enum is_sharpness { + IS_SHARPNESS_MINUS_2, + IS_SHARPNESS_MINUS_1, + IS_SHARPNESS_DEFAULT, + IS_SHARPNESS_PLUS_1, + IS_SHARPNESS_PLUS_2, + IS_SHARPNESS_MAX +}; + +#define V4L2_CID_IS_CAMERA_EXPOSURE (V4L2_CID_FIMC_IS_BASE + 409) +enum is_exposure { + IS_EXPOSURE_MINUS_4, + IS_EXPOSURE_MINUS_3, + IS_EXPOSURE_MINUS_2, + IS_EXPOSURE_MINUS_1, + IS_EXPOSURE_DEFAULT, + IS_EXPOSURE_PLUS_1, + IS_EXPOSURE_PLUS_2, + IS_EXPOSURE_PLUS_3, + IS_EXPOSURE_PLUS_4, + IS_EXPOSURE_MAX +}; + +#define V4L2_CID_IS_CAMERA_BRIGHTNESS (V4L2_CID_FIMC_IS_BASE + 410) +enum is_brightness { + IS_BRIGHTNESS_MINUS_2, + IS_BRIGHTNESS_MINUS_1, + IS_BRIGHTNESS_DEFAULT, + IS_BRIGHTNESS_PLUS_1, + IS_BRIGHTNESS_PLUS_2, + IS_BRIGHTNESS_MAX +}; + +#define V4L2_CID_IS_CAMERA_HUE (V4L2_CID_FIMC_IS_BASE + 411) +enum is_hue { + IS_HUE_MINUS_2, + IS_HUE_MINUS_1, + IS_HUE_DEFAULT, + IS_HUE_PLUS_1, + IS_HUE_PLUS_2, + IS_HUE_MAX +}; + +#define V4L2_CID_IS_CAMERA_METERING (V4L2_CID_FIMC_IS_BASE + 412) +enum is_metering { + IS_METERING_AVERAGE, + IS_METERING_SPOT, + IS_METERING_MATRIX, + IS_METERING_CENTER, + IS_METERING_MAX +}; +#define V4L2_CID_IS_CAMERA_METERING_POSITION_X (V4L2_CID_FIMC_IS_BASE + 500) +#define V4L2_CID_IS_CAMERA_METERING_POSITION_Y (V4L2_CID_FIMC_IS_BASE + 501) +#define V4L2_CID_IS_CAMERA_METERING_WINDOW_X (V4L2_CID_FIMC_IS_BASE + 502) +#define V4L2_CID_IS_CAMERA_METERING_WINDOW_Y (V4L2_CID_FIMC_IS_BASE + 503) + +#define V4L2_CID_IS_CAMERA_AFC_MODE (V4L2_CID_FIMC_IS_BASE + 413) +enum is_afc_mode { + IS_AFC_DISABLE, + IS_AFC_AUTO, + IS_AFC_MANUAL_50HZ, + IS_AFC_MANUAL_60HZ, + IS_AFC_MAX +}; + +#define V4L2_CID_IS_AWB_LOCK_UNLOCK (V4L2_CID_FIMC_IS_BASE + 496) +enum is_awb_lock_unlock { + IS_AWB_LOCK, + IS_AWB_UNLOCK, + IS_AWB_LOCK_UNLOCK_MAX +}; + +#define V4L2_CID_IS_AE_LOCK_UNLOCK (V4L2_CID_FIMC_IS_BASE + 497) +enum is_ae_lock_unlock { + IS_AE_LOCK, + IS_AE_UNLOCK, + IS_AE_LOCK_UNLOCK_MAX +}; + + +#define V4L2_CID_IS_FD_GET_FACE_COUNT (V4L2_CID_FIMC_IS_BASE + 600) +#define V4L2_CID_IS_FD_GET_FACE_FRAME_NUMBER (V4L2_CID_FIMC_IS_BASE + 601) +#define V4L2_CID_IS_FD_GET_FACE_CONFIDENCE (V4L2_CID_FIMC_IS_BASE + 602) +#define V4L2_CID_IS_FD_GET_FACE_SMILE_LEVEL (V4L2_CID_FIMC_IS_BASE + 603) +#define V4L2_CID_IS_FD_GET_FACE_BLINK_LEVEL (V4L2_CID_FIMC_IS_BASE + 604) +#define V4L2_CID_IS_FD_GET_FACE_TOPLEFT_X (V4L2_CID_FIMC_IS_BASE + 605) +#define V4L2_CID_IS_FD_GET_FACE_TOPLEFT_Y (V4L2_CID_FIMC_IS_BASE + 606) +#define V4L2_CID_IS_FD_GET_FACE_BOTTOMRIGHT_X (V4L2_CID_FIMC_IS_BASE + 607) +#define V4L2_CID_IS_FD_GET_FACE_BOTTOMRIGHT_Y (V4L2_CID_FIMC_IS_BASE + 608) +#define V4L2_CID_IS_FD_GET_LEFT_EYE_TOPLEFT_X (V4L2_CID_FIMC_IS_BASE + 609) +#define V4L2_CID_IS_FD_GET_LEFT_EYE_TOPLEFT_Y (V4L2_CID_FIMC_IS_BASE + 610) +#define V4L2_CID_IS_FD_GET_LEFT_EYE_BOTTOMRIGHT_X \ + (V4L2_CID_FIMC_IS_BASE + 611) +#define V4L2_CID_IS_FD_GET_LEFT_EYE_BOTTOMRIGHT_Y \ + (V4L2_CID_FIMC_IS_BASE + 612) +#define V4L2_CID_IS_FD_GET_RIGHT_EYE_TOPLEFT_X \ + (V4L2_CID_FIMC_IS_BASE + 613) +#define V4L2_CID_IS_FD_GET_RIGHT_EYE_TOPLEFT_Y \ + (V4L2_CID_FIMC_IS_BASE + 614) +#define V4L2_CID_IS_FD_GET_RIGHT_EYE_BOTTOMRIGHT_X \ + (V4L2_CID_FIMC_IS_BASE + 615) +#define V4L2_CID_IS_FD_GET_RIGHT_EYE_BOTTOMRIGHT_Y \ + (V4L2_CID_FIMC_IS_BASE + 616) +#define V4L2_CID_IS_FD_GET_MOUTH_TOPLEFT_X \ + (V4L2_CID_FIMC_IS_BASE + 617) +#define V4L2_CID_IS_FD_GET_MOUTH_TOPLEFT_Y \ + (V4L2_CID_FIMC_IS_BASE + 618) +#define V4L2_CID_IS_FD_GET_MOUTH_BOTTOMRIGHT_X \ + (V4L2_CID_FIMC_IS_BASE + 619) +#define V4L2_CID_IS_FD_GET_MOUTH_BOTTOMRIGHT_Y \ + (V4L2_CID_FIMC_IS_BASE + 620) +#define V4L2_CID_IS_FD_GET_ANGLE (V4L2_CID_FIMC_IS_BASE + 621) +#define V4L2_CID_IS_FD_GET_YAW_ANGLE (V4L2_CID_FIMC_IS_BASE + 622) +#define V4L2_CID_IS_FD_GET_NEXT (V4L2_CID_FIMC_IS_BASE + 623) +#define V4L2_CID_IS_FD_GET_DATA (V4L2_CID_FIMC_IS_BASE + 624) + +#define V4L2_CID_IS_FD_SET_MAX_FACE_NUMBER (V4L2_CID_FIMC_IS_BASE + 650) +#define V4L2_CID_IS_FD_SET_ROLL_ANGLE (V4L2_CID_FIMC_IS_BASE + 651) + +#define V4L2_CID_IS_AE_AWB_LOCK_UNLOCK (V4L2_CID_FIMC_IS_BASE + 665) +enum is_ae_awb_lock_unlock { + IS_AE_UNLOCK_AWB_UNLOCK, + IS_AE_LOCK_AWB_UNLOCK, + IS_AE_UNLOCK_AWB_LOCK, + IS_AE_LOCK_AWB_LOCK, + IS_AE_AWB_LOCK_UNLOCK_MAX +}; + +#define V4L2_CID_IS_CAMERA_FOCUS_START_STOP (V4L2_CID_FIMC_IS_BASE + 670) +enum is_focus_start_stop { + IS_FOCUS_STOP, + IS_FOCUS_START, + IS_FOCUS_MAX +}; + +enum is_fd_roll_angle { + /* 0, 45, 0, -45 */ + IS_FD_ROLL_ANGLE_BASIC = 0, + /* 0, 30, 0, -30, 0, 45, 0, -45 */ + IS_FD_ROLL_ANGLE_PRECISE_BASIC = 1, + /* 0, 90, 0, -90 */ + IS_FD_ROLL_ANGLE_SIDES = 2, + /* 0, 90, 0, -90 0, 45, 0, -45 */ + IS_FD_ROLL_ANGLE_PRECISE_SIDES = 3, + /* 0, 90, 0, -90, 0, 180 */ + IS_FD_ROLL_ANGLE_FULL = 4, + /* 0, 90, 0, -90, 0, 180, 0, 135, 0, -135 */ + IS_FD_ROLL_ANGLE_PRECISE_FULL = 5, +}; + +#define V4L2_CID_IS_FD_SET_YAW_ANGLE (V4L2_CID_FIMC_IS_BASE + 652) +enum is_fd_yaw_angle { + IS_FD_YAW_ANGLE_0 = 0, + IS_FD_YAW_ANGLE_45 = 1, + IS_FD_YAW_ANGLE_90 = 2, + IS_FD_YAW_ANGLE_45_90 = 3, +}; + +#define V4L2_CID_IS_FD_SET_SMILE_MODE (V4L2_CID_FIMC_IS_BASE + 653) +enum is_fd_smile_mode { + IS_FD_SMILE_MODE_DISABLE = 0, + IS_FD_SMILE_MODE_ENABLE = 1, +}; + +#define V4L2_CID_IS_FD_SET_BLINK_MODE (V4L2_CID_FIMC_IS_BASE + 654) +enum is_fd_blink_mode { + IS_FD_BLINK_MODE_DISABLE = 0, + IS_FD_BLINK_MODE_ENABLE = 1, +}; + +#define V4L2_CID_IS_FD_SET_EYE_DETECT_MODE (V4L2_CID_FIMC_IS_BASE + 655) +enum is_fd_eye_detect_mode { + IS_FD_EYE_DETECT_DISABLE = 0, + IS_FD_EYE_DETECT_ENABLE = 1, +}; + +#define V4L2_CID_IS_FD_SET_MOUTH_DETECT_MODE (V4L2_CID_FIMC_IS_BASE + 656) +enum is_fd_mouth_detect_mode { + IS_FD_MOUTH_DETECT_DISABLE = 0, + IS_FD_MOUTH_DETECT_ENABLE = 1, +}; + +#define V4L2_CID_IS_FD_SET_ORIENTATION_MODE (V4L2_CID_FIMC_IS_BASE + 657) +enum is_fd_orientation_mode { + IS_FD_ORIENTATION_DISABLE = 0, + IS_FD_ORIENTATION_ENABLE = 1, +}; + +#define V4L2_CID_IS_FD_SET_ORIENTATION (V4L2_CID_FIMC_IS_BASE + 658) +#define V4L2_CID_IS_FD_SET_DATA_ADDRESS (V4L2_CID_FIMC_IS_BASE + 659) + +#define V4L2_CID_IS_SET_ISP (V4L2_CID_FIMC_IS_BASE + 440) +enum is_isp_bypass_mode { + IS_ISP_BYPASS_DISABLE, + IS_ISP_BYPASS_ENABLE, + IS_ISP_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_DRC (V4L2_CID_FIMC_IS_BASE + 441) +enum is_drc_bypass_mode { + IS_DRC_BYPASS_DISABLE, + IS_DRC_BYPASS_ENABLE, + IS_DRC_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_FD (V4L2_CID_FIMC_IS_BASE + 442) +enum is_fd_bypass_mode { + IS_FD_BYPASS_DISABLE, + IS_FD_BYPASS_ENABLE, + IS_FD_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_ODC (V4L2_CID_FIMC_IS_BASE + 443) +enum is_odc_bypass_mode { + IS_ODC_BYPASS_DISABLE, + IS_ODC_BYPASS_ENABLE, + IS_ODC_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_DIS (V4L2_CID_FIMC_IS_BASE + 444) +enum is_dis_bypass_mode { + IS_DIS_BYPASS_DISABLE, + IS_DIS_BYPASS_ENABLE, + IS_DIS_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_3DNR (V4L2_CID_FIMC_IS_BASE + 445) +enum is_tdnr_bypass_mode { + IS_TDNR_BYPASS_DISABLE, + IS_TDNR_BYPASS_ENABLE, + IS_TDNR_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_SCALERC (V4L2_CID_FIMC_IS_BASE + 446) +enum is_scalerc_bypass_mode { + IS_SCALERC_BYPASS_DISABLE, + IS_SCALERC_BYPASS_ENABLE, + IS_SCALERC_BYPASS_MAX +}; + +#define V4L2_CID_IS_SET_SCALERP (V4L2_CID_FIMC_IS_BASE + 446) +enum is_scalerp_bypass_mode { + IS_SCALERP_BYPASS_DISABLE, + IS_SCALERP_BYPASS_ENABLE, + IS_SCALERP_BYPASS_MAX +}; + +#define V4L2_CID_IS_ROTATION_MODE (V4L2_CID_FIMC_IS_BASE + 450) +enum is_rotation_mode { + IS_ROTATION_0, + IS_ROTATION_90, + IS_ROTATION_180, + IS_ROTATION_270, + IS_ROTATION_MAX +}; + +#define V4L2_CID_IS_3DNR_1ST_FRAME_MODE (V4L2_CID_FIMC_IS_BASE + 451) +enum is_tdnr_1st_frame_mode { + IS_TDNR_1ST_FRAME_NOPROCESSING, + IS_TDNR_1ST_FRAME_2DNR, + IS_TDNR_MAX +}; + +#define V4L2_CID_IS_CAMERA_OBJECT_POSITION_X (V4L2_CID_FIMC_IS_BASE + 452) +#define V4L2_CID_IS_CAMERA_OBJECT_POSITION_Y (V4L2_CID_FIMC_IS_BASE + 453) +#define V4L2_CID_IS_CAMERA_WINDOW_SIZE_X (V4L2_CID_FIMC_IS_BASE + 454) +#define V4L2_CID_IS_CAMERA_WINDOW_SIZE_Y (V4L2_CID_FIMC_IS_BASE + 455) + +#define V4L2_CID_IS_CAMERA_EXIF_EXPTIME (V4L2_CID_FIMC_IS_BASE + 456) +#define V4L2_CID_IS_CAMERA_EXIF_FLASH (V4L2_CID_FIMC_IS_BASE + 457) +#define V4L2_CID_IS_CAMERA_EXIF_ISO (V4L2_CID_FIMC_IS_BASE + 458) +#define V4L2_CID_IS_CAMERA_EXIF_SHUTTERSPEED (V4L2_CID_FIMC_IS_BASE + 459) +#define V4L2_CID_IS_CAMERA_EXIF_BRIGHTNESS (V4L2_CID_FIMC_IS_BASE + 460) + +#define V4L2_CID_IS_CAMERA_ISP_SEL_INPUT (V4L2_CID_FIMC_IS_BASE + 461) +enum is_isp_sel_input { + IS_ISP_INPUT_OTF, + IS_ISP_INPUT_DMA1, + IS_ISP_INPUT_DMA2, + IS_ISP_INPUT_DMA12, + IS_ISP_INPUT_MAX +}; + +#define V4L2_CID_IS_CAMERA_ISP_SEL_OUTPUT (V4L2_CID_FIMC_IS_BASE + 462) +enum is_isp_sel_output { + IS_ISP_OUTPUT_OTF, + IS_ISP_OUTPUT_DMA1, + IS_ISP_OUTPUT_DMA2, + IS_ISP_OUTPUT_DMA12, + IS_ISP_OUTPUT_OTF_DMA1, + IS_ISP_OUTPUT_OTF_DMA2, + IS_ISP_OUTPUT_OTF_DMA12, + IS_ISP_OUTPUT_MAX +}; + +#define V4L2_CID_IS_CAMERA_DRC_SEL_INPUT (V4L2_CID_FIMC_IS_BASE + 463) +enum is_drc_sel_input { + IS_DRC_INPUT_OTF, + IS_DRC_INPUT_DMA, + IS_DRC_INPUT_MAX +}; + +#define V4L2_CID_IS_CAMERA_FD_SEL_INPUT (V4L2_CID_FIMC_IS_BASE + 464) +enum is_fd_sel_input { + IS_FD_INPUT_OTF, + IS_FD_INPUT_DMA, + IS_FD_INPUT_MAX +}; + +#define V4L2_CID_IS_CAMERA_INIT_WIDTH (V4L2_CID_FIMC_IS_BASE + 465) +#define V4L2_CID_IS_CAMERA_INIT_HEIGHT (V4L2_CID_FIMC_IS_BASE + 466) + +#define V4L2_CID_IS_CMD_ISP (V4L2_CID_FIMC_IS_BASE + 467) +enum is_isp_cmd_mode { + IS_ISP_COMMAND_STOP, + IS_ISP_COMMAND_START, + IS_ISP_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_DRC (V4L2_CID_FIMC_IS_BASE + 468) +enum is_drc_cmd_mode { + IS_DRC_COMMAND_STOP, + IS_DRC_COMMAND_START, + IS_DRC_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_FD (V4L2_CID_FIMC_IS_BASE + 469) +enum is_fd_cmd_mode { + IS_FD_COMMAND_STOP, + IS_FD_COMMAND_START, + IS_FD_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_ODC (V4L2_CID_FIMC_IS_BASE + 470) +enum is_odc_cmd_mode { + IS_ODC_COMMAND_STOP, + IS_ODC_COMMAND_START, + IS_ODC_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_DIS (V4L2_CID_FIMC_IS_BASE + 471) +enum is_dis_cmd_mode { + IS_DIS_COMMAND_STOP, + IS_DIS_COMMAND_START, + IS_DIS_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_TDNR (V4L2_CID_FIMC_IS_BASE + 472) +enum is_tdnr_cmd_mode { + IS_TDNR_COMMAND_STOP, + IS_TDNR_COMMAND_START, + IS_TDNR_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_SCALERC (V4L2_CID_FIMC_IS_BASE + 473) +enum is_scalerc_cmd_mode { + IS_SCALERC_COMMAND_STOP, + IS_SCALERC_COMMAND_START, + IS_SCALERC_COMMAND_MAX +}; + +#define V4L2_CID_IS_CMD_SCALERP (V4L2_CID_FIMC_IS_BASE + 474) +enum is_scalerp_cmd_mode { + IS_SCALERP_COMMAND_STOP, + IS_SCALERP_COMMAND_START, + IS_SCALERP_COMMAND_MAX +}; + +#define V4L2_CID_IS_GET_SENSOR_OFFSET_X (V4L2_CID_FIMC_IS_BASE + 480) +#define V4L2_CID_IS_GET_SENSOR_OFFSET_Y (V4L2_CID_FIMC_IS_BASE + 481) +#define V4L2_CID_IS_GET_SENSOR_WIDTH (V4L2_CID_FIMC_IS_BASE + 482) +#define V4L2_CID_IS_GET_SENSOR_HEIGHT (V4L2_CID_FIMC_IS_BASE + 483) + +#define V4L2_CID_IS_GET_FRAME_VALID (V4L2_CID_FIMC_IS_BASE + 484) +#define V4L2_CID_IS_SET_FRAME_VALID (V4L2_CID_FIMC_IS_BASE + 485) +#define V4L2_CID_IS_GET_FRAME_BADMARK (V4L2_CID_FIMC_IS_BASE + 486) +#define V4L2_CID_IS_SET_FRAME_BADMARK (V4L2_CID_FIMC_IS_BASE + 487) +#define V4L2_CID_IS_GET_FRAME_CAPTURED (V4L2_CID_FIMC_IS_BASE + 488) +#define V4L2_CID_IS_SET_FRAME_CAPTURED (V4L2_CID_FIMC_IS_BASE + 489) +#define V4L2_CID_IS_SET_FRAME_NUMBER (V4L2_CID_FIMC_IS_BASE + 490) +#define V4L2_CID_IS_GET_FRAME_NUMBER (V4L2_CID_FIMC_IS_BASE + 491) +#define V4L2_CID_IS_CLEAR_FRAME_NUMBER (V4L2_CID_FIMC_IS_BASE + 492) +#define V4L2_CID_IS_GET_LOSTED_FRAME_NUMBER (V4L2_CID_FIMC_IS_BASE + 493) +#define V4L2_CID_IS_ISP_DMA_BUFFER_NUM (V4L2_CID_FIMC_IS_BASE + 494) +#define V4L2_CID_IS_ISP_DMA_BUFFER_ADDRESS (V4L2_CID_FIMC_IS_BASE + 495) + +#define V4L2_CID_IS_ZOOM_STATE (V4L2_CID_FIMC_IS_BASE + 660) +#define V4L2_CID_IS_ZOOM_MAX_LEVEL (V4L2_CID_FIMC_IS_BASE + 661) +#define V4L2_CID_IS_ZOOM (V4L2_CID_FIMC_IS_BASE + 662) +#define V4L2_CID_IS_FW_DEBUG_REGION_ADDR (V4L2_CID_FIMC_IS_BASE + 663) +#define V4L2_CID_IS_FRAME_RATE (V4L2_CID_FIMC_IS_BASE + 664) +#define V4L2_CID_IS_SCENE_MODE (V4L2_CID_FIMC_IS_BASE + 667) +enum is_scene_mode { + IS_SCENE_MODE_BASE, + IS_SCENE_MODE_NONE, + IS_SCENE_MODE_PORTRAIT, + IS_SCENE_MODE_NIGHTSHOT, + IS_SCENE_MODE_BACK_LIGHT, + IS_SCENE_MODE_LANDSCAPE, + IS_SCENE_MODE_SPORTS, + IS_SCENE_MODE_PARTY_INDOOR, + IS_SCENE_MODE_BEACH_SNOW, + IS_SCENE_MODE_SUNSET, + IS_SCENE_MODE_DUSK_DAWN, + IS_SCENE_MODE_FALL_COLOR, + IS_SCENE_MODE_FIREWORKS, + IS_SCENE_MODE_TEXT, + IS_SCENE_MODE_CANDLE_LIGHT, + IS_SCENE_MODE_MAX, +}; + +#define V4L2_CID_IS_VT_MODE (V4L2_CID_FIMC_IS_BASE + 668) +#define V4L2_CID_IS_CAMERA_AUTO_FOCUS_RESULT (V4L2_CID_FIMC_IS_BASE + 669) + +#define V4L2_CID_IS_TUNE_SEL_ENTRY (V4L2_CID_FIMC_IS_TUNE_BASE) +#define V4L2_CID_IS_TUNE_SENSOR_EXPOSURE (V4L2_CID_FIMC_IS_TUNE_BASE + 1) +#define V4L2_CID_IS_TUNE_SENSOR_ANALOG_GAIN (V4L2_CID_FIMC_IS_TUNE_BASE + 2) +#define V4L2_CID_IS_TUNE_SENSOR_FRAME_RATE (V4L2_CID_FIMC_IS_TUNE_BASE + 3) +#define V4L2_CID_IS_TUNE_SENSOR_ACTUATOR_POS (V4L2_CID_FIMC_IS_TUNE_BASE + 4) + + +enum v4l2_blur { + BLUR_LEVEL_0 = 0, + BLUR_LEVEL_1, + BLUR_LEVEL_2, + BLUR_LEVEL_3, + BLUR_LEVEL_MAX, +}; + +#define V4L2_CID_SENSOR_SET_AE_TARGET (V4L2_CID_SENSOR_BASE + 1) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_1x1_2 (V4L2_CID_SENSOR_BASE + 2) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_1x3_4 (V4L2_CID_SENSOR_BASE + 3) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_2x1_2 (V4L2_CID_SENSOR_BASE + 4) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_2x3_4 (V4L2_CID_SENSOR_BASE + 5) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_3x1_2 (V4L2_CID_SENSOR_BASE + 6) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_3x3_4 (V4L2_CID_SENSOR_BASE + 7) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_4x1_2 (V4L2_CID_SENSOR_BASE + 8) +#define V4L2_CID_SENSOR_SET_AE_WEIGHT_4x3_4 (V4L2_CID_SENSOR_BASE + 9) +#define V4L2_CID_SENSOR_SET_RG_WEIGHT (V4L2_CID_SENSOR_BASE + 10) +#define V4L2_CID_SENSOR_SET_AE_SPEED (V4L2_CID_SENSOR_BASE + 11) +#define V4L2_CID_SENSOR_SET_SHUTTER (V4L2_CID_SENSOR_BASE + 12) +#define V4L2_CID_SENSOR_SET_GAIN (V4L2_CID_SENSOR_BASE + 13) +#define V4L2_CID_SENSOR_SET_BIT_CONVERTING (V4L2_CID_SENSOR_BASE + 14) +#define V4L2_CID_SENSOR_SET_AUTO_EXPOSURE (V4L2_CID_SENSOR_BASE + 15) +#define V4L2_CID_SENSOR_SET_FRAME_RATE (V4L2_CID_SENSOR_BASE + 16) + +#if 1 +#define V4L2_CID_CAMERA_SCENE_MODE (V4L2_CID_PRIVATE_BASE+70) + +#define V4L2_CID_CAMERA_FLASH_MODE (V4L2_CID_PRIVATE_BASE+71) +enum v4l2_flash_mode { + FLASH_MODE_BASE, + FLASH_MODE_OFF, + FLASH_MODE_AUTO, + FLASH_MODE_ON, + FLASH_MODE_TORCH, + FLASH_MODE_MAX, +}; + +#define V4L2_CID_CAMERA_BRIGHTNESS (V4L2_CID_PRIVATE_BASE+72) +enum v4l2_ev_mode { + EV_MINUS_4 = -4, + EV_MINUS_3 = -3, + EV_MINUS_2 = -2, + EV_MINUS_1 = -1, + EV_DEFAULT = 0, + EV_PLUS_1 = 1, + EV_PLUS_2 = 2, + EV_PLUS_3 = 3, + EV_PLUS_4 = 4, + EV_MAX, +}; + +#define V4L2_CID_CAMERA_WHITE_BALANCE (V4L2_CID_PRIVATE_BASE+73) +enum v4l2_wb_mode { + WHITE_BALANCE_BASE = 0, + WHITE_BALANCE_AUTO, + WHITE_BALANCE_SUNNY, + WHITE_BALANCE_CLOUDY, + WHITE_BALANCE_TUNGSTEN, + WHITE_BALANCE_FLUORESCENT, + WHITE_BALANCE_MAX, +}; + +#define V4L2_CID_CAMERA_EFFECT (V4L2_CID_PRIVATE_BASE+74) +enum v4l2_effect_mode { + IMAGE_EFFECT_BASE = 0, + IMAGE_EFFECT_NONE, + IMAGE_EFFECT_BNW, + IMAGE_EFFECT_SEPIA, + IMAGE_EFFECT_AQUA, + IMAGE_EFFECT_ANTIQUE, + IMAGE_EFFECT_NEGATIVE, + IMAGE_EFFECT_SHARPEN, + IMAGE_EFFECT_MAX, +}; + +#define V4L2_CID_CAMERA_ISO (V4L2_CID_PRIVATE_BASE+75) +enum v4l2_iso_mode { + ISO_AUTO = 0, + ISO_50, + ISO_100, + ISO_200, + ISO_400, + ISO_800, + ISO_1600, + ISO_SPORTS, + ISO_NIGHT, + ISO_MOVIE, + ISO_MAX, +}; + +#define V4L2_CID_CAMERA_METERING (V4L2_CID_PRIVATE_BASE+76) +enum v4l2_metering_mode { + METERING_BASE = 0, + METERING_MATRIX, + METERING_CENTER, + METERING_SPOT, + METERING_MAX, +}; + +#define V4L2_CID_CAMERA_CONTRAST (V4L2_CID_PRIVATE_BASE+77) +enum v4l2_contrast_mode { + CONTRAST_MINUS_2 = 0, + CONTRAST_MINUS_1, + CONTRAST_DEFAULT, + CONTRAST_PLUS_1, + CONTRAST_PLUS_2, + CONTRAST_MAX, +}; + +#define V4L2_CID_CAMERA_SATURATION (V4L2_CID_PRIVATE_BASE+78) +enum v4l2_saturation_mode { + SATURATION_MINUS_2 = 0, + SATURATION_MINUS_1, + SATURATION_DEFAULT, + SATURATION_PLUS_1, + SATURATION_PLUS_2, + SATURATION_MAX, +}; + +#define V4L2_CID_CAMERA_SHARPNESS (V4L2_CID_PRIVATE_BASE+79) +enum v4l2_sharpness_mode { + SHARPNESS_MINUS_2 = 0, + SHARPNESS_MINUS_1, + SHARPNESS_DEFAULT, + SHARPNESS_PLUS_1, + SHARPNESS_PLUS_2, + SHARPNESS_MAX, +}; + +#define V4L2_CID_CAMERA_WDR (V4L2_CID_PRIVATE_BASE+80) +enum v4l2_wdr_mode { + WDR_OFF, + WDR_ON, + WDR_MAX, +}; + +#define V4L2_CID_CAMERA_ANTI_SHAKE (V4L2_CID_PRIVATE_BASE+81) +enum v4l2_anti_shake_mode { + ANTI_SHAKE_OFF, + ANTI_SHAKE_STILL_ON, + ANTI_SHAKE_MOVIE_ON, + ANTI_SHAKE_MAX, +}; + +#define V4L2_CID_CAMERA_TOUCH_AF_START_STOP (V4L2_CID_PRIVATE_BASE+82) +enum v4l2_touch_af { + TOUCH_AF_STOP = 0, + TOUCH_AF_START, + TOUCH_AF_MAX, +}; + +#define V4L2_CID_CAMERA_SMART_AUTO (V4L2_CID_PRIVATE_BASE+83) +enum v4l2_smart_auto { + SMART_AUTO_OFF = 0, + SMART_AUTO_ON, + SMART_AUTO_MAX, +}; + +#define V4L2_CID_CAMERA_VINTAGE_MODE (V4L2_CID_PRIVATE_BASE+84) +enum v4l2_vintage_mode { + VINTAGE_MODE_BASE, + VINTAGE_MODE_OFF, + VINTAGE_MODE_NORMAL, + VINTAGE_MODE_WARM, + VINTAGE_MODE_COOL, + VINTAGE_MODE_BNW, + VINTAGE_MODE_MAX, +}; + +#define V4L2_CID_CAMERA_JPEG_QUALITY (V4L2_CID_PRIVATE_BASE+85) +/* (V4L2_CID_PRIVATE_BASE+86) */ +#define V4L2_CID_CAMERA_GPS_LATITUDE (V4L2_CID_CAMERA_CLASS_BASE+30) +/* (V4L2_CID_PRIVATE_BASE+87) */ +#define V4L2_CID_CAMERA_GPS_LONGITUDE (V4L2_CID_CAMERA_CLASS_BASE + 31) +/* (V4L2_CID_PRIVATE_BASE+88) */ +#define V4L2_CID_CAMERA_GPS_TIMESTAMP (V4L2_CID_CAMERA_CLASS_BASE + 32) +/* (V4L2_CID_PRIVATE_BASE+89)*/ +#define V4L2_CID_CAMERA_GPS_ALTITUDE (V4L2_CID_CAMERA_CLASS_BASE + 33) +#define V4L2_CID_CAMERA_EXIF_TIME_INFO (V4L2_CID_CAMERA_CLASS_BASE + 34) +#define V4L2_CID_CAMERA_GPS_PROCESSINGMETHOD (V4L2_CID_CAMERA_CLASS_BASE+35) + +#define V4L2_CID_FOCUS_AUTO_MODE (V4L2_CID_CAMERA_CLASS_BASE+36) +enum v4l2_focus_mode_type { + V4L2_FOCUS_AUTO_NORMAL = 0, + V4L2_FOCUS_AUTO_MACRO, + V4L2_FOCUS_AUTO_CONTINUOUS, + V4L2_FOCUS_AUTO_FACE_DETECTION, + V4L2_FOCUS_AUTO_RECTANGLE, + V4L2_FOCUS_AUTO_MAX, +}; +#define V4L2_CID_FOCUS_AUTO_RECTANGLE_LEFT (V4L2_CID_CAMERA_CLASS_BASE+37) +#define V4L2_CID_FOCUS_AUTO_RECTANGLE_TOP (V4L2_CID_CAMERA_CLASS_BASE+38) +#define V4L2_CID_FOCUS_AUTO_RECTANGLE_WIDTH (V4L2_CID_CAMERA_CLASS_BASE+39) +#define V4L2_CID_FOCUS_AUTO_RECTANGLE_HEIGHT (V4L2_CID_CAMERA_CLASS_BASE+40) +#define V4L2_CID_CAMERA_ZOOM (V4L2_CID_PRIVATE_BASE+90) +enum v4l2_zoom_level { + ZOOM_LEVEL_0 = 0, + ZOOM_LEVEL_1, + ZOOM_LEVEL_2, + ZOOM_LEVEL_3, + ZOOM_LEVEL_4, + ZOOM_LEVEL_5, + ZOOM_LEVEL_6, + ZOOM_LEVEL_7, + ZOOM_LEVEL_8, + ZOOM_LEVEL_9, + ZOOM_LEVEL_10, + ZOOM_LEVEL_11, + ZOOM_LEVEL_12, + ZOOM_LEVEL_MAX = 31, + ZOOM_LEVEL_X8_MAX = 100, + ZOOM_LEVEL_X4_400STEP_MAX = 400, + ZOOM_LEVEL_X8_800STEP_MAX = 800, +}; + +#define V4L2_CID_CAMERA_FACE_DETECTION (V4L2_CID_PRIVATE_BASE+91) +enum v4l2_face_detection { + FACE_DETECTION_OFF = 0, + FACE_DETECTION_ON, + FACE_DETECTION_NOLINE, + FACE_DETECTION_ON_BEAUTY, + FACE_DETECTION_MAX, +}; + +#define V4L2_CID_CAMERA_SMART_AUTO_STATUS (V4L2_CID_PRIVATE_BASE+92) +enum v4l2_smart_auto_status { + SMART_AUTO_STATUS_AUTO = 0, + SMART_AUTO_STATUS_LANDSCAPE, + SMART_AUTO_STATUS_PORTRAIT, + SMART_AUTO_STATUS_MACRO, + SMART_AUTO_STATUS_NIGHT, + SMART_AUTO_STATUS_PORTRAIT_NIGHT, + SMART_AUTO_STATUS_BACKLIT, + SMART_AUTO_STATUS_PORTRAIT_BACKLIT, + SMART_AUTO_STATUS_ANTISHAKE, + SMART_AUTO_STATUS_PORTRAIT_ANTISHAKE, + SMART_AUTO_STATUS_MAX, +}; + +#define V4L2_CID_CAMERA_SET_AUTO_FOCUS (V4L2_CID_PRIVATE_BASE+93) +enum v4l2_auto_focus { + AUTO_FOCUS_OFF = 0, + AUTO_FOCUS_ON, + AUTO_FOCUS_MAX, +}; + +#define V4L2_CID_CAMERA_BEAUTY_SHOT (V4L2_CID_PRIVATE_BASE+94) +enum v4l2_beauty_shot { + BEAUTY_SHOT_OFF = 0, + BEAUTY_SHOT_ON, + BEAUTY_SHOT_MAX, +}; + +#define V4L2_CID_CAMERA_AEAWB_LOCK_UNLOCK (V4L2_CID_PRIVATE_BASE+95) +enum v4l2_ae_awb_lockunlock { + AE_UNLOCK_AWB_UNLOCK = 0, + AE_LOCK_AWB_UNLOCK, + AE_UNLOCK_AWB_LOCK, + AE_LOCK_AWB_LOCK, + AE_AWB_MAX +}; + +#define V4L2_CID_CAMERA_FACEDETECT_LOCKUNLOCK (V4L2_CID_PRIVATE_BASE+96) +enum v4l2_face_lock { + FACE_LOCK_OFF = 0, + FACE_LOCK_ON, + FIRST_FACE_TRACKING, + FACE_LOCK_MAX, +}; + +#define V4L2_CID_CAMERA_OBJECT_POSITION_X (V4L2_CID_PRIVATE_BASE+97) +#define V4L2_CID_CAMERA_OBJECT_POSITION_Y (V4L2_CID_PRIVATE_BASE+98) +#define V4L2_CID_CAMERA_FOCUS_MODE (V4L2_CID_PRIVATE_BASE+99) +enum v4l2_focusmode { + FOCUS_MODE_AUTO = 0, + FOCUS_MODE_MACRO, + FOCUS_MODE_FACEDETECT, + FOCUS_MODE_AUTO_DEFAULT, + FOCUS_MODE_MACRO_DEFAULT, + FOCUS_MODE_FACEDETECT_DEFAULT, + FOCUS_MODE_INFINITY, + FOCUS_MODE_FIXED, + FOCUS_MODE_CONTINUOUS, + FOCUS_MODE_CONTINUOUS_PICTURE, + FOCUS_MODE_CONTINUOUS_PICTURE_MACRO, + FOCUS_MODE_CONTINUOUS_VIDEO, + FOCUS_MODE_TOUCH, + FOCUS_MODE_MAX, + FOCUS_MODE_DEFAULT = (1 << 8), +}; + +#define V4L2_CID_CAMERA_OBJ_TRACKING_STATUS (V4L2_CID_PRIVATE_BASE+100) +enum v4l2_obj_tracking_status { + OBJECT_TRACKING_STATUS_BASE, + OBJECT_TRACKING_STATUS_PROGRESSING, + OBJECT_TRACKING_STATUS_SUCCESS, + OBJECT_TRACKING_STATUS_FAIL, + OBJECT_TRACKING_STATUS_MISSING, + OBJECT_TRACKING_STATUS_MAX, +}; + +#define V4L2_CID_CAMERA_OBJ_TRACKING_START_STOP (V4L2_CID_PRIVATE_BASE+101) +enum v4l2_ot_start_stop { + OT_STOP = 0, + OT_START, + OT_MAX, +}; + +#define V4L2_CID_CAMERA_CAF_START_STOP (V4L2_CID_PRIVATE_BASE+102) +enum v4l2_caf_start_stop { + CAF_STOP = 0, + CAF_START, + CAF_MAX, +}; + +#define V4L2_CID_CAMERA_AUTO_FOCUS_RESULT (V4L2_CID_PRIVATE_BASE+103) +enum v4l2_af_status { + CAMERA_AF_STATUS_IN_PROGRESS = 0, + CAMERA_AF_STATUS_SUCCESS, + CAMERA_AF_STATUS_FAIL, + CAMERA_AF_STATUS_1ST_SUCCESS, + CAMERA_AF_STATUS_MAX, +}; +#define V4L2_CID_CAMERA_FRAME_RATE (V4L2_CID_PRIVATE_BASE+104) +enum v4l2_frame_rate { + FRAME_RATE_AUTO = 0, + FRAME_RATE_7 = 7, + FRAME_RATE_15 = 15, + FRAME_RATE_20 = 20, + FRAME_RATE_30 = 30, + FRAME_RATE_60 = 60, + FRAME_RATE_120 = 120, + FRAME_RATE_240 = 240, + FRAME_RATE_MAX +}; + +#define V4L2_CID_CAMERA_ANTI_BANDING (V4L2_CID_PRIVATE_BASE+105) +enum v4l2_anti_banding { + ANTI_BANDING_AUTO = 0, + ANTI_BANDING_50HZ = 1, + ANTI_BANDING_60HZ = 2, + ANTI_BANDING_OFF = 3, +}; + +#define V4L2_CID_CAMERA_SET_GAMMA (V4L2_CID_PRIVATE_BASE+106) +enum v4l2_gamma_mode { + GAMMA_OFF = 0, + GAMMA_ON = 1, + GAMMA_MAX, +}; + +#define V4L2_CID_CAMERA_SET_SLOW_AE (V4L2_CID_PRIVATE_BASE+107) +enum v4l2_slow_ae_mode { + SLOW_AE_OFF, + SLOW_AE_ON, + SLOW_AE_MAX, +}; + +#define V4L2_CID_CAMERA_BATCH_REFLECTION (V4L2_CID_PRIVATE_BASE+108) +#define V4L2_CID_CAMERA_EXIF_ORIENTATION (V4L2_CID_PRIVATE_BASE+109) + +/* s1_camera [ Defense process by ESD input ] */ +#define V4L2_CID_CAMERA_RESET (V4L2_CID_PRIVATE_BASE+111) +#define V4L2_CID_CAMERA_CHECK_DATALINE (V4L2_CID_PRIVATE_BASE+112) +#define V4L2_CID_CAMERA_CHECK_DATALINE_STOP (V4L2_CID_PRIVATE_BASE+113) + +#endif + +/* Modify NTTS1 */ +#if defined(CONFIG_ARIES_NTT) +#define V4L2_CID_CAMERA_AE_AWB_DISABLE_LOCK (V4L2_CID_PRIVATE_BASE+114) +#endif +#define V4L2_CID_CAMERA_THUMBNAIL_NULL (V4L2_CID_PRIVATE_BASE+115) +#define V4L2_CID_CAMERA_SENSOR_MODE (V4L2_CID_PRIVATE_BASE+116) +enum v4l2_sensor_mode { + SENSOR_CAMERA, + SENSOR_MOVIE, +}; + +enum stream_mode_t { + STREAM_MODE_CAM_OFF, + STREAM_MODE_CAM_ON, + STREAM_MODE_MOVIE_OFF, + STREAM_MODE_MOVIE_ON, +}; + +#define V4L2_CID_CAMERA_EXIF_EXPTIME (V4L2_CID_PRIVATE_BASE+117) +#define V4L2_CID_CAMERA_EXIF_FLASH (V4L2_CID_PRIVATE_BASE+118) +#define V4L2_CID_CAMERA_EXIF_ISO (V4L2_CID_PRIVATE_BASE+119) +#define V4L2_CID_CAMERA_EXIF_TV (V4L2_CID_PRIVATE_BASE+120) +#define V4L2_CID_CAMERA_EXIF_BV (V4L2_CID_PRIVATE_BASE+121) +#define V4L2_CID_CAMERA_EXIF_EBV (V4L2_CID_PRIVATE_BASE+122) +#define V4L2_CID_CAMERA_CHECK_ESD (V4L2_CID_PRIVATE_BASE+123) +#define V4L2_CID_CAMERA_APP_CHECK (V4L2_CID_PRIVATE_BASE+124) + +#define V4L2_CID_CAMERA_FACE_ZOOM (V4L2_CID_PRIVATE_BASE + 132) +enum v4l2_face_zoom { + FACE_ZOOM_STOP = 0, + FACE_ZOOM_START +}; +/* control for post processing block in ISP */ +#define V4L2_CID_CAMERA_SET_ODC (V4L2_CID_PRIVATE_BASE+127) +enum set_odc_mode { + CAMERA_ODC_ON, + CAMERA_ODC_OFF +}; + +#define V4L2_CID_CAMERA_SET_DIS (V4L2_CID_PRIVATE_BASE+128) +enum set_dis_mode { + CAMERA_DIS_ON, + CAMERA_DIS_OFF +}; + +#define V4L2_CID_CAMERA_SET_3DNR (V4L2_CID_PRIVATE_BASE+129) +enum set_3dnr_mode { + CAMERA_3DNR_ON, + CAMERA_3DNR_OFF +}; + +#define V4L2_CID_EMBEDDEDDATA_ENABLE (V4L2_CID_PRIVATE_BASE+130) + +#define V4L2_CID_CAMERA_CHECK_SENSOR_STATUS (V4L2_CID_PRIVATE_BASE+150) +#define V4L2_CID_CAMERA_DEFAULT_FOCUS_POSITION (V4L2_CID_PRIVATE_BASE+151) +#define V4L2_CID_CAMERA_OIS_SINE_MODE (V4L2_CID_PRIVATE_BASE+152) +/* Pixel format FOURCC depth Description */ +enum v4l2_pix_format_mode { + V4L2_PIX_FMT_MODE_PREVIEW, + V4L2_PIX_FMT_MODE_CAPTURE, + V4L2_PIX_FMT_MODE_HDR, + V4L2_PIX_FMT_MODE_VT_MIRROR, + V4L2_PIX_FMT_MODE_VT_NONMIRROR, +}; + +#define V4L2_CID_CAMERA_FADE_IN (V4L2_CID_PRIVATE_BASE+153) + +/* 12 Y/CbCr 4:2:0 64x32 macroblocks */ +#define V4L2_PIX_FMT_NV12T v4l2_fourcc('T', 'V', '1', '2') +#define V4L2_PIX_FMT_NV21T v4l2_fourcc('T', 'V', '2', '1') +#define V4L2_PIX_FMT_INTERLEAVED v4l2_fourcc('I', 'T', 'L', 'V') + + +/* + * * V4L2 extention for digital camera + * */ +/* Strobe flash light */ +enum v4l2_strobe_control { + /* turn off the flash light */ + V4L2_STROBE_CONTROL_OFF = 0, + /* turn on the flash light */ + V4L2_STROBE_CONTROL_ON = 1, + /* act guide light before splash */ + V4L2_STROBE_CONTROL_AFGUIDE = 2, + /* charge the flash light */ + V4L2_STROBE_CONTROL_CHARGE = 3, +}; + +enum v4l2_strobe_conf { + V4L2_STROBE_OFF = 0, /* Always off */ + V4L2_STROBE_ON = 1, /* Always splashes */ + /* Auto control presets */ + V4L2_STROBE_AUTO = 2, + V4L2_STROBE_REDEYE_REDUCTION = 3, + V4L2_STROBE_SLOW_SYNC = 4, + V4L2_STROBE_FRONT_CURTAIN = 5, + V4L2_STROBE_REAR_CURTAIN = 6, + /* Extra manual control presets */ + /* keep turned on until turning off */ + V4L2_STROBE_PERMANENT = 7, + V4L2_STROBE_EXTERNAL = 8, +}; + +enum v4l2_strobe_status { + V4L2_STROBE_STATUS_OFF = 0, + /* while processing configurations */ + V4L2_STROBE_STATUS_BUSY = 1, + V4L2_STROBE_STATUS_ERR = 2, + V4L2_STROBE_STATUS_CHARGING = 3, + V4L2_STROBE_STATUS_CHARGED = 4, +}; + +/* capabilities field */ +/* No strobe supported */ +#define V4L2_STROBE_CAP_NONE 0x0000 +/* Always flash off mode */ +#define V4L2_STROBE_CAP_OFF 0x0001 +/* Always use flash light mode */ +#define V4L2_STROBE_CAP_ON 0x0002 +/* Flashlight works automatic */ +#define V4L2_STROBE_CAP_AUTO 0x0004 +/* Red-eye reduction */ +#define V4L2_STROBE_CAP_REDEYE 0x0008 +/* Slow sync */ +#define V4L2_STROBE_CAP_SLOWSYNC 0x0010 +/* Front curtain */ +#define V4L2_STROBE_CAP_FRONT_CURTAIN 0x0020 +/* Rear curtain */ +#define V4L2_STROBE_CAP_REAR_CURTAIN 0x0040 +/* keep turned on until turning off */ +#define V4L2_STROBE_CAP_PERMANENT 0x0080 +/* use external strobe */ +#define V4L2_STROBE_CAP_EXTERNAL 0x0100 + +/* Set mode and Get status */ +struct v4l2_strobe { + /* off/on/charge:0/1/2 */ + enum v4l2_strobe_control control; + /* supported strobe capabilities */ + __u32 capabilities; + enum v4l2_strobe_conf mode; + enum v4l2_strobe_status status; /* read only */ + /* default is 0 and range of value varies from each models */ + __u32 flash_ev; + __u32 reserved[4]; +}; + +#define VIDIOC_S_STROBE _IOWR('V', 83, struct v4l2_strobe) +#define VIDIOC_G_STROBE _IOR('V', 84, struct v4l2_strobe) + +/* Object recognition and collateral actions */ +enum v4l2_recog_mode { + V4L2_RECOGNITION_MODE_OFF = 0, + V4L2_RECOGNITION_MODE_ON = 1, + V4L2_RECOGNITION_MODE_LOCK = 2, +}; + +enum v4l2_recog_action { + V4L2_RECOGNITION_ACTION_NONE = 0, /* only recognition */ + V4L2_RECOGNITION_ACTION_BLINK = 1, /* Capture on blinking */ + V4L2_RECOGNITION_ACTION_SMILE = 2, /* Capture on smiling */ +}; + +enum v4l2_recog_pattern { + V4L2_RECOG_PATTERN_FACE = 0, /* Face */ + V4L2_RECOG_PATTERN_HUMAN = 1, /* Human */ + V4L2_RECOG_PATTERN_CHAR = 2, /* Character */ +}; + +struct v4l2_recog_rect { + enum v4l2_recog_pattern p; /* detected pattern */ + struct v4l2_rect o; /* detected area */ + __u32 reserved[4]; +}; + +struct v4l2_recog_data { + __u8 detect_cnt; /* detected object counter */ + struct v4l2_rect o; /* detected area */ + __u32 reserved[4]; +}; + +struct v4l2_recognition { + enum v4l2_recog_mode mode; + + /* Which pattern to detect */ + enum v4l2_recog_pattern pattern; + + /* How many object to detect */ + __u8 obj_num; + + /* select detected object */ + __u32 detect_idx; + + /* read only :Get object coordination */ + struct v4l2_recog_data data; + + enum v4l2_recog_action action; + __u32 reserved[4]; +}; + +#define VIDIOC_S_RECOGNITION _IOWR('V', 85, struct v4l2_recognition) +#define VIDIOC_G_RECOGNITION _IOR('V', 86, struct v4l2_recognition) + +#endif /* __LINUX_VIDEODEV2_EXYNOS_CAMERA_H */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.cpp b/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.cpp new file mode 100644 index 0000000..5818a7f --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.cpp @@ -0,0 +1,385 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityAutofocus" +#include + +#include "ExynosCameraActivityAutofocus.h" + +namespace android { + +#define WAIT_COUNT_FAIL_STATE (7) +#define AUTOFOCUS_WAIT_COUNT_STEP_REQUEST (3) + +#define AUTOFOCUS_WAIT_COUNT_FRAME_COUNT_NUM (3) /* n + x frame count */ +#define AUTOFOCUS_WATING_TIME_LOCK_AF (10000) /* 10msec */ +#define AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF (300000) /* 300msec */ +#define AUTOFOCUS_SKIP_FRAME_LOCK_AF (6) /* == NUM_BAYER_BUFFERS */ + +#define SET_BIT(x) (1 << x) + +ExynosCameraActivityAutofocus::ExynosCameraActivityAutofocus() +{ + m_flagAutofocusStart = false; + m_flagAutofocusLock = false; + + /* first Lens position is infinity */ + /* m_autoFocusMode = AUTOFOCUS_MODE_BASE; */ + m_autoFocusMode = AUTOFOCUS_MODE_INFINITY; + m_interenalAutoFocusMode = AUTOFOCUS_MODE_BASE; + + m_focusWeight = 0; + /* first AF operation is trigger infinity mode */ + /* m_autofocusStep = AUTOFOCUS_STEP_STOP; */ + m_autofocusStep = AUTOFOCUS_STEP_REQUEST; + m_aaAfState = ::AA_AFSTATE_INACTIVE; + m_afState = AUTOFOCUS_STATE_NONE; + m_aaAFMode = ::AA_AFMODE_OFF; + m_metaCtlAFMode = -1; + m_waitCountFailState = 0; + m_stepRequestCount = 0; + m_frameCount = 0; + + m_recordingHint = false; + m_flagFaceDetection = false; + m_macroPosition = AUTOFOCUS_MACRO_POSITION_BASE; + m_fpsValue = 0; + m_samsungCamera = false; + m_afInMotionResult = false; + + m_af_mode_info = 0; + m_af_pan_focus_info = 0; + m_af_typical_macro_info = 0; + m_af_module_version_info = 0; + m_af_state_info = 0; + m_af_cur_pos_info = 0; + m_af_time_info = 0; + m_af_factory_info = 0; + m_paf_from_info = 0; + m_paf_error_code = 0; +} + +ExynosCameraActivityAutofocus::~ExynosCameraActivityAutofocus() +{ +} + +int ExynosCameraActivityAutofocus::t_funcNull(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSensorBefore(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSensorAfter(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcISPBefore(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcISPAfter(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_func3ABeforeHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_func3AAfterHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSCPBefore(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSCPAfter(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSCCBefore(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::t_funcSCCAfter(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityAutofocus::getAutofocusMode(void) +{ + return m_autoFocusMode; +} + +bool ExynosCameraActivityAutofocus::getRecordingHint(void) +{ + return m_recordingHint; +} + +bool ExynosCameraActivityAutofocus::setFocusAreas(ExynosRect2 rect, int weight) +{ + m_focusArea = rect; + m_focusWeight = weight; + + return true; +} + +bool ExynosCameraActivityAutofocus::getFocusAreas(ExynosRect2 *rect, int *weight) +{ + *rect = m_focusArea; + *weight = m_focusWeight; + + return true; +} + +bool ExynosCameraActivityAutofocus::startAutofocus(void) +{ + ALOGI("INFO(%s[%d]):m_autoFocusMode(%d)", __FUNCTION__, __LINE__, m_autoFocusMode); + + m_autofocusStep = AUTOFOCUS_STEP_REQUEST; + m_flagAutofocusStart = true; + + return true; +} + +bool ExynosCameraActivityAutofocus::stopAutofocus(void) +{ + ALOGI("INFO(%s[%d]):m_autoFocusMode(%d)", __FUNCTION__, __LINE__, m_autoFocusMode); + + m_autofocusStep = AUTOFOCUS_STEP_STOP; + m_flagAutofocusStart = false; + + return true; +} + +bool ExynosCameraActivityAutofocus::flagAutofocusStart(void) +{ + return m_flagAutofocusStart; +} + +bool ExynosCameraActivityAutofocus::lockAutofocus() +{ + ALOGI("INFO(%s[%d]):m_autoFocusMode(%d)", __FUNCTION__, __LINE__, m_autoFocusMode); + + if(m_autofocusStep != AUTOFOCUS_STEP_TRIGGER_START) { + m_autofocusStep = AUTOFOCUS_STEP_TRIGGER_START; + + ALOGI("INFO(%s): request locked state of Focus. : m_autofocusStep(%d), m_aaAfState(%d)", + __FUNCTION__, m_autofocusStep, m_aaAfState); + } + + m_flagAutofocusStart = false; + + if (m_aaAfState == AA_AFSTATE_INACTIVE || + m_aaAfState == AA_AFSTATE_PASSIVE_SCAN || + m_aaAfState == AA_AFSTATE_ACTIVE_SCAN) { + /* + * hold, until + 3 Frame + * n (lockFrameCount) : n - 1's state + * n + 1 : adjust on f/w + * n + 2 : adjust on sensor + * n + 3 : result + */ + int lockFrameCount = m_frameCount; + unsigned int i = 0; + bool flagScanningDetected = false; + int scanningDetectedFrameCount = 0; + + for (i = 0; i < AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF; i += AUTOFOCUS_WATING_TIME_LOCK_AF) { + if (lockFrameCount + AUTOFOCUS_WAIT_COUNT_FRAME_COUNT_NUM <= m_frameCount) { + ALOGD("DEBUG(%s):find lockFrameCount(%d) + %d, m_frameCount(%d), m_aaAfState(%d)", + __FUNCTION__, lockFrameCount, AUTOFOCUS_WAIT_COUNT_FRAME_COUNT_NUM, m_frameCount, m_aaAfState); + break; + } + + if (flagScanningDetected == false) { + if (m_aaAfState == AA_AFSTATE_PASSIVE_SCAN || + m_aaAfState == AA_AFSTATE_ACTIVE_SCAN) { + flagScanningDetected = true; + scanningDetectedFrameCount = m_frameCount; + } + } + + usleep(AUTOFOCUS_WATING_TIME_LOCK_AF); + } + + if (AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF <= i) { + ALOGW("WARN(%s):AF lock time out (%d)msec", __FUNCTION__, i / 1000); + } else { + /* skip bayer frame when scanning detected */ + if (flagScanningDetected == true) { + for (i = 0; i < AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF; i += AUTOFOCUS_WATING_TIME_LOCK_AF) { + if (scanningDetectedFrameCount + AUTOFOCUS_SKIP_FRAME_LOCK_AF <= m_frameCount) { + ALOGD("DEBUG(%s):kcoolsw find scanningDetectedFrameCount(%d) + %d, m_frameCount(%d), m_aaAfState(%d)", + __FUNCTION__, scanningDetectedFrameCount, AUTOFOCUS_SKIP_FRAME_LOCK_AF, m_frameCount, m_aaAfState); + break; + } + + usleep(AUTOFOCUS_WATING_TIME_LOCK_AF); + } + + if (AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF <= i) + ALOGW("WARN(%s):kcoolsw scanningDectected skip time out (%d)msec", __FUNCTION__, i / 1000); + } + } + } + + m_flagAutofocusLock = true; + + return true; +} + +bool ExynosCameraActivityAutofocus::unlockAutofocus() +{ + ALOGI("INFO(%s[%d]):m_autoFocusMode(%d)", __FUNCTION__, __LINE__, m_autoFocusMode); + + /* + * With the 3.2 metadata interface, + * unlockAutofocus() triggers the new AF scanning. + */ + m_flagAutofocusStart = true; + m_autofocusStep = AUTOFOCUS_STEP_REQUEST; + + return true; +} + +bool ExynosCameraActivityAutofocus::flagLockAutofocus(void) +{ + return m_flagAutofocusLock; +} + +int ExynosCameraActivityAutofocus::getCurrentState(void) +{ + int state = AUTOFOCUS_STATE_NONE; + + if (m_flagAutofocusStart == false) { + state = m_afState; + goto done; + } + + switch (m_aaAfState) { + case ::AA_AFSTATE_INACTIVE: + state = AUTOFOCUS_STATE_NONE; + break; + case ::AA_AFSTATE_PASSIVE_SCAN: + case ::AA_AFSTATE_ACTIVE_SCAN: + state = AUTOFOCUS_STATE_SCANNING; + break; + case ::AA_AFSTATE_PASSIVE_FOCUSED: + case ::AA_AFSTATE_FOCUSED_LOCKED: + state = AUTOFOCUS_STATE_SUCCEESS; + break; + case ::AA_AFSTATE_NOT_FOCUSED_LOCKED: + case ::AA_AFSTATE_PASSIVE_UNFOCUSED: + state = AUTOFOCUS_STATE_FAIL; + break; + default: + state = AUTOFOCUS_STATE_NONE; + break; + } + +done: + m_afState = state; + + return state; +} + +bool ExynosCameraActivityAutofocus::setRecordingHint(bool hint) +{ + ALOGI("INFO(%s[%d]):hint(%d)", __FUNCTION__, __LINE__, hint); + + m_recordingHint = hint; + return true; +} + +bool ExynosCameraActivityAutofocus::setFaceDetection(bool toggle) +{ + ALOGI("INFO(%s[%d]):toggle(%d)", __FUNCTION__, __LINE__, toggle); + + m_flagFaceDetection = toggle; + return true; +} + +bool ExynosCameraActivityAutofocus::setMacroPosition(int macroPosition) +{ + ALOGI("INFO(%s[%d]):macroPosition(%d)", __FUNCTION__, __LINE__, macroPosition); + + m_macroPosition = macroPosition; + return true; +} + +void ExynosCameraActivityAutofocus::setFpsValue(int fpsValue) +{ + m_fpsValue = fpsValue; +} + +int ExynosCameraActivityAutofocus::getFpsValue() +{ + return m_fpsValue; +} + +void ExynosCameraActivityAutofocus::setSamsungCamera(int flags) +{ + m_samsungCamera = flags; +} + +void ExynosCameraActivityAutofocus::setAfInMotionResult(bool afInMotion) +{ + m_afInMotionResult = afInMotion; +} + +bool ExynosCameraActivityAutofocus::getAfInMotionResult(void) +{ + return m_afInMotionResult; +} + +void ExynosCameraActivityAutofocus::displayAFInfo(void) +{ + ALOGD("(%s):==================================================", "CMGEFL"); + ALOGD("(%s):0x%x", "CMGEFL", m_af_mode_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_pan_focus_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_typical_macro_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_module_version_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_state_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_cur_pos_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_time_info); + ALOGD("(%s):0x%x", "CMGEFL", m_af_factory_info); + ALOGD("(%s):0x%x", "CMGEFL", m_paf_from_info); + ALOGD("(%s):0x%x", "CMGEFL", m_paf_error_code); + ALOGD("(%s):==================================================", "CMGEFL"); + return ; +} + +void ExynosCameraActivityAutofocus::displayAFStatus(void) +{ + ALOGD("(%s):0x%x / 0x%x / 0x%x", "CMGEFL", + m_af_state_info, m_af_cur_pos_info, m_af_time_info); + return ; +} +} /* namespace android */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.h b/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.h new file mode 100644 index 0000000..d3133dd --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityAutofocus.h @@ -0,0 +1,267 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraActivityAutofocus.h + * \brief hearder file for ExynosCameraActivityAutofocus + * \author Sangowoo Park(sw5771.park@samsung.com) + * \date 2012/03/07 + * + */ + +#ifndef EXYNOS_CAMERA_ACTIVITY_AUTOFOCUS_H__ +#define EXYNOS_CAMERA_ACTIVITY_AUTOFOCUS_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#include "exynos_format.h" +#include "ExynosBuffer.h" +#include "ExynosRect.h" +#include "exynos_v4l2.h" +#include "ExynosCameraActivityBase.h" + +#include "fimc-is-metadata.h" + +#define UniPluginFocusData_t int + +#define AUTOFOCUS_WATING_TIME (10000) /* 10msec */ +#define AUTOFOCUS_TOTAL_WATING_TIME (3000000) /* 3000msec */ + +#define MAX_FRAME_AF_DEBUG 50 +#define WAIT_COUNT_FAIL_STATE (7) +#define AUTOFOCUS_WAIT_COUNT_STEP_REQUEST (3) + +#define AUTOFOCUS_WAIT_COUNT_FRAME_COUNT_NUM (3) /* n + x frame count */ +#define AUTOFOCUS_WATING_TIME_LOCK_AF (10000) /* 10msec */ +#define AUTOFOCUS_TOTAL_WATING_TIME_LOCK_AF (300000) /* 300msec */ +#define AUTOFOCUS_SKIP_FRAME_LOCK_AF (6) /* == NUM_BAYER_BUFFERS */ + +#define SET_BIT(x) (1 << x) + +struct camera2_af_debug_info { + uint16_t CurrPos; + uint64_t FilterValue; +}; + +struct camera2_af_exif_info { + uint16_t INDICATOR1; // AFAF + uint16_t INDICATOR2; // AFAF + + uint16_t AF_MODE; + uint16_t AF_PAN_FOCUS; + uint16_t AF_TYPICAL_MACRO; + uint16_t AF_MODULE_VERSION; + uint16_t AF_STATE; + uint16_t AF_CUR_POS; + uint16_t AF_TIME; // unit: (ms) + uint16_t FACTORY_INFO; + uint32_t PAF_FROM_INFO; // first 4 bytes of PDAF cal region(date) + int32_t APEX_BV; + float GYRO_INFO_X; + float GYRO_INFO_Y; + float GYRO_INFO_Z; + + uint16_t INDICATOR3; // AFAF + uint16_t INDICATOR4; // AFAF + + uint16_t TotalDebugInfo; + camera2_af_debug_info DebugInfo[MAX_FRAME_AF_DEBUG]; + + uint16_t INDICATOR5; // AFAF + uint16_t INDICATOR6; // AFAF +}; + +namespace android { + /* Moved from SecCameraParameters.h */ + enum { + FOCUS_RESULT_FAIL = 0, + FOCUS_RESULT_SUCCESS, + FOCUS_RESULT_CANCEL, + FOCUS_RESULT_FOCUSING, + FOCUS_RESULT_RESTART, + }; + +class ExynosCameraActivityAutofocus : public ExynosCameraActivityBase { +public: + enum AUTOFOCUS_MODE { + AUTOFOCUS_MODE_BASE = (0), + AUTOFOCUS_MODE_AUTO = (1 << 0), + AUTOFOCUS_MODE_INFINITY = (1 << 1), + AUTOFOCUS_MODE_MACRO = (1 << 2), + AUTOFOCUS_MODE_FIXED = (1 << 3), + AUTOFOCUS_MODE_EDOF = (1 << 4), + AUTOFOCUS_MODE_CONTINUOUS_VIDEO = (1 << 5), + AUTOFOCUS_MODE_CONTINUOUS_PICTURE = (1 << 6), + AUTOFOCUS_MODE_TOUCH = (1 << 7), + AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO = (1 << 8), + }; + + enum AUTOFOCUS_MACRO_POSITION { + AUTOFOCUS_MACRO_POSITION_BASE = (0), + AUTOFOCUS_MACRO_POSITION_CENTER = (1 << 0), + AUTOFOCUS_MACRO_POSITION_CENTER_UP = (1 << 1), + }; + + enum AUTOFOCUS_STATE { + AUTOFOCUS_STATE_NONE = 0, + AUTOFOCUS_STATE_SCANNING, + AUTOFOCUS_STATE_SUCCEESS, + AUTOFOCUS_STATE_FAIL, + }; + + enum { + AUTOFOCUS_RESULT_FAIL = 0, + AUTOFOCUS_RESULT_SUCCESS, + AUTOFOCUS_RESULT_CANCEL, + AUTOFOCUS_RESULT_FOCUSING, + AUTOFOCUS_RESULT_RESTART, + }; + +public: + ExynosCameraActivityAutofocus(); + virtual ~ExynosCameraActivityAutofocus(); + +protected: + int t_funcNull(void *args); + int t_funcSensorBefore(void *args); + int t_funcSensorAfter(void *args); + int t_func3ABefore(void *args); + int t_func3AAfter(void *args); + int t_func3ABeforeHAL3(void *args); + int t_func3AAfterHAL3(void *args); + int t_funcISPBefore(void *args); + int t_funcISPAfter(void *args); + int t_funcSCPBefore(void *args); + int t_funcSCPAfter(void *args); + int t_funcSCCBefore(void *args); + int t_funcSCCAfter(void *args); + +public: + bool setAutofocusMode(int autoFocusMode); + int getAutofocusMode(void); + + bool setFocusAreas(ExynosRect2 rect, int weight); + bool getFocusAreas(ExynosRect2 *rect, int *weight); + + bool startAutofocus(void); + bool stopAutofocus(void); + bool flagAutofocusStart(void); + + bool lockAutofocus(void); + bool unlockAutofocus(void); + bool flagLockAutofocus(void); + + bool getAutofocusResult(bool flagLockFocus = false, bool flagStartFaceDetection = false, int numOfFace = 0); + int getCAFResult(void); + int getCurrentState(void); + + bool setRecordingHint(bool hint); + bool getRecordingHint(void); + + bool setFaceDetection(bool toggle); + bool setMacroPosition(int macroPosition); + + void setAfInMotionResult(bool afInMotion); + bool getAfInMotionResult(void); + + void displayAFInfo(void); + void displayAFStatus(void); + + static AUTOFOCUS_STATE afState2AUTOFOCUS_STATE(enum aa_afstate aaAfState); + + /* Sets FPS Value */ + void setFpsValue(int fpsValue); + void setSamsungCamera(int flags); + int getFpsValue(); + +private: + enum AUTOFOCUS_STEP { + AUTOFOCUS_STEP_BASE = 0, + AUTOFOCUS_STEP_STOP, + AUTOFOCUS_STEP_DELAYED_STOP, + AUTOFOCUS_STEP_REQUEST, + AUTOFOCUS_STEP_START, + AUTOFOCUS_STEP_START_SCANNING, + AUTOFOCUS_STEP_SCANNING, + AUTOFOCUS_STEP_DONE, + AUTOFOCUS_STEP_TRIGGER_START, + }; + + bool m_flagAutofocusStart; + bool m_flagAutofocusLock; + + int m_autoFocusMode; /* set by user */ + int m_interenalAutoFocusMode; /* set by this */ + + ExynosRect2 m_focusArea; + int m_focusWeight; + + int m_autofocusStep; + int m_aaAfState; + int m_afState; + int m_aaAFMode; /* set on h/w */ + int m_metaCtlAFMode; + int m_waitCountFailState; + int m_stepRequestCount; + int m_frameCount; + + bool m_recordingHint; + bool m_flagFaceDetection; + int m_macroPosition; + + uint16_t m_af_mode_info; + uint16_t m_af_pan_focus_info; + uint16_t m_af_typical_macro_info; + uint16_t m_af_module_version_info; + uint16_t m_af_state_info; + uint16_t m_af_cur_pos_info; + uint16_t m_af_time_info; + uint16_t m_af_factory_info; + uint32_t m_paf_from_info; + uint32_t m_paf_error_code; + + unsigned int m_fpsValue; + bool m_samsungCamera; + bool m_afInMotionResult; + + void m_AUTOFOCUS_MODE2AA_AFMODE(int autoFocusMode, camera2_shot_ext *shot_ext); +}; +} + +#endif /* EXYNOS_CAMERA_ACTIVITY_AUTOFOCUS_H__ */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityBase.cpp b/libcamera/common_v2/Activities/ExynosCameraActivityBase.cpp new file mode 100644 index 0000000..5347789 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityBase.cpp @@ -0,0 +1,84 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityBase" +#include + +#include "ExynosCameraActivityBase.h" + +namespace android { + +ExynosCameraActivityBase::ExynosCameraActivityBase() +{ + t_isExclusiveReq = false; + t_isActivated = false; + t_reqNum = 0; + t_reqStatus = 0; + pFunc = NULL; +} + +ExynosCameraActivityBase::~ExynosCameraActivityBase() +{ +} + +int ExynosCameraActivityBase::execFunction(CALLBACK_TYPE callbackType, void *args) +{ + switch (callbackType) { + case CALLBACK_TYPE_SENSOR_BEFORE: + pFunc = &ExynosCameraActivityBase::t_funcSensorBefore; + break; + case CALLBACK_TYPE_SENSOR_AFTER: + pFunc = &ExynosCameraActivityBase::t_funcSensorAfter; + break; + case CALLBACK_TYPE_3A_BEFORE: + pFunc = &ExynosCameraActivityBase::t_func3ABefore; + break; + case CALLBACK_TYPE_3A_AFTER: + pFunc = &ExynosCameraActivityBase::t_func3AAfter; + break; + case CALLBACK_TYPE_3A_BEFORE_HAL3: + pFunc = &ExynosCameraActivityBase::t_func3ABeforeHAL3; + break; + case CALLBACK_TYPE_3A_AFTER_HAL3: + pFunc = &ExynosCameraActivityBase::t_func3AAfterHAL3; + break; + case CALLBACK_TYPE_ISP_BEFORE: + pFunc = &ExynosCameraActivityBase::t_funcISPBefore; + break; + case CALLBACK_TYPE_ISP_AFTER: + pFunc = &ExynosCameraActivityBase::t_funcISPAfter; + break; + case CALLBACK_TYPE_SCC_BEFORE: + pFunc = &ExynosCameraActivityBase::t_funcSCCBefore; + break; + case CALLBACK_TYPE_SCC_AFTER: + pFunc = &ExynosCameraActivityBase::t_funcSCCAfter; + break; + case CALLBACK_TYPE_SCP_BEFORE: + pFunc = &ExynosCameraActivityBase::t_funcSCPBefore; + break; + case CALLBACK_TYPE_SCP_AFTER: + pFunc = &ExynosCameraActivityBase::t_funcSCPAfter; + break; + default: + break; + } + + return (this->*pFunc)(args); +} + +} /* namespace android */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityBase.h b/libcamera/common_v2/Activities/ExynosCameraActivityBase.h new file mode 100644 index 0000000..ec5f917 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityBase.h @@ -0,0 +1,114 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraActivityBase.h + * \brief hearder file for CAMERA HAL MODULE + * \author Pilsun Jang(pilsun.jang@samsung.com) + * \date 2012/12/19 + * + */ + +#ifndef EXYNOS_CAMERA_ACTIVITY_BASE_H__ +#define EXYNOS_CAMERA_ACTIVITY_BASE_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#include "exynos_format.h" +#include "ExynosBuffer.h" +#include "ExynosRect.h" +#include "ExynosExif.h" +#include "exynos_v4l2.h" + +#include "ExynosJpegEncoderForCamera.h" +#include "ExynosCameraBuffer.h" +#include "ExynosCameraUtils.h" +#include "fimc-is-metadata.h" + +namespace android { + +class ExynosCameraActivityBase { +public: +enum CALLBACK_TYPE { + CALLBACK_TYPE_SENSOR_BEFORE, + CALLBACK_TYPE_SENSOR_AFTER, + CALLBACK_TYPE_3A_BEFORE, + CALLBACK_TYPE_3A_AFTER, + CALLBACK_TYPE_3A_BEFORE_HAL3, + CALLBACK_TYPE_3A_AFTER_HAL3, + CALLBACK_TYPE_ISP_BEFORE, + CALLBACK_TYPE_ISP_AFTER, + CALLBACK_TYPE_SCC_BEFORE, + CALLBACK_TYPE_SCC_AFTER, + CALLBACK_TYPE_SCP_BEFORE, + CALLBACK_TYPE_SCP_AFTER, + CALLBACK_TYPE_END +}; + +public: + ExynosCameraActivityBase(); + virtual ~ExynosCameraActivityBase(); + + int execFunction(CALLBACK_TYPE callbackType, void *args); + +protected: + virtual int t_funcNull(void *args) = 0; + virtual int t_funcSensorBefore(void *args) = 0; + virtual int t_funcSensorAfter(void *args) = 0; + virtual int t_func3ABefore(void *args) = 0; + virtual int t_func3AAfter(void *args) = 0; + virtual int t_func3ABeforeHAL3(void *args) = 0; + virtual int t_func3AAfterHAL3(void *args) = 0; + virtual int t_funcISPBefore(void *args) = 0; + virtual int t_funcISPAfter(void *args) = 0; + virtual int t_funcSCPBefore(void *args) = 0; + virtual int t_funcSCPAfter(void *args) = 0; + virtual int t_funcSCCBefore(void *args) = 0; + virtual int t_funcSCCAfter(void *args) = 0; + +protected: + int (ExynosCameraActivityBase::*pFunc)(void *args); + bool t_isExclusiveReq; + bool t_isActivated; + int t_reqNum; + int t_reqStatus; +}; +} + +#endif /* EXYNOS_CAMERA_ACTIVITY_BASE_H__ */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityFlash.cpp b/libcamera/common_v2/Activities/ExynosCameraActivityFlash.cpp new file mode 100644 index 0000000..2c2b64e --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityFlash.cpp @@ -0,0 +1,1154 @@ +/* +** +** Copyright 2012, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityFlash" +#include + + +#include "ExynosCameraActivityFlash.h" + +namespace android { + +class ExynosCamera; + +ExynosCameraActivityFlash::ExynosCameraActivityFlash() +{ + t_isExclusiveReq = false; + t_isActivated = false; + t_reqNum = 0x1F; + t_reqStatus = 0; + + m_isNeedFlash = false; + m_isNeedCaptureFlash = true; + m_isNeedFlashOffDelay = false; + m_flashTriggerStep = 0; + + m_flashStepErrorCount = -1; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + + m_waitingCount = -1; + m_isCapture = false; + m_isMainFlashFiring = false; + m_timeoutCount = 0; + m_aeWaitMaxCount = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_flashReq = FLASH_REQ_OFF; + m_flashStep = FLASH_STEP_OFF; + m_overrideFlashControl = false; + m_ShotFcount = 0; + + m_flashPreStatus = FLASH_STATUS_OFF; + m_aePreState = AE_STATE_INACTIVE; + m_flashTrigger = FLASH_TRIGGER_OFF; + m_mainWaitCount = 0; + + m_aeflashMode = AA_FLASHMODE_OFF; + m_checkFlashStepCancel = false; + m_checkFlashWaitCancel = false; + m_mainCaptureRcount = 0; + m_mainCaptureFcount = 0; + m_isRecording = false; + m_isFlashOff = false; + m_flashMode = CAM2_FLASH_MODE_OFF; + m_currentIspInputFcount = 0; + m_awbMode = AA_AWBMODE_OFF; + m_aeState = AE_STATE_INACTIVE; + m_aeMode = AA_AEMODE_OFF; + m_isPreFlash = false; + m_curAeState = AE_STATE_INACTIVE; + m_aeLock = false; + m_awbLock = false; + m_fpsValue = 1; + m_manualExposureTime = 0; + m_needAfTrigger = false; +} + +ExynosCameraActivityFlash::~ExynosCameraActivityFlash() +{ + m_checkFlashWaitCancel = false; +} + +int ExynosCameraActivityFlash::t_funcNull(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + return 1; +} + +int ExynosCameraActivityFlash::t_funcSensorBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + m_reqBuf = *buf; + + return 1; +} + +int ExynosCameraActivityFlash::t_funcSensorAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + if (m_checkMainCaptureFcount == true) { + /* Update m_waitingCount */ + m_waitingCount = checkMainCaptureFcount(shot_ext->shot.dm.request.frameCount); + ALOGV("INFO(%s[%d]):m_waitingCount=0x%x", __FUNCTION__, __LINE__, m_waitingCount); + } + + return 1; +} + +int ExynosCameraActivityFlash::t_funcISPBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityFlash::t_funcISPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityFlash::t_func3ABeforeHAL3(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + enum flash_mode tempFlashMode = CAM2_FLASH_MODE_NONE; + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + m_currentIspInputFcount = shot_ext->shot.dm.request.frameCount; + + ALOGV("INFO(%s[%d]):m_flashReq=%d, m_flashStatus=%d, m_flashStep=%d", + __FUNCTION__, __LINE__, (int)m_flashReq, (int)m_flashStatus, (int)m_flashStep); + + if (m_flashPreStatus != m_flashStatus) { + ALOGD("DEBUG(%s[%d]):m_flashReq=%d, m_flashStatus=%d, m_flashStep=%d", + __FUNCTION__, __LINE__, + (int)m_flashReq, (int)m_flashStatus, (int)m_flashStep); + + m_flashPreStatus = m_flashStatus; + } + + if (m_aePreState != m_aeState) { + ALOGV("INFO(%s[%d]):m_aeState=%d", __FUNCTION__, __LINE__, (int)m_aeState); + m_aePreState = m_aeState; + } + + if (m_overrideFlashControl == false) { + switch (shot_ext->shot.ctl.flash.flashMode) { + case CAM2_FLASH_MODE_OFF: + this->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_OFF); + break; + case CAM2_FLASH_MODE_TORCH: + this->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_TORCH); + break; + case CAM2_FLASH_MODE_SINGLE: + this->setFlashReq(ExynosCameraActivityFlash::FLASH_REQ_SINGLE); + break; + default: + break; + } + } + + tempFlashMode = shot_ext->shot.ctl.flash.flashMode; + + if (m_flashStep == FLASH_STEP_CANCEL && m_checkFlashStepCancel == true) { + ALOGV("DEBUG(%s[%d]): Flash step is CANCEL", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CANCEL; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_flashStepErrorCount = -1; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_checkFlashStepCancel = false; + /* m_checkFlashWaitCancel = false; */ + m_isCapture = false; + + goto done; + } + + if (m_flashReq == FLASH_REQ_OFF) { + ALOGV("DEBUG(%s[%d]): Flash request is OFF", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + if (m_aeflashMode == AA_FLASHMODE_ON_ALWAYS) + m_isNeedFlashOffDelay = true; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_flashStepErrorCount = -1; + m_flashStep = FLASH_STEP_OFF; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + + goto done; + } else if (m_flashReq == FLASH_REQ_SINGLE) { + ALOGV("DEBUG(%s[%d]): Flash request is SINGLE", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + + goto done; + } else if (m_flashReq == FLASH_REQ_TORCH) { + ALOGV("DEBUG(%s[%d]): Flash request is TORCH", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON_ALWAYS; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + + goto done; + } else if (m_flashReq == FLASH_REQ_ON) { + ALOGV("DEBUG(%s[%d]): Flash request is ON", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + if (m_flashStatus == FLASH_STATUS_OFF || m_flashStatus == FLASH_STATUS_PRE_CHECK) { + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_PRE_READY; + } else if (m_flashStatus == FLASH_STATUS_PRE_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE READY", __FUNCTION__, __LINE__); + + if (m_flashStep == FLASH_STEP_PRE_START) { + ALOGV("DEBUG(%s[%d]): Flash step is PRE START", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_START; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + if (shot_ext->shot.ctl.aa.afMode == AA_AFMODE_AUTO + || shot_ext->shot.ctl.aa.afMode == AA_AFMODE_MACRO) + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + else + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE ON", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AE DONE", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AF", __FUNCTION__, __LINE__); + + if (m_needAfTrigger == true) { + m_needAfTrigger = false; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + } else { + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AF; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE DONE", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN READY", __FUNCTION__, __LINE__); + m_ShotFcount = 0; + + if (m_flashStep == FLASH_STEP_MAIN_START) { + ALOGD("DEBUG(%s[%d]): Flash step is MAIN START (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN ON (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN WAIT (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_WAIT; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN DONE (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + m_isCapture = false; + } + } else if (m_flashReq == FLASH_REQ_AUTO) { + ALOGV("DEBUG(%s[%d]): Flash request is AUTO", __FUNCTION__, __LINE__); + + if (m_aeState == AE_STATE_INACTIVE) { + ALOGV("DEBUG(%s[%d]): AE state is INACTIVE", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_flashStep = FLASH_STEP_OFF; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_waitingCount = -1; + + goto done; + } else if (m_aeState == AE_STATE_CONVERGED || m_aeState == AE_STATE_LOCKED_CONVERGED) { + ALOGV("DEBUG(%s[%d]): AE state is CONVERGED", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_flashStep = FLASH_STEP_OFF; + + m_isCapture = false; + m_isPreFlash = false; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_waitingCount = -1; + + goto done; + } else if (m_aeState == AE_STATE_FLASH_REQUIRED + || m_aeState == AE_STATE_LOCKED_FLASH_REQUIRED + || m_aeState == AE_STATE_SEARCHING_FLASH_REQUIRED) { + ALOGV("DEBUG(%s[%d]): AE state is FLASH REQUIRED", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + if (m_flashStatus == FLASH_STATUS_OFF || m_flashStatus == FLASH_STATUS_PRE_CHECK) { + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_PRE_READY; + } else if (m_flashStatus == FLASH_STATUS_PRE_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE READY", __FUNCTION__, __LINE__); + + if (m_flashStep == FLASH_STEP_PRE_START) { + ALOGV("DEBUG(%s[%d]): Flash step is PRE START", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_START; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + if (shot_ext->shot.ctl.aa.afMode == AA_AFMODE_AUTO + || shot_ext->shot.ctl.aa.afMode == AA_AFMODE_MACRO) + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + else + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE ON", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AE DONE", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AF", __FUNCTION__, __LINE__); + + if (m_needAfTrigger == true) { + m_needAfTrigger = false; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + } else { + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AF; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE DONE", __FUNCTION__, __LINE__); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN READY", __FUNCTION__, __LINE__); + m_ShotFcount = 0; + + if (m_flashStep == FLASH_STEP_MAIN_START) { + ALOGV("DEBUG(%s[%d]): Flash step is MAIN START (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN ON (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN WAIT (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_WAIT; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN DONE (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + m_isCapture = false; + } + } + } + + if (0 < m_flashStepErrorCount) + m_flashStepErrorCount++; + + ALOGV("INFO(%s[%d]):aeflashMode=%d", __FUNCTION__, __LINE__, (int)shot_ext->shot.ctl.aa.vendor_aeflashMode); + +done: + shot_ext->shot.ctl.flash.flashMode = tempFlashMode; + + if(shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + + ALOGV("INFO(%s[%d]):aeflashMode(%d) ctl flashMode(%d)", + __FUNCTION__, __LINE__, + (int)shot_ext->shot.ctl.aa.vendor_aeflashMode, + (int)shot_ext->shot.ctl.flash.flashMode); + + return 1; +} + +int ExynosCameraActivityFlash::t_func3AAfterHAL3(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + shot_ext->shot.dm.aa.aePrecaptureTrigger = shot_ext->shot.ctl.aa.aePrecaptureTrigger; + + if (m_isCapture == false) + m_aeState = shot_ext->shot.dm.aa.aeState; + + m_curAeState = shot_ext->shot.dm.aa.aeState; + + /* Convert aeState for Locked */ + if (shot_ext->shot.dm.aa.aeState == AE_STATE_LOCKED_CONVERGED || + shot_ext->shot.dm.aa.aeState == AE_STATE_LOCKED_FLASH_REQUIRED) { + shot_ext->shot.dm.aa.aeState = AE_STATE_LOCKED; + } + + if (shot_ext->shot.dm.aa.aeState == AE_STATE_SEARCHING_FLASH_REQUIRED) + shot_ext->shot.dm.aa.aeState = AE_STATE_SEARCHING; + + if (m_flashStep == FLASH_STEP_CANCEL && + m_checkFlashStepCancel == false) { + m_flashStep = FLASH_STEP_OFF; + m_flashStatus = FLASH_STATUS_OFF; + + goto done; + } + + if (m_flashStep == FLASH_STEP_OFF) { + if (shot_ext->shot.dm.aa.vendor_aeflashMode == AA_FLASHMODE_ON_ALWAYS) { + shot_ext->shot.dm.flash.flashMode = CAM2_FLASH_MODE_TORCH; + } else if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) { + shot_ext->shot.dm.flash.flashMode = CAM2_FLASH_MODE_SINGLE; + shot_ext->shot.dm.flash.flashState = FLASH_STATE_FIRED; + } + } + + if (m_flashReq == FLASH_REQ_OFF) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 3) { + ALOGV("DEBUG(%s[%d]): flashReady = 3 frameCount %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_isFlashOff = true; + } + } + + if (m_flashStatus == FLASH_STATUS_PRE_CHECK) { + if (shot_ext->shot.dm.flash.vendor_decision == 2 || + FLASH_TIMEOUT_COUNT < m_timeoutCount) { + m_flashStatus = FLASH_STATUS_PRE_READY; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + shot_ext->shot.dm.aa.aeState = AE_STATE_PRECAPTURE; + if (shot_ext->shot.dm.flash.vendor_flashReady == 1 || + FLASH_AE_TIMEOUT_COUNT < m_timeoutCount) { + if (FLASH_AE_TIMEOUT_COUNT < m_timeoutCount) + ALOGD("DEBUG(%s[%d]):auto exposure timeoutCount %d", + __FUNCTION__, __LINE__, m_timeoutCount); + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + shot_ext->shot.dm.aa.aeState = AE_STATE_PRECAPTURE; + + if (shot_ext->shot.dm.aa.afState == AA_AFSTATE_PASSIVE_FOCUSED || + shot_ext->shot.dm.aa.afState == AA_AFSTATE_PASSIVE_UNFOCUSED || + shot_ext->shot.dm.aa.afState == AA_AFSTATE_INACTIVE) { + m_needAfTrigger = true; + m_flashStatus = FLASH_STATUS_PRE_AF; + } else { + ALOGD("DEBUG(%s[%d]):FLASH_STATUS_PRE_AE_DONE aeState(%d) afState(%d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.aa.aeState, shot_ext->shot.dm.aa.afState); + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + shot_ext->shot.dm.aa.aeState = AE_STATE_PRECAPTURE; + + if (shot_ext->shot.dm.aa.afState == AA_AFSTATE_FOCUSED_LOCKED || + shot_ext->shot.dm.aa.afState == AA_AFSTATE_NOT_FOCUSED_LOCKED || + FLASH_AF_TIMEOUT_COUNT < m_timeoutCount) { + if (FLASH_AF_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):auto focus timeoutCount %d", + __FUNCTION__, __LINE__, m_timeoutCount); + } + m_flashStatus = FLASH_STATUS_PRE_DONE; + m_timeoutCount = 0; + } else { + if (shot_ext->shot.ctl.aa.afMode == AA_AFMODE_OFF) { + m_flashStatus = FLASH_STATUS_PRE_DONE; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 2 || + FLASH_TIMEOUT_COUNT < m_timeoutCount) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 2) { + ALOGD("DEBUG(%s[%d]):flashReady == 2 frameCount %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + } else if (FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):m_timeoutCount %d", + __FUNCTION__, __LINE__, m_timeoutCount); + } + + m_flashStatus = FLASH_STATUS_MAIN_READY; + m_timeoutCount = 0; + } else { + shot_ext->shot.dm.aa.aeState = AE_STATE_PRECAPTURE; + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + if ((shot_ext->shot.dm.flash.vendor_flashOffReady == 2) || + (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_FLASH) || + FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + + if (shot_ext->shot.dm.flash.vendor_flashOffReady == 2) { + ALOGD("DEBUG(%s[%d]):flashOffReady %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.flash.vendor_flashOffReady); + } else if (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_FLASH) { + m_ShotFcount = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_ShotFcount %u", __FUNCTION__, __LINE__, m_ShotFcount); + } else if (FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):m_timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + } + ALOGD("DEBUG(%s[%d]):frameCount %d" , + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + m_flashStatus = FLASH_STATUS_MAIN_DONE; + m_timeoutCount = 0; + m_mainWaitCount = 0; + + m_waitingCount--; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + /* 1 frame is used translate status MAIN_ON to MAIN_WAIT */ + if (m_mainWaitCount < FLASH_MAIN_WAIT_COUNT -1) { + ALOGD("DEBUG(%s[%d]):frameCount=%d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_mainWaitCount++; + } else { + ALOGD("DEBUG(%s[%d]):frameCount=%d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_mainWaitCount = 0; + m_waitingCount = -1; + m_flashStatus = FLASH_STATUS_MAIN_DONE; + } + } + + m_aeflashMode = shot_ext->shot.dm.aa.vendor_aeflashMode; + + ALOGV("INFO(%s[%d]):(m_aeState %d)(m_flashStatus %d)", __FUNCTION__, __LINE__, + (int)m_aeState, (int)m_flashStatus); + ALOGV("INFO(%s[%d]):(decision %d flashReady %d flashOffReady %d firingStable %d)", __FUNCTION__, __LINE__, + (int)shot_ext->shot.dm.flash.vendor_decision, + (int)shot_ext->shot.dm.flash.vendor_flashReady, + (int)shot_ext->shot.dm.flash.vendor_flashOffReady, + (int)shot_ext->shot.dm.flash.vendor_firingStable); + ALOGV("INFO(%s[%d]):(aeState %d)(aeflashMode %d)(dm flashMode %d)(flashState %d)(ctl flashMode %d)", + __FUNCTION__, __LINE__, + (int)shot_ext->shot.dm.aa.aeState, (int)shot_ext->shot.dm.aa.vendor_aeflashMode, + (int)shot_ext->shot.dm.flash.flashMode, (int)shot_ext->shot.dm.flash.flashState, + (int)shot_ext->shot.ctl.flash.flashMode); + +done: + return 1; +} + +int ExynosCameraActivityFlash::t_funcSCPBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityFlash::t_funcSCPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityFlash::t_funcSCCBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityFlash::t_funcSCCAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +bool ExynosCameraActivityFlash::setFlashReq(enum FLASH_REQ flashReqVal) +{ + if (m_flashReq != flashReqVal) { + m_flashReq = flashReqVal; + setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_OFF); + if (m_isRecording == false) + m_isNeedCaptureFlash = true; + ALOGD("DEBUG(%s[%d]):flashReq=%d", __FUNCTION__, __LINE__, (int)m_flashReq); + } + + if (m_flashReq == FLASH_REQ_ON) + m_isNeedFlash = true; + if (m_flashReq == FLASH_REQ_TORCH) + m_isNeedCaptureFlash = false; + if (m_flashReq == FLASH_REQ_OFF) { + m_isNeedCaptureFlash = false; + m_isFlashOff = false; + } + + return true; +} + +bool ExynosCameraActivityFlash::setFlashReq(enum FLASH_REQ flashReqVal, bool overrideFlashControl) +{ + m_overrideFlashControl = overrideFlashControl; + + if (m_overrideFlashControl == true) { + return this->setFlashReq(flashReqVal); + } + + return true; +} + +enum ExynosCameraActivityFlash::FLASH_REQ ExynosCameraActivityFlash::getFlashReq(void) +{ + return m_flashReq; +} + +bool ExynosCameraActivityFlash::setFlashStatus(enum FLASH_STATUS flashStatusVal) +{ + m_flashStatus = flashStatusVal; + ALOGV("DEBUG(%s[%d]):flashStatus=%d", __FUNCTION__, __LINE__, (int)m_flashStatus); + return true; +} + +int ExynosCameraActivityFlash::getFlashStatus() +{ + return m_aeflashMode; +} + +bool ExynosCameraActivityFlash::setFlashExposure(enum aa_aemode aeModeVal) +{ + m_aeMode = aeModeVal; + ALOGV("DEBUG(%s[%d]):aeMode=%d", __FUNCTION__, __LINE__, (int)m_aeMode); + return true; +} + +bool ExynosCameraActivityFlash::setFlashWhiteBalance(enum aa_awbmode wbModeVal) +{ + m_awbMode = wbModeVal; + ALOGV("DEBUG(%s[%d]):awbMode=%d", __FUNCTION__, __LINE__, (int)m_awbMode); + return true; +} + +void ExynosCameraActivityFlash::setAeLock(bool aeLock) +{ + m_aeLock = aeLock; + ALOGV("DEBUG(%s[%d]):aeLock=%d", __FUNCTION__, __LINE__, (int)m_aeLock); +} + +void ExynosCameraActivityFlash::setAwbLock(bool awbLock) +{ + m_awbLock = awbLock; + ALOGV("DEBUG(%s[%d]):awbLock=%d", __FUNCTION__, __LINE__, (int)m_awbLock); +} + +bool ExynosCameraActivityFlash::getFlashStep(enum FLASH_STEP *flashStepVal) +{ + *flashStepVal = m_flashStep; + + return true; +} + +bool ExynosCameraActivityFlash::setFlashTrigerPath(enum FLASH_TRIGGER flashTriggerVal) +{ + m_flashTrigger = flashTriggerVal; + + ALOGD("DEBUG(%s[%d]):flashTriggerVal=%d", __FUNCTION__, __LINE__, (int)flashTriggerVal); + + return true; +} + +bool ExynosCameraActivityFlash::getFlashTrigerPath(enum FLASH_TRIGGER *flashTriggerVal) +{ + *flashTriggerVal = m_flashTrigger; + + return true; +} + +bool ExynosCameraActivityFlash::setShouldCheckedRcount(int rcount) +{ + m_mainCaptureRcount = rcount; + ALOGV("DEBUG(%s[%d]):mainCaptureRcount=%d", __FUNCTION__, __LINE__, m_mainCaptureRcount); + + return true; +} + +bool ExynosCameraActivityFlash::waitAeDone(void) +{ + bool ret = true; + + int status = 0; + unsigned int totalWaitingTime = 0; + + while (status == 0 && + totalWaitingTime <= FLASH_MAX_AEDONE_WAITING_TIME && + m_checkFlashWaitCancel == false) { + if (m_flashStatus == FLASH_STATUS_PRE_ON || m_flashStep == FLASH_STEP_PRE_START) { + if ((m_aeWaitMaxCount <= 0) || (m_flashStatus == FLASH_STATUS_PRE_AE_DONE)) { + status = 1; + break; + } else { + ALOGV("DEBUG(%s[%d]):aeWaitMaxCount=%d", __FUNCTION__, __LINE__, m_aeWaitMaxCount); + status = 0; + } + } else { + status = 1; + break; + } + + usleep(FLASH_WAITING_SLEEP_TIME); + totalWaitingTime += FLASH_WAITING_SLEEP_TIME; + } + + if (status == 0 || FLASH_MAX_AEDONE_WAITING_TIME < totalWaitingTime) { + ALOGW("DEBUG(%s):waiting too much (%d msec)", __FUNCTION__, totalWaitingTime); + ret = false; + } + + return ret; +} + +bool ExynosCameraActivityFlash::waitMainReady(void) +{ + bool ret = true; + unsigned int totalWaitingTime = 0; + unsigned int waitTimeoutFpsValue = 0; + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + if (getFpsValue() > 0) { + waitTimeoutFpsValue = 30 / getFpsValue(); + } + if (waitTimeoutFpsValue < 1) + waitTimeoutFpsValue = 1; + + ALOGI("INFO(%s[%d]):waitTimeoutFpsValue(%d) , getFpsValue(%d)", + __FUNCTION__, __LINE__, waitTimeoutFpsValue, getFpsValue()); + + while (m_flashStatus < FLASH_STATUS_MAIN_READY && + totalWaitingTime <= FLASH_MAX_PRE_DONE_WAITING_TIME * waitTimeoutFpsValue && + m_checkFlashWaitCancel == false && m_isPreFlash == true) { + ALOGV("DEBUG(%s[%d]):(%d)(%d)(%d)", __FUNCTION__, __LINE__, m_flashStatus, totalWaitingTime, m_checkFlashWaitCancel); + + usleep(FLASH_WAITING_SLEEP_TIME); + totalWaitingTime += FLASH_WAITING_SLEEP_TIME; + } + + if (m_flashStatus < FLASH_STATUS_MAIN_READY && FLASH_MAX_PRE_DONE_WAITING_TIME * waitTimeoutFpsValue < totalWaitingTime) { + ALOGW("DEBUG(%s)::waiting too much (%d msec)", __FUNCTION__, totalWaitingTime); + m_flashStatus = FLASH_STATUS_MAIN_READY; + ret = false; + } + + return ret; +} + +int ExynosCameraActivityFlash::checkMainCaptureRcount(int rcount) +{ + if (rcount == m_mainCaptureRcount) + return 0; + else if (rcount < m_mainCaptureRcount) + return 1; + else + return -1; +} + +bool ExynosCameraActivityFlash::setShouldCheckedFcount(int fcount) +{ + m_mainCaptureFcount = fcount; + ALOGV("DEBUG(%s[%d]):mainCaptureFcount=%d", __FUNCTION__, __LINE__, m_mainCaptureFcount); + + return true; +} + +int ExynosCameraActivityFlash::checkMainCaptureFcount(int fcount) +{ + ALOGV("DEBUG(%s[%d]):mainCaptureFcount=%d, fcount=%d", + __FUNCTION__, __LINE__, m_mainCaptureFcount, fcount); + + if (fcount < m_mainCaptureFcount) + return (m_mainCaptureFcount - fcount); /* positive count */ + else + return 0; +} + +int ExynosCameraActivityFlash::getWaitingCount() +{ + return m_waitingCount; +} + +bool ExynosCameraActivityFlash::getNeedFlash() +{ + ALOGD("DEBUG(%s[%d]):m_isNeedFlash=%d", __FUNCTION__, __LINE__, m_isNeedFlash); + return m_isNeedFlash; +} + +bool ExynosCameraActivityFlash::getNeedCaptureFlash() +{ + if (m_isNeedFlash == true) + return m_isNeedCaptureFlash; + + return false; +} + +unsigned int ExynosCameraActivityFlash::getShotFcount() +{ + return m_ShotFcount; +} +void ExynosCameraActivityFlash::resetShotFcount(void) +{ + m_ShotFcount = 0; +} +void ExynosCameraActivityFlash::setCaptureStatus(bool isCapture) +{ + m_isCapture = isCapture; +} + +bool ExynosCameraActivityFlash::setRecordingHint(bool hint) +{ + m_isRecording = hint; + if ((m_isRecording == true) || (m_flashReq == FLASH_REQ_TORCH)) + m_isNeedCaptureFlash = false; + else + m_isNeedCaptureFlash = true; + + return true; +} + +bool ExynosCameraActivityFlash::checkPreFlash() +{ + return m_isPreFlash; +} + +bool ExynosCameraActivityFlash::checkFlashOff() +{ + return m_isFlashOff; +} + +bool ExynosCameraActivityFlash::updateAeState() +{ + m_aePreState = m_aeState; + m_aeState = m_curAeState; + + if (m_aePreState != m_aeState) { + if ((m_aeState == AE_STATE_CONVERGED || m_aeState == AE_STATE_LOCKED_CONVERGED) && + m_flashReq != FLASH_REQ_ON) + m_isNeedFlash = false; + else if (m_aeState == AE_STATE_FLASH_REQUIRED || m_aeState == AE_STATE_LOCKED_FLASH_REQUIRED) + m_isNeedFlash = true; + } + + ALOGD("DEBUG(%s[%d]): aeState %d", __FUNCTION__, __LINE__, (int)m_aeState); + + return true; +} + +void ExynosCameraActivityFlash::setFlashWaitCancel(bool cancel) +{ + m_checkFlashWaitCancel = cancel; +} + +bool ExynosCameraActivityFlash::getFlashWaitCancel(void) +{ + return m_checkFlashWaitCancel; +} + +status_t ExynosCameraActivityFlash::setNeedFlashOffDelay(bool delay) +{ + m_isNeedFlashOffDelay = delay; + return NO_ERROR; +} + +bool ExynosCameraActivityFlash::getNeedFlashOffDelay(void) +{ + return m_isNeedFlashOffDelay; +} + +void ExynosCameraActivityFlash::setFpsValue(int fpsValue) +{ + m_fpsValue = fpsValue; +} + +int ExynosCameraActivityFlash::getFpsValue() +{ + return m_fpsValue; +} + +void ExynosCameraActivityFlash::notifyAfResult(void) +{ + if (m_flashStatus == FLASH_STATUS_PRE_AF) { + setFlashStep(FLASH_STEP_PRE_DONE); + ALOGD("DEBUG(%s[%d]): AF DONE (for flash)", __FUNCTION__, __LINE__); + } +} + +void ExynosCameraActivityFlash::notifyAfResultHAL3(void) +{ + if (m_flashStatus == FLASH_STATUS_PRE_AF) { + setFlashStep(FLASH_STEP_PRE_AF_DONE); + ALOGD("DEBUG(%s[%d]): AF DONE (for flash)", __FUNCTION__, __LINE__); + } +} + +void ExynosCameraActivityFlash::notifyAeResult(void) +{ + if (m_flashStatus == FLASH_STATUS_PRE_ON) { + setFlashStep(FLASH_STEP_PRE_DONE); + ALOGD("DEBUG(%s[%d]): AE DONE (for flash)", __FUNCTION__, __LINE__); + } +} + +void ExynosCameraActivityFlash::setMainFlashFiring(bool isMainFlashFiring) +{ + m_isMainFlashFiring = isMainFlashFiring; +} + +void ExynosCameraActivityFlash::setManualExposureTime(uint64_t exposureTime) +{ + ALOGD("DEBUG(%s[%d]):exposureTime(%lld)", __FUNCTION__, __LINE__, exposureTime); + m_manualExposureTime = exposureTime; +} + +}/* namespace android */ + diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityFlash.h b/libcamera/common_v2/Activities/ExynosCameraActivityFlash.h new file mode 100644 index 0000000..7293bc3 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityFlash.h @@ -0,0 +1,253 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraActivityFlash.h + * \brief hearder file for CAMERA HAL MODULE + * \author Pilsun Jang(pilsun.jang@samsung.com) + * \date 2012/12/19 + * + */ + +#ifndef EXYNOS_CAMERA_ACTIVITY_FLASH_H__ +#define EXYNOS_CAMERA_ACTIVITY_FLASH_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#include "exynos_format.h" +#include "ExynosBuffer.h" +#include "ExynosRect.h" +#include "ExynosJpegEncoderForCamera.h" +#include "ExynosExif.h" +#include "exynos_v4l2.h" +#include "ExynosCameraActivityBase.h" + +#include "fimc-is-metadata.h" + +#define CAPTURE_SKIP_COUNT (1) + +#define FLASH_WAITING_SLEEP_TIME (15000) /* 15 msec */ +#define FLASH_MAX_WAITING_TIME (2000000) /* 2 sec */ +#define FLASH_MAX_AEDONE_WAITING_TIME (3000000) /* 3 sec */ +#define FLASH_MAX_PRE_DONE_WAITING_TIME (5000000) /* 5 sec */ +#define FLASH_OFF_MAX_WATING_TIME (500000) /* 500 ms */ +#define FLASH_CAPTURE_WAITING_TIME (200000) /* 200 ms */ +#define FLASH_TIMEOUT_COUNT (30) /* 30 fps * 1 sec */ +#define FLASH_AF_TIMEOUT_COUNT (60) /*33ms * 60 */ +#define FLASH_AE_TIMEOUT_COUNT (60) /*33ms * 60 */ +#define FLASH_MAIN_TIMEOUT_COUNT (15) /* 33ms * 15 */ +#define FLASH_SHOT_MAX_WAITING_TIME (100000) /* 100 msec */ +#define FLASH_MAIN_WAIT_COUNT (2) + +namespace android { + +class ExynosCameraActivityFlash : public ExynosCameraActivityBase { +public: + enum FLASH_REQ { + FLASH_REQ_OFF, + FLASH_REQ_AUTO, + FLASH_REQ_ON, + FLASH_REQ_RED_EYE, + FLASH_REQ_SINGLE, + FLASH_REQ_TORCH, + FLASH_REQ_END + }; + + enum FLASH_STATUS { + FLASH_STATUS_OFF, + FLASH_STATUS_NEED_FLASH, + FLASH_STATUS_PRE_CHECK, + FLASH_STATUS_PRE_READY, + FLASH_STATUS_PRE_ON, + FLASH_STATUS_PRE_AE_DONE, /* 5 */ + FLASH_STATUS_PRE_AF, + FLASH_STATUS_PRE_AF_DONE, + FLASH_STATUS_PRE_DONE, + FLASH_STATUS_MAIN_READY, + FLASH_STATUS_MAIN_ON, + FLASH_STATUS_MAIN_WAIT, + FLASH_STATUS_MAIN_DONE, + FLASH_STATUS_END + }; + + enum FLASH_STEP { + FLASH_STEP_OFF, + FLASH_STEP_PRE_AF_START, + FLASH_STEP_PRE_AF_DONE, + FLASH_STEP_PRE_START, + FLASH_STEP_PRE_DONE, + FLASH_STEP_MAIN_START, + FLASH_STEP_MAIN_DONE, + FLASH_STEP_CANCEL, + FLASH_STEP_PRE_LCD_ON, + FLASH_STEP_LCD_ON, + FLASH_STEP_LCD_OFF, + FLASH_STEP_END + }; + + enum FLASH_TRIGGER { + FLASH_TRIGGER_OFF, + FLASH_TRIGGER_TOUCH_DISPLAY, + FLASH_TRIGGER_SHORT_BUTTON, + FLASH_TRIGGER_LONG_BUTTON, + FLASH_TRIGGER_END + }; + +public: + ExynosCameraActivityFlash(); + virtual ~ExynosCameraActivityFlash(); + +protected: + int t_funcNull(void *args); + int t_funcSensorBefore(void *args); + int t_funcSensorAfter(void *args); + int t_func3ABefore(void *args); + int t_func3AAfter(void *args); + int t_func3ABeforeHAL3(void *args); + int t_func3AAfterHAL3(void *args); + int t_funcISPBefore(void *args); + int t_funcISPAfter(void *args); + int t_funcSCPBefore(void *args); + int t_funcSCPAfter(void *args); + int t_funcSCCBefore(void *args); + int t_funcSCCAfter(void *args); + +public: + bool setFlashReq(enum FLASH_REQ flashReqVal); + bool setFlashReq(enum FLASH_REQ flashReqVal, bool overrideFlashControl); + enum ExynosCameraActivityFlash::FLASH_REQ getFlashReq(void); + + bool setFlashStep(enum FLASH_STEP flashStepVal); + + bool setFlashStatus(enum FLASH_STATUS flashStatusVal); + + status_t setNeedFlashOffDelay(bool delay); + bool setFlashTrigerPath(enum FLASH_TRIGGER flashTriggerVal); + bool setFlashWhiteBalance(enum aa_awbmode wbModeVal); + bool setFlashExposure(enum aa_aemode aeModeVal); + bool setShouldCheckedRcount(int rcount); + int checkMainCaptureRcount(int rcount); + + bool setShouldCheckedFcount(int rcount); + int checkMainCaptureFcount(int rcount); + + int getWaitingCount(void); + bool getNeedFlash(void); + + bool waitAeDone(void); + bool waitMainReady(void); + void setCaptureStatus(bool isCapture); + unsigned int getShotFcount(); + void resetShotFcount(void); + bool getFlashTrigerPath(enum FLASH_TRIGGER *flashTriggerVal); + bool getFlashStep(enum FLASH_STEP *flashStepVal); + int getFlashStatus(void); + bool getNeedCaptureFlash(void); + bool getNeedFlashOffDelay(void); + bool setRecordingHint(bool hint); + bool checkPreFlash(void); + bool checkFlashOff(void); + bool updateAeState(void); + void setAeLock(bool aeLock); + void setAwbLock(bool awbLock); + void setFlashWaitCancel(bool cancel); + bool getFlashWaitCancel(void); + void setFpsValue(int fpsValue); + int getFpsValue(); + void notifyAfResult(void); + void notifyAfResultHAL3(void); + void notifyAeResult(void); + void setMainFlashFiring(bool isMainFlashFiring); + void setManualExposureTime(uint64_t exposureTime); + +private: + bool m_isNeedFlash; + bool m_isNeedCaptureFlash; + /* TODO: If need another delay, add variables */ + bool m_isNeedFlashOffDelay; + int m_flashTriggerStep; + + int m_flashStepErrorCount; + + int m_mainCaptureRcount; + bool m_checkMainCaptureRcount; + int m_mainCaptureFcount; + int m_currentIspInputFcount; + bool m_checkMainCaptureFcount; + int m_waitingCount; + + bool m_isMainFlashFiring; + bool m_isCapture; + bool m_isRecording; + bool m_isFlashOff; + bool m_needAfTrigger; + + int m_timeoutCount; + int m_aeWaitMaxCount; + int m_mainWaitCount; + + bool m_checkFlashStepCancel; + bool m_checkFlashWaitCancel; + bool m_isPreFlash; + bool m_aeLock; + bool m_awbLock; + unsigned int m_fpsValue; + uint64_t m_manualExposureTime; + + enum flash_mode m_flashMode; + enum aa_ae_flashmode m_aeflashMode; + enum aa_aemode m_aeMode; + enum aa_awbmode m_awbMode; + enum ae_state m_aeState; + + enum FLASH_STATUS m_flashStatus; + enum FLASH_STEP m_flashStep; + enum FLASH_TRIGGER m_flashTrigger; + enum FLASH_REQ m_flashReq; + bool m_overrideFlashControl; + + ExynosCameraBuffer m_reqBuf; + unsigned int m_ShotFcount; + enum FLASH_STATUS m_flashPreStatus; + enum ae_state m_aePreState; + enum ae_state m_curAeState; +}; +} + +#endif /* EXYNOS_CAMERA_ACTIVITY_FLASH_H__ */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.cpp b/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.cpp new file mode 100644 index 0000000..5f539a4 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.cpp @@ -0,0 +1,496 @@ +/* +** +** Copyright 2012, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivitySpecialCapture" +#include + +#include "ExynosCameraActivitySpecialCapture.h" +//#include "ExynosCamera.h" + +#define TIME_CHECK 1 + +namespace android { + +class ExynosCamera; + +ExynosCameraActivitySpecialCapture::ExynosCameraActivitySpecialCapture() +{ + t_isExclusiveReq = false; + t_isActivated = false; + t_reqNum = 0x1F; + t_reqStatus = 0; + + m_hdrFcount = 0; + m_currentInputFcount = 0; + m_backupAeExpCompensation = 0; + m_hdrStartFcount[0] = 0; + m_hdrStartFcount[1] = 0; + m_hdrStartFcount[2] = 0; + m_hdrDropFcount[0] = 0; + m_hdrDropFcount[1] = 0; + m_hdrDropFcount[2] = 0; + m_delay = 0; + m_specialCaptureMode = SCAPTURE_MODE_NONE; + m_check = false; + m_specialCaptureStep = SCAPTURE_STEP_OFF; + m_backupSceneMode = AA_SCENE_MODE_DISABLED; + m_backupAaMode = AA_CONTROL_OFF; + m_backupAeLock = AA_AE_LOCK_OFF; + memset(m_backupAeTargetFpsRange, 0x00, sizeof(m_backupAeTargetFpsRange)); + m_backupFrameDuration = 0L; +#ifdef RAWDUMP_CAPTURE + m_RawCaptureFcount = 0; +#endif + memset(m_hdrBuffer, 0x00, sizeof(m_hdrBuffer)); + +} + +ExynosCameraActivitySpecialCapture::~ExynosCameraActivitySpecialCapture() +{ + t_isExclusiveReq = false; + t_isActivated = false; + t_reqNum = 0x1F; + t_reqStatus = 0; + + m_hdrFcount = 0; + m_currentInputFcount = 0; + m_backupAeExpCompensation = 0; + m_hdrStartFcount[0] = 0; + m_hdrStartFcount[1] = 0; + m_hdrStartFcount[2] = 0; + m_hdrDropFcount[0] = 0; + m_hdrDropFcount[1] = 0; + m_hdrDropFcount[2] = 0; + m_delay = 0; + m_specialCaptureMode = SCAPTURE_MODE_NONE; + m_check = false; +} + +int ExynosCameraActivitySpecialCapture::t_funcNull(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcSensorBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcSensorAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + int ret = 1; + + if (shot_ext != NULL && m_specialCaptureMode == SCAPTURE_MODE_HDR) { + if (m_hdrDropFcount[2] + 1 == shot_ext->shot.dm.request.frameCount) { + ret = 2; + + ALOGD("DEBUG(%s[%d]):(%d / B_LOCK %d)", __FUNCTION__, __LINE__, m_hdrStartFcount[0], shot_ext->shot.dm.request.frameCount); + } + + if (m_hdrDropFcount[2] + 2 == shot_ext->shot.dm.request.frameCount) { + ret = 3; + + ALOGD("DEBUG(%s[%d]):(%d / B_LOCK %d)", __FUNCTION__, __LINE__, m_hdrStartFcount[1], shot_ext->shot.dm.request.frameCount); + } + + if (m_hdrDropFcount[2] + 3 == shot_ext->shot.dm.request.frameCount) { + ret = 4; + + ALOGD("DEBUG(%s[%d]):(%d / B_LOCK %d)", __FUNCTION__, __LINE__, m_hdrStartFcount[2], shot_ext->shot.dm.request.frameCount); + } + } + + return ret; +} + +int ExynosCameraActivitySpecialCapture::t_funcISPBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + +done: + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcISPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_func3ABefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext != NULL && m_specialCaptureMode == SCAPTURE_MODE_HDR) { + m_currentInputFcount = shot_ext->shot.dm.request.frameCount; + + /* HACK UNLOCK AE */ +#ifndef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + + if (m_specialCaptureStep == SCAPTURE_STEP_START) { + m_backupAeExpCompensation = shot_ext->shot.ctl.aa.aeExpCompensation; + m_backupAaMode = shot_ext->shot.ctl.aa.mode; + m_backupAeLock = shot_ext->shot.ctl.aa.aeLock; + m_backupSceneMode = shot_ext->shot.ctl.aa.sceneMode; + + m_specialCaptureStep = SCAPTURE_STEP_MINUS_SET; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_START", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_MINUS_SET) { + shot_ext->shot.ctl.aa.mode = AA_CONTROL_USE_SCENE_MODE; + shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_HDR; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + m_specialCaptureStep = SCAPTURE_STEP_ZERO_SET; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_MINUS_SET", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_ZERO_SET) { + m_delay = 0; + shot_ext->shot.ctl.aa.mode = AA_CONTROL_USE_SCENE_MODE; + shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_HDR; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + m_specialCaptureStep = SCAPTURE_STEP_PLUS_SET; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_ZERO_SET", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_PLUS_SET) { + shot_ext->shot.ctl.aa.mode = AA_CONTROL_USE_SCENE_MODE; + shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_HDR; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + + m_specialCaptureStep = SCAPTURE_STEP_RESTORE; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_PLUS_SET", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_RESTORE) { + shot_ext->shot.ctl.aa.mode = AA_CONTROL_USE_SCENE_MODE; + shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_HDR; + shot_ext->shot.ctl.aa.aeLock = m_backupAeLock; + shot_ext->shot.ctl.aa.aeExpCompensation = m_backupAeExpCompensation; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + + m_specialCaptureStep = SCAPTURE_STEP_WAIT_CAPTURE_DELAY; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_RESTORE", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_WAIT_CAPTURE_DELAY) { + shot_ext->shot.ctl.aa.sceneMode = m_backupSceneMode; + shot_ext->shot.ctl.aa.mode = m_backupAaMode; + shot_ext->shot.ctl.aa.aeLock = m_backupAeLock; + shot_ext->shot.ctl.aa.aeExpCompensation = m_backupAeExpCompensation; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + + m_specialCaptureStep = SCAPTURE_STEP_WAIT_CAPTURE; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_WAIT_CAPTURE_DELAY", __FUNCTION__, __LINE__); + } else if (m_specialCaptureStep == SCAPTURE_STEP_WAIT_CAPTURE) { + m_specialCaptureStep = SCAPTURE_STEP_WAIT_CAPTURE; + shot_ext->shot.ctl.aa.sceneMode = m_backupSceneMode; + shot_ext->shot.ctl.aa.mode = m_backupAaMode; + shot_ext->shot.ctl.aa.aeLock = m_backupAeLock; + shot_ext->shot.ctl.aa.aeExpCompensation = m_backupAeExpCompensation; +#ifdef USE_LSI_3A + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; +#endif + } else { + m_specialCaptureStep = SCAPTURE_STEP_OFF; + m_delay = 0; + m_check = false; + } + } +#ifdef RAWDUMP_CAPTURE + else if (shot_ext != NULL && m_specialCaptureMode == SCAPTURE_MODE_RAW) { + switch (m_specialCaptureStep) { + case SCAPTURE_STEP_START: + shot_ext->shot.ctl.aa.captureIntent = AA_CAPTURE_INTENT_PREVIEW; + m_specialCaptureStep = SCAPTURE_STEP_OFF; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_START", __FUNCTION__, __LINE__); + break; + case SCAPTURE_STEP_OFF: + shot_ext->shot.ctl.aa.captureIntent = AA_CAPTURE_INTENT_PREVIEW; + ALOGD("DEBUG(%s[%d]):SCAPTURE_STEP_OFF", __FUNCTION__, __LINE__); + break; + default: + m_specialCaptureStep = SCAPTURE_STEP_OFF; + m_specialCaptureMode = SCAPTURE_MODE_NONE; + break; + } + } +#endif + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_func3AAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + if (shot_ext != NULL && m_specialCaptureMode == SCAPTURE_MODE_HDR) { + if (shot_ext->shot.dm.flash.vendor_firingStable == 2) { + m_hdrStartFcount[0] = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_hdrStartFcount[0] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrStartFcount[0], shot_ext->shot.dm.request.frameCount); + } + + if (shot_ext->shot.dm.flash.vendor_firingStable == 3) { + m_hdrStartFcount[1] = shot_ext->shot.dm.request.frameCount; + m_check = true; + ALOGD("DEBUG(%s[%d]):m_hdrStartFcount[1] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrStartFcount[1], shot_ext->shot.dm.request.frameCount); + } + + if (shot_ext->shot.dm.flash.vendor_firingStable == 4) { + m_hdrStartFcount[2] = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_hdrStartFcount[2] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrStartFcount[2], shot_ext->shot.dm.request.frameCount); + } + + if (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_HDR_DARK) { + m_hdrDropFcount[0] = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_hdrDropFcount[0] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrDropFcount[0], shot_ext->shot.dm.request.frameCount); + } + + if (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_HDR_NORMAL) { + m_hdrDropFcount[1] = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_hdrDropFcount[1] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrDropFcount[1], shot_ext->shot.dm.request.frameCount); + } + + if (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_HDR_BRIGHT) { + m_hdrDropFcount[2] = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_hdrDropFcount[2] (%d / %d)", + __FUNCTION__, __LINE__, m_hdrDropFcount[2], shot_ext->shot.dm.request.frameCount); + } + } +#ifdef RAWDUMP_CAPTURE + else if (shot_ext != NULL && m_specialCaptureMode == SCAPTURE_MODE_RAW) { + if (m_RawCaptureFcount == 0 && shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_RAW_CAPTURE) + { + m_RawCaptureFcount = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_RawCaptureFcount (%d / %d)", + __FUNCTION__, __LINE__, m_RawCaptureFcount, shot_ext->shot.dm.request.frameCount); + m_specialCaptureMode = SCAPTURE_MODE_NONE; + } + ALOGV("DEBUG(%s[%d]):m_RawCaptureFcount (%d / %d) firingStable(%d)", + __FUNCTION__, __LINE__, m_RawCaptureFcount, + shot_ext->shot.dm.request.frameCount, shot_ext->shot.dm.flash.vendor_firingStable); + } +#endif + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_func3ABeforeHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_func3AAfterHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcSCPBefore(__unused void *args) +{ + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + + +int ExynosCameraActivitySpecialCapture::t_funcSCPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_stream *shot_stream = (struct camera2_stream *)(buf->addr[2]); + + ALOGV("INFO(%s[%d]):(%d)(%d)(%d)", __FUNCTION__, __LINE__, shot_stream->fvalid, shot_stream->fcount, m_hdrDropFcount[0]); + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + if (shot_stream != NULL && m_specialCaptureMode == SCAPTURE_MODE_HDR) { +#if 0 + if ((m_hdrDropFcount[2] == shot_stream->fcount) || + (m_hdrDropFcount[2] + 1 == shot_stream->fcount) || + (m_hdrStartFcount[0] == shot_stream->fcount) || + (m_hdrStartFcount[1] == shot_stream->fcount) || + (m_hdrStartFcount[2] == shot_stream->fcount) || + (m_hdrStartFcount[2] + 1 == shot_stream->fcount)) { + shot_stream->fvalid = false; + + ALOGV("DEBUG(%s[%d]):drop fcount %d [%d %d %d][%d %d %d]", __FUNCTION__, __LINE__, shot_stream->fcount, + m_hdrStartFcount[0] , m_hdrStartFcount[1] , m_hdrStartFcount[2], + m_hdrDropFcount[0] , m_hdrDropFcount[1] , m_hdrDropFcount[2]); + } +#else + + if (m_hdrDropFcount[0] + 3 == shot_stream->fcount) { + shot_stream->fvalid = false; + + ALOGV("DEBUG(%s[%d]):drop fcount %d [%d %d %d][%d %d %d]", __FUNCTION__, __LINE__, shot_stream->fcount, + m_hdrStartFcount[0] , m_hdrStartFcount[1] , m_hdrStartFcount[2], + m_hdrDropFcount[0] , m_hdrDropFcount[1] , m_hdrDropFcount[2]); + } +#endif + } + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcSCCBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + +int ExynosCameraActivitySpecialCapture::t_funcSCCAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + +int ExynosCameraActivitySpecialCapture::setCaptureMode(enum SCAPTURE_MODE sCaptureModeVal) +{ + m_specialCaptureMode = sCaptureModeVal; + + ALOGD("DEBUG(%s[%d]):(%d)", __FUNCTION__, __LINE__, m_specialCaptureMode); + + return 1; +} + +int ExynosCameraActivitySpecialCapture::getIsHdr() +{ + if (m_specialCaptureMode == SCAPTURE_MODE_HDR) + return true; + else + return false; +} + +int ExynosCameraActivitySpecialCapture::setCaptureStep(enum SCAPTURE_STEP sCaptureStepVal) +{ + m_specialCaptureStep = sCaptureStepVal; + + if (m_specialCaptureStep == SCAPTURE_STEP_OFF) { + m_hdrFcount = 0; + m_currentInputFcount = 0; + m_backupAeExpCompensation = 0; + m_hdrStartFcount[0] = 0; + m_check = false; + + m_hdrStartFcount[0] = 0; + m_hdrStartFcount[1] = 0; + m_hdrStartFcount[2] = 0; + m_hdrDropFcount[0] = 0; + m_hdrDropFcount[1] = 0; + m_hdrDropFcount[2] = 0; + + /* dealloc buffers */ + } + + if (m_specialCaptureStep == SCAPTURE_STEP_START) { + /* alloc buffers */ + } + + ALOGD("DEBUG(%s[%d]):(%d)", __FUNCTION__, __LINE__, m_specialCaptureStep); + + return 1; +} + +unsigned int ExynosCameraActivitySpecialCapture::getHdrStartFcount(int index) +{ + return m_hdrStartFcount[index]; +} + +unsigned int ExynosCameraActivitySpecialCapture::getHdrDropFcount(void) +{ + return m_hdrDropFcount[0]; +} + +int ExynosCameraActivitySpecialCapture::resetHdrStartFcount() +{ + ALOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + m_hdrStartFcount[0] = m_hdrStartFcount[1] = m_hdrStartFcount[2] = 0; + + return 1; +} + +int ExynosCameraActivitySpecialCapture::getHdrWaitFcount() +{ + return HDR_WAIT_COUNT; +} + +void ExynosCameraActivitySpecialCapture::setHdrBuffer(ExynosCameraBuffer *secondBuffer, ExynosCameraBuffer *thirdBuffer) +{ + m_hdrBuffer[0] = secondBuffer; + m_hdrBuffer[1] = thirdBuffer; + + ALOGD("DEBUG(%s[%d]):(%p / %p)", __FUNCTION__, __LINE__, m_hdrBuffer[0], secondBuffer); + ALOGD("DEBUG(%s[%d]):(%p / %p)", __FUNCTION__, __LINE__, m_hdrBuffer[1], thirdBuffer); + ALOGD("DEBUG(%s[%d]):(%d) (%d)", __FUNCTION__, __LINE__, m_hdrBuffer[0]->size[0], m_hdrBuffer[0]->size[1]); + ALOGD("DEBUG(%s[%d]):(%d) (%d)", __FUNCTION__, __LINE__, m_hdrBuffer[1]->size[0], m_hdrBuffer[1]->size[1]); + + return; +} + +ExynosCameraBuffer *ExynosCameraActivitySpecialCapture::getHdrBuffer(int index) +{ + ALOGD("DEBUG(%s[%d]):(%d)", __FUNCTION__, __LINE__, index); + + return (m_hdrBuffer[index]); +} + +#ifdef RAWDUMP_CAPTURE +unsigned int ExynosCameraActivitySpecialCapture::getRawCaptureFcount(void) +{ + return m_RawCaptureFcount; +} + +void ExynosCameraActivitySpecialCapture::resetRawCaptureFcount() +{ + ALOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + m_RawCaptureFcount = 0; +} +#endif +} /* namespace android */ + diff --git a/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.h b/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.h new file mode 100644 index 0000000..5ded41c --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivitySpecialCapture.h @@ -0,0 +1,169 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraActivitySpecialCapture.h + * \brief hearder file for CAMERA HAL MODULE + * \author Pilsun Jang(pilsun.jang@samsung.com) + * \date 2012/12/19 + * + */ + +#ifndef EXYNOS_CAMERA_ACTIVITY_SPECIAL_CAPTURE_H__ +#define EXYNOS_CAMERA_ACTIVITY_SPECIAL_CAPTURE_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#include "exynos_format.h" +#include "ExynosBuffer.h" +#include "ExynosRect.h" +#include "ExynosJpegEncoderForCamera.h" +#include "ExynosExif.h" +#include "exynos_v4l2.h" +#include "ExynosCameraActivityBase.h" + +#include "fimc-is-metadata.h" + +#define CAPTURE_SKIP_COUNT (1) + +#define HDR_WAITING_SLEEP_TIME (15000) /* 15 msec */ +#define HDR_MAX_WAITING_TIME (1000000) +#define HDR_TIMEOUT_COUNT (30) /* 30 fps * 1 sec */ +#define HDR_BESTSHOT_MAX_WAITING_TIME (100000) /* 100 msec */ + +/* #define HDR_WAIT_COUNT (3) */ +#define HDR_WAIT_COUNT (0) +#define HDR_FRAME_COUNT (4) +#define BAYER_LOCK (2) +#define HDR_REPROCESSING_COUNT (3) + +namespace android { + +class ExynosCameraActivitySpecialCapture : public ExynosCameraActivityBase { +public: + enum SCAPTURE_DUMMY { + SCAPTURE_DUMMY, + SCAPTURE_DUMMY1 + }; + + enum SCAPTURE_MODE { + SCAPTURE_MODE_NONE, + SCAPTURE_MODE_HDR, + SCAPTURE_MODE_LLL, + SCAPTURE_MODE_OIS, + SCAPTURE_MODE_RAW, + SCAPTURE_MODE_END + }; + + enum SCAPTURE_STEP { + SCAPTURE_STEP_OFF, + SCAPTURE_STEP_START, + SCAPTURE_STEP_MINUS_SET, + SCAPTURE_STEP_ZERO_DELAY_SET, + SCAPTURE_STEP_ZERO_SET, + SCAPTURE_STEP_PLUS_SET, + SCAPTURE_STEP_RESTORE, + SCAPTURE_STEP_WAIT_CAPTURE_DELAY, + SCAPTURE_STEP_WAIT_CAPTURE, + SCAPTURE_STEP_END + }; + +public: + ExynosCameraActivitySpecialCapture(); + virtual ~ExynosCameraActivitySpecialCapture(); + +protected: + int t_funcNull(void *args); + int t_funcSensorBefore(void *args); + int t_funcSensorAfter(void *args); + int t_func3ABefore(void *args); + int t_func3AAfter(void *args); + int t_func3ABeforeHAL3(void *args); + int t_func3AAfterHAL3(void *args); + int t_funcISPBefore(void *args); + int t_funcISPAfter(void *args); + int t_funcSCPBefore(void *args); + int t_funcSCPAfter(void *args); + int t_funcSCCBefore(void *args); + int t_funcSCCAfter(void *args); + +public: + int setCaptureMode(enum SCAPTURE_MODE sCaptureModeVal); + int setCaptureStep(enum SCAPTURE_STEP sCaptureStepVal); + int getIsHdr(); + unsigned int getHdrStartFcount(int index); + unsigned int getHdrDropFcount(void); + int resetHdrStartFcount(); + int getHdrWaitFcount(); + void setHdrBuffer(ExynosCameraBuffer *secondBuffer, ExynosCameraBuffer *thirdBuffer); + +#ifdef RAWDUMP_CAPTURE + unsigned int getRawCaptureFcount(void); + void resetRawCaptureFcount(); +#endif + + ExynosCameraBuffer *getHdrBuffer(int index); + +private: + enum SCAPTURE_MODE m_specialCaptureMode; + enum SCAPTURE_STEP m_specialCaptureStep; + + unsigned int m_hdrFcount; + unsigned int m_currentInputFcount; + unsigned int m_hdrStartFcount[3]; + unsigned int m_hdrDropFcount[3]; + + int m_backupAeExpCompensation; + enum aa_scene_mode m_backupSceneMode; + enum aa_mode m_backupAaMode; + enum aa_ae_lock m_backupAeLock; + int m_backupAeTargetFpsRange[2]; + long m_backupFrameDuration; + int m_delay; + bool m_check; + + ExynosCameraBuffer m_reqBuf; + ExynosCameraBuffer *m_hdrBuffer[2]; +#ifdef RAWDUMP_CAPTURE + unsigned int m_RawCaptureFcount; +#endif +}; +} + +#endif /* EXYNOS_CAMERA_ACTIVITY_SPECIAL_CAPTURE_H__ */ diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.cpp b/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.cpp new file mode 100644 index 0000000..3f4aaf5 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.cpp @@ -0,0 +1,148 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityUCTL" +#include + +#include "ExynosCameraActivityUCTL.h" +//#include "ExynosCamera.h" + +namespace android { + +class ExynosCamera; + +ExynosCameraActivityUCTL::ExynosCameraActivityUCTL() +{ + m_rotation = 0; +} + +ExynosCameraActivityUCTL::~ExynosCameraActivityUCTL() +{ +} + +int ExynosCameraActivityUCTL::t_funcNull(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcSensorBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcSensorAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcISPBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcISPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_func3ABefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext != NULL) { +#ifdef FD_ROTATION + shot_ext->shot.uctl.scalerUd.orientation = m_rotation; +#endif + } + + return 1; +} + +int ExynosCameraActivityUCTL::t_func3AAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_func3ABeforeHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityUCTL::t_func3AAfterHAL3(__unused void *args) +{ + return 1; +} + +int ExynosCameraActivityUCTL::t_funcSCPBefore(__unused void *args) +{ + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + + +int ExynosCameraActivityUCTL::t_funcSCPAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_stream *shot_stream = (struct camera2_stream *)(buf->addr[2]); + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcSCCBefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + +int ExynosCameraActivityUCTL::t_funcSCCAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + ALOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 1; +} + +void ExynosCameraActivityUCTL::setDeviceRotation(int rotation) +{ + m_rotation = rotation; +} +} /* namespace android */ + diff --git a/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.h b/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.h new file mode 100644 index 0000000..0ac4fd1 --- /dev/null +++ b/libcamera/common_v2/Activities/ExynosCameraActivityUCTL.h @@ -0,0 +1,93 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraActivityUCTL.h + * \brief hearder file for CAMERA HAL MODULE + * \author Pilsun Jang(pilsun.jang@samsung.com) + * \date 2013/12/16 + * + */ + +#ifndef EXYNOS_CAMERA_ACTIVITY_UCTL_H__ +#define EXYNOS_CAMERA_ACTIVITY_UCTL_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#include "exynos_format.h" +#include "ExynosBuffer.h" +#include "ExynosRect.h" +#include "ExynosJpegEncoderForCamera.h" +#include "ExynosExif.h" +#include "exynos_v4l2.h" +#include "ExynosCameraActivityBase.h" + +#include "fimc-is-metadata.h" + +namespace android { + +class ExynosCameraActivityUCTL : public ExynosCameraActivityBase { +public: + ExynosCameraActivityUCTL(); + virtual ~ExynosCameraActivityUCTL(); + +protected: + int t_funcNull(void *args); + int t_funcSensorBefore(void *args); + int t_funcSensorAfter(void *args); + int t_func3ABefore(void *args); + int t_func3AAfter(void *args); + int t_func3ABeforeHAL3(void *args); + int t_func3AAfterHAL3(void *args); + int t_funcISPBefore(void *args); + int t_funcISPAfter(void *args); + int t_funcSCPBefore(void *args); + int t_funcSCPAfter(void *args); + int t_funcSCCBefore(void *args); + int t_funcSCCAfter(void *args); + +public: + void setDeviceRotation(int rotation); + +private: + int m_rotation; +}; +} + +#endif /* EXYNOS_CAMERA_ACTIVITY_UCTL_H__ */ diff --git a/libcamera/common_v2/Buffers/ExynosCameraBuffer.h b/libcamera/common_v2/Buffers/ExynosCameraBuffer.h new file mode 100644 index 0000000..9783440 --- /dev/null +++ b/libcamera/common_v2/Buffers/ExynosCameraBuffer.h @@ -0,0 +1,215 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraBuffer.h + * \brief hearder file for ExynosCameraBuffer + * \author Sunmi Lee(carrotsm.lee@samsung.com) + * \date 2013/07/17 + * + */ + +#ifndef EXYNOS_CAMERA_BUFFER_H__ +#define EXYNOS_CAMERA_BUFFER_H__ + + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "gralloc_priv.h" + +#include "ExynosCameraMemory.h" +#include "fimc-is-metadata.h" + +namespace android { + +/* metadata plane : non-cached buffer */ +/* image plane (default) : non-cached buffer */ +#define EXYNOS_CAMERA_BUFFER_1MB (1024*1024) +#define EXYNOS_CAMERA_BUFFER_WARNING_TIME_MARGIN (100) /* 0.1ms per 1MB */ + +#define EXYNOS_CAMERA_BUFFER_ION_MASK_NONCACHED (ION_HEAP_SYSTEM_MASK) +#define EXYNOS_CAMERA_BUFFER_ION_FLAG_NONCACHED (0) +#define EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_NONCACHED (1600 + EXYNOS_CAMERA_BUFFER_WARNING_TIME_MARGIN) /* 1.6ms per 1MB */ + +#define EXYNOS_CAMERA_BUFFER_ION_MASK_CACHED (ION_HEAP_SYSTEM_MASK) +#define EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED (ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC) +#define EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED_SYNC_FORCE (ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC | ION_FLAG_SYNC_FORCE) +#define EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_CACHED (670 + EXYNOS_CAMERA_BUFFER_WARNING_TIME_MARGIN) /* 0.67ms per 1MB */ + +#ifdef ION_RESERVED_FLAG_FOR_JUNGFRAU +#define EXYNOS_CAMERA_BUFFER_ION_MASK_RESERVED (EXYNOS_ION_HEAP_CAMERA) +#define EXYNOS_CAMERA_BUFFER_ION_FLAG_RESERVED (0) +#else +#define EXYNOS_CAMERA_BUFFER_ION_MASK_RESERVED (ION_HEAP_EXYNOS_CONTIG_MASK) +#define EXYNOS_CAMERA_BUFFER_ION_FLAG_RESERVED (ION_EXYNOS_VIDEO_MASK) +#endif +#define EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_RESERVED (50) /* 0.05ms */ + +#define EXYNOS_CAMERA_BUFFER_GRALLOC_WARNING_TIME (3300 + EXYNOS_CAMERA_BUFFER_WARNING_TIME_MARGIN) /* 3.3ms per 1MB */ + +typedef enum exynos_camera_buffer_type { + EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE = 0, + EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE = 1, + EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE = 2, + EXYNOS_CAMERA_BUFFER_ION_NONCACHED_RESERVED_TYPE = EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE, + EXYNOS_CAMERA_BUFFER_ION_CACHED_RESERVED_TYPE = 3, + EXYNOS_CAMERA_BUFFER_ION_CACHED_SYNC_FORCE_TYPE = 4, + EXYNOS_CAMERA_BUFFER_INVALID_TYPE, +} exynos_camera_buffer_type_t; + +enum EXYNOS_CAMERA_BUFFER_POSITION { + EXYNOS_CAMERA_BUFFER_POSITION_NONE = 0, + EXYNOS_CAMERA_BUFFER_POSITION_IN_DRIVER, + EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, + EXYNOS_CAMERA_BUFFER_POSITION_IN_SERVICE, + EXYNOS_CAMERA_BUFFER_POSITION_MAX +}; + +enum EXYNOS_CAMERA_BUFFER_PERMISSION { + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE = 0, + EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE, + EXYNOS_CAMERA_BUFFER_PERMISSION_IN_PROCESS, + EXYNOS_CAMERA_BUFFER_PERMISSION_MAX +}; + +struct ExynosCameraBufferStatus { + int driverReturnValue; + enum EXYNOS_CAMERA_BUFFER_POSITION position; + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission; +#ifdef __cplusplus + ExynosCameraBufferStatus() { + driverReturnValue = 0; + position = EXYNOS_CAMERA_BUFFER_POSITION_NONE; + permission = EXYNOS_CAMERA_BUFFER_PERMISSION_NONE; + } + + ExynosCameraBufferStatus& operator =(const ExynosCameraBufferStatus &other) { + driverReturnValue = other.driverReturnValue; + position = other.position; + permission = other.permission; + + return *this; + } + + bool operator ==(const ExynosCameraBufferStatus &other) const { + bool ret = true; + + if (driverReturnValue != other.driverReturnValue + || position != other.position + || permission != other.permission) { + ret = false; + } + + return ret; + } + + bool operator !=(const ExynosCameraBufferStatus &other) const { + return !(*this == other); + } +#endif +}; + +struct ExynosCameraBuffer { + int index; + int planeCount; + int fd[EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + unsigned int size[EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + char *addr[EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + struct ExynosCameraBufferStatus status; + exynos_camera_buffer_type_t type; /* this value in effect exclude metadataPlane*/ + + int acquireFence; + int releaseFence; + +#ifdef __cplusplus + ExynosCameraBuffer() { + index = -1; + planeCount = 0; + for (int planeIndex = 0; planeIndex < EXYNOS_CAMERA_BUFFER_MAX_PLANES; planeIndex++) { + fd[planeIndex] = -1; + size[planeIndex] = 0; + bytesPerLine[planeIndex] = 0; + addr[planeIndex] = NULL; + } + status.driverReturnValue = 0; + status.position = EXYNOS_CAMERA_BUFFER_POSITION_NONE; + status.permission = EXYNOS_CAMERA_BUFFER_PERMISSION_NONE; + type = EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE; + + acquireFence = -1; + releaseFence = -1; + } + + ExynosCameraBuffer& operator =(const ExynosCameraBuffer &other) { + index = other.index; + planeCount = other.planeCount; + for (int i = 0; i < EXYNOS_CAMERA_BUFFER_MAX_PLANES; i++) { + fd[i] = other.fd[i]; + size[i] = other.size[i]; + bytesPerLine[i] = other.bytesPerLine[i]; + addr[i] = other.addr[i]; + } + status = other.status; + type = other.type; + + acquireFence = other.acquireFence; + releaseFence = other.releaseFence; + + return *this; + } + + bool operator ==(const ExynosCameraBuffer &other) const { + bool ret = true; + + if (index != other.index + || planeCount != other.planeCount + || status != other.status + || type != other.type + || acquireFence != other.acquireFence + || releaseFence != other.releaseFence) { + ret = false; + } + + for (int i = 0; i < EXYNOS_CAMERA_BUFFER_MAX_PLANES; i++) { + if (fd[i] != other.fd[i] + || size[i] != other.size[i] + || bytesPerLine[i] != other.bytesPerLine[i] + || addr[i] != other.addr[i]) { + ret = false; + break; + } + } + + return ret; + } + + bool operator !=(const ExynosCameraBuffer &other) const { + return !(*this == other); + } +#endif +}; +} +#endif diff --git a/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.cpp b/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.cpp new file mode 100644 index 0000000..0486c38 --- /dev/null +++ b/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.cpp @@ -0,0 +1,226 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * file ExynosCameraBufferLocker.h + * brief hearder file for ExynosCameraBufferLocker + * author Pilsun Jang(pilsun.jang@samsung.com) + * date 2013/08/20 + * + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraBufferLocker" + +#include "ExynosCameraBufferLocker.h" + +namespace android { +ExynosCameraBufferLocker::ExynosCameraBufferLocker() +{ + m_flagCreated = true; + memset(m_bufferLockState, 0x00, sizeof(buffer_lock_state_t) * NUM_BAYER_BUFFERS); + m_QNum = 0; + printWhoAmI(); +} + +ExynosCameraBufferLocker::~ExynosCameraBufferLocker() +{ +} + +void ExynosCameraBufferLocker::init(void) +{ + EXYNOS_CAMERA_BUFFER_LOCKER_IN(); + + buffer_lock_state_t initLockState; + /* search bayer index */ + for (int i = 0; i < NUM_BAYER_BUFFERS; i++) { + m_bufferLockState[i].bufferFcount = 0; + m_bufferLockState[i].bufferLockState = EXYNOS_CAMERA_BUFFER_LOCKER_UNLOCKED; + } + m_QNum= 0; + + EXYNOS_CAMERA_BUFFER_LOCKER_OUT(); +} + +void ExynosCameraBufferLocker::deinit(void) +{ + EXYNOS_CAMERA_BUFFER_LOCKER_IN(); + + m_indexQ.clear(); + m_QNum= 0; + + EXYNOS_CAMERA_BUFFER_LOCKER_OUT(); +} + + +status_t ExynosCameraBufferLocker::setBufferLockByIndex(int index, bool setLock) +{ + ALOGD("[%s], (%d) index %d setLock %d", __FUNCTION__, __LINE__, index, setLock); + + if (setLock) + m_bufferLockState[index].bufferLockState = EXYNOS_CAMERA_BUFFER_LOCKER_LOCKED; + else + m_bufferLockState[index].bufferLockState = EXYNOS_CAMERA_BUFFER_LOCKER_UNLOCKED; + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::setBufferLockByFcount(unsigned int fcount, bool setLock) +{ + ALOGV("[%s] (%d) fcount %d", __FUNCTION__, __LINE__, fcount); + + for (int i = 0; i < NUM_BAYER_BUFFERS; i++) { + if (m_bufferLockState[i].bufferFcount == fcount) { + setBufferLockByIndex(i, setLock); + + return NO_ERROR; + } + } + + return INVALID_OPERATION; +} + +status_t ExynosCameraBufferLocker::getBufferLockStateByIndex(int index, bool *lockState) +{ + EXYNOS_CAMERA_BUFFER_LOCKER_IN(); + + if (m_bufferLockState[index].bufferLockState == EXYNOS_CAMERA_BUFFER_LOCKER_LOCKED) + *lockState = true; + else + *lockState = false; + + + EXYNOS_CAMERA_BUFFER_LOCKER_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::setBufferFcount(int index, unsigned int fcount) +{ + m_bufferLockState[index].bufferFcount = fcount; + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::getBufferFcount(int index, unsigned int *fcount) +{ + *fcount = m_bufferLockState[index].bufferFcount; + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::putBufferToManageQ(int index) +{ + EXYNOS_CAMERA_BUFFER_LOCKER_IN(); + + Mutex::Autolock lock(m_bufferStateLock); + + m_indexQ.push_back(index); + + EXYNOS_CAMERA_BUFFER_LOCKER_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::getBufferToManageQ(int *index) +{ + List::iterator token; + + EXYNOS_CAMERA_BUFFER_LOCKER_IN(); + + Mutex::Autolock lock(m_bufferStateLock); + + if (m_indexQ.size() == 0) + return INVALID_OPERATION; + + token = m_indexQ.begin()++; + *index = *token; + m_indexQ.erase(token); + + EXYNOS_CAMERA_BUFFER_LOCKER_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraBufferLocker::getBufferSizeQ(int *size) +{ + Mutex::Autolock lock(m_bufferStateLock); + + *size = m_indexQ.size(); + + return NO_ERROR; +} + +int ExynosCameraBufferLocker::getQnum(void) +{ + return m_QNum; +} + +void ExynosCameraBufferLocker::incQnum(void) +{ + Mutex::Autolock lock(m_QNumLock); + + m_QNum ++; + + return; +} + +void ExynosCameraBufferLocker::decQnum(void) +{ + Mutex::Autolock lock(m_QNumLock); + + m_QNum --; + + return; +} + +void ExynosCameraBufferLocker::printWhoAmI() +{ + ALOGD("(%s, %d)", __FUNCTION__, __LINE__); + + return; +} + +void ExynosCameraBufferLocker::printBufferState() +{ + for (int i = 0; i < NUM_BAYER_BUFFERS; i++) { + ALOGD("(%s, %d): [%d].bufferLockState : %d" , __FUNCTION__, __LINE__, i, m_bufferLockState[i].bufferLockState); + ALOGD("(%s, %d): [%d].bufferFcount : %d" , __FUNCTION__, __LINE__, i, m_bufferLockState[i].bufferFcount); + } + + return; +} + +void ExynosCameraBufferLocker::printBufferQ() +{ + List::iterator token; + int index; + + if (m_indexQ.size() == 0) { + ALOGW("(%s, %d) no entry", __FUNCTION__, __LINE__); + return; + } + + ALOGD("(%s, %d) m_indexQ.size() = %zu", __FUNCTION__, __LINE__, m_indexQ.size()); + for (token = m_indexQ.begin(); token != m_indexQ.end(); token++) { + index = *token; + ALOGD("(%s, %d) index = %d", __FUNCTION__, __LINE__, index); + } + + return; +} + +} diff --git a/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.h b/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.h new file mode 100644 index 0000000..2070f83 --- /dev/null +++ b/libcamera/common_v2/Buffers/ExynosCameraBufferLocker.h @@ -0,0 +1,94 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * file ExynosCameraBufferLocker.h + * brief hearder file for ExynosCameraBufferLocker + * author Pilsun Jang(pilsun.jang@samsung.com) + * date 2013/08/20 + * + */ + +#ifndef EXYNOS_CAMERA_BUFFER_LOCKER_H__ +#define EXYNOS_CAMERA_BUFFER_LOCKER_H__ + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "fimc-is-metadata.h" +#include "ExynosCameraBuffer.h" + + /* #define EXYNOS_CAMERA_BUFFER_LOCKER_TRACE */ + +namespace android { + +#ifdef EXYNOS_CAMERA_BUFFER_LOCKER_TRACE +#define EXYNOS_CAMERA_BUFFER_LOCKER_IN() ALOGD("(%s, %d):IN.." , __FUNCTION__, __LINE__) +#define EXYNOS_CAMERA_BUFFER_LOCKER_OUT() ALOGD("(%s, %d):OUT..", __FUNCTION__, __LINE__) +#else +#define EXYNOS_CAMERA_BUFFER_LOCKER_IN() ((void *)0) +#define EXYNOS_CAMERA_BUFFER_LOCKER_OUT() ((void *)0) +#endif + +#define EXYNOS_CAMERA_BUFFER_LOCKER_LOCKED 1 +#define EXYNOS_CAMERA_BUFFER_LOCKER_UNLOCKED 2 + +typedef struct exynos_camera_buffer_lock_state { + int bufferLockState; + unsigned int bufferFcount; +} buffer_lock_state_t; + +class ExynosCameraBufferLocker { + public: + ExynosCameraBufferLocker(); + virtual ~ExynosCameraBufferLocker(); + void init(void); + void deinit(void); + + status_t setBufferLockByIndex(int index, bool setLock); + status_t setBufferLockByFcount(unsigned int fcount, bool setLock); + status_t getBufferLockStateByIndex(int index, bool *lockState); + status_t setBufferFcount(int index, unsigned int fcount); + status_t getBufferFcount(int index, unsigned int *fcount); + status_t putBufferToManageQ(int index); + status_t getBufferToManageQ(int *index); + status_t getBufferSizeQ(int *size); + int getQnum(void); + void incQnum(void); + void decQnum(void); + + void printWhoAmI(void); + void printBufferState(void); + void printBufferQ(void); + + private: + mutable Mutex m_bufferStateLock; + List m_indexQ; + bool m_flagCreated; + buffer_lock_state_t m_bufferLockState[NUM_BAYER_BUFFERS]; + int m_QNum; + mutable Mutex m_QNumLock; +}; +} +#endif + diff --git a/libcamera/common_v2/Buffers/ExynosCameraBufferManager.cpp b/libcamera/common_v2/Buffers/ExynosCameraBufferManager.cpp new file mode 100644 index 0000000..1afa52d --- /dev/null +++ b/libcamera/common_v2/Buffers/ExynosCameraBufferManager.cpp @@ -0,0 +1,3453 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExynosCameraBufferManager" +#include "ExynosCameraBufferManager.h" + +namespace android { + +ExynosCameraBufferManager::ExynosCameraBufferManager() +{ + m_isDestructor = false; + m_cameraId = 0; + + init(); + + m_allocationThread = new allocThread(this, &ExynosCameraBufferManager::m_allocationThreadFunc, "allocationThreadFunc"); +} + +ExynosCameraBufferManager::~ExynosCameraBufferManager() +{ + m_isDestructor = true; +} + +ExynosCameraBufferManager *ExynosCameraBufferManager::createBufferManager(buffer_manager_type_t type) +{ + switch (type) { + case BUFFER_MANAGER_ION_TYPE: + return (ExynosCameraBufferManager *)new InternalExynosCameraBufferManager(); + break; + case BUFFER_MANAGER_HEAP_BASE_TYPE: + return (ExynosCameraBufferManager *)new MHBExynosCameraBufferManager(); + break; + case BUFFER_MANAGER_GRALLOC_TYPE: + return (ExynosCameraBufferManager *)new GrallocExynosCameraBufferManager(); + break; + case BUFFER_MANAGER_SERVICE_GRALLOC_TYPE: + return (ExynosCameraBufferManager *)new ServiceExynosCameraBufferManager(); + break; + case BUFFER_MANAGER_INVALID_TYPE: + ALOGE("ERR(%s[%d]):Unknown bufferManager type(%d)", __FUNCTION__, __LINE__, (int)type); + default: + break; + } + + return NULL; +} + +status_t ExynosCameraBufferManager::create(const char *name, int cameraId, void *defaultAllocator) +{ + Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + + m_cameraId = cameraId; + strncpy(m_name, name, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + if (defaultAllocator == NULL) { + if (m_createDefaultAllocator(false) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_createDefaultAllocator failed", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } else { + if (m_setDefaultAllocator(defaultAllocator) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setDefaultAllocator failed", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + return ret; +} + +status_t ExynosCameraBufferManager::create(const char *name, void *defaultAllocator) +{ + return create(name, 0, defaultAllocator); +} + +void ExynosCameraBufferManager::init(void) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + m_flagAllocated = false; + m_reservedMemoryCount = 0; + m_reqBufCount = 0; + m_allocatedBufCount = 0; + m_allowedMaxBufCount = 0; + m_defaultAllocator = NULL; + m_isCreateDefaultAllocator = false; + memset((void *)m_buffer, 0, (VIDEO_MAX_FRAME) * sizeof(struct ExynosCameraBuffer)); + for (int bufIndex = 0; bufIndex < VIDEO_MAX_FRAME; bufIndex++) { + for (int planeIndex = 0; planeIndex < EXYNOS_CAMERA_BUFFER_MAX_PLANES; planeIndex++) { + m_buffer[bufIndex].fd[planeIndex] = -1; + } + } + m_hasMetaPlane = false; + memset(m_name, 0x00, sizeof(m_name)); + strncpy(m_name, "none", EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_flagSkipAllocation = false; + m_flagNeedMmap = false; + m_allocMode = BUFFER_MANAGER_ALLOCATION_ATONCE; + m_indexOffset = 0; + + m_graphicBufferAllocator.init(); + + EXYNOS_CAMERA_BUFFER_OUT(); +} + +void ExynosCameraBufferManager::deinit(void) +{ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + + if (m_flagAllocated == false) { + CLOGI("INFO(%s[%d]:OUT.. Buffer is not allocated", __FUNCTION__, __LINE__); + return; + } + + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_SILENT) { + m_allocationThread->join(); + CLOGI("INFO(%s[%d]):allocationThread is finished", __FUNCTION__, __LINE__); + } + + for (int bufIndex = 0; bufIndex < m_allocatedBufCount; bufIndex++) + cancelBuffer(bufIndex); + + if (m_free() != NO_ERROR) + CLOGE("ERR(%s[%d])::free failed", __FUNCTION__, __LINE__); + + if (m_defaultAllocator != NULL && m_isCreateDefaultAllocator == true) { + delete m_defaultAllocator; + m_defaultAllocator = NULL; + } + + m_reservedMemoryCount = 0; + m_flagSkipAllocation = false; + CLOGD("DEBUG(%s[%d]):OUT..", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraBufferManager::resetBuffers(void) +{ + /* same as deinit */ + /* clear buffers except releasing the memory */ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_flagAllocated == false) { + CLOGI("INFO(%s[%d]:OUT.. Buffer is not allocated", __FUNCTION__, __LINE__); + return ret; + } + + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_SILENT) { + m_allocationThread->join(); + CLOGI("INFO(%s[%d]):allocationThread is finished", __FUNCTION__, __LINE__); + } + + for (int bufIndex = m_indexOffset; bufIndex < m_allocatedBufCount + m_indexOffset; bufIndex++) + cancelBuffer(bufIndex); + + m_resetSequenceQ(); + m_flagSkipAllocation = true; + + return ret; +} + +status_t ExynosCameraBufferManager::setAllocator(void *allocator) +{ + Mutex::Autolock lock(m_lock); + + if (allocator == NULL) { + CLOGE("ERR(%s[%d]):m_allocator equals NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + return m_setAllocator(allocator); +} + +status_t ExynosCameraBufferManager::alloc(void) +{ + EXYNOS_CAMERA_BUFFER_IN(); + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + + if (m_flagSkipAllocation == true) { + CLOGI("INFO(%s[%d]):skip to allocate memory (m_flagSkipAllocation=%d)", + __FUNCTION__, __LINE__, (int)m_flagSkipAllocation); + goto func_exit; + } + + if (m_checkInfoForAlloc() == false) { + CLOGE("ERR(%s[%d]):m_checkInfoForAlloc failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_hasMetaPlane == true) { + if (m_defaultAlloc(m_indexOffset, m_reqBufCount + m_indexOffset, m_hasMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAlloc failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + /* allocate image buffer */ + if (m_alloc(m_indexOffset, m_reqBufCount + m_indexOffset) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_alloc failed", __FUNCTION__, __LINE__); + + if (m_hasMetaPlane == true) { + CLOGD("DEBUG(%s[%d]):Free metadata plane. bufferCount %d", + __FUNCTION__, __LINE__, m_reqBufCount); + if (m_defaultFree(m_indexOffset, m_reqBufCount + m_indexOffset, m_hasMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultFree failed", __FUNCTION__, __LINE__); + } + } + + ret = INVALID_OPERATION; + goto func_exit; + } + + m_allocatedBufCount = m_reqBufCount; + m_resetSequenceQ(); + m_flagAllocated = true; + + CLOGD("DEBUG(%s[%d]):Allocate the buffer succeeded " + "(m_allocatedBufCount=%d, m_reqBufCount=%d, m_allowedMaxBufCount=%d) --- dumpBufferInfo ---", + __FUNCTION__, __LINE__, + m_allocatedBufCount, m_reqBufCount, m_allowedMaxBufCount); + dumpBufferInfo(); + CLOGD("DEBUG(%s[%d]):------------------------------------------------------------------", + __FUNCTION__, __LINE__); + + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_SILENT) { + /* run the allocationThread */ + m_allocationThread->run(PRIORITY_DEFAULT); + CLOGI("INFO(%s[%d]):allocationThread is started", __FUNCTION__, __LINE__); + } + +func_exit: + + m_flagSkipAllocation = false; + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ExynosCameraBufferManager::m_free(void) +{ + EXYNOS_CAMERA_BUFFER_IN(); + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + Mutex::Autolock lock(m_lock); + + CLOGD("DEBUG(%s[%d]):Free the buffer (m_allocatedBufCount=%d) --- dumpBufferInfo ---", + __FUNCTION__, __LINE__, m_allocatedBufCount); + dumpBufferInfo(); + CLOGD("DEBUG(%s[%d]):------------------------------------------------------", + __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (m_flagAllocated != false) { + if (m_free(m_indexOffset, m_allocatedBufCount + m_indexOffset) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_free failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_hasMetaPlane == true) { + if (m_defaultFree(m_indexOffset, m_allocatedBufCount + m_indexOffset, m_hasMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultFree failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + m_availableBufferIndexQLock.lock(); + m_availableBufferIndexQ.clear(); + m_availableBufferIndexQLock.unlock(); + m_allocatedBufCount = 0; + m_allowedMaxBufCount = 0; + m_flagAllocated = false; + } + + CLOGD("DEBUG(%s[%d]):Free the buffer succeeded (m_allocatedBufCount=%d)", + __FUNCTION__, __LINE__, m_allocatedBufCount); + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +void ExynosCameraBufferManager::m_resetSequenceQ() +{ + Mutex::Autolock lock(m_availableBufferIndexQLock); + m_availableBufferIndexQ.clear(); + + for (int bufIndex = m_indexOffset; bufIndex < m_allocatedBufCount + m_indexOffset; bufIndex++) + m_availableBufferIndexQ.push_back(m_buffer[bufIndex].index); + + return; +} + +void ExynosCameraBufferManager::setContigBufCount(int reservedMemoryCount) +{ + ALOGI("INFO(%s[%d]):reservedMemoryCount(%d)", __FUNCTION__, __LINE__, reservedMemoryCount); + m_reservedMemoryCount = reservedMemoryCount; + return; +} + +int ExynosCameraBufferManager::getContigBufCount(void) +{ + return m_reservedMemoryCount; +} + +/* If Image buffer color format equals YV12, and buffer has MetaDataPlane.. + + planeCount = 4 (set by user) + size[0] : Image buffer plane Y size + size[1] : Image buffer plane Cr size + size[2] : Image buffer plane Cb size + + if (createMetaPlane == true) + size[3] = EXYNOS_CAMERA_META_PLANE_SIZE; (set by BufferManager, internally) +*/ +status_t ExynosCameraBufferManager::setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int reqBufCount, + bool createMetaPlane, + bool needMmap) +{ + status_t ret = NO_ERROR; + + ret = setInfo( + planeCount, + size, + bytePerLine, + 0, + reqBufCount, + reqBufCount, + EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE, + BUFFER_MANAGER_ALLOCATION_ATONCE, + createMetaPlane, + needMmap); + if (ret < 0) + ALOGE("ERR(%s[%d]):setInfo fail", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraBufferManager::setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int reqBufCount, + int allowedMaxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap) +{ + status_t ret = NO_ERROR; + + ret = setInfo( + planeCount, + size, + bytePerLine, + 0, + reqBufCount, + allowedMaxBufCount, + type, + BUFFER_MANAGER_ALLOCATION_ONDEMAND, + createMetaPlane, + needMmap); + if (ret < 0) + ALOGE("ERR(%s[%d]):setInfo fail", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraBufferManager::setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + bool createMetaPlane, + bool needMmap) +{ + status_t ret = NO_ERROR; + + ret = setInfo( + planeCount, + size, + bytePerLine, + startBufIndex, + reqBufCount, + reqBufCount, + EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE, + BUFFER_MANAGER_ALLOCATION_ATONCE, + createMetaPlane, + needMmap); + if (ret < 0) + ALOGE("ERR(%s[%d]):setInfo fail", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraBufferManager::setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + int allowedMaxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap) +{ + EXYNOS_CAMERA_BUFFER_IN(); + Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + + if (m_indexOffset > 0) { + CLOGD("DEBUG(%s[%d]):buffer indexOffset(%d), Index[0 - %d] not used", + __FUNCTION__, __LINE__, m_indexOffset, m_indexOffset); + } + m_indexOffset = startBufIndex; + + if (createMetaPlane == true) { + size[planeCount-1] = EXYNOS_CAMERA_META_PLANE_SIZE; + m_hasMetaPlane = true; + } + + if (allowedMaxBufCount < reqBufCount) { + CLOGW("WARN(%s[%d]):abnormal value [reqBufCount=%d, allowedMaxBufCount=%d]", + __FUNCTION__, __LINE__, reqBufCount, allowedMaxBufCount); + allowedMaxBufCount = reqBufCount; + } + + if (reqBufCount < 0 || VIDEO_MAX_FRAME < reqBufCount) { + CLOGE("ERR(%s[%d]):abnormal value [reqBufCount=%d]", + __FUNCTION__, __LINE__, reqBufCount); + ret = BAD_VALUE; + goto func_exit; + } + + if (planeCount < 0 || EXYNOS_CAMERA_BUFFER_MAX_PLANES <= planeCount) { + CLOGE("ERR(%s[%d]):abnormal value [planeCount=%d]", + __FUNCTION__, __LINE__, planeCount); + ret = BAD_VALUE; + goto func_exit; + } + + for (int bufIndex = m_indexOffset; bufIndex < allowedMaxBufCount + m_indexOffset; bufIndex++) { + for (int planeIndex = 0; planeIndex < planeCount; planeIndex++) { + if (size[planeIndex] == 0) { + CLOGE("ERR(%s[%d]):abnormal value [size=%d]", + __FUNCTION__, __LINE__, size[planeIndex]); + ret = BAD_VALUE; + goto func_exit; + } + m_buffer[bufIndex].size[planeIndex] = size[planeIndex]; + m_buffer[bufIndex].bytesPerLine[planeIndex] = bytePerLine[planeIndex]; + } + m_buffer[bufIndex].planeCount = planeCount; + m_buffer[bufIndex].type = type; + } + m_allowedMaxBufCount = allowedMaxBufCount + startBufIndex; + m_reqBufCount = reqBufCount; + m_flagNeedMmap = needMmap; + m_allocMode = allocMode; +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ExynosCameraBufferManager::setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + __unused int allowedMaxBufCount, + __unused exynos_camera_buffer_type_t type, + __unused buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + int width, + int height, + int stride, + int pixelFormat, + bool needMmap) +{ + status_t ret = NO_ERROR; + + ret = setInfo( + planeCount, + size, + bytePerLine, + startBufIndex, + reqBufCount, + reqBufCount, + type, + allocMode, + createMetaPlane, + needMmap); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setInfo fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = m_graphicBufferAllocator.setSize(width, height, stride); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_graphicBufferAllocator.setSiz(%d, %d, %d) fail", + __FUNCTION__, __LINE__, width, height, stride); + return ret; + } + + ret = m_graphicBufferAllocator.setHalPixelFormat(pixelFormat); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_graphicBufferAllocator.setHalPixelFormat(%d) fail", + __FUNCTION__, __LINE__, pixelFormat); + return ret; + } + + return ret; +} + +bool ExynosCameraBufferManager::m_allocationThreadFunc(void) +{ + status_t ret = NO_ERROR; + int increaseCount = 1; + + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + CLOGI("INFO(%s[%d]:increase buffer silently - start - " + "(m_allowedMaxBufCount=%d, m_allocatedBufCount=%d, m_reqBufCount=%d)", + __FUNCTION__, __LINE__, + m_allowedMaxBufCount, m_allocatedBufCount, m_reqBufCount); + + increaseCount = m_allowedMaxBufCount - m_reqBufCount; + + /* increase buffer*/ + for (int count = 0; count < increaseCount; count++) { + ret = m_increase(1); + if (ret < 0) { + CLOGE("ERR(%s[%d]):increase the buffer failed", __FUNCTION__, __LINE__); + } else { + m_lock.lock(); + m_availableBufferIndexQ.push_back(m_buffer[m_allocatedBufCount + m_indexOffset].index); + m_allocatedBufCount++; + m_lock.unlock(); + } + + } + dumpBufferInfo(); + CLOGI("INFO(%s[%d]:increase buffer silently - end - (increaseCount=%d)" + "(m_allowedMaxBufCount=%d, m_allocatedBufCount=%d, m_reqBufCount=%d)", + __FUNCTION__, __LINE__, increaseCount, + m_allowedMaxBufCount, m_allocatedBufCount, m_reqBufCount); + + /* false : Thread run once */ + return false; +} + +status_t ExynosCameraBufferManager::registerBuffer( + __unused int frameCount, + __unused buffer_handle_t *handle, + __unused int acquireFence, + __unused int releaseFence, + __unused enum EXYNOS_CAMERA_BUFFER_POSITION position) +{ + return NO_ERROR; +} + +status_t ExynosCameraBufferManager::putBuffer( + int bufIndex, + enum EXYNOS_CAMERA_BUFFER_POSITION position) +{ + EXYNOS_CAMERA_BUFFER_IN(); + Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + List::iterator r; + bool found = false; + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission; + + permission = EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE; + + if (bufIndex < 0 || m_allocatedBufCount + m_indexOffset <= bufIndex) { + CLOGE("ERR(%s[%d]):buffer Index in out of bound [bufIndex=%d], allocatedBufCount(%d)", + __FUNCTION__, __LINE__, bufIndex, m_allocatedBufCount); + ret = INVALID_OPERATION; + goto func_exit; + } + + m_availableBufferIndexQLock.lock(); + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + if (bufIndex == *r) { + found = true; + break; + } + } + m_availableBufferIndexQLock.unlock(); + + if (found == true) { + CLOGI("INFO(%s[%d]):bufIndex=%d is already in (available state)", + __FUNCTION__, __LINE__, bufIndex); + goto func_exit; + } + + if (m_putBuffer(bufIndex) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_putBuffer failed [bufIndex=%d, position=%d, permission=%d]", + __FUNCTION__, __LINE__, bufIndex, (int)position, (int)permission); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (updateStatus(bufIndex, 0, position, permission) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bufIndex=%d, position=%d, permission=%d]", + __FUNCTION__, __LINE__, bufIndex, (int)position, (int)permission); + ret = INVALID_OPERATION; + goto func_exit; + } + + m_availableBufferIndexQLock.lock(); + m_availableBufferIndexQ.push_back(m_buffer[bufIndex].index); + m_availableBufferIndexQLock.unlock(); + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +/* User Process need to check the index of buffer returned from "getBuffer()" */ +status_t ExynosCameraBufferManager::getBuffer( + int *reqBufIndex, + enum EXYNOS_CAMERA_BUFFER_POSITION position, + struct ExynosCameraBuffer *buffer) +{ + EXYNOS_CAMERA_BUFFER_IN(); + Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + List::iterator r; + + int bufferIndex; + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission; + int acquireFence = -1; + int releaseFence = -1; + + bufferIndex = *reqBufIndex; + permission = EXYNOS_CAMERA_BUFFER_PERMISSION_NONE; + + if (m_allocatedBufCount == 0) { + CLOGE("ERR(%s[%d]):m_allocatedBufCount equals zero", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_getBuffer(&bufferIndex, &acquireFence, &releaseFence) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_getBuffer failed [bufferIndex=%d, position=%d, permission=%d]", + __FUNCTION__, __LINE__, bufferIndex, (int)position, (int)permission); + ret = INVALID_OPERATION; + goto func_exit; + } + +reDo: + + if (bufferIndex < 0 || m_allocatedBufCount + m_indexOffset <= bufferIndex) { + /* find availableBuffer */ + m_availableBufferIndexQLock.lock(); + if (m_availableBufferIndexQ.empty() == false) { + r = m_availableBufferIndexQ.begin(); + bufferIndex = *r; + m_availableBufferIndexQ.erase(r); +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):available buffer [index=%d]...", + __FUNCTION__, __LINE__, bufferIndex); +#endif + } + m_availableBufferIndexQLock.unlock(); + } else { + m_availableBufferIndexQLock.lock(); + /* get the Buffer of requested */ + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + if (bufferIndex == *r) { + m_availableBufferIndexQ.erase(r); + break; + } + } + m_availableBufferIndexQLock.unlock(); + } + + if (0 <= bufferIndex && bufferIndex < m_allocatedBufCount + m_indexOffset) { + /* found buffer */ + if (isAvaliable(bufferIndex) == false) { + CLOGE("ERR(%s[%d]):isAvaliable failed [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + ret = BAD_VALUE; + goto func_exit; + } + + permission = EXYNOS_CAMERA_BUFFER_PERMISSION_IN_PROCESS; + + if (updateStatus(bufferIndex, 0, position, permission) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=%d, permission=%d]", + __FUNCTION__, __LINE__, bufferIndex, (int)position, (int)permission); + ret = INVALID_OPERATION; + goto func_exit; + } + } else { + /* do not find buffer */ + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_ONDEMAND) { + /* increase buffer*/ + ret = m_increase(1); + if (ret < 0) { + CLOGE("ERR(%s[%d]):increase the buffer failed, m_allocatedBufCount %d, bufferIndex %d", + __FUNCTION__, __LINE__, m_allocatedBufCount, bufferIndex); + } else { + m_availableBufferIndexQLock.lock(); + m_availableBufferIndexQ.push_back(m_allocatedBufCount + m_indexOffset); + m_availableBufferIndexQLock.unlock(); + bufferIndex = m_allocatedBufCount + m_indexOffset; + m_allocatedBufCount++; + + dumpBufferInfo(); + CLOGI("INFO(%s[%d]):increase the buffer succeeded (bufferIndex=%d)", + __FUNCTION__, __LINE__, bufferIndex); + goto reDo; + } + } else { + if (m_allocatedBufCount == 1) + bufferIndex = 0; + else + ret = INVALID_OPERATION; + } + + if (ret < 0) { +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):find free buffer... failed --- dump ---", + __FUNCTION__, __LINE__); + dump(); + CLOGD("DEBUG(%s[%d]):----------------------------------------", + __FUNCTION__, __LINE__); + CLOGD("DEBUG(%s[%d]):buffer Index in out of bound [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); +#endif + ret = BAD_VALUE; + goto func_exit; + } + } + + m_buffer[bufferIndex].index = bufferIndex; + + m_buffer[bufferIndex].acquireFence = acquireFence; + m_buffer[bufferIndex].releaseFence = releaseFence; + + *reqBufIndex = bufferIndex; + *buffer = m_buffer[bufferIndex]; + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ExynosCameraBufferManager::increase(int increaseCount) +{ + CLOGD("DEBUG(%s[%d]):increaseCount(%d) function invalid. Do nothing", __FUNCTION__, __LINE__, increaseCount); + return NO_ERROR; +} + +#ifdef USE_GRALLOC_REUSE_SUPPORT +status_t ExynosCameraBufferManager::cancelBuffer(int bufIndex, bool isReuse) +#else +status_t ExynosCameraBufferManager::cancelBuffer(int bufIndex) +#endif +{ + int ret = NO_ERROR; +#ifdef USE_GRALLOC_REUSE_SUPPORT + if (isReuse == true) { + CLOGD("DEBUG(%s[%d]):cancelBuffer bufIndex(%d) isReuse(true) function invalid, put buffer", __FUNCTION__, __LINE__, bufIndex); + } +#endif + { + ret = putBuffer(bufIndex, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + } + return ret; +} + +int ExynosCameraBufferManager::getBufStride(void) +{ + return 0; +} + +status_t ExynosCameraBufferManager::updateStatus( + int bufIndex, + int driverValue, + enum EXYNOS_CAMERA_BUFFER_POSITION position, + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission) +{ + if (bufIndex < 0) { + CLOGE("ERR(%s[%d]):Invalid buffer index %d", + __FUNCTION__, __LINE__, bufIndex); + return BAD_VALUE; + } + + m_buffer[bufIndex].index = bufIndex; + m_buffer[bufIndex].status.driverReturnValue = driverValue; + m_buffer[bufIndex].status.position = position; + m_buffer[bufIndex].status.permission = permission; + + return NO_ERROR; +} + +status_t ExynosCameraBufferManager::getStatus( + int bufIndex, + struct ExynosCameraBufferStatus *bufStatus) +{ + *bufStatus = m_buffer[bufIndex].status; + + return NO_ERROR; +} + +status_t ExynosCameraBufferManager::getIndexByHandle(__unused buffer_handle_t *handle, __unused int *index) +{ + return NO_ERROR; +} + +status_t ExynosCameraBufferManager::getHandleByIndex(__unused buffer_handle_t **handle, __unused int index) +{ + return NO_ERROR; +} + +sp ExynosCameraBufferManager::getGraphicBuffer(int index) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + sp graphicBuffer; + int planeCount = 0; + + if ((index < 0) || (index >= m_allowedMaxBufCount)) { + CLOGE("ERR(%s[%d]):Buffer index error (%d/%d)", __FUNCTION__, __LINE__, index, m_allowedMaxBufCount); + goto done; + } + + planeCount = m_buffer[index].planeCount; + + if (m_hasMetaPlane == true) { + planeCount--; + } + + graphicBuffer = m_graphicBufferAllocator.alloc(index, planeCount, m_buffer[index].fd, m_buffer[index].addr, m_buffer[index].size); + if (graphicBuffer == 0) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.alloc(%d) fail", __FUNCTION__, __LINE__, index); + goto done; + } + +done: + EXYNOS_CAMERA_BUFFER_OUT(); + + return graphicBuffer; +} + +bool ExynosCameraBufferManager::isAllocated(void) +{ + return m_flagAllocated; +} + +bool ExynosCameraBufferManager::isAvaliable(int bufIndex) +{ + bool ret = false; + + switch (m_buffer[bufIndex].status.permission) { + case EXYNOS_CAMERA_BUFFER_PERMISSION_NONE: + case EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE: + ret = true; + break; + + case EXYNOS_CAMERA_BUFFER_PERMISSION_IN_PROCESS: + default: +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):buffer is not available", __FUNCTION__, __LINE__); + dump(); +#endif + ret = false; + break; + } + + return ret; +} + +status_t ExynosCameraBufferManager::m_setDefaultAllocator(void *allocator) +{ + m_defaultAllocator = (ExynosCameraIonAllocator *)allocator; + + return NO_ERROR; +} + +status_t ExynosCameraBufferManager::m_defaultAlloc(int bIndex, int eIndex, bool isMetaPlane) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + int planeIndexStart = 0; + int planeIndexEnd = 0; + bool mapNeeded = false; +#ifdef DEBUG_RAWDUMP + char enableRawDump[PROP_VALUE_MAX]; +#endif /* DEBUG_RAWDUMP */ + + int mask = EXYNOS_CAMERA_BUFFER_ION_MASK_NONCACHED; + int flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_NONCACHED; + + ExynosCameraDurationTimer m_timer; + long long durationTime = 0; + long long durationTimeSum = 0; + unsigned int estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_NONCACHED; + unsigned int estimatedTime = 0; + unsigned int bufferSize = 0; + int reservedMaxCount = 0; + int bufIndex = 0; + if (m_defaultAllocator == NULL) { + CLOGE("ERR(%s[%d]):m_defaultAllocator equals NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (bIndex < 0 || eIndex < 0) { + CLOGE("ERR(%s[%d]):Invalid index parameters. bIndex %d eIndex %d", + __FUNCTION__, __LINE__, + bIndex, eIndex); + ret = BAD_VALUE; + goto func_exit; + } + + if (isMetaPlane == true) { + mapNeeded = true; + } else { +#ifdef DEBUG_RAWDUMP + mapNeeded = true; +#else + mapNeeded = m_flagNeedMmap; +#endif + } + + for (bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (isMetaPlane == true) { + planeIndexStart = m_buffer[bufIndex].planeCount-1; + planeIndexEnd = m_buffer[bufIndex].planeCount; + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_NONCACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_NONCACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_NONCACHED; + } else { + planeIndexStart = 0; + planeIndexEnd = (m_hasMetaPlane ? + m_buffer[bufIndex].planeCount-1 : m_buffer[bufIndex].planeCount); + switch (m_buffer[bufIndex].type) { + case EXYNOS_CAMERA_BUFFER_ION_NONCACHED_TYPE: + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_NONCACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_NONCACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_NONCACHED; + break; + case EXYNOS_CAMERA_BUFFER_ION_CACHED_TYPE: + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_CACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_CACHED; + break; + case EXYNOS_CAMERA_BUFFER_ION_RESERVED_TYPE: + /* case EXYNOS_CAMERA_BUFFER_ION_NONCACHED_RESERVED_TYPE: */ +#ifdef RESERVED_MEMORY_ENABLE + reservedMaxCount = (m_reservedMemoryCount > 0 ? m_reservedMemoryCount : RESERVED_BUFFER_COUNT_MAX); +#else + reservedMaxCount = 0; +#endif + if (bufIndex < reservedMaxCount) { + CLOGI("INFO(%s[%d]):bufIndex(%d) < reservedMaxCount(%d) , m_reservedMemoryCount(%d), non-cached", + __FUNCTION__, __LINE__, bufIndex, reservedMaxCount, m_reservedMemoryCount); + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_RESERVED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_RESERVED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_RESERVED; + } else { + CLOGI("INFO(%s[%d]):bufIndex(%d) >= reservedMaxCount(%d) , m_reservedMemoryCount(%d), non-cached. so, alloc ion memory instead of reserved memory", + __FUNCTION__, __LINE__, bufIndex, reservedMaxCount, m_reservedMemoryCount); + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_NONCACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_NONCACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_NONCACHED; + } + break; + case EXYNOS_CAMERA_BUFFER_ION_CACHED_RESERVED_TYPE: +#ifdef RESERVED_MEMORY_ENABLE + reservedMaxCount = (m_reservedMemoryCount > 0 ? m_reservedMemoryCount : RESERVED_BUFFER_COUNT_MAX); +#else + reservedMaxCount = 0; +#endif + if (bufIndex < reservedMaxCount) { + CLOGI("INFO(%s[%d]):bufIndex(%d) < reservedMaxCount(%d) , m_reservedMemoryCount(%d), cached", + __FUNCTION__, __LINE__, bufIndex, reservedMaxCount, m_reservedMemoryCount); + + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_RESERVED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_RESERVED | EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_RESERVED; + } else { + CLOGI("INFO(%s[%d]):bufIndex(%d) >= reservedMaxCount(%d) , m_reservedMemoryCount(%d), cached. so, alloc ion memory instead of reserved memory", + __FUNCTION__, __LINE__, bufIndex, reservedMaxCount, m_reservedMemoryCount); + + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_CACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_CACHED; + } + break; + case EXYNOS_CAMERA_BUFFER_ION_CACHED_SYNC_FORCE_TYPE: + ALOGD("DEBUG(%s[%d]):SYNC_FORCE_CACHED", __FUNCTION__, __LINE__); + mask = EXYNOS_CAMERA_BUFFER_ION_MASK_CACHED; + flags = EXYNOS_CAMERA_BUFFER_ION_FLAG_CACHED_SYNC_FORCE; + estimatedBase = EXYNOS_CAMERA_BUFFER_ION_WARNING_TIME_CACHED; + break; + case EXYNOS_CAMERA_BUFFER_INVALID_TYPE: + default: + CLOGE("ERR(%s[%d]):buffer type is invaild (%d)", __FUNCTION__, __LINE__, (int)m_buffer[bufIndex].type); + break; + } + } + + if (isMetaPlane == false) { + m_timer.start(); + bufferSize = 0; + } + + for (int planeIndex = planeIndexStart; planeIndex < planeIndexEnd; planeIndex++) { + if (m_buffer[bufIndex].fd[planeIndex] >= 0) { + CLOGE("ERR(%s[%d]):buffer[%d].fd[%d] = %d already allocated", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].fd[planeIndex]); + continue; + } + + if (m_defaultAllocator->alloc( + m_buffer[bufIndex].size[planeIndex], + &(m_buffer[bufIndex].fd[planeIndex]), + &(m_buffer[bufIndex].addr[planeIndex]), + mask, + flags, + mapNeeded) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAllocator->alloc(bufIndex=%d, planeIndex=%d, planeIndex=%d) failed", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].size[planeIndex]); + ret = INVALID_OPERATION; + goto func_exit; + } +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + printBufferInfo(__FUNCTION__, __LINE__, bufIndex, planeIndex); +#endif + if (isMetaPlane == false) + bufferSize = bufferSize + m_buffer[bufIndex].size[planeIndex]; + } + if (isMetaPlane == false) { + m_timer.stop(); + durationTime = m_timer.durationMsecs(); + durationTimeSum += durationTime; + CLOGD("DEBUG(%s[%d]):duration time(%5d msec):(type=%d, bufIndex=%d, size=%.2f)", + __FUNCTION__, __LINE__, (int)durationTime, m_buffer[bufIndex].type, bufIndex, (float)bufferSize / (float)(1024 * 1024)); + + estimatedTime = estimatedBase * bufferSize / EXYNOS_CAMERA_BUFFER_1MB; + if (estimatedTime < durationTime) { + CLOGW("WARN(%s[%d]):estimated time(%5d msec):(type=%d, bufIndex=%d, size=%d)", + __FUNCTION__, __LINE__, (int)estimatedTime, m_buffer[bufIndex].type, bufIndex, (int)bufferSize); + } + } + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + CLOGD("DEBUG(%s[%d]):Duration time of buffer allocation(%5d msec)", __FUNCTION__, __LINE__, (int)durationTimeSum); + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; + +func_exit: + EXYNOS_CAMERA_BUFFER_OUT(); + + if (bufIndex < eIndex) { + if (m_defaultFree(0, bufIndex, isMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultFree failed", __FUNCTION__, __LINE__); + } + } + return ret; +} + +status_t ExynosCameraBufferManager::m_defaultFree(int bIndex, int eIndex, bool isMetaPlane) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + int planeIndexStart = 0; + int planeIndexEnd = 0; + bool mapNeeded = false; +#ifdef DEBUG_RAWDUMP + char enableRawDump[PROP_VALUE_MAX]; +#endif /* DEBUG_RAWDUMP */ + + if (isMetaPlane == true) { + mapNeeded = true; + } else { +#ifdef DEBUG_RAWDUMP + mapNeeded = true; +#else + mapNeeded = m_flagNeedMmap; +#endif + } + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (isAvaliable(bufIndex) == false) { + CLOGE("ERR(%s[%d]):buffer [bufIndex=%d] in InProcess state", + __FUNCTION__, __LINE__, bufIndex); + if (m_isDestructor == false) { + ret = BAD_VALUE; + continue; + } else { + CLOGE("ERR(%s[%d]):buffer [bufIndex=%d] in InProcess state, but try to forcedly free", + __FUNCTION__, __LINE__, bufIndex); + } + } + + if (isMetaPlane == true) { + planeIndexStart = m_buffer[bufIndex].planeCount-1; + planeIndexEnd = m_buffer[bufIndex].planeCount; + } else { + planeIndexStart = 0; + planeIndexEnd = (m_hasMetaPlane ? + m_buffer[bufIndex].planeCount-1 : m_buffer[bufIndex].planeCount); + } + + for (int planeIndex = planeIndexStart; planeIndex < planeIndexEnd; planeIndex++) { + if (m_defaultAllocator->free( + m_buffer[bufIndex].size[planeIndex], + &(m_buffer[bufIndex].fd[planeIndex]), + &(m_buffer[bufIndex].addr[planeIndex]), + mapNeeded) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAllocator->free for Imagedata Plane failed", + __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_graphicBufferAllocator.free(bufIndex) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.free(%d) fail", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +bool ExynosCameraBufferManager::m_checkInfoForAlloc(void) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + bool ret = true; + + if (m_reqBufCount < 0 || VIDEO_MAX_FRAME < m_reqBufCount) { + CLOGE("ERR(%s[%d]):buffer Count in out of bound [m_reqBufCount=%d]", + __FUNCTION__, __LINE__, m_reqBufCount); + ret = false; + goto func_exit; + } + + for (int bufIndex = m_indexOffset; bufIndex < m_reqBufCount + m_indexOffset; bufIndex++) { + if (m_buffer[bufIndex].planeCount < 0 + || VIDEO_MAX_PLANES <= m_buffer[bufIndex].planeCount) { + CLOGE("ERR(%s[%d]):plane Count in out of bound [m_buffer[bIndex].planeCount=%d]", + __FUNCTION__, __LINE__, m_buffer[bufIndex].planeCount); + ret = false; + goto func_exit; + } + + for (int planeIndex = 0; planeIndex < m_buffer[bufIndex].planeCount; planeIndex++) { + if (m_buffer[bufIndex].size[planeIndex] == 0) { + CLOGE("ERR(%s[%d]):size is empty [m_buffer[%d].size[%d]=%d]", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].size[planeIndex]); + ret = false; + goto func_exit; + } + } + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ExynosCameraBufferManager::m_createDefaultAllocator(bool isCached) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + m_defaultAllocator = new ExynosCameraIonAllocator(); + m_isCreateDefaultAllocator = true; + if (m_defaultAllocator->init(isCached) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAllocator->init failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +int ExynosCameraBufferManager::getAllocatedBufferCount(void) +{ + return m_allocatedBufCount; +} + +int ExynosCameraBufferManager::getAvailableIncreaseBufferCount(void) +{ + CLOGI("INFO(%s[%d]):this function only applied to ONDEMAND mode (%d)", __FUNCTION__, __LINE__, m_allocMode); + int numAvailable = 0; + + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_ONDEMAND) + numAvailable += (m_allowedMaxBufCount - m_allocatedBufCount); + + CLOGI("INFO(%s[%d]):m_allowedMaxBufCount(%d), m_allocatedBufCount(%d), ret(%d)", + __FUNCTION__, __LINE__, m_allowedMaxBufCount, m_allocatedBufCount, numAvailable); + return numAvailable; +} + +int ExynosCameraBufferManager::getNumOfAvailableBuffer(void) +{ + int numAvailable = 0; + + for (int i = m_indexOffset; i < m_allocatedBufCount + m_indexOffset; i++) { + if (m_buffer[i].status.permission == EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE) + numAvailable++; + } + + if (m_allocMode == BUFFER_MANAGER_ALLOCATION_ONDEMAND) + numAvailable += (m_allowedMaxBufCount - m_allocatedBufCount); + + return numAvailable; +} + +int ExynosCameraBufferManager::getNumOfAvailableAndNoneBuffer(void) +{ + int numAvailable = 0; + + for (int i = m_indexOffset; i < m_allocatedBufCount + m_indexOffset; i++) { + if (m_buffer[i].status.permission == EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE || + m_buffer[i].status.permission == EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) + numAvailable++; + } + + return numAvailable; +} + +void ExynosCameraBufferManager::printBufferState(void) +{ + for (int i = m_indexOffset; i < m_allocatedBufCount + m_indexOffset; i++) { + CLOGI("INFO(%s[%d]):m_buffer[%d].fd[0]=%d, position=%d, permission=%d]", + __FUNCTION__, __LINE__, i, m_buffer[i].fd[0], + m_buffer[i].status.position, m_buffer[i].status.permission); + } + + return; +} + +void ExynosCameraBufferManager::printBufferState(int bufIndex, int planeIndex) +{ + CLOGI("INFO(%s[%d]):m_buffer[%d].fd[%d]=%d, .status.permission=%d]", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].fd[planeIndex], + m_buffer[bufIndex].status.permission); + + return; +} + +void ExynosCameraBufferManager::printBufferQState() +{ + List::iterator r; + int bufferIndex; + + Mutex::Autolock lock(m_availableBufferIndexQLock); + + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + bufferIndex = *r; + CLOGV("DEBUG(%s[%d]):bufferIndex=%d", __FUNCTION__, __LINE__, bufferIndex); + } + + return; +} + +void ExynosCameraBufferManager::printBufferInfo( + const char *funcName, + const int lineNum, + int bufIndex, + int planeIndex) +{ + CLOGI("INFO(%s[%d]):[m_buffer[%d].fd[%d]=%d] .addr=%p .size=%d]", + funcName, lineNum, bufIndex, planeIndex, + m_buffer[bufIndex].fd[planeIndex], + m_buffer[bufIndex].addr[planeIndex], + m_buffer[bufIndex].size[planeIndex]); + + return; +} + +void ExynosCameraBufferManager::dump(void) +{ + printBufferState(); + printBufferQState(); + + return; +} + +void ExynosCameraBufferManager::dumpBufferInfo(void) +{ + for (int bufIndex = m_indexOffset; bufIndex < m_allocatedBufCount + m_indexOffset; bufIndex++) + for (int planeIndex = 0; planeIndex < m_buffer[bufIndex].planeCount; planeIndex++) { + CLOGI("INFO(%s[%d]):[m_buffer[%d].fd[%d]=%d] .addr=%p .size=%d .position=%d .permission=%d]", + __FUNCTION__, __LINE__, m_buffer[bufIndex].index, planeIndex, + m_buffer[bufIndex].fd[planeIndex], + m_buffer[bufIndex].addr[planeIndex], + m_buffer[bufIndex].size[planeIndex], + m_buffer[bufIndex].status.position, + m_buffer[bufIndex].status.permission); + } + printBufferQState(); + + return; +} + +status_t ExynosCameraBufferManager::setBufferCount(__unused int bufferCount) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +int ExynosCameraBufferManager::getBufferCount(void) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return 0; +} + +InternalExynosCameraBufferManager::InternalExynosCameraBufferManager() +{ + ExynosCameraBufferManager::init(); +} + +InternalExynosCameraBufferManager::~InternalExynosCameraBufferManager() +{ + ExynosCameraBufferManager::deinit(); +} + +status_t InternalExynosCameraBufferManager::m_setAllocator(void *allocator) +{ + return m_setDefaultAllocator(allocator); +} + +status_t InternalExynosCameraBufferManager::m_alloc(int bIndex, int eIndex) +{ + return m_defaultAlloc(bIndex, eIndex, false); +} + +status_t InternalExynosCameraBufferManager::m_free(int bIndex, int eIndex) +{ + return m_defaultFree(bIndex, eIndex, false); +} + +status_t InternalExynosCameraBufferManager::m_increase(int increaseCount) +{ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = NO_ERROR; + + if (m_allowedMaxBufCount <= m_allocatedBufCount) { + CLOGD("DEBUG(%s[%d]):BufferManager can't increase the buffer " + "(m_reqBufCount=%d, m_allowedMaxBufCount=%d <= m_allocatedBufCount=%d)", + __FUNCTION__, __LINE__, + m_reqBufCount, m_allowedMaxBufCount, m_allocatedBufCount); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_allowedMaxBufCount < m_allocatedBufCount + increaseCount) { + CLOGI("INFO(%s[%d]):change the increaseCount (%d->%d) --- " + "(m_reqBufCount=%d, m_allowedMaxBufCount=%d <= m_allocatedBufCount=%d + increaseCount=%d)", + __FUNCTION__, __LINE__, increaseCount, m_allowedMaxBufCount - m_allocatedBufCount, + m_reqBufCount, m_allowedMaxBufCount, m_allocatedBufCount, increaseCount); + increaseCount = m_allowedMaxBufCount - m_allocatedBufCount; + } + + /* set the buffer information */ + for (int bufIndex = m_allocatedBufCount + m_indexOffset; bufIndex < m_allocatedBufCount + m_indexOffset + increaseCount; bufIndex++) { + for (int planeIndex = 0; planeIndex < m_buffer[0].planeCount; planeIndex++) { + if (m_buffer[0].size[planeIndex] == 0) { + CLOGE("ERR(%s[%d]):abnormal value [size=%d]", + __FUNCTION__, __LINE__, m_buffer[0].size[planeIndex]); + ret = BAD_VALUE; + goto func_exit; + } + m_buffer[bufIndex].size[planeIndex] = m_buffer[0].size[planeIndex]; + m_buffer[bufIndex].bytesPerLine[planeIndex] = m_buffer[0].bytesPerLine[planeIndex]; + } + m_buffer[bufIndex].planeCount = m_buffer[0].planeCount; + m_buffer[bufIndex].type = m_buffer[0].type; + } + + if (m_alloc(m_allocatedBufCount + m_indexOffset, m_allocatedBufCount + m_indexOffset + increaseCount) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_alloc failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_hasMetaPlane == true) { + if (m_defaultAlloc(m_allocatedBufCount + m_indexOffset, m_allocatedBufCount + m_indexOffset + increaseCount, m_hasMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAlloc failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + CLOGD("DEBUG(%s[%d]):Increase the buffer succeeded (m_allocatedBufCount=%d, increaseCount=%d)", + __FUNCTION__, __LINE__, m_allocatedBufCount + m_indexOffset, increaseCount); + +func_exit: + + CLOGD("DEBUG(%s[%d]):OUT.." , __FUNCTION__, __LINE__); + + return ret; +} + +status_t InternalExynosCameraBufferManager::m_decrease(void) +{ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + status_t ret = true; + List::iterator r; + + int bufferIndex = -1; + + if (m_allocatedBufCount <= m_reqBufCount) { + CLOGD("DEBUG(%s[%d]):BufferManager can't decrease the buffer " + "(m_allowedMaxBufCount=%d, m_allocatedBufCount=%d <= m_reqBufCount=%d)", + __FUNCTION__, __LINE__, + m_allowedMaxBufCount, m_allocatedBufCount, m_reqBufCount); + ret = INVALID_OPERATION; + goto func_exit; + } + bufferIndex = m_allocatedBufCount; + + if (m_free(bufferIndex-1 + m_indexOffset, bufferIndex + m_indexOffset) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_free failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_hasMetaPlane == true) { + if (m_defaultFree(bufferIndex-1 + m_indexOffset, bufferIndex + m_indexOffset, m_hasMetaPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultFree failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + m_availableBufferIndexQLock.lock(); + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + if ((bufferIndex + m_indexOffset) == *r) { + m_availableBufferIndexQ.erase(r); + break; + } + } + m_availableBufferIndexQLock.unlock(); + m_allocatedBufCount--; + + CLOGD("DEBUG(%s[%d]):Decrease the buffer succeeded (m_allocatedBufCount=%d)" , + __FUNCTION__, __LINE__, m_allocatedBufCount); + +func_exit: + + CLOGD("DEBUG(%s[%d]):OUT.." , __FUNCTION__, __LINE__); + + return ret; +} + +status_t InternalExynosCameraBufferManager::m_putBuffer(__unused int bufIndex) +{ + return NO_ERROR; +} + +status_t InternalExynosCameraBufferManager::m_getBuffer(__unused int *bufIndex, __unused int *acquireFence, __unused int *releaseFence) +{ + return NO_ERROR; +} + +status_t InternalExynosCameraBufferManager::increase(int increaseCount) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + Mutex::Autolock lock(m_lock); + status_t ret = NO_ERROR; + + CLOGI("INFO(%s[%d]):m_allocatedBufCount(%d), m_allowedMaxBufCount(%d), increaseCount(%d)", + __FUNCTION__, __LINE__, m_allocatedBufCount, m_allowedMaxBufCount, increaseCount); + + /* increase buffer*/ + ret = m_increase(increaseCount); + if (ret < 0) { + CLOGE("ERR(%s[%d]):increase the buffer failed, m_allocatedBufCount(%d), m_allowedMaxBufCount(%d), increaseCount(%d)", + __FUNCTION__, __LINE__, m_allocatedBufCount, m_allowedMaxBufCount, increaseCount); + } else { + for (int bufferIndex = m_allocatedBufCount + m_indexOffset; bufferIndex < m_allocatedBufCount + m_indexOffset + increaseCount; bufferIndex++) { + m_availableBufferIndexQLock.lock(); + m_availableBufferIndexQ.push_back(bufferIndex); + m_availableBufferIndexQLock.unlock(); + } + m_allocatedBufCount += increaseCount; + + dumpBufferInfo(); + CLOGI("INFO(%s[%d]):increase the buffer succeeded (increaseCount(%d))", + __FUNCTION__, __LINE__, increaseCount); + } + +func_exit: + + return ret; +} + +MHBExynosCameraBufferManager::MHBExynosCameraBufferManager() +{ + ExynosCameraBufferManager::init(); + + m_allocator = NULL; + m_numBufsHeap = 1; + + for (int bufIndex = m_indexOffset; bufIndex < VIDEO_MAX_FRAME; bufIndex++) { + for (int planeIndex = 0; planeIndex < EXYNOS_CAMERA_BUFFER_MAX_PLANES; planeIndex++) { + m_heap[bufIndex][planeIndex] = NULL; + } + } +} + +MHBExynosCameraBufferManager::~MHBExynosCameraBufferManager() +{ + ExynosCameraBufferManager::deinit(); +} + +status_t MHBExynosCameraBufferManager::m_setAllocator(void *allocator) +{ + m_allocator = (ExynosCameraMHBAllocator *)allocator; + + return NO_ERROR; +} + +status_t MHBExynosCameraBufferManager::m_alloc(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + int planeCount = 0; + + if (m_allocator == NULL) { + CLOGE("ERR(%s[%d]):m_allocator equals NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + planeCount = (m_hasMetaPlane ? + m_buffer[bufIndex].planeCount-1 : m_buffer[bufIndex].planeCount); + + for (int planeIndex = 0; planeIndex < planeCount; planeIndex++) { + if (m_allocator->alloc( + m_buffer[bufIndex].size[planeIndex], + &(m_buffer[bufIndex].fd[planeIndex]), + &(m_buffer[bufIndex].addr[planeIndex]), + m_numBufsHeap, + &(m_heap[bufIndex][planeIndex])) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_allocator->alloc(bufIndex=%d, planeIndex=%d, planeIndex=%d) failed", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].size[planeIndex]); + return INVALID_OPERATION; + } +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + printBufferInfo(__FUNCTION__, __LINE__, bufIndex, planeIndex); + CLOGI("INFO(%s[%d]):[m_buffer[%d][%d].heap=%p]", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_heap[bufIndex][planeIndex]); +#endif + } + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + return INVALID_OPERATION; + } + } + + EXYNOS_CAMERA_BUFFER_OUT(); + + return NO_ERROR; +} + +status_t MHBExynosCameraBufferManager::m_free(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + int planeCount = 0; + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (isAvaliable(bufIndex) == false) { + CLOGE("ERR(%s[%d]):buffer [bufIndex=%d] in InProcess state", + __FUNCTION__, __LINE__, bufIndex); + if (m_isDestructor == false) { + ret = BAD_VALUE; + continue; + } else { + CLOGE("ERR(%s[%d]):buffer [bufIndex=%d] in InProcess state, but try to forcedly free", + __FUNCTION__, __LINE__, bufIndex); + } + } + + planeCount = (m_hasMetaPlane ? + m_buffer[bufIndex].planeCount-1 : m_buffer[bufIndex].planeCount); + + for (int planeIndex = 0; planeIndex < planeCount; planeIndex++) { + if (m_allocator->free( + m_buffer[bufIndex].size[planeIndex], + &(m_buffer[bufIndex].fd[planeIndex]), + &(m_buffer[bufIndex].addr[planeIndex]), + &(m_heap[bufIndex][planeIndex])) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_defaultAllocator->free for Imagedata Plane failed", + __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + m_heap[bufIndex][planeIndex] = 0; + } + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_graphicBufferAllocator.free(bufIndex) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.free(%d) fail", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t MHBExynosCameraBufferManager::m_increase(__unused int increaseCount) +{ + CLOGD("DEBUG(%s[%d]):allocMode(%d) is invalid. Do nothing", __FUNCTION__, __LINE__, m_allocMode); + return INVALID_OPERATION; +} + +status_t MHBExynosCameraBufferManager::m_decrease(void) +{ + return INVALID_OPERATION; +} + +status_t MHBExynosCameraBufferManager::m_putBuffer(__unused int bufIndex) +{ + return NO_ERROR; +} + +status_t MHBExynosCameraBufferManager::m_getBuffer(__unused int *bufIndex, __unused int *acquireFence, __unused int *releaseFence) +{ + return NO_ERROR; +} + +status_t MHBExynosCameraBufferManager::allocMulti() +{ + m_numBufsHeap = m_reqBufCount; + m_reqBufCount = 1; + + return alloc(); +} + +status_t MHBExynosCameraBufferManager::getHeapMemory( + int bufIndex, + int planeIndex, + camera_memory_t **heap) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + if (m_buffer[bufIndex].status.position != EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL) { + CLOGE("ERR(%s[%d]):buffer position not in IN_HAL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (m_heap[bufIndex][planeIndex] == NULL) { + CLOGE("ERR(%s[%d]):m_heap equals NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + *heap = m_heap[bufIndex][planeIndex]; +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):heap=%p", __FUNCTION__, __LINE__, *heap); +#endif + + EXYNOS_CAMERA_BUFFER_OUT(); + + return NO_ERROR; +} + +GrallocExynosCameraBufferManager::GrallocExynosCameraBufferManager() +{ + ExynosCameraBufferManager::init(); + + m_allocator = NULL; + m_dequeuedBufCount = 0; + m_minUndequeuedBufCount = 0; + m_bufStride = 0; + m_bufferCount = 0; + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + m_collectedBufferCount = 0; + + m_stopBufferCollector = false; + m_bufferCollector = new grallocBufferThread(this, &GrallocExynosCameraBufferManager::m_bufferCollectorThreadFunc, "GrallocBufferCollector", PRIORITY_DEFAULT); +#endif + + for (int bufIndex = 0; bufIndex < VIDEO_MAX_FRAME; bufIndex++) { + m_handle[bufIndex] = NULL; + m_handleIsLocked[bufIndex] = false; + } +} + +GrallocExynosCameraBufferManager::~GrallocExynosCameraBufferManager() +{ + ExynosCameraBufferManager::deinit(); +} + +status_t GrallocExynosCameraBufferManager::m_setAllocator(void *allocator) +{ + m_allocator = (ExynosCameraGrallocAllocator *)allocator; + + return NO_ERROR; +} + +status_t GrallocExynosCameraBufferManager::m_alloc(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + ExynosCameraDurationTimer m_timer; + long long durationTime = 0; + long long durationTimeSum = 0; + unsigned int estimatedBase = EXYNOS_CAMERA_BUFFER_GRALLOC_WARNING_TIME; + unsigned int estimatedTime = 0; + unsigned int bufferSize = 0; + int planeIndexEnd = 0; + + if (m_allocator == NULL) { + CLOGE("ERR(%s[%d]):m_allocator equals NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (getBufferCount() == 0) { + CLOGE("ERR(%s[%d]):m_reqBufCount(%d)", __FUNCTION__, __LINE__, m_reqBufCount); + setBufferCount(m_reqBufCount); + } + + m_minUndequeuedBufCount = m_allocator->getMinUndequeueBuffer(); + if (m_minUndequeuedBufCount < 0 ) { + CLOGE("ERR(%s[%d]):m_minUndequeuedBufCount=%d..", + __FUNCTION__, __LINE__, m_minUndequeuedBufCount); + ret = INVALID_OPERATION; + goto func_exit; + } +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):before dequeueBuffer m_reqBufCount=%d, m_minUndequeuedBufCount=%d", + __FUNCTION__, __LINE__, m_reqBufCount, m_minUndequeuedBufCount); +#endif + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + m_timer.start(); + if (m_allocator->alloc( + &m_handle[bufIndex], + m_buffer[bufIndex].fd, + m_buffer[bufIndex].addr, + &m_bufStride, + &m_handleIsLocked[bufIndex]) != NO_ERROR) { + CLOGE("ERR(%s[%d]):alloc failed [bufIndex=%d]", __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + planeIndexEnd = m_buffer[bufIndex].planeCount; + + if (m_hasMetaPlane == true) + planeIndexEnd--; + + bufferSize = 0; + for (int planeIndex = 0; planeIndex < planeIndexEnd; planeIndex++) + bufferSize = bufferSize + m_buffer[bufIndex].size[planeIndex]; + + m_timer.stop(); + durationTime = m_timer.durationMsecs(); + durationTimeSum += durationTime; + CLOGD("DEBUG(%s[%d]):duration time(%5d msec):(type=%d, bufIndex=%d, size=%.2f)", + __FUNCTION__, __LINE__, (int)durationTime, m_buffer[bufIndex].type, bufIndex, (float)bufferSize / (float)(1024 * 1024)); + + estimatedTime = estimatedBase * bufferSize / EXYNOS_CAMERA_BUFFER_1MB; + if (estimatedTime < durationTime) { + CLOGW("WARN(%s[%d]):estimated time(%5d msec):(type=%d, bufIndex=%d, size=%d)", + __FUNCTION__, __LINE__, (int)estimatedTime, m_buffer[bufIndex].type, bufIndex, (int)bufferSize); + } + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); +#endif + m_dequeuedBufCount++; + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bufIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + CLOGD("DEBUG(%s[%d]):Duration time of buffer allocation(%5d msec)", __FUNCTION__, __LINE__, (int)durationTimeSum); + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { +#ifdef USE_GRALLOC_REUSE_SUPPORT + m_cancelReuseBuffer(bufIndex, true); +#else +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); +#endif + if (m_allocator->cancelBuffer(m_handle[bufIndex]) != 0) { + CLOGE("ERR(%s[%d]):could not free [bufIndex=%d]", __FUNCTION__, __LINE__, bufIndex); + goto func_exit; + } + m_dequeuedBufCount--; + m_handleIsLocked[bufIndex] = false; + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bufIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } +#endif + } + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + m_bufferCollector->run(); +#endif +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):before exit m_alloc m_dequeuedBufCount=%d, m_minUndequeuedBufCount=%d", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_minUndequeuedBufCount); +#endif + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t GrallocExynosCameraBufferManager::m_free(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + CLOGD("DEBUG(%s[%d]):IN -- dump buffer status --", __FUNCTION__, __LINE__); + dump(); + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (m_handleIsLocked[bufIndex] == false) { + CLOGD("DEBUG(%s[%d]):buffer [bufIndex=%d] already free", __FUNCTION__, __LINE__, bufIndex); + continue; + } + + if (m_allocator->free(m_handle[bufIndex], m_handleIsLocked[bufIndex]) != 0) { + CLOGE("ERR(%s[%d]):could not free [bufIndex=%d]", __FUNCTION__, __LINE__, bufIndex); + goto func_exit; + } + m_dequeuedBufCount--; + m_handle[bufIndex] = NULL; + m_handleIsLocked[bufIndex] = false; + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_graphicBufferAllocator.free(bufIndex) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.free(%d) fail", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t GrallocExynosCameraBufferManager::m_increase(__unused int increaseCount) +{ + CLOGD("DEBUG(%s[%d]):allocMode(%d) is invalid. Do nothing", __FUNCTION__, __LINE__, m_allocMode); + return INVALID_OPERATION; +} + +status_t GrallocExynosCameraBufferManager::m_decrease(void) +{ + return INVALID_OPERATION; +} + +status_t GrallocExynosCameraBufferManager::m_putBuffer(int bufIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + if (m_handle[bufIndex] != NULL && + m_buffer[bufIndex].status.position == EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL) { + if (m_allocator->enqueueBuffer(m_handle[bufIndex]) != 0) { + CLOGE("ERR(%s[%d]):could not enqueue_buffer [bufIndex=%d]", + __FUNCTION__, __LINE__, bufIndex); + CLOGD("DEBUG(%s[%d]):dump buffer status", __FUNCTION__, __LINE__); + dump(); + goto func_exit; + } + m_dequeuedBufCount--; + m_handleIsLocked[bufIndex] = false; + } + m_buffer[bufIndex].status.position = EXYNOS_CAMERA_BUFFER_POSITION_IN_SERVICE; + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):dump buffer status", __FUNCTION__, __LINE__); + dump(); +#endif + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t GrallocExynosCameraBufferManager::m_getBuffer(int *bufIndex, __unused int *acquireFence, __unused int *releaseFence) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + buffer_handle_t *bufHandle = NULL; + int bufferFd[3] = {0}; + void *bufferAddr[3] = {NULL}; + + int stride = 0; + int bufferIndex = -1; + + const private_handle_t *priv_handle; + bool isExistedBuffer = false; + bool isLocked = false; + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + /* Error return check is done by callee */ + ret = m_getCollectedBuffer(bufIndex); +#else /* USE_GRALLOC_BUFFER_COLLECTOR */ + m_minUndequeuedBufCount = m_allocator->getMinUndequeueBuffer(); + + if (m_minUndequeuedBufCount < 0 ) { + CLOGE("ERR(%s[%d]):m_minUndequeuedBufCount=%d..", + __FUNCTION__, __LINE__, m_minUndequeuedBufCount); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_dequeuedBufCount == m_reqBufCount - m_minUndequeuedBufCount) { + CLOGI("INFO(%s[%d]):skip allocation... ", __FUNCTION__, __LINE__); + CLOGI("INFO(%s[%d]):m_dequeuedBufCount(%d) == m_reqBufCount(%d) - m_minUndequeuedBufCount(%d)", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_reqBufCount, m_minUndequeuedBufCount); + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); + ret = INVALID_OPERATION; + + goto func_exit; + } +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):before dequeueBuffer() " + "m_reqBufCount=%d, m_dequeuedBufCount=%d, m_minUndequeuedBufCount=%d", + __FUNCTION__, __LINE__, + m_reqBufCount, m_dequeuedBufCount, m_minUndequeuedBufCount); +#endif + + if (m_allocator->dequeueBuffer( + &bufHandle, + bufferFd, + (char **)bufferAddr, + &isLocked) != NO_ERROR) { + CLOGE("ERR(%s[%d]):dequeueBuffer failed", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_indexOffset < 0 || VIDEO_MAX_FRAME < (m_reqBufCount + m_indexOffset)) { + CLOGE("ERR(%s[%d]):abnormal value [m_indexOffset=%d, m_reqBufCount=%d]", + __FUNCTION__, __LINE__, m_indexOffset, m_reqBufCount); + ret = BAD_VALUE; + goto func_exit; + } + + for (int index = m_indexOffset; index < m_reqBufCount + m_indexOffset; index++) { + if (m_buffer[index].addr[0] != bufferAddr[0]) { + continue; + } else { + bufferIndex = index; + isExistedBuffer = true; +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):bufferIndex(%d) found!", __FUNCTION__, __LINE__, bufferIndex); +#endif + break; + } + } + + if (isExistedBuffer == false) { + CLOGI("INFO(%s[%d]):not existedBuffer!", __FUNCTION__, __LINE__); + if (m_allocator->cancelBuffer(bufHandle) != 0) { + CLOGE("ERR(%s[%d]):could not cancelBuffer [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + } + ret = BAD_VALUE; + goto func_exit; + } + + if (bufferIndex < 0 || VIDEO_MAX_FRAME <= bufferIndex) { + CLOGE("ERR(%s[%d]):abnormal value [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + ret = BAD_VALUE; + goto func_exit; + } + + priv_handle = private_handle_t::dynamicCast(*bufHandle); + + if (m_hasMetaPlane == true) { + switch (m_buffer[bufferIndex].planeCount) { + case 3: + m_buffer[bufferIndex].fd[1] = priv_handle->fd1; + m_buffer[bufferIndex].addr[1] = (char *)bufferAddr[1]; + /* no break; */ + case 2: + m_buffer[bufferIndex].fd[0] = priv_handle->fd; + m_buffer[bufferIndex].addr[0] = (char *)bufferAddr[0]; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid m_buffer[%d].planeCount(%d) with metaPlane, assert!!!!", + __FUNCTION__, __LINE__, bufferIndex, m_buffer[bufferIndex].planeCount); + break; + } + } else { + switch (m_buffer[bufferIndex].planeCount) { + case 3: + m_buffer[bufferIndex].fd[2] = priv_handle->fd2; + m_buffer[bufferIndex].addr[2] = (char *)bufferAddr[2]; + /* no break; */ + case 2: + m_buffer[bufferIndex].fd[1] = priv_handle->fd1; + m_buffer[bufferIndex].addr[1] = (char *)bufferAddr[1]; + /* no break; */ + case 1: + m_buffer[bufferIndex].fd[0] = priv_handle->fd; + m_buffer[bufferIndex].addr[0] = (char *)bufferAddr[0]; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid m_buffer[%d].planeCount(%d) without metaPlane, assert!!!!", + __FUNCTION__, __LINE__, bufferIndex, m_buffer[bufferIndex].planeCount); + } + } + + m_handleIsLocked[bufferIndex] = isLocked; + + *bufIndex = bufferIndex; + m_handle[bufferIndex] = bufHandle; + m_dequeuedBufCount++; + m_buffer[bufferIndex].status.position = EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL; + m_buffer[bufferIndex].status.permission = EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE; +#endif /* NOT USE_GRALLOC_BUFFER_COLLECTOR */ + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); + CLOGI("INFO(%s[%d]):-- OUT -- m_dequeuedBufCount=%d, m_minUndequeuedBufCount=%d", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_minUndequeuedBufCount); +#endif + +func_exit: + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +#ifdef USE_GRALLOC_REUSE_SUPPORT +bool GrallocExynosCameraBufferManager::m_cancelReuseBuffer(int bufIndex, bool isReuse) +{ + bool ret = false; + bool found = false; + List::iterator r; + int item = -1; + int maxCount = -1; + + if (isReuse == true) { + m_availableBufferIndexQLock.lock(); + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + if (bufIndex == *r) { + found = true; + break; + } + } + + if (found == true) { + CLOGI("INFO(%s[%d]):bufIndex=%d is already in (available state)", + __FUNCTION__, __LINE__, bufIndex); + m_availableBufferIndexQLock.unlock(); + CLOGI("INFO(%s[%d]):cancelReuse not available buffer is founded [bufIndex=%d]", __FUNCTION__, __LINE__, bufIndex); + ret = true; + return ret; + } else { + m_availableBufferIndexQ.push_back(m_buffer[bufIndex].index); + } + m_availableBufferIndexQLock.unlock(); + + m_handleIsLocked[bufIndex] = true; + m_buffer[bufIndex].status.position = EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL; + m_buffer[bufIndex].status.permission = EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE; + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + m_collectedBufferCount++; +#endif + ret = true; + } + + return ret; +} +#endif + +#ifdef USE_GRALLOC_REUSE_SUPPORT +status_t GrallocExynosCameraBufferManager::cancelBuffer(int bufIndex, bool isReuse) +#else +status_t GrallocExynosCameraBufferManager::cancelBuffer(int bufIndex) +#endif +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + Mutex::Autolock lock(m_lock); + + List::iterator r; + bool found = false; +#ifdef USE_GRALLOC_REUSE_SUPPORT + bool reuseRet = false; +#endif + + if (bufIndex < 0 || m_reqBufCount + m_indexOffset <= bufIndex) { + CLOGE("ERR(%s[%d]):buffer Index in out of bound [bufIndex=%d]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_handleIsLocked[bufIndex] == false) { + CLOGD("DEBUG(%s[%d]):buffer [bufIndex=%d] already free", __FUNCTION__, __LINE__, bufIndex); + return ret; + } + +#ifdef USE_GRALLOC_REUSE_SUPPORT + reuseRet = m_cancelReuseBuffer(bufIndex, isReuse); + if (reuseRet == true) { + goto func_exit; + } +#endif + + if (m_allocator->cancelBuffer(m_handle[bufIndex]) != 0) { + CLOGE("ERR(%s[%d]):could not cancel buffer [bufIndex=%d]", __FUNCTION__, __LINE__, bufIndex); + goto func_exit; + } + m_dequeuedBufCount--; + m_handle[bufIndex] = NULL; + m_handleIsLocked[bufIndex] = false; + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + m_availableBufferIndexQLock.lock(); + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + if (bufIndex == *r) { + found = true; + break; + } + } + + if (found == true) { + CLOGI("INFO(%s[%d]):bufIndex=%d is already in (available state)", + __FUNCTION__, __LINE__, bufIndex); + m_availableBufferIndexQLock.unlock(); + goto func_exit; + } + m_availableBufferIndexQ.push_back(m_buffer[bufIndex].index); + m_availableBufferIndexQLock.unlock(); + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); +#endif + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +void GrallocExynosCameraBufferManager::deinit(void) +{ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + /* declare thread will stop */ + m_stopBufferCollector = true; +#endif + + ExynosCameraBufferManager::deinit(); + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + /* thread stop */ + m_bufferCollector->requestExitAndWait(); + + /* after thread end, reset m_stopBufferCollector as default */ + m_stopBufferCollector = false; +#endif + + CLOGD("DEBUG(%s[%d]):OUT..", __FUNCTION__, __LINE__); +} + +status_t GrallocExynosCameraBufferManager::resetBuffers(void) +{ + CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + /* declare thread will stop */ + m_stopBufferCollector = true; +#endif + + ret = ExynosCameraBufferManager::resetBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosCameraBufferManager::resetBuffers()", __FUNCTION__, __LINE__); + } + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + /* thread stop */ + m_bufferCollector->requestExitAndWait(); + + /* after thread end, reset m_stopBufferCollector as default */ + m_stopBufferCollector = false; +#endif + + CLOGD("DEBUG(%s[%d]):OUT..", __FUNCTION__, __LINE__); + + return ret; +} + +status_t GrallocExynosCameraBufferManager::setBufferCount(int bufferCount) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = resetBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):resetBuffers() failed", __FUNCTION__, __LINE__); + goto func_exit; + } + + ret = m_allocator->setBufferCount(bufferCount); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_allocator->setBufferCount(m_bufferCount(%d) -> %d)", + __FUNCTION__, __LINE__, m_bufferCount, bufferCount); + goto func_exit; + } + + m_bufferCount = bufferCount; + +func_exit: + + return ret; +} + +int GrallocExynosCameraBufferManager::getBufferCount(void) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + return m_bufferCount; +} + +int GrallocExynosCameraBufferManager::getBufStride(void) +{ + ALOGI("INFO(%s):bufStride=%d", __FUNCTION__, m_bufStride); + return m_bufStride; +} + +void GrallocExynosCameraBufferManager::printBufferState(void) +{ + for (int i = 0; i < m_allocatedBufCount; i++) { + CLOGI("INFO(%s[%d]):m_buffer[%d].fd[0]=%d, position=%d, permission=%d, lock=%d]", + __FUNCTION__, __LINE__, i, m_buffer[i].fd[0], + m_buffer[i].status.position, m_buffer[i].status.permission, m_handleIsLocked[i]); + } + + return; +} + +void GrallocExynosCameraBufferManager::printBufferState(int bufIndex, int planeIndex) +{ + CLOGI("INFO(%s[%d]):m_buffer[%d].fd[%d]=%d, .status.permission=%d, lock=%d]", + __FUNCTION__, __LINE__, bufIndex, planeIndex, m_buffer[bufIndex].fd[planeIndex], + m_buffer[bufIndex].status.permission, m_handleIsLocked[bufIndex]); + + return; +} + +sp GrallocExynosCameraBufferManager::getGraphicBuffer(int index) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + sp graphicBuffer; + + int width, height, stride; + + int planeCount = 0; + + if ((index < 0) || (index >= m_allowedMaxBufCount)) { + CLOGE("ERR(%s[%d]):Buffer index error (%d/%d)", __FUNCTION__, __LINE__, index, m_allowedMaxBufCount); + goto done; + } + + if (m_graphicBufferAllocator.getSize(&width, &height, &stride) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.getSize(%d) fail", __FUNCTION__, __LINE__, index); + goto done; + } + + if (m_graphicBufferAllocator.setSize(width, height, m_bufStride) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.setSize(%d) fail", __FUNCTION__, __LINE__, index); + goto done; + } + + if (m_graphicBufferAllocator.setGrallocUsage(m_allocator->getGrallocUsage()) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.setGrallocUsage(%d) fail", __FUNCTION__, __LINE__, index); + goto done; + } + + planeCount = m_buffer[index].planeCount; + + if (m_hasMetaPlane == true) { + planeCount--; + } + + graphicBuffer = m_graphicBufferAllocator.alloc(index, planeCount, m_buffer[index].fd, m_buffer[index].addr, m_buffer[index].size); + if (graphicBuffer == 0) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.alloc(%d) fail", __FUNCTION__, __LINE__, index); + goto done; + } + +done: + EXYNOS_CAMERA_BUFFER_OUT(); + + return graphicBuffer; +} + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR +bool GrallocExynosCameraBufferManager::m_bufferCollectorThreadFunc(void) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + buffer_handle_t *bufHandle = NULL; + int bufferFd[3] = {0}; + void *bufferAddr[3] = {NULL}; + + int stride = 0; + int bufferIndex = -1; + + const private_handle_t *priv_handle; + bool isExistedBuffer = false; + bool isLocked = false; + uint8_t tryCount = 0; + + if (m_stopBufferCollector == true) { + CLOGD("DEBUG(%s[%d]):m_stopBufferCollector == true. so, just return. m_collectedBufferCount(%d)", + __FUNCTION__, __LINE__, m_collectedBufferCount); + return false; + } + + if (m_collectedBufferCount >= MAX_BUFFER_COLLECT_COUNT || + m_dequeuedBufCount >= (m_allowedMaxBufCount - m_indexOffset - m_minUndequeuedBufCount) + ) { +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]): bufferCollector just return. m_collectedBufferCount(%d) m_dequeuedBufCount(%d)", + __FUNCTION__, __LINE__, m_collectedBufferCount, m_dequeuedBufCount); +#endif + goto EXIT; + } + + /* Blocking Function : + If the Gralloc buffer queue can not give the available buffer, + it will be blocked until the buffer which is being rendered + is released. + */ + ret = m_allocator->dequeueBuffer(&bufHandle, + bufferFd, + (char **) bufferAddr, + &isLocked); + if (ret == NO_INIT) { + CLOGW("WARN(%s[%d]):BufferQueue is abandoned!", __FUNCTION__, __LINE__); + return false; + } else if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):dequeueBuffer failed, dequeue(%d), collected(%d)", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_collectedBufferCount); + goto EXIT; + } else if (bufHandle == NULL) { + CLOGE("ERR(%s[%d]):Buffer handle is NULL, dequeue(%d), collected(%d)", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_collectedBufferCount); + goto EXIT; + } + + if (m_indexOffset < 0 + || VIDEO_MAX_FRAME < (m_reqBufCount + m_indexOffset)) { + CLOGE("ERR(%s[%d]):abnormal value [m_indexOffset=%d, m_reqBufCount=%d]", + __FUNCTION__, __LINE__, m_indexOffset, m_reqBufCount); + goto EXIT; + } + + for (int index = m_indexOffset; index < m_reqBufCount + m_indexOffset; index++) { + if (m_buffer[index].addr[0] != bufferAddr[0]) { + continue; + } else { + bufferIndex = index; + isExistedBuffer = true; +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGI("INFO(%s[%d]):bufferIndex(%d) found!", __FUNCTION__, __LINE__, bufferIndex); +#endif + break; + } + } + + if (isExistedBuffer == false) { + CLOGI("INFO(%s[%d]):not existedBuffer!", __FUNCTION__, __LINE__); + if (m_allocator->cancelBuffer(bufHandle) != 0) { + CLOGE("ERR(%s[%d]):could not cancelBuffer [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + } + goto EXIT; + } + + if (m_stopBufferCollector == true) { + CLOGD("DEBUG(%s[%d]):m_stopBufferCollector == true. so, just cancel. m_collectedBufferCount(%d)", + __FUNCTION__, __LINE__, m_collectedBufferCount); + + if (m_allocator->cancelBuffer(bufHandle) != 0) { + CLOGE("ERR(%s[%d]):could not cancelBuffer [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + } + return false; + } + + if (bufferIndex < 0 || VIDEO_MAX_FRAME <= bufferIndex) { + CLOGE("ERR(%s[%d]):abnormal value [bufferIndex=%d]", + __FUNCTION__, __LINE__, bufferIndex); + goto EXIT; + } + + priv_handle = private_handle_t::dynamicCast(*bufHandle); + + { + /* + * this is mutex for race-condition with cancelBuffer() + * problem scenario. + * a. cancelBuffer() : cancelBuffer done -> context switch -> + * b. m_preDequeueThreadFunc(): dequeueBuffer done -> m_handleIsLocked[bufferIndex] = true -> context switch -> + * c. cancelBuffer() : m_handleIsLocked[bufferIndex] = false + * d. and next cancelBuffer() is fail, and lost that buffer forever. + */ + + m_lock.lock(); + + if (m_hasMetaPlane == true) { + switch (m_buffer[bufferIndex].planeCount) { + case 3: + m_buffer[bufferIndex].fd[1] = priv_handle->fd1; + m_buffer[bufferIndex].addr[1] = (char *)bufferAddr[1]; + /* no break; */ + case 2: + m_buffer[bufferIndex].fd[0] = priv_handle->fd; + m_buffer[bufferIndex].addr[0] = (char *)bufferAddr[0]; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid m_buffer[%d].planeCount(%d) with metaPlane, assert!!!!", + __FUNCTION__, __LINE__, bufferIndex, m_buffer[bufferIndex].planeCount); + break; + } + } else { + switch (m_buffer[bufferIndex].planeCount) { + case 3: + m_buffer[bufferIndex].fd[2] = priv_handle->fd2; + m_buffer[bufferIndex].addr[2] = (char *)bufferAddr[2]; + /* no break; */ + case 2: + m_buffer[bufferIndex].fd[1] = priv_handle->fd1; + m_buffer[bufferIndex].addr[1] = (char *)bufferAddr[1]; + /* no break; */ + case 1: + m_buffer[bufferIndex].fd[0] = priv_handle->fd; + m_buffer[bufferIndex].addr[0] = (char *)bufferAddr[0]; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid m_buffer[%d].planeCount(%d) without metaPlane, assert!!!!", + __FUNCTION__, __LINE__, bufferIndex, m_buffer[bufferIndex].planeCount); + } + } + + m_handleIsLocked[bufferIndex] = isLocked; + + m_handle[bufferIndex] = bufHandle; + m_dequeuedBufCount++; + m_buffer[bufferIndex].status.position = EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL; + m_buffer[bufferIndex].status.permission = EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE; + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE + CLOGD("DEBUG(%s[%d]):-- dump buffer status --", __FUNCTION__, __LINE__); + dump(); + CLOGI("INFO(%s[%d]):-- OUT -- m_dequeuedBufCount=%d, m_minUndequeuedBufCount=%d", + __FUNCTION__, __LINE__, m_dequeuedBufCount, m_minUndequeuedBufCount); + CLOGD("DEBUG(%s[%d]):Pre-dequeue buffer(%d)", + __FUNCTION__, __LINE__, bufferIndex); +#endif + + m_collectedBufferCount ++; + + m_lock.unlock(); + } + +EXIT: + while ((m_collectedBufferCount >= MAX_BUFFER_COLLECT_COUNT + || m_dequeuedBufCount >= (m_allowedMaxBufCount - m_indexOffset - m_minUndequeuedBufCount)) + && m_stopBufferCollector == false) { + usleep(BUFFER_COLLECTOR_WAITING_TIME); + } + + return true; +} + +status_t GrallocExynosCameraBufferManager::m_getCollectedBuffer(int *bufIndex) +{ + status_t ret = NO_ERROR; + List::iterator r; + int currentBufferIndex = -1; + + if (m_collectedBufferCount < 1) { + CLOGE("ERR(%s[%d]):Gralloc buffer collector has no Buffer", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + } else { + m_availableBufferIndexQLock.lock(); + + for (r = m_availableBufferIndexQ.begin(); r != m_availableBufferIndexQ.end(); r++) { + /* Found the collected buffer */ + if (m_isCollectedBuffer(*r) == true) { + currentBufferIndex = *r; + break; + } + } + + m_availableBufferIndexQLock.unlock(); + } + + if (currentBufferIndex > -1) { + *bufIndex = currentBufferIndex; + m_collectedBufferCount --; + CLOGV("INFO(%s[%d]):Get buffer(%d) from gralloc buffer collector, available count(%d)", + __FUNCTION__, __LINE__, currentBufferIndex, m_collectedBufferCount); + } else { + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):Failed to get available gralloc buffer from buffer collector, available count(%d)", + __FUNCTION__, __LINE__, m_collectedBufferCount); + } + + return ret; +} + +bool GrallocExynosCameraBufferManager::m_isCollectedBuffer(int bufferIndex) +{ + bool ret = false; + bool isValidPosition = false; + bool isValidPermission = false; + + switch (m_buffer[bufferIndex].status.position) { + case EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL: + isValidPosition = true; + break; + case EXYNOS_CAMERA_BUFFER_POSITION_NONE: + case EXYNOS_CAMERA_BUFFER_POSITION_IN_DRIVER: + case EXYNOS_CAMERA_BUFFER_POSITION_IN_SERVICE: + default: + isValidPosition = false; + break; + } + + switch (m_buffer[bufferIndex].status.permission) { + case EXYNOS_CAMERA_BUFFER_PERMISSION_AVAILABLE: + isValidPermission = true; + break; + case EXYNOS_CAMERA_BUFFER_PERMISSION_NONE: + case EXYNOS_CAMERA_BUFFER_PERMISSION_IN_PROCESS: + default: + isValidPermission = false; + break; + } + + ret = isValidPosition && isValidPermission; + return ret; +} +#endif /* USE_GRALLOC_BUFFER_COLLECTOR */ +ExynosCameraFence::ExynosCameraFence() +{ + m_fenceType = EXYNOS_CAMERA_FENCE_TYPE_BASE; + + m_frameCount = -1; + m_index = -1; + + m_acquireFence = -1; + m_releaseFence = -1; + + m_fence = 0; + + m_flagSwfence = false; +} + +ExynosCameraFence::ExynosCameraFence( + enum EXYNOS_CAMERA_FENCE_TYPE fenceType, + int frameCount, + int index, + int acquireFence, + int releaseFence) +{ + /* default setting */ + m_fenceType = EXYNOS_CAMERA_FENCE_TYPE_BASE; + + m_frameCount = -1; + m_index = -1; + + m_acquireFence = -1; + m_releaseFence = -1; + + m_fence = 0; + m_flagSwfence = false; + + /* we will set from here */ + if (fenceType <= EXYNOS_CAMERA_FENCE_TYPE_BASE || + EXYNOS_CAMERA_FENCE_TYPE_MAX <= fenceType) { + ALOGE("ERR(%s[%d]):invalid fenceType(%d), frameCount(%d), index(%d)", + __FUNCTION__, __LINE__, fenceType, frameCount, index); + return; + } + + m_fenceType = fenceType; + + m_frameCount = frameCount; + m_index = index; + + m_acquireFence = acquireFence; + m_releaseFence = releaseFence; + + if (0 <= m_acquireFence || 0 <= m_releaseFence) { + ALOGV("DEBUG(%s[%d]):fence(%d):m_acquireFence(%d), m_releaseFence(%d)", + __FUNCTION__, __LINE__, m_frameCount, m_acquireFence, m_releaseFence); + } + +#ifdef USE_SW_FENCE + m_flagSwfence = true; +#endif + + if (m_flagSwfence == true) { + switch (m_fenceType) { + case EXYNOS_CAMERA_FENCE_TYPE_ACQUIRE: + m_fence = new Fence(acquireFence); + break; + case EXYNOS_CAMERA_FENCE_TYPE_RELEASE: + m_fence = new Fence(releaseFence); + break; + default: + ALOGE("ERR(%s[%d]):invalid m_fenceType(%d), m_frameCount(%d), m_index(%d)", + __FUNCTION__, __LINE__, m_fenceType, m_frameCount, m_index); + break; + } + } +} + +ExynosCameraFence::~ExynosCameraFence() +{ + /* delete sp addr */ + m_fence = 0; +#ifdef FORCE_CLOSE_ACQUIRE_FD + static uint64_t closeCnt = 0; + if(m_acquireFence >= FORCE_CLOSE_ACQUIRE_FD_THRESHOLD) { + if(closeCnt++ % 1000 == 0) { + ALOGW("ERR(%s[%d]):Attempt to close acquireFence[%d], %d th close.", + __FUNCTION__, __LINE__, m_acquireFence, closeCnt); + } + ::close(m_acquireFence); + } +#endif +} + +int ExynosCameraFence::getFenceType(void) +{ + return m_fenceType; +} + +int ExynosCameraFence::getFrameCount(void) +{ + return m_frameCount; +} + +int ExynosCameraFence::getIndex(void) +{ + return m_index; +} + +int ExynosCameraFence::getAcquireFence(void) +{ + return m_acquireFence; +} + +int ExynosCameraFence::getReleaseFence(void) +{ + return m_releaseFence; +} + +bool ExynosCameraFence::isValid(void) +{ + bool ret = false; + + if (m_flagSwfence == true) { + if (m_fence == NULL) { + ALOGE("ERR(%s[%d]):m_fence == NULL. so, fail", __FUNCTION__, __LINE__); + ret = false; + } else { + ret = m_fence->isValid(); + } + } else { + switch (m_fenceType) { + case EXYNOS_CAMERA_FENCE_TYPE_ACQUIRE: + if (0 <= m_acquireFence) + ret = true; + break; + case EXYNOS_CAMERA_FENCE_TYPE_RELEASE: + if (0 <= m_releaseFence) + ret = true; + break; + default: + ALOGE("ERR(%s[%d]):invalid m_fenceType(%d), m_frameCount(%d), m_index(%d)", + __FUNCTION__, __LINE__, m_fenceType, m_frameCount, m_index); + break; + } + } + + return ret; +} + +status_t ExynosCameraFence::wait(int time) +{ + status_t ret = NO_ERROR; + + if (this->isValid() == false) { + ALOGE("ERR(%s[%d]):this->isValid() == false. so, fail!! frameCount(%d), index(%d), fencType(%d)", + __FUNCTION__, __LINE__, m_frameCount, m_index, m_fenceType); + return INVALID_OPERATION; + } + + if (m_flagSwfence == false) { + ALOGW("WARN(%s[%d]):m_flagSwfence == false. so, fail!! frameCount(%d), index(%d), fencType(%d)", + __FUNCTION__, __LINE__, m_frameCount, m_index, m_fenceType); + + return INVALID_OPERATION; + } + + int waitTime = time; + if (waitTime < 0) + waitTime = 1000; /* wait 1 sec */ + + int fenceFd = -1; + + switch (m_fenceType) { + case EXYNOS_CAMERA_FENCE_TYPE_ACQUIRE: + fenceFd = m_acquireFence; + break; + case EXYNOS_CAMERA_FENCE_TYPE_RELEASE: + fenceFd = m_releaseFence; + break; + default: + ALOGE("ERR(%s[%d]):invalid m_fenceType(%d), m_frameCount(%d), m_index(%d)", + __FUNCTION__, __LINE__, m_fenceType, m_frameCount, m_index); + break; + } + + ret = m_fence->wait(waitTime); + if (ret == TIMED_OUT) { + ALOGE("ERR(%s[%d]):Fence timeout. so, fail!! fenceFd(%d), frameCount(%d), index(%d), fencType(%d)", + __FUNCTION__, __LINE__, fenceFd, m_frameCount, m_index, m_fenceType); + + return INVALID_OPERATION; + } else if (ret != OK) { + ALOGE("ERR(%s[%d]):Fence wait error. so, fail!! fenceFd(%d), frameCount(%d), index(%d), fencType(%d)", + __FUNCTION__, __LINE__, fenceFd, m_frameCount, m_index, m_fenceType); + + return INVALID_OPERATION; + } + + return ret; +} + +ServiceExynosCameraBufferManager::ServiceExynosCameraBufferManager() +{ + ExynosCameraBufferManager::init(); + + m_allocator = new ExynosCameraStreamAllocator(); + + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + for (int bufIndex = 0; bufIndex < VIDEO_MAX_FRAME; bufIndex++) { + m_handle[bufIndex] = NULL; + m_handleIsLocked[bufIndex] = false; + } +} + +ServiceExynosCameraBufferManager::~ServiceExynosCameraBufferManager() +{ + ExynosCameraBufferManager::deinit(); +} + +status_t ServiceExynosCameraBufferManager::registerBuffer( + int frameCount, + buffer_handle_t *handle, + int acquireFence, + int releaseFence, + enum EXYNOS_CAMERA_BUFFER_POSITION position) +{ + EXYNOS_CAMERA_BUFFER_IN(); + /* + * this->putBuffer(index, position) has same lock. + * so, don't lock here + */ + //Mutex::Autolock lock(m_lock); + + status_t ret = NO_ERROR; + ExynosCameraFence *fence = NULL; + + int index = -1; + + ret = getIndexByHandle(handle, &index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getIndexByHandle error (%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + ret = putBuffer(index, position); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):putBuffer(%d) error (%d)", __FUNCTION__, __LINE__, index, ret); + goto func_exit; + } + + m_lock.lock(); + ret = m_registerBuffer(&handle, index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_setBuffer fail", __FUNCTION__, __LINE__); + } + + /* + * Wait release fence, before give buffer to h/w. + * Save acquire fence, this will give to h/w. + */ + fence = new ExynosCameraFence( + ExynosCameraFence::EXYNOS_CAMERA_FENCE_TYPE_ACQUIRE, + frameCount, + index, + acquireFence, + releaseFence); + + m_pushFence(&m_fenceList, fence); + m_lock.unlock(); + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::getIndexByHandle(buffer_handle_t *handle, int *index) +{ + EXYNOS_CAMERA_BUFFER_IN(); + status_t ret = OK; + int emptyIndex = -2; + int bufIndex = -2; + bool flag = false; + + if (handle == NULL) + return BAD_VALUE; + + if (m_indexOffset < 0 || VIDEO_MAX_FRAME < (m_reqBufCount + m_indexOffset)) { + CLOGE("ERR(%s[%d]):abnormal value [m_indexOffset=%d, m_reqBufCount=%d]", + __FUNCTION__, __LINE__, m_indexOffset, m_reqBufCount); + return BAD_VALUE; + } + + for (int i = m_indexOffset; i < m_reqBufCount + m_indexOffset; i++) { + if (handle == m_handle[i]) { + emptyIndex = i; + CLOGV("DEBUG(%s[%d]):index(%d), (%p/%p)", __FUNCTION__, __LINE__, emptyIndex, handle , m_handle[i]); + flag = true; + break; + } + if (m_handle[i] == NULL) { + emptyIndex = i; + } + + } + bufIndex = emptyIndex; + + if (flag == false && bufIndex >= 0) { + CLOGD("DEBUG(%s[%d]): assigned new buffer handle(%p) Index(%d)", __FUNCTION__, __LINE__, m_handle[bufIndex], bufIndex); + } + + if ((bufIndex < 0) || (bufIndex >= m_allowedMaxBufCount + m_indexOffset)) { + CLOGE("ERR(%s[%d]):Buffer index error (%d/%d)", __FUNCTION__, __LINE__, bufIndex, m_allowedMaxBufCount); + return BAD_VALUE; + } + + *index = bufIndex; + + EXYNOS_CAMERA_BUFFER_OUT(); + return ret; +} + +status_t ServiceExynosCameraBufferManager::getHandleByIndex(buffer_handle_t **handle, int index) +{ + EXYNOS_CAMERA_BUFFER_IN(); + if ((index < 0) || (index >= m_allowedMaxBufCount)) { + CLOGE("ERR(%s[%d]):Buffer index error (%d/%d)", __FUNCTION__, __LINE__, index, m_allowedMaxBufCount); + return BAD_VALUE; + } + + if (m_handle[index] == NULL) + CLOGE("ERR(%s):m_handle[%d] is NULL!!", __FUNCTION__, index); + + *handle = m_handle[index]; + + EXYNOS_CAMERA_BUFFER_OUT(); + return OK; +} + +void ServiceExynosCameraBufferManager::m_resetSequenceQ() +{ + Mutex::Autolock lock(m_availableBufferIndexQLock); + m_availableBufferIndexQ.clear(); + + /* + * service buffer is given by service. + * so, initial state is all un-available. + */ + /* + for (int bufIndex = 0; bufIndex < m_allocatedBufCount; bufIndex++) + m_availableBufferIndexQ.push_back(m_buffer[bufIndex].index); + */ +} + +status_t ServiceExynosCameraBufferManager::m_setAllocator(void *allocator) +{ + if (m_allocator == NULL) { + ALOGE("ERR(%s[%d]):m_allocator equals NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + m_allocator->init((camera3_stream_t *)allocator); + +func_exit: + return NO_ERROR; +} + +status_t ServiceExynosCameraBufferManager::m_compareFdOfBufferHandle(const buffer_handle_t* handle, const ExynosCameraBuffer* exynosBuf) { + bool fdCmp = true; + const private_handle_t * privHandle = NULL; + int fdCmpPlaneNum = 0; + + if (handle == NULL) + return BAD_VALUE; + if (exynosBuf == NULL) + return BAD_VALUE; + + privHandle = private_handle_t::dynamicCast(*handle); + if (privHandle == NULL) + return BAD_VALUE; + + fdCmpPlaneNum = (m_hasMetaPlane) ? exynosBuf->planeCount- 1 : exynosBuf->planeCount; + switch(fdCmpPlaneNum) { + /* Compare each plane's DMA fd */ + case 3: + fdCmp = fdCmp && (exynosBuf->fd[2] == privHandle->fd2); + case 2: + fdCmp = fdCmp && (exynosBuf->fd[1] == privHandle->fd1); + case 1: + fdCmp = fdCmp && (exynosBuf->fd[0] == privHandle->fd); + break; + default: + CLOGE("ERR(%s[%d]):Invalid plane count [m_buffer.planeCount=%d, m_hasMetaPlane=%d]", + __FUNCTION__, __LINE__, exynosBuf->planeCount, m_hasMetaPlane); + return INVALID_OPERATION; + } + + if(fdCmp == true) { + return NO_ERROR; + } else { + CLOGI("INFO(%s[%d]): same handle but different FD : index[%d] handleFd[%d/%d/%d] - bufFd[%d/%d/%d]" + , __FUNCTION__, __LINE__ + , exynosBuf->index + , privHandle->fd, privHandle->fd1, privHandle->fd2 + , exynosBuf->fd[0], exynosBuf->fd[1], exynosBuf->fd[2]); + return NAME_NOT_FOUND; + } +} + +status_t ServiceExynosCameraBufferManager::m_registerBuffer(buffer_handle_t **handle, int index) +{ + status_t ret = OK; + int planeCount = 0; + + if (handle == NULL) + return BAD_VALUE; + + const private_handle_t * privHandle = private_handle_t::dynamicCast(**handle); + CLOGV("DEBUG(%s[%d]): register handle[%d/%d/%d] - buf[index:%d][%d/%d/%d]" + , __FUNCTION__, __LINE__ + , privHandle->fd, privHandle->fd1, privHandle->fd2 + , index, m_buffer[index].fd[0], m_buffer[index].fd[1], m_buffer[index].fd[2]); + + if (m_handleIsLocked[index] == true) { + /* Check the contents of buffer_handle_t */ + if(m_compareFdOfBufferHandle(*handle, &m_buffer[index]) == NO_ERROR) { + return NO_ERROR; + } + /* Otherwise, DMA fd shoud be updated on following codes. */ + } + + m_handle[index] = *handle; + + planeCount = m_buffer[index].planeCount; + + if (m_hasMetaPlane == true) + planeCount--; + + ret = m_allocator->lock( + handle, + m_buffer[index].fd, + m_buffer[index].addr, + &m_handleIsLocked[index], planeCount); + if (ret != 0) + CLOGE("ERR(%s[%d]):m_allocator->lock failed.. ", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::m_alloc(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = OK; + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_IN_SERVICE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bufIndex=%d, position=SERVICE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + break; + } + } + + /* + * service buffer is given by service. + * so, initial state is all un-available. + */ + m_availableBufferIndexQLock.lock(); + m_availableBufferIndexQ.clear(); + m_availableBufferIndexQLock.unlock(); + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::m_free(int bIndex, int eIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + CLOGD("DEBUG(%s[%d]):IN -- dump buffer status --", __FUNCTION__, __LINE__); + dump(); + + for (int bufIndex = bIndex; bufIndex < eIndex; bufIndex++) { + if (m_handleIsLocked[bufIndex] == false) { + CLOGD("DEBUG(%s[%d]):buffer [bufIndex=%d] already free", __FUNCTION__, __LINE__, bufIndex); + continue; + } + + m_removeFence(&m_fenceList, bufIndex); + + m_handle[bufIndex] = NULL; + m_handleIsLocked[bufIndex] = false; + + if (updateStatus( + bufIndex, + 0, + EXYNOS_CAMERA_BUFFER_POSITION_NONE, + EXYNOS_CAMERA_BUFFER_PERMISSION_NONE) != NO_ERROR) { + CLOGE("ERR(%s[%d]):setStatus failed [bIndex=%d, position=NONE, permission=NONE]", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_graphicBufferAllocator.free(bufIndex) != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_graphicBufferAllocator.free(%d) fail", + __FUNCTION__, __LINE__, bufIndex); + ret = INVALID_OPERATION; + goto func_exit; + } + } + +func_exit: + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::m_increase(__unused int increaseCount) +{ + CLOGD("DEBUG(%s[%d]):allocMode(%d) is invalid. Do nothing", __FUNCTION__, __LINE__, m_allocMode); + return INVALID_OPERATION; +} + +status_t ServiceExynosCameraBufferManager::m_decrease(void) +{ + return INVALID_OPERATION; +} + +status_t ServiceExynosCameraBufferManager::m_putBuffer(__unused int bufIndex) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + CLOGV("DEBUG(%s[%d]):no effect" , __FUNCTION__, __LINE__); + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence) +{ + EXYNOS_CAMERA_BUFFER_IN(); + + status_t ret = NO_ERROR; + + ExynosCameraFence *ptrFence = NULL; + int frameCount = -1; + + /* this is default value */ + *acquireFence = -1; + *releaseFence = -1; + + ptrFence = m_popFence(&m_fenceList); + if (ptrFence == NULL) { + CLOGE("DEBUG(%s[%d]):m_popFence() fail", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto done; + } + + *bufIndex = ptrFence->getIndex(); + if (*bufIndex < 0) { + CLOGE("ERR(%s[%d]):*bufIndex(%d) < 0", __FUNCTION__, __LINE__, *bufIndex); + ret = BAD_VALUE; + goto done; + } + + frameCount = ptrFence->getFrameCount(); + if (frameCount < 0) { + CLOGE("ERR(%s[%d]):frameCount(%d) < 0", __FUNCTION__, __LINE__, frameCount); + ret = BAD_VALUE; + goto done; + } + +#ifdef USE_CAMERA2_USE_FENCE + if (ptrFence != NULL && ret == NO_ERROR) { +#ifdef USE_SW_FENCE + /* wait before give buffer to hardware */ + if (ptrFence->isValid() == true) { + ret = m_waitFence(ptrFence); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_waitFence() fail", __FUNCTION__, __LINE__); + goto done; + } + } else { + CLOGV("DEBUG(%s[%d]):Fence is invalid. framecount=%d", __FUNCTION__, __LINE__, frameCount); + } +#else + /* give fence to H/W */ + *acquireFence = ptrFence->getAcquireFence(); + *releaseFence = ptrFence->getReleaseFence(); +#endif + } +#endif + + if (m_allocator->unlock(m_handle[*bufIndex]) != 0) { + ALOGE("ERR(%s):grallocHal->unlock failed", __FUNCTION__); + return INVALID_OPERATION; + } + +done: + if (ptrFence != NULL) { + delete ptrFence; + ptrFence = NULL; + } + + EXYNOS_CAMERA_BUFFER_OUT(); + + return ret; +} + +status_t ServiceExynosCameraBufferManager::m_waitFence(ExynosCameraFence *fence) +{ + status_t ret = NO_ERROR; + +#if 0 + /* reference code */ + sp bufferAcquireFence = new Fence(buffer->acquire_fence); + ret = bufferAcquireFence->wait(1000); /* 1 sec */ + if (ret == TIMED_OUT) { + ALOGE("ERR(%s[%d]):Fence timeout(%d)!!", __FUNCTION__, __LINE__, request->frame_number); + return INVALID_OPERATION; + } else if (ret != OK) { + CLOGE("ERR(%s[%d]):Waiting on Fence error(%d)!!", __FUNCTION__, __LINE__, request->frame_number); + return INVALID_OPERATION; + } +#endif + + if (fence == NULL) { + CLOGE("DEBUG(%s[%d]):fence == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (fence->isValid() == false) { + CLOGE("ERR(%s[%d]):fence(%d)->isValid() == false. so, fail", + __FUNCTION__, __LINE__, fence->getFrameCount()); + return INVALID_OPERATION; + } + + CLOGV("DEBUG(%s[%d]):Valid fence on frameCount(%d)", + __FUNCTION__, __LINE__, fence->getFrameCount()); + + + ret = fence->wait(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):fence(frameCount : %d)->wait() fail", + __FUNCTION__, __LINE__, fence->getFrameCount()); + return INVALID_OPERATION; + } else { + CLOGV("DEBUG(%s[%d]):fence(frameCount : %d)->wait() succeed", + __FUNCTION__, __LINE__, fence->getFrameCount()); + } + + return ret; +} + +void ServiceExynosCameraBufferManager::m_pushFence(List *list, ExynosCameraFence *fence) +{ + Mutex::Autolock l(m_fenceListLock); + + list->push_back(fence); +} + +ExynosCameraFence *ServiceExynosCameraBufferManager::m_popFence(List *list) +{ + ExynosCameraFence *curFence = NULL; + List::iterator r; + + Mutex::Autolock l(m_fenceListLock); + + if (list->empty()) { + CLOGE("ERR(%s[%d]):list is empty", __FUNCTION__, __LINE__); + return NULL; + } + + r = list->begin()++; + curFence = *r; + list->erase(r); + + return curFence; +} + +status_t ServiceExynosCameraBufferManager::m_removeFence(List *list, int index) +{ + ExynosCameraFence *curFence = NULL; + List::iterator r; + + Mutex::Autolock l(m_fenceListLock); + + if (list->empty()) { + CLOGD("DEBUG(%s[%d]):list is empty", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + r = list->begin()++; + + do { + curFence = *r; + if (curFence == NULL) { + ALOGE("ERR(%s):curFence is empty", __FUNCTION__); + return INVALID_OPERATION; + } + + if (curFence->getIndex() == index) { + CLOGV("DEBUG(%s[%d]):remove Fence(%d), frameCount(%d)", + __FUNCTION__, __LINE__, index, curFence->getFrameCount()); + + list->erase(r); + delete curFence; + curFence = NULL; + return NO_ERROR; + } + r++; + } while (r != list->end()); + + CLOGD("DEBUG(%s[%d]):Cannot find index(%d)", __FUNCTION__, __LINE__, index); + + return INVALID_OPERATION; +} +} // namespace android diff --git a/libcamera/common_v2/Buffers/ExynosCameraBufferManager.h b/libcamera/common_v2/Buffers/ExynosCameraBufferManager.h new file mode 100644 index 0000000..c537cea --- /dev/null +++ b/libcamera/common_v2/Buffers/ExynosCameraBufferManager.h @@ -0,0 +1,509 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraBufferManager.h + * \brief header file for ExynosCameraBufferManager + * \author Sunmi Lee(carrotsm.lee@samsung.com) + * \date 2013/07/17 + * + */ + +#ifndef EXYNOS_CAMERA_BUFFER_MANAGER_H__ +#define EXYNOS_CAMERA_BUFFER_MANAGER_H__ + + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include "gralloc_priv.h" + +#include "ExynosCameraConfig.h" +#include "fimc-is-metadata.h" + +#include "ExynosCameraList.h" +#include "ExynosCameraAutoTimer.h" +#include "ExynosCameraBuffer.h" +#include "ExynosCameraMemory.h" +#include "ExynosCameraThread.h" + +namespace android { + +/* #define DUMP_2_FILE */ +/* #define EXYNOS_CAMERA_BUFFER_TRACE */ + +#ifdef EXYNOS_CAMERA_BUFFER_TRACE +#define EXYNOS_CAMERA_BUFFER_IN() CLOGD("DEBUG(%s[%d]):IN.." , __FUNCTION__, __LINE__) +#define EXYNOS_CAMERA_BUFFER_OUT() CLOGD("DEBUG(%s[%d]):OUT..", __FUNCTION__, __LINE__) +#else +#define EXYNOS_CAMERA_BUFFER_IN() ((void *)0) +#define EXYNOS_CAMERA_BUFFER_OUT() ((void *)0) +#endif + +#ifdef USE_GRALLOC_BUFFER_COLLECTOR +#define MAX_BUFFER_COLLECT_COUNT (6) +#define BUFFER_COLLECTOR_WAITING_TIME (10000) /* 10 msec */ +#endif + +// Hack: Close Fence FD if the fd is larger than specified number +// Currently, Joon's fence FD is not closed properly +#define FORCE_CLOSE_ACQUIRE_FD +#define FORCE_CLOSE_ACQUIRE_FD_THRESHOLD 700 + + +typedef enum buffer_manager_type { + BUFFER_MANAGER_ION_TYPE = 0, + BUFFER_MANAGER_HEAP_BASE_TYPE = 1, + BUFFER_MANAGER_GRALLOC_TYPE = 2, + BUFFER_MANAGER_SERVICE_GRALLOC_TYPE = 3, + BUFFER_MANAGER_INVALID_TYPE, +} buffer_manager_type_t; + +typedef enum buffer_manager_allocation_mode { + BUFFER_MANAGER_ALLOCATION_ATONCE = 0, /* alloc() : allocation all buffers */ + BUFFER_MANAGER_ALLOCATION_ONDEMAND = 1, /* alloc() : allocation the number of reqCount buffers, getBuffer() : increase buffers within limits */ + BUFFER_MANAGER_ALLOCATION_SILENT = 2, /* alloc() : same as ONDEMAND, increase buffers in background */ + BUFFER_MANAGER_ALLOCATION_INVALID_MODE, +} buffer_manager_allocation_mode_t; + +typedef ExynosCameraList reuseBufList_t; + +class ExynosCameraBufferManager { +protected: + ExynosCameraBufferManager(); + +public: + static ExynosCameraBufferManager *createBufferManager(buffer_manager_type_t type); + virtual ~ExynosCameraBufferManager(); + + status_t create(const char *name, void *defaultAllocator); + status_t create(const char *name, int cameraId, void *defaultAllocator); + + void init(void); + virtual void deinit(void); + virtual status_t resetBuffers(void); + + status_t setAllocator(void *allocator); + + status_t alloc(void); + + void setContigBufCount(int reservedMemoryCount); + int getContigBufCount(void); + status_t setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int reqBufCount, + bool createMetaPlane, + bool needMmap = false); + status_t setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int reqBufCount, + int allowedMaxBufCount, + exynos_camera_buffer_type_t type, + bool createMetaPlane, + bool needMmap = false); + status_t setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + bool createMetaPlane, + bool needMmap = false); + status_t setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + int allowedMaxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + bool needMmap = false); + + status_t setInfo( + int planeCount, + unsigned int size[], + unsigned int bytePerLine[], + int startBufIndex, + int reqBufCount, + int allowedMaxBufCount, + exynos_camera_buffer_type_t type, + buffer_manager_allocation_mode_t allocMode, + bool createMetaPlane, + int width, + int height, + int stride, + int pixelFormat, + bool needMmap = false); + + status_t putBuffer( + int bufIndex, + enum EXYNOS_CAMERA_BUFFER_POSITION position); + status_t getBuffer( + int *reqBufIndex, + enum EXYNOS_CAMERA_BUFFER_POSITION position, + struct ExynosCameraBuffer *buffer); + + status_t updateStatus( + int bufIndex, + int driverValue, + enum EXYNOS_CAMERA_BUFFER_POSITION position, + enum EXYNOS_CAMERA_BUFFER_PERMISSION permission); + status_t getStatus( + int bufIndex, + struct ExynosCameraBufferStatus *bufStatus); + + virtual status_t getIndexByHandle(buffer_handle_t *handle, int *index); + virtual status_t getHandleByIndex(buffer_handle_t **handle, int index); + virtual sp getGraphicBuffer(int index); + + virtual status_t registerBuffer( + int frameCount, + buffer_handle_t *handle, + int acquireFence, + int releaseFence, + enum EXYNOS_CAMERA_BUFFER_POSITION position); + + bool isAllocated(void); + bool isAvaliable(int bufIndex); + + void dump(void); + void dumpBufferInfo(void); + int getAllocatedBufferCount(void); + int getAvailableIncreaseBufferCount(void); + int getNumOfAvailableBuffer(void); + int getNumOfAvailableAndNoneBuffer(void); + void printBufferInfo( + const char *funcName, + const int lineNum, + int bufIndex, + int planeIndex); + void printBufferQState(void); + virtual void printBufferState(void); + virtual void printBufferState(int bufIndex, int planeIndex); + + virtual status_t increase(int increaseCount); +#ifdef USE_GRALLOC_REUSE_SUPPORT + virtual status_t cancelBuffer(int bufIndex, bool isReuse = false); +#else + virtual status_t cancelBuffer(int bufIndex); +#endif + virtual status_t setBufferCount(int bufferCount); + virtual int getBufferCount(void); + virtual int getBufStride(void); + +protected: + status_t m_free(void); + + status_t m_setDefaultAllocator(void *allocator); + status_t m_defaultAlloc(int bIndex, int eIndex, bool isMetaPlane); + status_t m_defaultFree(int bIndex, int eIndex, bool isMetaPlane); + + bool m_checkInfoForAlloc(void); + status_t m_createDefaultAllocator(bool isCached = false); + + virtual void m_resetSequenceQ(void); + + virtual status_t m_setAllocator(void *allocator) = 0; + virtual status_t m_alloc(int bIndex, int eIndex) = 0; + virtual status_t m_free(int bIndex, int eIndex) = 0; + + virtual status_t m_increase(int increaseCount) = 0; + virtual status_t m_decrease(void) = 0; + + virtual status_t m_putBuffer(int bufIndex) = 0; + virtual status_t m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence) = 0; + +protected: + bool m_flagAllocated; + int m_reservedMemoryCount; + int m_reqBufCount; + int m_allocatedBufCount; + int m_allowedMaxBufCount; + bool m_flagSkipAllocation; + bool m_isDestructor; + mutable Mutex m_lock; + bool m_flagNeedMmap; + + bool m_hasMetaPlane; + /* using internal allocator (ION) for MetaData plane */ + ExynosCameraIonAllocator *m_defaultAllocator; + bool m_isCreateDefaultAllocator; + struct ExynosCameraBuffer m_buffer[VIDEO_MAX_FRAME]; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + int m_cameraId; + List m_availableBufferIndexQ; + mutable Mutex m_availableBufferIndexQLock; + + buffer_manager_allocation_mode_t m_allocMode; + int m_indexOffset; + + ExynosCameraGraphicBufferAllocator m_graphicBufferAllocator; + +private: + typedef ExynosCameraThread allocThread; + + sp m_allocationThread; + bool m_allocationThreadFunc(void); +}; + +class InternalExynosCameraBufferManager : public ExynosCameraBufferManager { +public: + InternalExynosCameraBufferManager(); + virtual ~InternalExynosCameraBufferManager(); + + status_t increase(int increaseCount); + +protected: + status_t m_setAllocator(void *allocator); + + status_t m_alloc(int bIndex, int eIndex); + status_t m_free(int bIndex, int eIndex); + + status_t m_increase(int increaseCount); + status_t m_decrease(void); + + status_t m_putBuffer(int bufIndex); + status_t m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence); +}; + +class MHBExynosCameraBufferManager : public ExynosCameraBufferManager { +/* do not use! deprecated class */ +public: + MHBExynosCameraBufferManager(); + virtual ~MHBExynosCameraBufferManager(); + + status_t allocMulti(); + status_t getHeapMemory( + int bufIndex, + int planeIndex, + camera_memory_t **heap); + +protected: + status_t m_setAllocator(void *allocator); + + status_t m_alloc(int bIndex, int eIndex); + status_t m_free(int bIndex, int eIndex); + + status_t m_increase(int increaseCount); + status_t m_decrease(void); + + status_t m_putBuffer(int bufIndex); + status_t m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence); + +private: + ExynosCameraMHBAllocator *m_allocator; + camera_memory_t *m_heap[VIDEO_MAX_FRAME][EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + int m_numBufsHeap; +}; + +class GrallocExynosCameraBufferManager : public ExynosCameraBufferManager { +public: + GrallocExynosCameraBufferManager(); + virtual ~GrallocExynosCameraBufferManager(); + + void deinit(void); + status_t resetBuffers(void); +#ifdef USE_GRALLOC_REUSE_SUPPORT + status_t cancelBuffer(int bufIndex, bool isReuse = false); +#else + status_t cancelBuffer(int bufIndex); +#endif + status_t setBufferCount(int bufferCount); + int getBufferCount(void); + int getBufStride(void); + void printBufferState(void); + void printBufferState(int bufIndex, int planeIndex); + + sp getGraphicBuffer(int index); + +protected: + status_t m_setAllocator(void *allocator); + + status_t m_alloc(int bIndex, int eIndex); + status_t m_free(int bIndex, int eIndex); + + status_t m_increase(int increaseCount); + status_t m_decrease(void); + + status_t m_putBuffer(int bufIndex); + status_t m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence); + +#ifdef USE_GRALLOC_REUSE_SUPPORT + bool m_cancelReuseBuffer(int bufIndex, bool isReuse); +#endif + +private: + ExynosCameraGrallocAllocator *m_allocator; + buffer_handle_t *m_handle[VIDEO_MAX_FRAME]; + bool m_handleIsLocked[VIDEO_MAX_FRAME]; + int m_dequeuedBufCount; + int m_minUndequeuedBufCount; + int m_bufferCount; + int m_bufStride; +#ifdef USE_GRALLOC_BUFFER_COLLECTOR + typedef ExynosCameraThread grallocBufferThread; + + int m_collectedBufferCount; + bool m_stopBufferCollector; + + sp m_bufferCollector; + bool m_bufferCollectorThreadFunc(void); + + status_t m_getCollectedBuffer(int *bufIndex); + bool m_isCollectedBuffer(int bufferIndex); +#endif +}; + +class ExynosCameraFence { +public: + enum EXYNOS_CAMERA_FENCE_TYPE { + EXYNOS_CAMERA_FENCE_TYPE_BASE = 0, + EXYNOS_CAMERA_FENCE_TYPE_ACQUIRE, + EXYNOS_CAMERA_FENCE_TYPE_RELEASE, + EXYNOS_CAMERA_FENCE_TYPE_MAX, + }; + +private: + ExynosCameraFence(); + +public: + ExynosCameraFence( + enum EXYNOS_CAMERA_FENCE_TYPE fenceType, + int frameCount, + int index, + int acquireFence, + int releaseFence); + + virtual ~ExynosCameraFence(); + + int getFenceType(void); + int getFrameCount(void); + int getIndex(void); + int getAcquireFence(void); + int getReleaseFence(void); + + bool isValid(void); + status_t wait(int time = -1); + +private: + enum EXYNOS_CAMERA_FENCE_TYPE m_fenceType; + + int m_frameCount; + int m_index; + + int m_acquireFence; + int m_releaseFence; + + sp m_fence; + + bool m_flagSwfence; +}; + +class ServiceExynosCameraBufferManager : public ExynosCameraBufferManager { +public: + ServiceExynosCameraBufferManager(); + virtual ~ServiceExynosCameraBufferManager(); + + status_t getIndexByHandle(buffer_handle_t *handle, int *index); + status_t getHandleByIndex(buffer_handle_t **handle, int index); + + /* + * The H/W fence sequence is + * 1. On putBuffer (call by processCaptureRequest()), + * And, save acquire_fence, release_fence value. + * S/W fence : Make Fence class with acquire_fence on output_buffer. + * + * 2. when getBuffer, + * H/W fence : save acquire_fence, release_fence to ExynosCameraBuffer. + * S/W fence : wait Fence class that allocated at step 1. + * + * (step 3 ~ step 4 is on ExynosCameraNode::m_qBuf()) + * 3. During H/W qBuf, + * give ExynosCameraBuffer.acquireFence (gotten step2) to v4l2 + * v4l2_buffer.flags = V4L2_BUF_FLAG_USE_SYNC; + * v4l2_buffer.reserved = ExynosCameraBuffer.acquireFence; + * 4. after H/W qBuf + * v4l2_buffer.reserved is changed to release_fence value. + * So, + * ExynosCameraBuffer.releaseFence = static_cast(v4l2_buffer.reserved) + * + * 5. (step5 is on ExynosCamera3::m_setResultBufferInfo()) + * after H/W dqBuf, we meet handlePreview(). + * we can set final value. + * result_buffer_info_t.acquire_fence = -1. (NOT original ExynosCameraBuffer.acquireFence) + * result_buffer_info_t.release_fence = ExynosCameraBuffer.releaseFence. (gotten by driver at step3) + * (service will look this release_fence.) + * + * 6 skip bufferManger::putBuffer(). + * (we don't need to call putBuffer. because, buffer came from service.) + * + * 7. repeat from 1. + */ + status_t registerBuffer( + int frameCount, + buffer_handle_t *handle, + int acquireFence, + int releaseFence, + enum EXYNOS_CAMERA_BUFFER_POSITION position); + +protected: + virtual void m_resetSequenceQ(void); + + status_t m_setAllocator(void *allocator); + status_t m_alloc(int bIndex, int eIndex); + status_t m_free(int bIndex, int eIndex); + + status_t m_increase(int increaseCount); + status_t m_decrease(void); + + status_t m_registerBuffer(buffer_handle_t **handle, int index); + + status_t m_putBuffer(int bufIndex); + status_t m_getBuffer(int *bufIndex, int *acquireFence, int *releaseFence); + + virtual status_t m_waitFence(ExynosCameraFence *fence); + + virtual void m_pushFence(List *list, ExynosCameraFence *fence); + virtual ExynosCameraFence *m_popFence(List *list); + virtual status_t m_removeFence(List *list, int index); + virtual status_t m_compareFdOfBufferHandle(const buffer_handle_t* handle, const ExynosCameraBuffer* exynosBuf); + +private: + ExynosCameraStreamAllocator *m_allocator; + buffer_handle_t *m_handle[VIDEO_MAX_FRAME]; + bool m_handleIsLocked[VIDEO_MAX_FRAME]; + + List m_fenceList; + mutable Mutex m_fenceListLock; +}; +} +#endif diff --git a/libcamera/common_v2/ExynosCamera1MetadataConverter.cpp b/libcamera/common_v2/ExynosCamera1MetadataConverter.cpp new file mode 100644 index 0000000..821da54 --- /dev/null +++ b/libcamera/common_v2/ExynosCamera1MetadataConverter.cpp @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ExynosCameraMetadataConverter" + +#include "ExynosCamera1MetadataConverter.h" + +namespace android { +#define SET_BIT(x) (1 << x) + +ExynosCamera1MetadataConverter::ExynosCamera1MetadataConverter(int cameraId, __unused ExynosCameraParameters *parameters) +{ + ExynosCameraActivityControl *activityControl = NULL; + + m_cameraId = cameraId; +} + +ExynosCamera1MetadataConverter::~ExynosCamera1MetadataConverter() +{ +} + +status_t ExynosCamera1MetadataConverter::constructStaticInfo(int cameraId, camera_metadata_t **cameraInfo) +{ + status_t ret = NO_ERROR; + uint8_t flashAvailable = 0x0; + + ALOGD("DEBUG(%s[%d]):ID(%d)", __FUNCTION__, __LINE__, cameraId); + struct ExynosSensorInfoBase *sensorStaticInfo = NULL; + CameraMetadata info; + + sensorStaticInfo = createExynosCamera1SensorInfo(cameraId); + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]): sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* andorid.flash static attributes */ + if (sensorStaticInfo->flashModeList & FLASH_MODE_TORCH) { + flashAvailable = 0x1; + } else { + flashAvailable = 0x0; + } + + ret = info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_INFO_AVAILABLE update failed(%d)", __FUNCTION__, ret); + + *cameraInfo = info.release(); + + return OK; +} +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCamera1MetadataConverter.h b/libcamera/common_v2/ExynosCamera1MetadataConverter.h new file mode 100644 index 0000000..24f48f9 --- /dev/null +++ b/libcamera/common_v2/ExynosCamera1MetadataConverter.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA_METADATA_CONVERTER_H__ +#define EXYNOS_CAMERA_METADATA_CONVERTER_H__ + +#include +#include +#include +#include + +#include "ExynosCameraParameters.h" + +#include "ExynosCameraSensorInfo.h" + +namespace android { +enum rectangle_index { + X1, + Y1, + X2, + Y2, + RECTANGLE_MAX_INDEX, +}; + +class ExynosCameraMetadataConverter : public virtual RefBase { +public: + ExynosCameraMetadataConverter(){}; + ~ExynosCameraMetadataConverter(){}; +}; + +class ExynosCamera1MetadataConverter : public virtual ExynosCameraMetadataConverter { +public: + ExynosCamera1MetadataConverter(int cameraId, ExynosCameraParameters *parameters); + ~ExynosCamera1MetadataConverter(); + static status_t constructStaticInfo(int cameraId, camera_metadata_t **info); +private: + int m_cameraId; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCamera3Interface.cpp b/libcamera/common_v2/ExynosCamera3Interface.cpp new file mode 100644 index 0000000..763bf5b --- /dev/null +++ b/libcamera/common_v2/ExynosCamera3Interface.cpp @@ -0,0 +1,1177 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3Interface" +#include + +#include "ExynosCamera3Interface.h" +#include "ExynosCameraAutoTimer.h" + +namespace android { + +static int HAL3_camera_device_open(const struct hw_module_t* module, + const char *id, + struct hw_device_t** device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(id); + enum CAMERA_STATE state; + FILE *fp = NULL; + int ret = 0; + + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + bool hasFlash = false; + char flashFilePath[100] = {'\0',}; + + /* Validation check */ + ALOGI("INFO(%s[%d]):camera(%d) in ======", __FUNCTION__, __LINE__, cameraId); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); + return -EINVAL; + } + + /* Check init thread state */ + if (g_thread) { + ret = pthread_join(g_thread, NULL); + if (ret != 0) { + ALOGE("ERR(%s[%d]):pthread_join failed with error code %d", __FUNCTION__, __LINE__, ret); + } + g_thread = 0; + } + + /* Setting status and check current status */ + state = CAMERA_OPENED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s[%d]):camera(%d) state(%d) is INVALID", __FUNCTION__, __LINE__, cameraId, state); + return -EUSERS; + } + + /* Create camera device */ + if (g_cam_device3[cameraId]) { + ALOGE("ERR(%s[%d]):returning existing camera ID(%d)", __FUNCTION__, __LINE__, cameraId); + *device = (hw_device_t *)g_cam_device3[cameraId]; + goto done; + } + + g_cam_device3[cameraId] = (camera3_device_t *)malloc(sizeof(camera3_device_t)); + if (!g_cam_device3[cameraId]) + return -ENOMEM; + + g_cam_openLock[cameraId].lock(); + g_cam_device3[cameraId]->common.tag = HARDWARE_DEVICE_TAG; + g_cam_device3[cameraId]->common.version = CAMERA_DEVICE_API_VERSION_3_3; + g_cam_device3[cameraId]->common.module = const_cast(module); + g_cam_device3[cameraId]->common.close = HAL3_camera_device_close; + g_cam_device3[cameraId]->ops = &camera_device3_ops; + + ALOGV("DEBUG(%s[%d]):open camera(%d)", __FUNCTION__, __LINE__, cameraId); + g_cam_device3[cameraId]->priv = new ExynosCamera3(cameraId, &g_cam_info[cameraId]); + *device = (hw_device_t *)g_cam_device3[cameraId]; + + ALOGI("INFO(%s[%d]):camera(%d) out from new g_cam_device3[%d]->priv()", + __FUNCTION__, __LINE__, cameraId, cameraId); + + g_cam_openLock[cameraId].unlock(); + +done: + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + + if (g_cam_info[cameraId]) { + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + ALOGV("INFO(%s[%d]): cameraId(%d), flashAvailable.count(%d), flashAvailable.data.u8[0](%d)", + __FUNCTION__, cameraId, flashAvailable.count, flashAvailable.data.u8[0]); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + hasFlash = true; + } else { + hasFlash = false; + } + } + + /* Turn off torch and update torch status */ + if(hasFlash && g_cam_torchEnabled[cameraId]) { + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open fail, ret(%d)", __FUNCTION__, __LINE__, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + fflush(fp); + fclose(fp); + + g_cam_torchEnabled[cameraId] = false; + } + } + + g_callbacks->torch_mode_status_change(g_callbacks, id, TORCH_MODE_STATUS_NOT_AVAILABLE); + + ALOGI("INFO(%s[%d]):camera(%d) out =====", __FUNCTION__, __LINE__, cameraId); + + return 0; +} + +static int HAL3_camera_device_close(struct hw_device_t* device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId; + int ret = OK; + enum CAMERA_STATE state; + char camid[10]; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + if (device) { + camera3_device_t *cam_device = (camera3_device_t *)device; + cameraId = obj(cam_device)->getCameraId(); + + ALOGV("DEBUG(%s[%d]):close camera(%d)", __FUNCTION__, __LINE__, cameraId); + + ret = obj(cam_device)->releaseDevice(); + if (ret) { + ALOGE("ERR(%s[%d]):initialize error!!", __FUNCTION__, __LINE__); + ret = BAD_VALUE; + } + + state = CAMERA_CLOSED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s[%d]):camera(%d) state(%d) is INVALID", + __FUNCTION__, __LINE__, cameraId, state); + return -1; + } + + g_cam_openLock[cameraId].lock(); + ALOGV("INFO(%s[%d]):camera(%d) open locked..", __FUNCTION__, __LINE__, cameraId); + g_cam_device3[cameraId] = NULL; + g_cam_openLock[cameraId].unlock(); + ALOGV("INFO(%s[%d]):camera(%d) open unlocked..", __FUNCTION__, __LINE__, cameraId); + + delete static_cast(cam_device->priv); + free(cam_device); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):close camera(%d)", __FUNCTION__, __LINE__, cameraId); + } + + /* Update torch status */ + g_cam_torchEnabled[cameraId] = false; + snprintf(camid, sizeof(camid), "%d\n", cameraId); + g_callbacks->torch_mode_status_change(g_callbacks, camid, + TORCH_MODE_STATUS_AVAILABLE_OFF); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static int HAL3_camera_device_initialize(const struct camera3_device *dev, + const camera3_callback_ops_t *callback_ops) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = OK; + uint32_t cameraId = obj(dev)->getCameraId(); + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + g_cam_configLock[cameraId].lock(); + + ALOGE("INFO(%s[%d]): dual cam_state[0](%d)", __FUNCTION__, __LINE__, cam_state[0]); + +#ifdef DUAL_CAMERA_SUPPORTED + if (cameraId != 0 && g_cam_device3[0] != NULL + && cam_state[0] != CAMERA_NONE && cam_state[0] != CAMERA_CLOSED) { + ret = obj(dev)->setDualMode(true); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):camera(%d) set dual mode fail, ret(%d)", + __FUNCTION__, __LINE__, cameraId, ret); + } else { + ALOGI("INFO(%s[%d]):camera(%d) set dual mode)", + __FUNCTION__, __LINE__, cameraId); + } + } +#endif + + ret = obj(dev)->initilizeDevice(callback_ops); + if (ret) { + ALOGE("ERR(%s[%d]):initialize error!!", __FUNCTION__, __LINE__); + ret = BAD_VALUE; + } + g_cam_configLock[cameraId].unlock(); + + ALOGV("DEBUG(%s):set callback ops - %p", __FUNCTION__, callback_ops); + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static int HAL3_camera_device_configure_streams(const struct camera3_device *dev, + camera3_stream_configuration_t *stream_list) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = OK; + uint32_t cameraId = obj(dev)->getCameraId(); + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].lock(); + ret = obj(dev)->configureStreams(stream_list); + if (ret) { + ALOGE("ERR(%s[%d]):configure_streams error!!", __FUNCTION__, __LINE__); + ret = BAD_VALUE; + } + g_cam_configLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static int HAL3_camera_device_register_stream_buffers(const struct camera3_device *dev, + const camera3_stream_buffer_set_t *buffer_set) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = OK; + uint32_t cameraId = obj(dev)->getCameraId(); + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].lock(); + ret = obj(dev)->registerStreamBuffers(buffer_set); + if (ret) { + ALOGE("ERR(%s[%d]):register_stream_buffers error!!", __FUNCTION__, __LINE__); + ret = BAD_VALUE; + } + g_cam_configLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static const camera_metadata_t* HAL3_camera_device_construct_default_request_settings( + const struct camera3_device *dev, + int type) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + camera_metadata_t *request = NULL; + status_t res; + uint32_t cameraId = obj(dev)->getCameraId(); + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].lock(); + res = obj(dev)->construct_default_request_settings(&request, type); + if (res) { + ALOGE("ERR(%s[%d]):constructDefaultRequestSettings error!!", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].unlock(); + return NULL; + } + g_cam_configLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return request; +} + +static int HAL3_camera_device_process_capture_request(const struct camera3_device *dev, + camera3_capture_request_t *request) +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + + int ret = OK; + uint32_t cameraId = obj(dev)->getCameraId(); + ALOGV("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].lock(); + ret = obj(dev)->processCaptureRequest(request); + if (ret) { + ALOGE("ERR(%s[%d]):process_capture_request error(%d)!!", __FUNCTION__, __LINE__, ret); + ret = BAD_VALUE; + } + g_cam_configLock[cameraId].unlock(); + ALOGV("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static int HAL3_camera_device_flush(const struct camera3_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = 0; + uint32_t cameraId = obj(dev)->getCameraId(); + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + g_cam_configLock[cameraId].lock(); + ret = obj(dev)->flush(); + if (ret) { + ALOGE("ERR(%s[%d]):flush error(%d)!!", __FUNCTION__, __LINE__, ret); + ret = BAD_VALUE; + } + g_cam_configLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + return ret; +} + +static void HAL3_camera_device_get_metadata_vendor_tag_ops(const struct camera3_device *dev, + vendor_tag_query_ops_t* ops) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + if (dev == NULL) + ALOGE("ERR(%s[%d]):dev is NULL", __FUNCTION__, __LINE__); + + if (ops == NULL) + ALOGE("ERR(%s[%d]):ops is NULL", __FUNCTION__, __LINE__); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); +} + +static void HAL3_camera_device_dump(const struct camera3_device *dev, int fd) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + if (dev == NULL) + ALOGE("ERR(%s[%d]):dev is NULL", __FUNCTION__, __LINE__); + + if (fd < 0) + ALOGE("ERR(%s[%d]):fd is Negative Value", __FUNCTION__, __LINE__); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); +} + +/*************************************************************************** + * FUNCTION : get_camera_info + * + * DESCRIPTION: static function to query the numner of cameras + * + * PARAMETERS : none + * + * RETURN : the number of cameras pre-defined + ***************************************************************************/ +static int HAL_getNumberOfCameras() +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + int getNumOfCamera = sizeof(sCameraInfo) / sizeof(sCameraInfo[0]); + ALOGV("DEBUG(%s[%d]):Number of cameras(%d)", __FUNCTION__, __LINE__, getNumOfCamera); + return getNumOfCamera; +} + +static int HAL_getCameraInfo(int cameraId, struct camera_info *info) +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + status_t ret = NO_ERROR; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s[%d]):Invalid camera ID %d", __FUNCTION__, __LINE__, cameraId); + return -ENODEV; + } + + /* set facing and orientation */ + memcpy(info, &sCameraInfo[cameraId], sizeof(CameraInfo)); + + /* set device API version */ + info->device_version = CAMERA_DEVICE_API_VERSION_3_3; + + /* set camera_metadata_t if needed */ + if (info->device_version >= HARDWARE_DEVICE_API_VERSION(2, 0)) { + if (g_cam_info[cameraId] == NULL) { + ALOGV("DEBUG(%s[%d]):Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + ret = ExynosCamera3MetadataConverter::constructStaticInfo(cameraId, &g_cam_info[cameraId]); + if (ret != 0) { + ALOGE("ERR(%s[%d]): static information is NULL", __FUNCTION__, __LINE__); + return -EINVAL; + } + info->static_camera_characteristics = g_cam_info[cameraId]; + } else { + ALOGV("DEBUG(%s[%d]):Reuse Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + info->static_camera_characteristics = g_cam_info[cameraId]; + } + } + + /* set service arbitration (resource_cost, conflicting_devices, conflicting_devices_length */ + info->resource_cost = sCameraConfigInfo[cameraId].resource_cost; + info->conflicting_devices = sCameraConfigInfo[cameraId].conflicting_devices; + info->conflicting_devices_length = sCameraConfigInfo[cameraId].conflicting_devices_length; + ALOGV("INFO(%s info->resource_cost = %d ", __FUNCTION__, info->resource_cost); + if (info->conflicting_devices_length) { + for (size_t i = 0; i < info->conflicting_devices_length; i++) { + ALOGV("INFO(%s info->conflicting_devices = %s ", __FUNCTION__, info->conflicting_devices[i]); + } + } else { + ALOGV("INFO(%s info->conflicting_devices_length is zero ", __FUNCTION__); + } + + return NO_ERROR; +} + +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + if (callbacks == NULL) + ALOGE("ERR(%s[%d]):dev is NULL", __FUNCTION__, __LINE__); + + g_callbacks = callbacks; + + return OK; +} + +static int HAL_open_legacy(const struct hw_module_t* module, const char* id, + uint32_t halVersion, struct hw_device_t** device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + int ret = 0; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + if (module == NULL) { + ALOGE("ERR(%s[%d]):module is NULL", __FUNCTION__, __LINE__); + ret = -EINVAL; + } + + if (id == NULL) { + ALOGE("ERR(%s[%d]):id is NULL", __FUNCTION__, __LINE__); + ret = -EINVAL; + } + + if (device == NULL) { + ALOGE("ERR(%s[%d]):device is NULL", __FUNCTION__, __LINE__); + ret = -EINVAL; + } + + if (halVersion == 0) + ALOGE("ERR(%s[%d]):halVersion is Zero", __FUNCTION__, __LINE__); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + +#ifdef USE_ONE_INTERFACE_FILE + if (!ret) + return HAL_camera_device_open(module, id, device); + else + return ret; +#else + return NO_ERROR; +#endif +} + +static int HAL_set_torch_mode(const char* camera_id, bool enabled) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(camera_id); + FILE *fp = NULL; + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + int ret = 0; + char flashFilePath[100] = {'\0',}; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s[%d]):Invalid camera ID %d", __FUNCTION__, __LINE__, cameraId); + return -EINVAL; + } + + /* Check the android.flash.info.available */ + /* If this camera device does not support flash, It have to return -ENOSYS */ + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + ALOGV("DEBUG(%s[%d]): Flash metadata exist", __FUNCTION__, __LINE__); + } else { + ALOGE("ERR(%s[%d]): Can not find flash metadata", __FUNCTION__, __LINE__); + return -ENOSYS; + } + + ALOGI("INFO(%s[%d]): Current Camera State (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + + /* Add the check the camera state that camera in use or not */ + if (cam_state[cameraId] > CAMERA_CLOSED) { + ALOGE("ERR(%s[%d]): Camera Device is busy (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + return -EBUSY; + } + + /* Add the sysfs file read (sys/class/camera/flash/torch_flash) then set 0 or 1 */ + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, flashFilePath, fp); + return -ENOSYS; + } + + if (enabled) { + fwrite("1", sizeof(char), 1, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + } + + fflush(fp); + + ret = fclose(fp); + if (ret != 0) { + ALOGE("ERR(%s[%d]): file close failed(%d)", __FUNCTION__, __LINE__, ret); + } + + if (enabled) { + g_cam_torchEnabled[cameraId] = true; + g_callbacks->torch_mode_status_change(g_callbacks, + camera_id, TORCH_MODE_STATUS_AVAILABLE_ON); + } else { + g_cam_torchEnabled[cameraId] = false; + g_callbacks->torch_mode_status_change(g_callbacks, + camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + } + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void *init_func(__unused void *data) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + FILE *fp = NULL; + char name[64]; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + fp = fopen(INIT_MODULE_PATH, "r"); + if (fp == NULL) { + ALOGI("INFO(%s[%d]):module init file open fail, ret(%d)", __FUNCTION__, __LINE__, fp); + return NULL; + } + + if (fgets(name, sizeof(name), fp) == NULL) { + ALOGI("INFO(%s[%d]):failed to read init sysfs", __FUNCTION__, __LINE__); + } + + fclose(fp); + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + + return NULL; +} + +static int HAL_init() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int ret = 0; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + ret = pthread_create(&g_thread, NULL, init_func, NULL); + if (ret) { + ALOGE("ERR(%s[%d]):pthread_create failed with error code %d", __FUNCTION__, __LINE__, ret); + } + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + + return OK; +} + +#ifdef USE_ONE_INTERFACE_FILE +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(id); + FILE *fp = NULL; + + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + bool hasFlash; + char flashFilePath[100] = {'\0',}; + +#ifdef BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 0) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + +#ifdef BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 1) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) +#else + enum CAMERA_STATE state; + int ret = 0; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); + return -EINVAL; + } + + /* Check init thread state */ + if (g_thread) { + ret = pthread_join(g_thread, NULL); + if (ret != 0) { + ALOGE("ERR(%s[%d]):pthread_join failed with error code %d", __FUNCTION__, __LINE__, ret); + } + g_thread = 0; + } + + state = CAMERA_OPENED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return -1; + } + + if ((unsigned int)cameraId < (sizeof(sCameraInfo) / sizeof(sCameraInfo[0]))) { + if (g_cam_device[cameraId]) { + ALOGE("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); + *device = (hw_device_t *)g_cam_device[cameraId]; + goto done; + } + + g_cam_device[cameraId] = (camera_device_t *)malloc(sizeof(camera_device_t)); + if (!g_cam_device[cameraId]) + return -ENOMEM; + + g_cam_openLock[cameraId].lock(); + g_cam_device[cameraId]->common.tag = HARDWARE_DEVICE_TAG; + g_cam_device[cameraId]->common.version = 1; + g_cam_device[cameraId]->common.module = const_cast(module); + g_cam_device[cameraId]->common.close = HAL_camera_device_close; + + g_cam_device[cameraId]->ops = &camera_device_ops; + + ALOGD("DEBUG(%s):open camera %s", __FUNCTION__, id); + g_cam_device[cameraId]->priv = new ExynosCamera(cameraId, g_cam_device[cameraId]); + *device = (hw_device_t *)g_cam_device[cameraId]; + ALOGI("INFO(%s[%d]):camera(%d) out from new g_cam_device[%d]->priv()", + __FUNCTION__, __LINE__, cameraId, cameraId); + + g_cam_openLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + } else { + ALOGE("DEBUG(%s):camera(%s) open fail - must front camera open first", + __FUNCTION__, id); + return -EINVAL; + } + +done: + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + + if (g_cam_info[cameraId]) { + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + ALOGV("INFO(%s[%d]): cameraId(%d), flashAvailable.count(%d), flashAvailable.data.u8[0](%d)", + __FUNCTION__, cameraId, flashAvailable.count, flashAvailable.data.u8[0]); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + hasFlash = true; + } else { + hasFlash = false; + } + } + + if(hasFlash && g_cam_torchEnabled[cameraId]) { + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open fail, ret(%d)", __FUNCTION__, __LINE__, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + fflush(fp); + fclose(fp); + + g_cam_torchEnabled[cameraId] = false; + g_callbacks->torch_mode_status_change(g_callbacks, id, TORCH_MODE_STATUS_AVAILABLE_OFF); + } + } + + g_callbacks->torch_mode_status_change(g_callbacks, id, TORCH_MODE_STATUS_NOT_AVAILABLE); + + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +#endif /* (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) */ + + return 0; +} + +static int HAL_camera_device_close(struct hw_device_t* device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId; + enum CAMERA_STATE state; + +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) +#else + ALOGI("INFO(%s[%d]): in", __FUNCTION__, __LINE__); + + if (device) { + camera_device_t *cam_device = (camera_device_t *)device; + cameraId = obj(cam_device)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_CLOSED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_openLock[cameraId].lock(); + ALOGI("INFO(%s[%d]):camera(%d) locked..", __FUNCTION__, __LINE__, cameraId); + g_cam_device[cameraId] = NULL; + g_cam_openLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + delete static_cast(cam_device->priv); + free(cam_device); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + } + + ALOGI("INFO(%s[%d]): out", __FUNCTION__, __LINE__); +#endif /* (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) */ + + return 0; +} + +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + ret = obj(dev)->setPreviewWindow(buf); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); + return ret; +} + +static void HAL_camera_device_set_callbacks(struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->setCallbacks(notify_cb, data_cb, data_cb_timestamp, + get_memory, + user); +} + +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->enableMsgType(msg_type); +} + +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->disableMsgType(msg_type); +} + +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->msgTypeEnabled(msg_type); +} + +static int HAL_camera_device_start_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_PREVIEW; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_previewLock[cameraId].lock(); + +#ifdef DUAL_CAMERA_SUPPORTED + if (cameraId != 0 && g_cam_device[0] != NULL + && cam_state[0] != CAMERA_NONE && cam_state[0] != CAMERA_CLOSED) { + ret = obj(dev)->setDualMode(true); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):camera(%d) set dual mode fail, ret(%d)", + __FUNCTION__, __LINE__, cameraId, ret); + } else { + ALOGI("INFO(%s[%d]):camera(%d) set dual mode)", + __FUNCTION__, __LINE__, cameraId); + } + } +#endif + + ret = obj(dev)->startPreview(); + ALOGV("INFO(%s[%d]):camera(%d) out from startPreview()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_previewLock[cameraId].unlock(); + + ALOGV("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + return ret; +} + +static void HAL_camera_device_stop_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); +/* HACK : If camera in recording state, */ +/* CameraService have to call the stop_recording before the stop_preview */ +#if 1 + if (cam_state[cameraId] == CAMERA_RECORDING) { + ALOGE("ERR(%s[%d]):camera(%d) in RECORDING RUNNING state ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + HAL_camera_device_stop_recording(dev); + ALOGE("ERR(%s[%d]):cameraId=%d out from stop_recording ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + + for (int i=0; i<30; i++) { + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + } + ALOGE("ERR(%s[%d]):camera(%d) sleep 500ms for ---- INVALID ---- state", + __FUNCTION__, __LINE__, cameraId); + usleep(500000); /* to notify, sleep 500ms */ + } +#endif + state = CAMERA_PREVIEWSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + g_cam_previewLock[cameraId].lock(); + + obj(dev)->stopPreview(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopPreview()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_previewLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_preview_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->previewEnabled(); +} + +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->storeMetaDataInBuffers(enable); +} + +static int HAL_camera_device_start_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDING; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_recordingLock[cameraId].lock(); + + ret = obj(dev)->startRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from startRecording()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_recordingLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + return ret; +} + +static void HAL_camera_device_stop_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDINGSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + g_cam_recordingLock[cameraId].lock(); + + obj(dev)->stopRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopRecording()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_recordingLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_recording_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->recordingEnabled(); +} + +static void HAL_camera_device_release_recording_frame(struct camera_device *dev, + const void *opaque) +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->releaseRecordingFrame(opaque); +} + +static int HAL_camera_device_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->autoFocus(); +} + +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->cancelAutoFocus(); +} + +static int HAL_camera_device_take_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->takePicture(); +} + +static int HAL_camera_device_cancel_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->cancelPicture(); +} + +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + String8 str(parms); + CameraParameters p(str); + return obj(dev)->setParameters(p); +} + +char *HAL_camera_device_get_parameters(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + String8 str; + +/* HACK : to avoid compile error */ +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) + ALOGE("ERR(%s[%d]):invalid opertion on external camera", __FUNCTION__, __LINE__); +#else + CameraParameters parms = obj(dev)->getParameters(); + str = parms.flatten(); +#endif + return strdup(str.string()); +} + +static void HAL_camera_device_put_parameters( + struct camera_device *dev, + char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + free(parms); +} + +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->sendCommand(cmd, arg1, arg2); +} + +static void HAL_camera_device_release(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RELEASED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return; + } + + g_cam_openLock[cameraId].lock(); + + obj(dev)->release(); + ALOGV("INFO(%s[%d]):camera(%d) out from release()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_openLock[cameraId].unlock(); + + ALOGV("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_dump(struct camera_device *dev, int fd) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->dump(fd); +} +#endif + +static void HAL_get_vendor_tag_ops(__unused vendor_tag_ops_t* ops) +{ + ALOGW("WARN(%s[%d]):empty operation", __FUNCTION__, __LINE__); +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCamera3Interface.h b/libcamera/common_v2/ExynosCamera3Interface.h new file mode 100644 index 0000000..fd3c8b3 --- /dev/null +++ b/libcamera/common_v2/ExynosCamera3Interface.h @@ -0,0 +1,493 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA3_SERVICE_INTERFACE_H +#define EXYNOS_CAMERA3_SERVICE_INTERFACE_H + + +#define USE_ONE_INTERFACE_FILE + +#include + +#include "hardware/camera3.h" +#include "system/camera_metadata.h" + +#ifdef USE_ONE_INTERFACE_FILE +#include "ExynosCameraFrameFactory.h" +#include "ExynosCamera.h" +#endif +#include "ExynosCamera3.h" +#include "ExynosCameraInterfaceState.h" +#include "pthread.h" + +#ifdef USE_ONE_INTERFACE_FILE +#define SET_METHOD(m) m : HAL_camera_device_##m +#endif +#define SET_METHOD3(m) m : HAL3_camera_device_##m + +#define MAX_NUM_OF_CAMERA 2 + +/* init camera module */ +#define INIT_MODULE_PATH "/sys/class/camera/rear/fw_update" + +namespace android { + +static CameraInfo sCameraInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + CAMERA_FACING_BACK, + BACK_ROTATION, /* orientation */ + }, +#endif + { + CAMERA_FACING_FRONT, + FRONT_ROTATION, /* orientation */ + } +}; + +/* This struct used in device3.3 service arbitration */ +struct CameraConfigInfo { + int resource_cost; + char** conflicting_devices; + size_t conflicting_devices_length; +}; + +const CameraConfigInfo sCameraConfigInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + 51, /* resoruce_cost : [0 , 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"1"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + }, +#endif + { + 51, /* resoruce_cost : [0, 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"0"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + } +}; + +#ifdef USE_ONE_INTERFACE_FILE +static camera_device_t *g_cam_device[MAX_NUM_OF_CAMERA]; +#endif +static camera3_device_t *g_cam_device3[MAX_NUM_OF_CAMERA]; +static camera_metadata_t *g_cam_info[MAX_NUM_OF_CAMERA] = {NULL, NULL}; +static const camera_module_callbacks_t *g_callbacks = NULL; + +//ExynosCameraRequestManager *m_exynosCameraRequestManager[MAX_NUM_OF_CAMERA]; +//const camera3_callback_ops_t *callbackOps; + +static Mutex g_cam_openLock[MAX_NUM_OF_CAMERA]; +static Mutex g_cam_configLock[MAX_NUM_OF_CAMERA]; +#ifdef USE_ONE_INTERFACE_FILE +static Mutex g_cam_previewLock[MAX_NUM_OF_CAMERA]; +static Mutex g_cam_recordingLock[MAX_NUM_OF_CAMERA]; +#endif +static bool g_cam_torchEnabled[MAX_NUM_OF_CAMERA] = {false, false}; +pthread_t g_thread; + +static inline ExynosCamera3 *obj(const struct camera3_device *dev) +{ + return reinterpret_cast(dev->priv); +}; + +/** + * Open camera device + */ +static int HAL3_camera_device_open(const struct hw_module_t* module, + const char *id, + struct hw_device_t** device); + +/** + * Close camera device + */ +static int HAL3_camera_device_close(struct hw_device_t* device); + +/** + * initialize + * One-time initialization to pass framework callback function pointers to the HAL. + */ +static int HAL3_camera_device_initialize(const struct camera3_device *dev, + const camera3_callback_ops_t *callback_ops); + +/** + * configure_streams + * Reset the HAL camera device processing pipeline and set up new input and output streams. + */ +static int HAL3_camera_device_configure_streams(const struct camera3_device *dev, + camera3_stream_configuration_t *stream_list); + +/** + * register_stream_buffers + * Register buffers for a given stream with the HAL device. + */ +static int HAL3_camera_device_register_stream_buffers(const struct camera3_device *dev, + const camera3_stream_buffer_set_t *buffer_set); + +/** + * construct_default_request_settings + * Create capture settings for standard camera use cases. + */ +static const camera_metadata_t* HAL3_camera_device_construct_default_request_settings( + const struct camera3_device *dev, + int type); + +/** + * process_capture_request + * Send a new capture request to the HAL. + */ +static int HAL3_camera_device_process_capture_request(const struct camera3_device *dev, + camera3_capture_request_t *request); + +/** + * flush + * Flush all currently in-process captures and all buffers in the pipeline on the given device. + */ +static int HAL3_camera_device_flush(const struct camera3_device *dev); + +/** + * get_metadata_vendor_tag_ops + * Get methods to query for vendor extension metadata tag information. + */ +static void HAL3_camera_device_get_metadata_vendor_tag_ops(const struct camera3_device *dev, + vendor_tag_query_ops_t* ops); + +/** + * dump + * Print out debugging state for the camera device. + */ +static void HAL3_camera_device_dump(const struct camera3_device *dev, int fd); + +/** + * Retrun the camera hardware info + */ +static int HAL_getCameraInfo(int cameraId, struct camera_info *info); + +/** + * Provide callback function pointers to the HAL module to inform framework + * of asynchronous camera module events. + */ +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks); + +/** + * Return number of the camera hardware + */ +static int HAL_getNumberOfCameras(); + +#ifdef USE_ONE_INTERFACE_FILE +static inline ExynosCamera *obj(struct camera_device *dev) +{ + return reinterpret_cast(dev->priv); +}; + +/** + * Open camera device + */ +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device); + +/** + * Close camera device + */ +static int HAL_camera_device_close(struct hw_device_t* device); + +/** + * Set the preview_stream_ops to which preview frames are sent + */ +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf); + +/** + * Set the notification and data callbacks + */ +static void HAL_camera_device_set_callbacks( + struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user); + +/** + * The following three functions all take a msg_type, which is a bitmask of + * the messages defined in include/ui/Camera.h + */ + +/** + * Enable a message, or set of messages. + */ +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera + * HAL should not rely on its client to call releaseRecordingFrame() to + * release video recording frames sent out by the cameral HAL before and + * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL + * clients must not modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). + */ +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Query whether a message, or a set of messages, is enabled. Note that + * this is operates as an AND, if any of the messages queried are off, this + * will return false. + */ +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type); + +/** + * Start preview mode. + */ +static int HAL_camera_device_start_preview(struct camera_device *dev); + +/** + * Stop a previously started preview. + */ +static void HAL_camera_device_stop_preview(struct camera_device *dev); + +/** + * Returns true if preview is enabled. + */ +static int HAL_camera_device_preview_enabled(struct camera_device *dev); + +/** + * Request the camera HAL to store meta data or real YUV data in the video + * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If + * it is not called, the default camera HAL behavior is to store real YUV + * data in the video buffers. + * + * This method should be called before startRecording() in order to be + * effective. + * + * If meta data is stored in the video buffers, it is up to the receiver of + * the video buffers to interpret the contents and to find the actual frame + * data with the help of the meta data in the buffer. How this is done is + * outside of the scope of this method. + * + * Some camera HALs may not support storing meta data in the video buffers, + * but all camera HALs should support storing real YUV data in the video + * buffers. If the camera HAL does not support storing the meta data in the + * video buffers when it is requested to do do, INVALID_OPERATION must be + * returned. It is very useful for the camera HAL to pass meta data rather + * than the actual frame data directly to the video encoder, since the + * amount of the uncompressed frame data can be very large if video size is + * large. + * + * @param enable if true to instruct the camera HAL to store + * meta data in the video buffers; false to instruct + * the camera HAL to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable); + +/** + * Start record mode. When a record image is available, a + * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding + * frame. Every record frame must be released by a camera HAL client via + * releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames, + * and the client must not modify/access any video recording frames. + */ +static int HAL_camera_device_start_recording(struct camera_device *dev); + +/** + * Stop a previously started recording. + */ +static void HAL_camera_device_stop_recording(struct camera_device *dev); + +/** + * Returns true if recording is enabled. + */ +static int HAL_camera_device_recording_enabled(struct camera_device *dev); + +/** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + */ +static void HAL_camera_device_release_recording_frame( + struct camera_device *dev, + const void *opaque); + +/** + * Start auto focus, the notification callback routine is called with + * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be + * called again if another auto focus is needed. + */ +static int HAL_camera_device_auto_focus(struct camera_device *dev); + +/** + * Cancels auto-focus function. If the auto-focus is still in progress, + * this function will cancel it. Whether the auto-focus is in progress or + * not, this function will return the focus position to the default. If + * the camera does not support auto-focus, this is a no-op. + */ +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev); + +/** + * Take a picture. + */ +static int HAL_camera_device_take_picture(struct camera_device *dev); + +/** + * Cancel a picture that was started with takePicture. Calling this method + * when no picture is being taken is a no-op. + */ +static int HAL_camera_device_cancel_picture(struct camera_device *dev); + +/** + * Set the camera parameters. This returns BAD_VALUE if any parameter is + * invalid or not supported. + */ +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms); + +/** + * Return the camera parameters. + */ +char *HAL_camera_device_get_parameters(struct camera_device *dev); + +/** + * Release buffer that used by the camera parameters. + */ +static void HAL_camera_device_put_parameters( + struct camera_device *dev, + char *parms); + +/** + * Send command to camera driver. + */ +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2); + +/** + * Release the hardware resources owned by this object. Note that this is + * *not* done in the destructor. + */ +static void HAL_camera_device_release(struct camera_device *dev); + +/** + * Dump state of the camera hardware + */ +static int HAL_camera_device_dump(struct camera_device *dev, int fd); +#endif + +static void HAL_get_vendor_tag_ops(vendor_tag_ops_t* ops); + +static int HAL_open_legacy(const struct hw_module_t* module, const char* id, uint32_t halVersion, struct hw_device_t** device); + +static int HAL_set_torch_mode(const char* camera_id, bool enabled); + +static int HAL_init(void); + +#ifdef USE_ONE_INTERFACE_FILE +static camera_device_ops_t camera_device_ops = { + SET_METHOD(set_preview_window), + SET_METHOD(set_callbacks), + SET_METHOD(enable_msg_type), + SET_METHOD(disable_msg_type), + SET_METHOD(msg_type_enabled), + SET_METHOD(start_preview), + SET_METHOD(stop_preview), + SET_METHOD(preview_enabled), + SET_METHOD(store_meta_data_in_buffers), + SET_METHOD(start_recording), + SET_METHOD(stop_recording), + SET_METHOD(recording_enabled), + SET_METHOD(release_recording_frame), + SET_METHOD(auto_focus), + SET_METHOD(cancel_auto_focus), + SET_METHOD(take_picture), + SET_METHOD(cancel_picture), + SET_METHOD(set_parameters), + SET_METHOD(get_parameters), + SET_METHOD(put_parameters), + SET_METHOD(send_command), + SET_METHOD(release), + SET_METHOD(dump), +}; +#endif + +static camera3_device_ops_t camera_device3_ops = { + SET_METHOD3(initialize), + SET_METHOD3(configure_streams), + NULL, + SET_METHOD3(construct_default_request_settings), + SET_METHOD3(process_capture_request), + SET_METHOD3(get_metadata_vendor_tag_ops), + SET_METHOD3(dump), + SET_METHOD3(flush), + {0} /* reserved for future use */ +}; + +static hw_module_methods_t mCameraHwModuleMethods = { + open : HAL3_camera_device_open +}; + +/* + * Required HAL header. + */ +extern "C" { + camera_module_t HAL_MODULE_INFO_SYM = { + common : { + tag : HARDWARE_MODULE_TAG, + module_api_version : CAMERA_MODULE_API_VERSION_2_4, + hal_api_version : HARDWARE_HAL_API_VERSION, + id : CAMERA_HARDWARE_MODULE_ID, + name : "Exynos Camera HAL3", + author : "Samsung Electronics Inc", + methods : &mCameraHwModuleMethods, + dso : NULL, + reserved : {0}, + }, + get_number_of_cameras : HAL_getNumberOfCameras, + get_camera_info : HAL_getCameraInfo, + set_callbacks : HAL_set_callbacks, + get_vendor_tag_ops : HAL_get_vendor_tag_ops, + open_legacy : HAL_open_legacy, + set_torch_mode : HAL_set_torch_mode, + init : HAL_init, + reserved : {0}, + }; +} + +}; // namespace android +#endif diff --git a/libcamera/common_v2/ExynosCameraAutoTimer.h b/libcamera/common_v2/ExynosCameraAutoTimer.h new file mode 100644 index 0000000..d13115c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraAutoTimer.h @@ -0,0 +1,136 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*! + * \file ExynosCameraAutoTimer.h + * \brief hearder file for ExynosCameraAutoTimer + * \author Sangwoo.Park(sw5771.park@samsung.com) + * \date 2013/06/18 + * + * Revision History: + * - 2013/06/18 : Sangwoo.Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_AUTO_TIMER_H +#define EXYNOS_CAMERA_AUTO_TIMER_H + +#include +#include +#include + +#include +#include + +namespace android { + +class ExynosCameraDurationTimer { +public: + ExynosCameraDurationTimer() + { + memset(&m_startTime, 0x00, sizeof(struct timeval)); + memset(&m_stopTime, 0x00, sizeof(struct timeval)); + } + ~ExynosCameraDurationTimer() {} + + void start() + { + gettimeofday(&m_startTime, NULL); + }; + + void stop() + { + gettimeofday(&m_stopTime, NULL); + }; + + uint64_t durationMsecs() const + { + nsecs_t stop = ((nsecs_t)m_stopTime.tv_sec) * 1000LL + ((nsecs_t)m_stopTime.tv_usec) / 1000LL; + nsecs_t start = ((nsecs_t)m_startTime.tv_sec) * 1000LL + ((nsecs_t)m_startTime.tv_usec) / 1000LL; + + return stop - start; + }; + + uint64_t durationUsecs() const + { + nsecs_t stop = ((nsecs_t)m_stopTime.tv_sec) * 1000000LL + ((nsecs_t)m_stopTime.tv_usec); + nsecs_t start = ((nsecs_t)m_startTime.tv_sec) * 1000000LL + ((nsecs_t)m_startTime.tv_usec); + + return stop - start; + }; + +private: + struct timeval m_startTime; + struct timeval m_stopTime; +}; + +class ExynosCameraAutoTimer { +private: + ExynosCameraAutoTimer(void) + {} + +public: + inline ExynosCameraAutoTimer(char *strLog) + { + if (m_create(strLog) == false) + ALOGE("ERR(%s):m_create() fail", __func__); + } + + inline ExynosCameraAutoTimer(const char *strLog) + { + char *strTemp = (char*)strLog; + + if (m_create(strTemp) == false) + ALOGE("ERR(%s):m_create() fail", __func__); + } + + inline virtual ~ExynosCameraAutoTimer() + { + uint64_t durationTime; + + m_timer.stop(); + + durationTime = m_timer.durationMsecs(); + + if (m_logStr) { + ALOGD("DEBUG:duration time(%5d msec):(%s)", + (int)durationTime, m_logStr); + } else { + ALOGD("DEBUG:duration time(%5d msec):(NULL)", + (int)durationTime); + } + } + +private: + bool m_create(char *strLog) + { + m_logStr = strLog; + + m_timer.start(); + + return true; + } + +private: + ExynosCameraDurationTimer m_timer; + char *m_logStr; +}; + +}; /* namespace android */ + +#endif /* EXYNOS_CAMERA_AUTO_TIMER_H */ diff --git a/libcamera/common_v2/ExynosCameraCommonDefine.h b/libcamera/common_v2/ExynosCameraCommonDefine.h new file mode 100644 index 0000000..d588740 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraCommonDefine.h @@ -0,0 +1,97 @@ +#ifndef EXYNOS_CAMERA_COMMON_CONFIG_H__ +#define EXYNOS_CAMERA_COMMON_CONFIG_H__ + +#include + +#include + +#define BUILD_DATE() ALOGE("Build Date is (%s) (%s) ", __DATE__, __TIME__) +#define WHERE_AM_I() ALOGE("[(%s)%d] ", __FUNCTION__, __LINE__) +#define LOG_DELAY() usleep(100000) + +#define TARGET_ANDROID_VER_MAJ 4 +#define TARGET_ANDROID_VER_MIN 4 + +/* ---------------------------------------------------------- */ +/* log */ +#define XPaste(s) s +#define Paste2(a, b) XPaste(a)b +#define ID "[CAM_ID(%d)][%s]-" +#define ID_PARM m_cameraId, m_name +#define LOCATION_ID "(%s[%d]):" +#define LOCATION_ID_PARM __FUNCTION__, __LINE__ + +#define CLOGD(fmt, ...) \ + ALOGD(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGV(fmt, ...) \ + ALOGV(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGW(fmt, ...) \ + ALOGW(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGE(fmt, ...) \ + ALOGE(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGI(fmt, ...) \ + ALOGI(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGT(cnt, fmt, ...) \ + if (cnt != 0) CLOGI(Paste2("#TRACE#", fmt), ##__VA_ARGS__) \ + +#define CLOG_ASSERT(fmt, ...) \ + android_printAssert(NULL, LOG_TAG, Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__); + +#define CLOGD2(fmt, ...) \ + CLOGD(Paste2("DEBUG" LOCATION_ID, fmt), LOCATION_ID_PARM, ##__VA_ARGS__) + +#define CLOGV2(fmt, ...) \ + CLOGV(Paste2("VERB" LOCATION_ID, fmt), LOCATION_ID_PARM, ##__VA_ARGS__) + +#define CLOGW2(fmt, ...) \ + CLOGW(Paste2("WARN" LOCATION_ID, fmt), LOCATION_ID_PARM, ##__VA_ARGS__) + +#define CLOGE2(fmt, ...) \ + CLOGE(Paste2("ERR" LOCATION_ID, fmt), LOCATION_ID_PARM, ##__VA_ARGS__) + +#define CLOGI2(fmt, ...) \ + CLOGI(Paste2("INFO" LOCATION_ID, fmt), LOCATION_ID_PARM, ##__VA_ARGS__) + +/* ---------------------------------------------------------- */ +/* Debug Timer */ +#define DEBUG_TIMER_INIT \ + ExynosCameraDurationTimer debugPPPTimer; +#define DEBUG_TIMER_START \ + debugPPPTimer.start(); +#define DEBUG_TIMER_STOP \ + debugPPPTimer.stop(); CLOGD("DEBUG(%s[%d]): DurationTimer #0 (%lld usec)", __FUNCTION__, __LINE__, debugPPPTimer.durationUsecs()); + +/* ---------------------------------------------------------- */ +/* Align */ +#define ROUND_UP(x, a) (((x) + ((a)-1)) / (a) * (a)) +#define ROUND_OFF_HALF(x, dig) ((float)(floor((x) * pow(10.0f, dig) + 0.5) / pow(10.0f, dig))) +#define ROUND_OFF_DIGIT(x, dig) ((uint32_t)(floor(((double)x)/((double)dig) + 0.5f) * dig)) + +/* Image processing */ +#define SATURATING_ADD(a, b) (((a) > (0x3FF - (b))) ? 0x3FF : ((a) + (b))) +#define COMBINE(a, b) ((((a<<20)&0xFFF00000)|((b<<8)&0x000FFF00))) +#define COMBINE_P0(a, b) ((((a)&0x00FF)|((b<<8)&0x0F00))) +#define COMBINE_P1(a, b) ((((a>>4)&0x000F)|((b<<4)&0x0FF0))) +#define COMBINE_P3(a, b) ((((a>>8)&0x000F)|((b<<4)&0x00F0))) +#define COMBINE_P4(a, b) ((((a<<8)&0x0F00)|((b)&0x00FF))) + +/* ---------------------------------------------------------- */ +/* Node Prefix */ +#define NODE_PREFIX "/dev/video" + +/* ---------------------------------------------------------- */ +/* Max Camera Name Size */ +#define EXYNOS_CAMERA_NAME_STR_SIZE (256) + +/* ---------------------------------------------------------- */ +#define SIZE_RATIO(w, h) ((w) * 10 / (h)) + +/* ---------------------------------------------------------- */ + +#endif /* EXYNOS_CAMERA_COMMON_CONFIG_H__ */ + diff --git a/libcamera/common_v2/ExynosCameraCommonEnum.h b/libcamera/common_v2/ExynosCameraCommonEnum.h new file mode 100644 index 0000000..71bd8d6 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraCommonEnum.h @@ -0,0 +1,77 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_COMMON_CONFIG_H +#define EXYNOS_CAMERA_COMMON_CONFIG_H + +typedef enum +{ + SENSOR_NAME_NOTHING = 0, + SENSOR_NAME_S5K3H2 = 1, // 1 ~ 100, SLSI sensors + SENSOR_NAME_S5K6A3 = 2, + SENSOR_NAME_S5K3H5 = 3, + SENSOR_NAME_S5K3H7 = 4, + SENSOR_NAME_S5K3H7_SUNNY = 5, + SENSOR_NAME_S5K3H7_SUNNY_2M = 6, + SENSOR_NAME_S5K6B2 = 7, + SENSOR_NAME_S5K3L2 = 8, + SENSOR_NAME_S5K4E5 = 9, + SENSOR_NAME_S5K2P2 = 10, + SENSOR_NAME_S5K8B1 = 11, + SENSOR_NAME_S5K1P2 = 12, + SENSOR_NAME_S5K4H5 = 13, + SENSOR_NAME_S5K3M2 = 14, + SENSOR_NAME_S5K2P2_12M = 15, + SENSOR_NAME_S5K6D1 = 16, + SENSOR_NAME_S5K5E3 = 17, + SENSOR_NAME_S5K2T2 = 18, + SENSOR_NAME_S5K2P3 = 19, + SENSOR_NAME_S5K2P8 = 20, + SENSOR_NAME_S5K4E6 = 21, + SENSOR_NAME_S5K5E2 = 22, + SENSOR_NAME_S5K3P3 = 23, + SENSOR_NAME_S5K4H5YC = 24, + SENSOR_NAME_S5K3L8_MASTER = 25, + SENSOR_NAME_S5K3L8_SLAVE = 26, + SENSOR_NAME_S5K4H8 = 27, + SENSOR_NAME_S5K2L1 = 29, + SENSOR_NAME_S5K5E8 = 30, + + SENSOR_NAME_IMX135 = 101, // 101 ~ 200 Sony sensors + SENSOR_NAME_IMX134 = 102, + SENSOR_NAME_IMX175 = 103, + SENSOR_NAME_IMX240 = 104, + SENSOR_NAME_IMX228 = 106, + SENSOR_NAME_IMX219 = 107, + SENSOR_NAME_IMX260 = 109, + + SENSOR_NAME_SR261 = 201, // 201 ~ 300 Other vendor sensors + SENSOR_NAME_OV5693 = 202, + SENSOR_NAME_SR544 = 203, + SENSOR_NAME_OV5670 = 204, + SENSOR_NAME_OV8865 = 205, + SENSOR_NAME_SR259 = 206, + + SENSOR_NAME_CUSTOM = 301, + SENSOR_NAME_SR200 = 302, // SoC Module + SENSOR_NAME_SR352 = 303, + SENSOR_NAME_SR130PC20 = 304, + SENSOR_NAME_END, + +}IS_SensorNameEnum; + +#endif /* EXYNOS_CAMERA_COMMON_CONFIG_H */ diff --git a/libcamera/common_v2/ExynosCameraCounter.h b/libcamera/common_v2/ExynosCameraCounter.h new file mode 100644 index 0000000..c265d02 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraCounter.h @@ -0,0 +1,81 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_COUNTER_H +#define EXYNOS_CAMERA_COUNTER_H + +namespace android { + +class ExynosCameraCounter { +public: + /* Constructor */ + ExynosCameraCounter() + { + m_count = 0; + m_compensation = 0; + }; + + void setCount(int cnt) + { + Mutex::Autolock lock(m_lock); + m_count = cnt + m_compensation; + }; + + void setCompensation(int compensation) + { + Mutex::Autolock lock(m_lock); + m_compensation = compensation; + }; + + int getCompensation(void) + { + Mutex::Autolock lock(m_lock); + return m_compensation; + }; + + + void clearCount(void) + { + Mutex::Autolock lock(m_lock); + m_count = 0; + m_compensation = 0; + }; + + int getCount() + { + Mutex::Autolock lock(m_lock); + return m_count; + }; + + void decCount() + { + Mutex::Autolock lock(m_lock); + if (m_count > 0) + m_count--; + else + m_count = 0; + }; + +private: + mutable Mutex m_lock; + int m_count; + int m_compensation; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/ExynosCameraDualFrameSelector.cpp b/libcamera/common_v2/ExynosCameraDualFrameSelector.cpp new file mode 100644 index 0000000..5baa447 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraDualFrameSelector.cpp @@ -0,0 +1,1299 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#define LOG_TAG "ExynosCameraDualFrameSelector" + +#include "ExynosCameraDualFrameSelector.h" + +//#define EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + +#define EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MAX_OBJ (100) +#define EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MIN_OBJ (1) +#define EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_CALIB_TIME (2) // 2msec + +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG +#define DUAL_FRAME_SELECTOR_LOG ALOGD +#else +#define DUAL_FRAME_SELECTOR_LOG ALOGV +#endif + +ExynosCameraDualFrameSelector::SyncObj::SyncObj() +{ + m_cameraId = -1; + m_frame = NULL; + m_pipeID = -1; + m_isSrc = false; + m_dstPos = -1; + +#ifdef USE_FRAMEMANAGER + m_frameMgr = NULL; +#endif + m_bufMgr = NULL; + + m_timeStamp = 0; +} + +ExynosCameraDualFrameSelector::SyncObj::~SyncObj() +{} + +status_t ExynosCameraDualFrameSelector::SyncObj::create(int cameraId, + ExynosCameraFrame *frame, + int pipeID, + bool isSrc, + int dstPos, +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *frameMgr, +#endif + ExynosCameraBufferManager *bufMgr) +{ + status_t ret = NO_ERROR; + + m_cameraId = cameraId; + m_frame = frame; + m_pipeID = pipeID; + m_isSrc = isSrc; + m_dstPos = dstPos; + +#ifdef USE_FRAMEMANAGER + m_frameMgr = frameMgr; +#endif + + m_bufMgr = bufMgr; + + m_timeStamp = (int)(ns2ms(m_frame->getTimeStamp())); + + return ret; +} + +status_t ExynosCameraDualFrameSelector::SyncObj::destroy(void) +{ + status_t ret = NO_ERROR; + + ExynosCameraBuffer buffer; + + if (m_bufMgr == NULL) { + ALOGV("DEBUG(%s[%d]):m_bufMgr == NULL, ignore frames's buffer(it will delete by caller)", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + // from ExynosCameraFrameSelector + ret = m_getBufferFromFrame(&buffer); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s)", + __FUNCTION__, __LINE__, m_pipeID, (m_isSrc)?"Src":"Dst"); + return ret; + } + + ret = m_bufMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putIndex is %d", __FUNCTION__, __LINE__, buffer.index); + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):delete frame ([%d]:%d)", __FUNCTION__, __LINE__, m_cameraId, getTimeStamp()); + + m_lockedFrameComplete(); + + return NO_ERROR; +} + +int ExynosCameraDualFrameSelector::SyncObj::getCameraId(void) +{ + return m_cameraId; +} + +ExynosCameraFrame *ExynosCameraDualFrameSelector::SyncObj::getFrame(void) +{ + return m_frame; +} + +int ExynosCameraDualFrameSelector::SyncObj::getPipeID(void) +{ + return m_pipeID; +} + +ExynosCameraBufferManager *ExynosCameraDualFrameSelector::SyncObj::getBufferManager(void) +{ + return m_bufMgr; +} + +/* + * Check complete flag of the Frame and deallocate it if it is completed. + * This function ignores lock flag of the frame(Lock flag is usually set to protect + * the frame from deallocation), so please use with caution. + * This function is required to remove a frame from frameHoldingList. + */ +// from ExynosCameraFrameSelector +status_t ExynosCameraDualFrameSelector::SyncObj::m_lockedFrameComplete(void) +#ifdef USE_FRAMEMANAGER +{ + status_t ret = NO_ERROR; + +#ifdef USE_FRAME_REFERENCE_COUNT + if (m_frameMgr == NULL) { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, m_frame->getFrameCount()); + return INVALID_OPERATION; + } + + m_frame->decRef(); + + m_frameMgr->deleteFrame(m_frame); + + m_frame = NULL; +#else + if (m_frame->isComplete() == true) { + if (m_frame->getFrameLockState() == true) { + ALOGW("WARN(%s[%d]):Deallocating locked frame, count(%d)", __FUNCTION__, __LINE__, m_frame->getFrameCount()); + } + + if (m_frameMgr != NULL) { + m_frameMgr->deleteFrame(m_frame); + } + m_frame = NULL; + } +#endif + + return ret; +} +#else // USE_FRAMEMANAGER +{ + status_t ret = NO_ERROR; + + if (m_frame == NULL) { + ALOGE("ERR(%s[%d]):m_frame == NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (m_frame->isComplete() == true) { + if (m_frame->getFrameLockState() == true) { + ALOGW("WARN(%s[%d]):Deallocating locked frame, count(%d)", __FUNCTION__, __LINE__, m_frame->getFrameCount()); + ALOGW("WARN(%s[%d]):frame is locked : isComplete(%d) count(%d) LockState(%d)", __FUNCTION__, __LINE__, + m_frame->isComplete(), + m_frame->getFrameCount(), + m_frame->getFrameLockState()); + } + delete m_frame; + m_frame = NULL; + } + return ret; +} +#endif // USE_FRAMEMANAGER + +status_t ExynosCameraDualFrameSelector::SyncObj::m_getBufferFromFrame(ExynosCameraBuffer *outBuffer) +{ + status_t ret = NO_ERROR; + ExynosCameraBuffer selectedBuffer; + + if (m_frame == NULL) { + ALOGE("ERR(%s[%d]):m_frame == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (outBuffer == NULL) { + ALOGE("ERR(%s[%d]):outBuffer == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (m_isSrc) { + ret = m_frame->getSrcBuffer(m_pipeID, &selectedBuffer); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getSrcBuffer(pipeID %d) fail", __FUNCTION__, __LINE__, m_pipeID); + } else { + if (m_dstPos < 0) { + ret = m_frame->getDstBuffer(m_pipeID, &selectedBuffer); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getDstBuffer(pipeID %d) fail", __FUNCTION__, __LINE__, m_pipeID); + } else { + ret = m_frame->getDstBuffer(m_pipeID, &selectedBuffer, m_dstPos); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getDstBuffer(pipeID %d, dstPos %d) fail", __FUNCTION__, __LINE__, m_pipeID, m_dstPos); + } + } + *outBuffer = selectedBuffer; + return ret; +} + +int ExynosCameraDualFrameSelector::SyncObj::getTimeStamp(void) +{ + //return (int)(ns2ms(m_frame->getTimeStamp())); + return m_timeStamp; +} + +bool ExynosCameraDualFrameSelector::SyncObj::isSimilarTimeStamp(SyncObj *other) +{ + bool ret = false; + + int calibTime = EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_CALIB_TIME; + +#if 1 + // if this is close other than calib time + /* + * sysnObj's time - 16msec < other's time <= sysnObj's time -> consider sync + * sysnObj's time <= other's time < sysnObj's time + 16msec -> consider sync + */ + if (other->getTimeStamp() < this->getTimeStamp()) { + if (this->getTimeStamp() - calibTime <= other->getTimeStamp()) + ret = true; + } else if (this->getTimeStamp() < other->getTimeStamp()) { + if (other->getTimeStamp() <= this->getTimeStamp() + calibTime) + ret = true; + } else { // time is same. + ret = true; + } +#else + // if this is more latest time than other + if (other->getTimeStamp() <= this->getTimeStamp()) { + ret = true; + + /* if two time is too long. + * ex : + * other's time : 100msec / this's time : 110msec + * 100msec + 16msec >= 110msec -> consider sync + * + * other's time : 100msec / this's time : 190msec + * 100msec + 16msec < 190msec -> consider not sync + */ + if (other->getTimeStamp() + calibTime < this->getTimeStamp()) { + ret = false; + } + } +#endif + + return ret; +} + +ExynosCameraDualFrameSelector::ExynosCameraDualFrameSelector() +{ + ALOGD("DEBUG(%s[%d]):new %s object allocated", __FUNCTION__, __LINE__, __FUNCTION__); + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_outputList[i] = NULL; + m_frameMgr[i] = NULL; + m_holdCount[i] = 1; + m_flagValidCameraId[i] = false; + } +} + +ExynosCameraDualFrameSelector::~ExynosCameraDualFrameSelector() +{ + status_t ret = NO_ERROR; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + ret = clear(i); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):clear(%d) fail", __FUNCTION__, __LINE__, i); + } + } +} + +status_t ExynosCameraDualFrameSelector::setInfo(int cameraId, +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *frameMgr, +#endif + int holdCount) +{ + status_t ret = NO_ERROR; + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + if (CAMERA_ID_MAX <= cameraId) { + ALOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + +#ifdef USE_FRAMEMANAGER + if (frameMgr == NULL) { + ALOGE("ERR(%s[%d]):frameMgr == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } +#endif + + m_frameMgr[cameraId] = frameMgr; + m_holdCount[cameraId] = holdCount; + + m_flagValidCameraId[cameraId] = true; + + return ret; +} + +status_t ExynosCameraDualFrameSelector::manageNormalFrameHoldList(int cameraId, + ExynosCameraList *outputList, + ExynosCameraFrame *frame, + int pipeID, bool isSrc, int32_t dstPos, ExynosCameraBufferManager *bufMgr) +{ +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + //ExynosCameraAutoTimer autoTimer(__func__); +#endif + + status_t ret = NO_ERROR; + + if (CAMERA_ID_MAX <= cameraId) { + ALOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (outputList == NULL) { + ALOGE("ERR(%s[%d]):outputList == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (frame == NULL) { + ALOGE("ERR(%s[%d]):frame == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (bufMgr == NULL) { + ALOGE("ERR(%s[%d]):bufMgr == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + if (EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MAX_OBJ < m_noSyncObjList[cameraId].size()) { + ALOGE("ERR(%s[%d]):EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MAX_OBJ < m_noSyncObjList[%d].size(). so, just return true", + __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + bool flagAllCameraSync = false; + SyncObj syncObj; + SyncObj syncObjArr[CAMERA_ID_MAX]; + + ret = syncObj.create(cameraId, frame, pipeID, isSrc, dstPos, +#ifdef USE_FRAMEMANAGER + m_frameMgr[cameraId], +#endif + bufMgr); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):syncObj.create([%d]:%d)) fail", __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(frame)); + return ret; + } + + m_outputList[cameraId] = outputList; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):=== start with cameraId(%d) frameCount(%d), timeStamp(%d) addr(%p)===", + __FUNCTION__, __LINE__, cameraId, frame->getFrameCount(), syncObj.getTimeStamp(), frame); + +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_printList(&m_noSyncObjList[i], i); + } +#endif + + // check all list are sync. just assume this is synced. + flagAllCameraSync = true; + + int validCameraCnt = 0; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (i == cameraId) { + validCameraCnt++; + continue; + } + + if (m_flagValidCameraId[i] == false) + continue; + + validCameraCnt++; + + // search sync obj on another camera's list. + // compare other list. + if (m_checkSyncObjOnList(&syncObj, i, &m_noSyncObjList[i], &syncObjArr[i]) == false) { + flagAllCameraSync = false; + } + } + + /* when smaller than 2, single stream is can be assume not synced frame. */ + if (validCameraCnt < 2) { + flagAllCameraSync = false; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):validCameraCnt(%d) < 2. so, flagAllCameraSync = false", + __FUNCTION__, __LINE__, validCameraCnt); + } + + syncObjArr[cameraId] = syncObj; + + // if found the all sync + if (flagAllCameraSync == true) { + /* + * if all sybcObj are matched, + * remove all old sybcObj of each synObjList. + * then, pushQ(the last matched frames) to all synObjList. + */ + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + syncObj = syncObjArr[i]; + + // pop the previous list and same time listfm_captureObjList + ret = m_popListUntilTimeStamp(&m_noSyncObjList[i], &syncObj); + if (ret != NO_ERROR){ + ALOGE("ERR(%s[%d]):m_popListUntilTimeStamp([%d]:%d)), fail", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + continue; + } + + // move sync obj to syncObjList. + ret = m_pushList(&m_syncObjList[i], &syncObj); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushList(m_syncObjList [%d]:%d)) fail", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + continue; + } + + if (m_holdCount[i] < (int)m_syncObjList[i].size() + m_outputList[i]->getSizeOfProcessQ()) { + + int newHoldCount = m_holdCount[i] - m_outputList[i]->getSizeOfProcessQ(); + + if (newHoldCount < 0) + newHoldCount = 0; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):m_holdCount[%d](%d) < m_syncObjList[%d].size()(%d) + m_outputList[%d]->getSizeOfProcessQ()(%d). so pop and remove until holdCount(%d)", + __FUNCTION__, __LINE__, i, m_holdCount[i], i, m_syncObjList[i].size(), i, m_outputList[i]->getSizeOfProcessQ(), newHoldCount); + + // remove sync object until min number + ret = m_popListUntilNumber(&m_syncObjList[i], newHoldCount); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popListUntilNumber([%d], %d)) fail", + __FUNCTION__, __LINE__, i, newHoldCount); + continue; + } + } + } + } else { + // remove sync object until min number + ret = m_popListUntilNumber(&m_noSyncObjList[cameraId], EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MIN_OBJ, &syncObj); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popListUntilNumber([%d], %d, %d)) fail", + __FUNCTION__, __LINE__, cameraId, EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MIN_OBJ, syncObj.getTimeStamp()); + } + + // to remember the not sync obj + ret = m_pushList(&m_noSyncObjList[cameraId], &syncObj); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushList(m_noSyncObjList [%d]:%d)) fail", + __FUNCTION__, __LINE__, cameraId, syncObj.getTimeStamp()); + return ret; + } + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):==========================================", __FUNCTION__, __LINE__); + + return ret; +} + +ExynosCameraFrame* ExynosCameraDualFrameSelector::selectFrames(int cameraId) +{ +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + //ExynosCameraAutoTimer autoTimer(__func__); +#endif + + status_t ret = NO_ERROR; + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + SyncObj syncObj; + SyncObj nullSyncObj; + ExynosCameraFrame *selectedFrame = NULL; + + /* + * if captureObjList is ready by other camera, use m_outputList. + * else just pop on m_syncObjList, and push m_outputList. + * purpose is... be assure, selectFrames() can give same timeStamp on back and front. + */ + if (0 < m_outputList[cameraId]->getSizeOfProcessQ()) { + goto done; + } + + // search sync obj on another camera's list. + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + syncObj = nullSyncObj; + + // if m_outputList is full, assert. + if (m_holdCount[i] <= m_outputList[i]->getSizeOfProcessQ()) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):m_holdCount[%d](%d) <= m_outputList[%d].size()(%d). because, capture happened on other side only, assert!!!!", + __FUNCTION__, __LINE__, i, m_holdCount[i], i, m_outputList[i]->getSizeOfProcessQ()); + } + + // pop from sync obj and push to capture objectList + if (m_popList(&m_syncObjList[i], &syncObj) != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popList(m_syncObjList[%d]) fail", __FUNCTION__, __LINE__, i); + continue; + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):m_popList(m_syncObjList([%d]:%d)) and m_pushList(m_outputList([%d]:%d))", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp(), i, syncObj.getTimeStamp()); + + ret = m_pushQ(m_outputList[i], syncObj.getFrame(), i); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushQ(m_outputList [%d]:%d)) fail", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + continue; + } + } + +done: + /* + * if pop on caller, we don't need the below code. + * but, until now, we will pop here to get frame + */ + if (m_popQ(m_outputList[cameraId], &selectedFrame, cameraId) != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popQ(m_outputList[%d]) fail", __FUNCTION__, __LINE__, cameraId); + } + + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):selectedFrame == NULL([%d]:%d) fail", __FUNCTION__, __LINE__, cameraId, syncObj.getTimeStamp()); + } else { + ALOGD("DEBUG(%s[%d]):selectedFrame is [%d]:frameCount(%d), timeStamp(%d)", + __FUNCTION__, __LINE__, cameraId, selectedFrame->getFrameCount(), m_getTimeStamp(selectedFrame)); + } + + return selectedFrame; +} + +status_t ExynosCameraDualFrameSelector::releaseFrames(int cameraId, ExynosCameraFrame *frame) +{ + status_t ret = NO_ERROR; + + // force release(== putBuffer() to bufferManger) sync frame + // when error case, find frame on my and other's m_outputList() by frame'timeStamp. + // then, release(== putBuffer() to bufferManger) sync frame of all cameras + ALOGD("DEBUG(%s[%d]):releaseFrames([%d]:%d) called", __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(frame)); + + return ret; +} + +status_t ExynosCameraDualFrameSelector::clear(int cameraId) +{ + status_t ret = NO_ERROR; + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + ret = m_clearList(&m_syncObjList[cameraId]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_clearList(m_syncObjList[%d]:All) fail", __FUNCTION__, __LINE__, cameraId); + //return ret; + } + + ret = m_clearList(&m_noSyncObjList[cameraId]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_clearList(m_noSyncObjList[%d]:All) fail", __FUNCTION__, __LINE__, cameraId); + //return ret; + } + + return ret; +} + +bool ExynosCameraDualFrameSelector::m_checkSyncObjOnList(SyncObj *syncObj, + int otherCameraId, + List *list, + SyncObj *resultSyncObj) +{ + List::iterator r; + + bool ret = false; + SyncObj curSyncObj; + + if (list->empty()) { + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):when checking [%d]:%d on [%d]'s list. list is empty", + __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp(), otherCameraId); + return false; + } + + r = list->begin()++; + + do { + curSyncObj = *r; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):compare [%d]:%d with [%d]:%d", + __FUNCTION__, __LINE__, + syncObj->getCameraId(), syncObj->getTimeStamp(), + curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + + if (syncObj->isSimilarTimeStamp(&curSyncObj) == true) { + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):[%d]:%d match with [%d]:%d", + __FUNCTION__, __LINE__, + syncObj->getCameraId(), syncObj->getTimeStamp(), + curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + + *resultSyncObj = curSyncObj; + + return true; + } + r++; + } while (r != list->end()); + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):[%d]:%d does not match with [%d]'s list", + __FUNCTION__, __LINE__, + syncObj->getCameraId(), syncObj->getTimeStamp(), + otherCameraId); + + return false; +} + +status_t ExynosCameraDualFrameSelector::m_destroySyncObj(SyncObj *syncObj, ExynosCameraList *outputList) +{ + status_t ret = NO_ERROR; + + if (outputList) { + ExynosCameraFrame *frame = syncObj->getFrame(); + int pipeID = syncObj->getPipeID(); + ExynosCameraFrameEntity *entity = frame->searchEntityByPipeId(pipeID); + + if (entity == NULL) { + ALOGE("ERR(%s[%d]):frame([%d]:%d)'s entity == NULL, fail", __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + return INVALID_OPERATION; + } + + ret = entity->setDstBufState(ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):frame([%d]:%d)'s entity->setDstBufState(ENTITY_BUFFER_STATE_ERROR) fail", + __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + } + + ret = frame->setEntityState(pipeID, ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):frame([%d]:%d)'s setEntityState(%d, ENTITY_STATE_FRAME_DONE) fail", + __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + } + + /* + * this will goto outputList. + * frame is dropped + */ + ret = m_pushQ(outputList, frame, syncObj->getCameraId()); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushQ([%d]:%d) fail", __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + } + } else { + ret = syncObj->destroy(); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):syncObj->destroy();([%d]:%d) fail", __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + } + } + + return ret; +} + +status_t ExynosCameraDualFrameSelector::m_pushList(List *list, SyncObj *syncObj) +{ + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):push([%d]:%d)", __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + + list->push_back(*syncObj); + + return NO_ERROR; +} + +status_t ExynosCameraDualFrameSelector::m_popList(List *list, SyncObj *syncObj) +{ + List::iterator r; + + if (syncObj == NULL) { + ALOGE("ERR(%s[%d]):syncObj == NULL, so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (list->empty()) + return TIMED_OUT; + + r = list->begin()++; + *syncObj = *r; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):pop([%d]:%d)", + __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + + list->erase(r); + + return NO_ERROR; +}; + +status_t ExynosCameraDualFrameSelector::m_popListUntilTimeStamp(List *list, SyncObj *syncObj, + ExynosCameraList *outputList) +{ + status_t ret = NO_ERROR; + + List::iterator r; + + SyncObj curSyncObj; + bool flagFound = false; + + if (list->empty()) { + ALOGV("DEBUG(%s[%d]):list is empty, when [%d]:%d", __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp()); + return NO_ERROR; + } + + r = list->begin()++; + + do { + curSyncObj = *r; + + if (syncObj->getTimeStamp() <= curSyncObj.getTimeStamp()) { + flagFound = true; + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):pop([%d]:%d), by [%d]:%d", + __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp(), syncObj->getCameraId(), syncObj->getTimeStamp()); + + list->erase(r); + r++; + + if (flagFound == true) { + break; + } else { + ret = m_destroySyncObj(&curSyncObj, outputList); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_destroySyncObj(([%d]:%d)) fail", __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + } + } + } while (r != list->end()); + + return NO_ERROR; +} + +status_t ExynosCameraDualFrameSelector::m_popListUntilNumber(List *list, int minNum, SyncObj *syncObj, + ExynosCameraList *outputList) +{ + status_t ret = NO_ERROR; + + List::iterator r; + + SyncObj curSyncObj; + bool flagFound = false; + + if (list->empty()) { + if (syncObj == NULL) { + ALOGV("DEBUG(%s[%d]):list is empty, when remove by [%d], %d, %d", __FUNCTION__, __LINE__, syncObj->getCameraId(), minNum, syncObj->getTimeStamp()); + } else { + ALOGV("DEBUG(%s[%d]):list is empty, when remove by %d", __FUNCTION__, __LINE__, minNum); + } + + return NO_ERROR; + } + + int size = list->size(); + + if (size <= minNum) { + return NO_ERROR; + } + + r = list->begin()++; + + do { + curSyncObj = *r; + + if (size <= minNum) + break; + + if (syncObj) { + if (syncObj->getTimeStamp() <= curSyncObj.getTimeStamp()) { + ALOGE("ERR(%s[%d]):[%d]:%d <= [%d]:%d. weird. minNum(%d), size(%d)", + __FUNCTION__, __LINE__, syncObj->getCameraId(), syncObj->getTimeStamp(), curSyncObj.getCameraId(), curSyncObj.getTimeStamp(), minNum, size); + flagFound = true; + } + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):pop([%d]:%d), by minNum(%d) <= size(%d)", + __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp(), minNum, size); + + list->erase(r); + r++; + + if (flagFound == true) { + break; + } else { + ret = m_destroySyncObj(&curSyncObj, outputList); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_destroySyncObj(([%d]:%d)) fail", __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + } + } + + size--; + } while (r != list->end()); + + return NO_ERROR; +} + +status_t ExynosCameraDualFrameSelector::m_clearList(List *list, + ExynosCameraList *outputList) +{ + status_t ret = NO_ERROR; + + List::iterator r; + + SyncObj curSyncObj; + + ALOGD("DEBUG(%s[%d]):remaining list size(%d), we remove them all", __FUNCTION__, __LINE__, list->size()); + + while (!list->empty()) { + r = list->begin()++; + curSyncObj = *r; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):pop([%d]:%d)", + __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + + ret = m_destroySyncObj(&curSyncObj, outputList); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_destroySyncObj(([%d]:%d)) fail", __FUNCTION__, __LINE__, curSyncObj.getCameraId(), curSyncObj.getTimeStamp()); + } + + list->erase(r); + } + + ALOGD("DEBUG(%s[%d]):EXIT", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +void ExynosCameraDualFrameSelector::m_printList(List *list, int cameraId) +{ + List::iterator r; + + SyncObj curSyncObj; + + if (m_flagValidCameraId[cameraId] == false) { + ALOGD("DEBUG(%s[%d]):=== [%d] is not valid sensor ===", __FUNCTION__, __LINE__, cameraId); + goto done; + } + + ALOGD("DEBUG(%s[%d]):=== [%d]'s size(%d) ===", __FUNCTION__, __LINE__, cameraId, list->size()); + if (list->empty()) { + goto done; + } + + r = list->begin()++; + + do { + curSyncObj = *r; + ALOGD("DEBUG(%s[%d]):[%d]%d frameCount(%d), addr(%p)", __FUNCTION__, __LINE__, cameraId, curSyncObj.getTimeStamp(), curSyncObj.getFrame()->getFrameCount(), curSyncObj.getFrame()); + + r++; + } while (r != list->end()); + +done: + ALOGD("DEBUG(%s[%d]):======================", __FUNCTION__, __LINE__); +} + +status_t ExynosCameraDualFrameSelector::m_pushQ(ExynosCameraList *list, ExynosCameraFrame* frame, int cameraId) +{ + status_t ret = NO_ERROR; + + if (list == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):list == NULL, assert!!!!", __FUNCTION__, __LINE__); + } + + if (frame == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):frame == NULL, assert!!!!", __FUNCTION__, __LINE__); + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):pushProcessQ([%d]:%d)", __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(frame)); + + list->pushProcessQ(&frame); + + return ret; +} + +status_t ExynosCameraDualFrameSelector::m_popQ(ExynosCameraList *list, ExynosCameraFrame** outframe, int cameraId) +{ + status_t ret = NO_ERROR; + +#if 0 +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + ALOGD("DEBUG(%s[%d]):========== print list (%d:size(%d) ==========", __FUNCTION__, __LINE__, cameraId, list->getSizeOfProcessQ()); + if (syncObj->list->empty() == false) { + ExynosCameraList::iterator t; + ExynosCameraFrame *frame; + + t = list->begin()++; + + do { + frame = *t; + ALOGD("DEBUG(%s[%d]):list[%d]:%d fCount(%d)", + __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(frame), frame->getFrameCount()); + + t++; + } while (t != list->end()); + } + + ALOGD("DEBUG(%s[%d]):====================================", __FUNCTION__, __LINE__); + /* + * you can debug about what item is in obj->list. + * you need put this method on class ExynosCameraList + iterator begin(void) { return m_processQ.begin(); }; + iterator end(void) { return m_processQ.end(); }; + bool empty(void) { return m_processQ.empty(); }; + */ +#endif +#endif + + int iter = 0; + int tryCount = 1; + + do { + ret = list->popProcessQ(outframe); + if (ret < 0) { + if (ret == TIMED_OUT) { + ALOGD("DEBUG(%s[%d]):PopQ Time out -> retry[max cur](%d %d)", __FUNCTION__, __LINE__, tryCount, iter); + iter++; + continue; + } + } + } while (ret != OK && iter < tryCount); + + if (ret != OK) { + ALOGE("ERR(%s[%d]):popQ fail(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (*outframe == NULL) { + ALOGE("ERR(%s[%d]):popQ frame = NULL frame(%p)", __FUNCTION__, __LINE__, *outframe); + return INVALID_OPERATION; + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):popProcessQ([%d]:%d). so, remain size(%d)", + __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(*outframe), list->getSizeOfProcessQ()); + + return ret; +} + +int ExynosCameraDualFrameSelector::m_getTimeStamp(ExynosCameraFrame *frame) +{ + return (int)(ns2ms(frame->getTimeStamp())); +} + +ExynosCameraDualPreviewFrameSelector::ExynosCameraDualPreviewFrameSelector() +{ + ALOGD("DEBUG(%s[%d]):new %s object allocated", __FUNCTION__, __LINE__, __FUNCTION__); + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_dof[i] = NULL; + } +} + +ExynosCameraDualPreviewFrameSelector::~ExynosCameraDualPreviewFrameSelector() +{ +} + +status_t ExynosCameraDualPreviewFrameSelector::setInfo(int cameraId, + int holdCount, + DOF *dof) +{ + status_t ret = NO_ERROR; + + if (CAMERA_ID_MAX <= cameraId) { + ALOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (dof == NULL) { + ALOGE("ERR(%s[%d]):dof == NULL. so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (m_holdCount[cameraId] != holdCount) { + ALOGD("DEBUG(%s[%d]):m_holdCount[%d](%d) changed to %d", __FUNCTION__, __LINE__, cameraId, m_holdCount[cameraId], holdCount); + m_holdCount[cameraId] = holdCount; + } + + m_dof[cameraId] = dof; + + m_flagValidCameraId[cameraId] = true; + + return ret; +} + +DOF *ExynosCameraDualPreviewFrameSelector::getDOF(int cameraId) +{ + return m_dof[cameraId]; +} + +status_t ExynosCameraDualPreviewFrameSelector::managePreviewFrameHoldList(int cameraId, + ExynosCameraList *outputList, + ExynosCameraFrame *frame, + int pipeID, bool isSrc, int32_t dstPos, ExynosCameraBufferManager *bufMgr) +{ +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + //ExynosCameraAutoTimer autoTimer(__func__); +#endif + + status_t ret = NO_ERROR; + + if (CAMERA_ID_MAX <= cameraId) { + ALOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (outputList == NULL) { + ALOGE("ERR(%s[%d]):outputList == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (frame == NULL) { + ALOGE("ERR(%s[%d]):frame == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (bufMgr == NULL) { + ALOGE("ERR(%s[%d]):bufMgr == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + if (EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MAX_OBJ < m_noSyncObjList[cameraId].size()) { + ALOGE("ERR(%s[%d]):EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MAX_OBJ < m_noSyncObjList[%d].size(). so, just return true", + __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + bool flagAllCameraSync = false; + SyncObj syncObj; + SyncObj syncObjArr[CAMERA_ID_MAX]; + + ret = syncObj.create(cameraId, frame, pipeID, isSrc, dstPos, +#ifdef USE_FRAMEMANAGER + NULL, +#endif + bufMgr); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):syncObj.create([%d]:%d)) fail", __FUNCTION__, __LINE__, cameraId, m_getTimeStamp(frame)); + return ret; + } + + m_outputList[cameraId] = outputList; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):=== start with cameraId(%d) syncObj(%d) ===", __FUNCTION__, __LINE__, cameraId, syncObj.getTimeStamp()); + +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_printList(&m_noSyncObjList[i], i); + } +#endif + + // check all list are sync. just assume this is synced. + flagAllCameraSync = true; + + int validCameraCnt = 0; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (i == cameraId) { + validCameraCnt++; + continue; + } + + if (m_flagValidCameraId[i] == false) + continue; + + validCameraCnt++; + + // search sync obj on another camera's list. + // compare other list. + if (m_checkSyncObjOnList(&syncObj, i, &m_noSyncObjList[i], &syncObjArr[i]) == false) { + flagAllCameraSync = false; + } + } + + /* when smaller than 2, single stream is can be assume not synced frame. */ + if (validCameraCnt < 2) { + flagAllCameraSync = false; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):validCameraCnt(%d) < 2. so, flagAllCameraSync = false", + __FUNCTION__, __LINE__, validCameraCnt); + } + + syncObjArr[cameraId] = syncObj; + + // if found the all sync + if (flagAllCameraSync == true) { + /* + * if all sybcObj are matched, + * remove all old sybcObj of each synObjList. + * then, pushQ(the last matched frames) to all synObjList. + */ + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + syncObj = syncObjArr[i]; + + // pop the previous list and same time listfm_captureObjList + ret = m_popListUntilTimeStamp(&m_noSyncObjList[i], &syncObj, m_outputList[i]); + if (ret != NO_ERROR){ + ALOGE("ERR(%s[%d]):m_popListUntilTimeStamp([%d]:%d)), fail", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + continue; + } + + // move sync obj to syncObjList. + ret = m_pushList(&m_syncObjList[i], &syncObj); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushList(m_syncObjList [%d]:%d)) fail", + __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + continue; + } + + if (m_holdCount[i] < (int)m_syncObjList[i].size()) { + + int newHoldCount = m_holdCount[i]; + + if (newHoldCount < 0) + newHoldCount = 0; + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):m_holdCount[%d](%d) < m_syncObjList[%d].size()(%d). so pop and remove until holdCount(%d)", + __FUNCTION__, __LINE__, i, m_holdCount[i], i, m_syncObjList[i].size(), newHoldCount); + + // remove sync object until min number + ret = m_popListUntilNumber(&m_syncObjList[i], newHoldCount, NULL, m_outputList[i]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popListUntilNumber([%d], %d)) fail", + __FUNCTION__, __LINE__, i, newHoldCount); + continue; + } + } + } + } else { + // remove sync object until min number + ret = m_popListUntilNumber(&m_noSyncObjList[cameraId], EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MIN_OBJ, &syncObj, m_outputList[cameraId]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popListUntilNumber([%d], %d, %d)) fail", + __FUNCTION__, __LINE__, cameraId, EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_MIN_OBJ, syncObj.getTimeStamp()); + } + + // to remember the not sync obj + ret = m_pushList(&m_noSyncObjList[cameraId], &syncObj); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_pushList(m_noSyncObjList [%d]:%d)) fail", + __FUNCTION__, __LINE__, cameraId, syncObj.getTimeStamp()); + return ret; + } + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):==========================================", __FUNCTION__, __LINE__); + + return ret; +} + +bool ExynosCameraDualPreviewFrameSelector::selectFrames(int cameraId, + ExynosCameraFrame **frame0, ExynosCameraList **outputList0, ExynosCameraBufferManager **bufManager0, + ExynosCameraFrame **frame1, ExynosCameraList **outputList1, ExynosCameraBufferManager **bufManager1) +{ +#ifdef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_DEBUG + //ExynosCameraAutoTimer autoTimer(__func__); +#endif + + bool flagSynced = false; + status_t ret = NO_ERROR; + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + SyncObj syncObj; + SyncObj nullSyncObj; + ExynosCameraFrame *selectedFrame = NULL; + int frameSyncCount = 0; + + /* + * if m_syncObjList is <= 0, just return false; + */ + if (m_syncObjList[cameraId].size() <= 0) { + return false; + } + + // search sync obj on another camera's list. + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + syncObj = nullSyncObj; + + // if m_outputList is full, assert. + if (m_holdCount[i] < (int)m_syncObjList[i].size()) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):m_holdCount[%d](%d) <= m_syncObjList[%d].size()(%d). weird situation, assert!!!!", + __FUNCTION__, __LINE__, i, m_holdCount[i], i, m_syncObjList[i].size()); + } + + // pop from sync obj and push to capture objectList + if (m_popList(&m_syncObjList[i], &syncObj) != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_popList(m_syncObjList[%d]) fail", __FUNCTION__, __LINE__, i); + continue; + } + + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):m_popList(m_syncObjList([%d]:%d))", __FUNCTION__, __LINE__, i, syncObj.getTimeStamp()); + + selectedFrame = syncObj.getFrame(); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):selectedFrame == NULL([%d]:%d) fail", __FUNCTION__, __LINE__, cameraId, syncObj.getTimeStamp()); + continue; + } else { + DUAL_FRAME_SELECTOR_LOG("DEBUG(%s[%d]):selectedFrame is [%d]:frameCount(%d), timeStamp(%d)", + __FUNCTION__, __LINE__, cameraId, selectedFrame->getFrameCount(), m_getTimeStamp(selectedFrame)); + } + + if (frameSyncCount == 0) { + *frame0 = selectedFrame; + *outputList0 = m_outputList[i]; + *bufManager0 = syncObj.getBufferManager(); + } else if (frameSyncCount == 1) { + *frame1 = selectedFrame; + *outputList1 = m_outputList[i]; + *bufManager1 = syncObj.getBufferManager(); + } else { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid i(%d), frameSyncCount(%d), assert!!!!", __FUNCTION__, __LINE__, i, frameSyncCount); + break; + } + + frameSyncCount++; + + flagSynced = true; + } + +done: + return flagSynced; +} + +status_t ExynosCameraDualPreviewFrameSelector::clear(int cameraId) +{ + status_t ret = NO_ERROR; + + /* we must protect list of various camera. */ + Mutex::Autolock lock(m_lock); + + ret = m_clearList(&m_syncObjList[cameraId], m_outputList[cameraId]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_clearList(m_syncObjList[%d]:All) fail", __FUNCTION__, __LINE__, cameraId); + //return ret; + } + + ret = m_clearList(&m_noSyncObjList[cameraId], m_outputList[cameraId]); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_clearList(m_noSyncObjList[%d]:All) fail", __FUNCTION__, __LINE__, cameraId); + //return ret; + } + + return ret; +} diff --git a/libcamera/common_v2/ExynosCameraDualFrameSelector.h b/libcamera/common_v2/ExynosCameraDualFrameSelector.h new file mode 100644 index 0000000..6479d33 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraDualFrameSelector.h @@ -0,0 +1,403 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosCameraDualFrameSelector.h + * \brief header file for ExynosCameraDualFrameSelector + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2014/10/08 + * + * Revision History: + * - 2014/10/08 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_H +#define EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_H + +#include "string.h" +#include +#include + +#include "ExynosCameraSingleton.h" +#include "ExynosCameraFrameSelector.h" +#include "ExynosCameraFusionInclude.h" + +using namespace android; + +/* Class declaration */ +//! ExynosCameraDualFrameSelector is sync logic to, get sync frame, among asynchronously coming frames from multiple camera. +/*! + * \ingroup ExynosCamera + */ +class ExynosCameraDualFrameSelector +{ +protected: + /* Class declaration */ + //! SyncObj is the object to have frame + /*! + * \ingroup ExynosCamera + */ + class SyncObj { + public: + //! Constructor + SyncObj(); + //! Destructor + ~SyncObj(); + + //! create + /*! + \param cameraId + \param frame + \param pipeID + caller's pipeID + \param isSrc + to check srcBuffer or dstBuffer + \param dstPos + when dstBuffer, it need dstPosition to get buffer + \remarks + create the initiate syncObj + */ + status_t create(int cameraId, + ExynosCameraFrame *frame, + int pipeID, + bool isSrc, + int dstPos, +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *frameMgr, +#endif + ExynosCameraBufferManager *bufMgr); + + //! destroy + /*! + \remarks + destroy obj (this leads frame's buffer release) + */ + status_t destroy(void); + + //! getCameraId + /*! + \remarks + get cameraId of frame + */ + int getCameraId(void); + + //! getFrame + ExynosCameraFrame *getFrame(void); + + //! getPipeId + /*! + \remarks + get pipeId of frame + */ + int getPipeID(void); + + //! getBufferManager + /*! + \remarks + get bufferManager of frame + */ + ExynosCameraBufferManager *getBufferManager(void); + + //! getTimeStamp + int getTimeStamp(void); + + //! isSimilarTimeStamp + /*! + \param other + other SyncObj to compare timeStamp + \remarks + compare my timeStamp and other's timeStamp + if similar(sync) frames, return true; + else, return false; + */ + bool isSimilarTimeStamp(SyncObj *other); + + private: + status_t m_lockedFrameComplete(void); + status_t m_getBufferFromFrame(ExynosCameraBuffer *outBuffer); + + public: + SyncObj& operator =(const SyncObj &other) { + m_cameraId = other.m_cameraId; + m_frame = other.m_frame; + m_pipeID = other.m_pipeID; + m_isSrc = other.m_isSrc; + m_dstPos = other.m_dstPos; +#ifdef USE_FRAMEMANAGER + m_frameMgr = other.m_frameMgr; +#endif + m_bufMgr = other.m_bufMgr; + + m_timeStamp = other.m_timeStamp; + + return *this; + } + + bool operator ==(const SyncObj &other) const { + bool ret = true; + + if (m_cameraId != other.m_cameraId || + m_frame != other.m_frame || + m_pipeID != other.m_pipeID || + m_isSrc != other.m_isSrc || + m_dstPos != other.m_dstPos || +#ifdef USE_FRAMEMANAGER + m_frameMgr != other.m_frameMgr || +#endif + m_bufMgr != other.m_bufMgr || + m_timeStamp != other.m_timeStamp) { + ret = false; + } + + return ret; + } + + bool operator !=(const SyncObj &other) const { + return !(*this == other); + } + + private: + int m_cameraId; + ExynosCameraFrame *m_frame; + int m_pipeID; + bool m_isSrc; + int m_dstPos; + +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *m_frameMgr; +#endif + ExynosCameraBufferManager *m_bufMgr; + + int m_timeStamp; + }; + +protected: + friend class ExynosCameraSingleton; + + //! Constructor + ExynosCameraDualFrameSelector(); + + //! Destructor + virtual ~ExynosCameraDualFrameSelector(); + +public: + //! setInfo + /*! + \param cameraId + \param frameMgr + frameManager to free frame. + \param holdCount + synced buffer count to hold on. + ex) if hold Count is 1, it maintain only last 1 synced frame. + \remarks + setting information of cameraId's stream + */ + status_t setInfo(int cameraId, +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *frameMgr, +#endif + int holdCount); + + //! manageNormalFrameHoldList + /*! + \param cameraId + \param outputList + when frame is synced. the target list that frame move. + \param frame + \param pipeID + caller's pipeID + \param isSrc + to check srcBuffer or dstBuffer + \param dstPos + when dstBuffer, it need dstPosition to get buffer + \param bufMgr + bufferManager to get and free frame's buffer. + \remarks + trigger sync logic with a frame. + when this function call, this class try to compare to find sync frame. + If logic find synced frame, push frame to list which is from argument. + This list is type of FIFO not stack. + and, you can get sync frame by selectFrames(); + */ + status_t manageNormalFrameHoldList(int cameraId, + ExynosCameraList *outputList, + ExynosCameraFrame *frame, + int pipeID, bool isSrc, int32_t dstPos, ExynosCameraBufferManager *bufMgr); + + //! selectFrames + /*! + \param cameraId + \remarks + get synced frame of cameraId + this return one sync frame. + */ + ExynosCameraFrame* selectFrames(int cameraId); + + //! releaseFrames + /*! + \param cameraId + \remarks + when error case, find frame on my and other's m_outputObjList() by frame's timeStamp. + then, force release(== putBuffer() to bufferManger) sync frame of all cameras + */ + status_t releaseFrames(int cameraId, ExynosCameraFrame *frame); + + //! clear + /*! + \param cameraId + \remarks + This API make class reset. + reset means + release buffer to bufferManager. + and, release frame in all sync and not-sync frame to frameManager. + */ + status_t clear(int cameraId); + +protected: + bool m_checkSyncObjOnList (SyncObj *syncObj, + int otherCameraId, + List *list, + SyncObj *resultSyncObj); + status_t m_destroySyncObj (SyncObj *syncObj, ExynosCameraList *outlist); + + status_t m_pushList (List *list, SyncObj *syncObj); + status_t m_popList (List *list, SyncObj *syncObj); + status_t m_popListUntilTimeStamp(List *list, SyncObj *syncObj, ExynosCameraList *outputList = NULL); + status_t m_popListUntilNumber (List *list, int minNum, SyncObj *syncObj = NULL, ExynosCameraList *outputList = NULL); + status_t m_clearList (List *list, ExynosCameraList *outputList = NULL); + void m_printList (List *list, int cameraId); + + int m_getTimeStamp(ExynosCameraFrame *frame); + + status_t m_pushQ(ExynosCameraList *list, ExynosCameraFrame* frame, int cameraId); + status_t m_popQ (ExynosCameraList *list, ExynosCameraFrame** outframe, int cameraId); + +protected: + // not yet sync obj list + List m_noSyncObjList[CAMERA_ID_MAX]; + + // sync obj list + List m_syncObjList[CAMERA_ID_MAX]; + + // output candidate obj list + ExynosCameraList *m_outputList[CAMERA_ID_MAX]; + +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *m_frameMgr[CAMERA_ID_MAX]; +#endif + + int m_holdCount[CAMERA_ID_MAX]; + bool m_flagValidCameraId[CAMERA_ID_MAX]; + + Mutex m_lock; +}; + +//! ExynosCameraDualFrameSelector +/*! + * \ingroup ExynosCamera + */ +class ExynosCameraDualPreviewFrameSelector : public ExynosCameraDualFrameSelector +{ +protected: + friend class ExynosCameraSingleton; + + //! Constructor + ExynosCameraDualPreviewFrameSelector(); + + //! Destructor + virtual ~ExynosCameraDualPreviewFrameSelector(); + +public: + /*! + \param cameraId + \param holdCount + synced buffer count to hold on. + ex) if hold Count is 1, it maintain only last 1 synced frame. + \remarks + setting information of cameraId's stream + */ + status_t setInfo(int cameraId, + int holdCount, + DOF *dof); + + /*! + \param cameraId + \remarks + return cameraId's DOF + */ + DOF *getDOF(int cameraId); + + //! managePreviewFrameHoldList + /*! + \param cameraId + \param outputList + when frame is not synced. the frame is move to outputList. + the owner of outputList can detect like this. + if (entity->getDstBufState() == ENTITY_BUFFER_STATE_ERROR) + \param frame + \param pipeID + caller's pipeID + \param isSrc + to check srcBuffer or dstBuffer + \param dstPos + when dstBuffer, it need dstPosition to get buffer + \param bufMgr + bufferManager to get and free frame's buffer. + \remarks + trigger sync logic with a frame. + when this function call, this class try to compare to find sync frame. + then, maintain sync frame. + and, you can get sync frame by selectFrames(); + */ + status_t managePreviewFrameHoldList(int cameraId, + ExynosCameraList *outputList, + ExynosCameraFrame *frame, + int pipeID, bool isSrc, int32_t dstPos, ExynosCameraBufferManager *bufMgr); + + //! selectFrames + /*! + \param cameraId + \param frame0 + synced wide's frame + \param frame1 + synced tele's frame + \remarks + get synced frame of cameraId + if synced : return true and give two frames. + else not synced : return false. + */ + bool selectFrames(int cameraId, + ExynosCameraFrame **frame0, ExynosCameraList **outputList0, ExynosCameraBufferManager **bufManager0, + ExynosCameraFrame **frame1, ExynosCameraList **outputList1, ExynosCameraBufferManager **bufManager1); + + //! clear + /*! + \param cameraId + \remarks + This API make class reset. + reset means + send the all sync and non-sync frames to caller. + then, caller will free the frames. + */ + status_t clear(int cameraId); + +protected: + DOF* m_dof[CAMERA_ID_MAX]; +}; + +#endif //EXYNOS_CAMERA_DUAL_FRAME_SELECTOR_H diff --git a/libcamera/common_v2/ExynosCameraFrame.cpp b/libcamera/common_v2/ExynosCameraFrame.cpp new file mode 100644 index 0000000..f996424 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrame.cpp @@ -0,0 +1,2024 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrame" +#include + +#include "ExynosCameraFrame.h" + +namespace android { + +#ifdef DEBUG_FRAME_MEMORY_LEAK +unsigned long long ExynosCameraFrame::m_checkLeakCount; +Mutex ExynosCameraFrame::m_countLock; +#endif + +ExynosCameraFrame::ExynosCameraFrame( + ExynosCameraParameters *obj_param, + uint32_t frameCount, + uint32_t frameType) +{ + ALOGV("DEBUG(%s[%d]): create frame type(%d), frameCount(%d)", __FUNCTION__, __LINE__, frameType, frameCount); + m_parameters = obj_param; + m_frameCount = frameCount; + m_frameType = frameType; + + m_init(); +} + +ExynosCameraFrame::ExynosCameraFrame() +{ + m_parameters = NULL; + m_frameCount = 0; + m_frameType = FRAME_TYPE_OTHERS; + m_init(); +} + +ExynosCameraFrame::~ExynosCameraFrame() +{ + m_deinit(); +} + +#ifdef DEBUG_FRAME_MEMORY_LEAK +long long int ExynosCameraFrame::getCheckLeakCount() +{ + return m_privateCheckLeakCount; +} +#endif + +status_t ExynosCameraFrame::addSiblingEntity( + __unused ExynosCameraFrameEntity *curEntity, + ExynosCameraFrameEntity *newEntity) +{ + Mutex::Autolock l(m_linkageLock); + m_linkageList.push_back(newEntity); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::addChildEntity( + ExynosCameraFrameEntity *parentEntity, + ExynosCameraFrameEntity *newEntity) +{ + status_t ret = NO_ERROR; + + if (parentEntity == NULL) { + ALOGE("ERR(%s):parentEntity is NULL", __FUNCTION__); + return BAD_VALUE; + } + + /* TODO: This is not suit in case of newEntity->next != NULL */ + ExynosCameraFrameEntity *tmpEntity; + + tmpEntity = parentEntity->getNextEntity(); + ret = parentEntity->setNextEntity(newEntity); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setNextEntity fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + newEntity->setNextEntity(tmpEntity); + + return ret; +} + +status_t ExynosCameraFrame::addChildEntity( + ExynosCameraFrameEntity *parentEntity, + ExynosCameraFrameEntity *newEntity, + int parentPipeId) +{ + status_t ret = NO_ERROR; + + if (parentEntity == NULL) { + ALOGE("ERR(%s):parentEntity is NULL", __FUNCTION__); + return BAD_VALUE; + } + + /* TODO: This is not suit in case of newEntity->next != NULL */ + ExynosCameraFrameEntity *tmpEntity; + + tmpEntity = parentEntity->getNextEntity(); + ret = parentEntity->setNextEntity(newEntity); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setNextEntity fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (0 <= parentPipeId) { + ret = newEntity->setParentPipeId((enum pipeline)parentPipeId); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setParentPipeId(%d) fail, ret(%d)", __FUNCTION__, __LINE__, parentPipeId, ret); + return ret; + } + } else { + ALOGW("WARN(%s[%d]):parentPipeId(%d) < 0. you may set parentPipeId which connect between parent(%d) and child(%d)", + __FUNCTION__, __LINE__, parentPipeId, parentEntity->getPipeId(), newEntity->getPipeId()); + } + + newEntity->setNextEntity(tmpEntity); + return ret; +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getFirstEntity(void) +{ + List::iterator r; + ExynosCameraFrameEntity *firstEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + firstEntity = NULL; + return firstEntity; + } + + r = m_linkageList.begin()++; + m_currentEntity = r; + firstEntity = *r; + + return firstEntity; +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getNextEntity(void) +{ + ExynosCameraFrameEntity *nextEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + m_currentEntity++; + + if (m_currentEntity == m_linkageList.end()) { + return nextEntity; + } + + nextEntity = *m_currentEntity; + + return nextEntity; +} +/* Unused, but useful */ +/* +ExynosCameraFrameEntity *ExynosCameraFrame::getChildEntity(ExynosCameraFrameEntity *parentEntity) +{ + ExynosCameraFrameEntity *childEntity = NULL; + + if (parentEntity == NULL) { + ALOGE("ERR(%s):parentEntity is NULL", __FUNCTION__); + return childEntity; + } + + childEntity = parentEntity->getNextEntity(); + + return childEntity; +} +*/ + +ExynosCameraFrameEntity *ExynosCameraFrame::searchEntityByPipeId(uint32_t pipeId) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + int listSize = 0; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return NULL; + } + + listSize = m_linkageList.size(); + r = m_linkageList.begin(); + + for (int i = 0; i < listSize; i++) { + curEntity = *r; + if (curEntity == NULL) { + ALOGE("ERR(%s):curEntity is NULL, index(%d), linkageList size(%d)", + __FUNCTION__, i, listSize); + return NULL; + } + + while (curEntity != NULL) { + if (curEntity->getPipeId() == pipeId) + return curEntity; + curEntity = curEntity->getNextEntity(); + } + r++; + } + + ALOGD("DEBUG(%s):Cannot find matched entity, frameCount(%d), pipeId(%d)", __FUNCTION__, getFrameCount(), pipeId); + + return NULL; +} + +status_t ExynosCameraFrame::setSrcBuffer(uint32_t pipeId, + ExynosCameraBuffer srcBuf) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setSrcBuf(srcBuf); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set src buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::setDstBuffer(uint32_t pipeId, + ExynosCameraBuffer dstBuf, + uint32_t nodeIndex) +{ + status_t ret = NO_ERROR; + + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setDstBuf(dstBuf, nodeIndex); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + /* TODO: set buffer to child node's source */ + entity = entity->getNextEntity(); + if (entity != NULL) { + ret = entity->setSrcBuf(dstBuf); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + return ret; +} + +status_t ExynosCameraFrame::setDstBuffer(uint32_t pipeId, + ExynosCameraBuffer dstBuf, + uint32_t nodeIndex, + int parentPipeId) +{ + status_t ret = NO_ERROR; + + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setDstBuf(dstBuf, nodeIndex); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + /* TODO: set buffer to child node's source */ + entity = entity->getNextEntity(); + if (entity != NULL) { + bool flagSetChildEntity = false; + + /* + * it will set child's source buffer + * when no specific parent set. (for backward compatibility) + * when specific parent only. (for MCPipe) + */ + if (entity->flagSpecficParent() == true) { + if (parentPipeId == entity->getParentPipeId()) { + flagSetChildEntity = true; + } else { + ALOGV("DEBUG(%s[%d]):parentPipeId(%d) != entity->getParentPipeId()(%d). so skip setting child src Buf", + __FUNCTION__, __LINE__, parentPipeId, entity->getParentPipeId()); + } + } else { + /* this is for backward compatiblity */ + flagSetChildEntity = true; + } + + /* child mode need to setting next */ + if (flagSetChildEntity == true) { + ret = entity->setSrcBuf(dstBuf); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraFrame::getSrcBuffer(uint32_t pipeId, + ExynosCameraBuffer *srcBuf) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->getSrcBuf(srcBuf); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not get src buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getDstBuffer(uint32_t pipeId, + ExynosCameraBuffer *dstBuf, + uint32_t nodeIndex) +{ + status_t ret = NO_ERROR; + + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->getDstBuf(dstBuf, nodeIndex); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not get dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::setSrcRect(uint32_t pipeId, ExynosRect srcRect) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setSrcRect(srcRect); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set src rect, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::setDstRect(uint32_t pipeId, ExynosRect dstRect) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setDstRect(dstRect); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst rect, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + /* TODO: set buffer to child node's source */ + entity = entity->getNextEntity(); + if (entity != NULL) { + ret = entity->setSrcRect(dstRect); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst rect, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + return ret; +} + +status_t ExynosCameraFrame::getSrcRect(uint32_t pipeId, ExynosRect *srcRect) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->getSrcRect(srcRect); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not get src rect, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getDstRect(uint32_t pipeId, ExynosRect *dstRect) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->getDstRect(dstRect); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not get dst rect, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getSrcBufferState(uint32_t pipeId, + entity_buffer_state_t *state) +{ + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + *state = entity->getSrcBufState(); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getDstBufferState(uint32_t pipeId, + entity_buffer_state_t *state, + uint32_t nodeIndex) +{ + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + *state = entity->getDstBufState(nodeIndex); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::setSrcBufferState(uint32_t pipeId, + entity_buffer_state_t state) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setSrcBufState(state); + + return ret; +} + +status_t ExynosCameraFrame::setDstBufferState(uint32_t pipeId, + entity_buffer_state_t state, + uint32_t nodeIndex) +{ + status_t ret = NO_ERROR; + + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setDstBufState(state, nodeIndex); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set dst buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + /* Set buffer to child node's source */ + entity = entity->getNextEntity(); + if (entity != NULL) { + ret = entity->setSrcBufState(state); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Could not set src buffer, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + return ret; +} + +status_t ExynosCameraFrame::ensureSrcBufferState(uint32_t pipeId, + entity_buffer_state_t state) +{ + status_t ret = NO_ERROR; + int retry = 0; + entity_buffer_state_t curState; + + do { + ret = getSrcBufferState(pipeId, &curState); + if (ret != NO_ERROR) + continue; + + if (state == curState) { + ret = OK; + break; + } else { + ret = BAD_VALUE; + usleep(100); + } + + retry++; + if (retry == 10) + ret = TIMED_OUT; + } while (ret != OK && retry < 100); + + ALOGV("DEBUG(%s[%d]): retry count %d", __FUNCTION__, __LINE__, retry); + + return ret; +} + +status_t ExynosCameraFrame::ensureDstBufferState(uint32_t pipeId, + entity_buffer_state_t state) +{ + status_t ret = NO_ERROR; + int retry = 0; + entity_buffer_state_t curState; + + do { + ret = getDstBufferState(pipeId, &curState); + if (ret != NO_ERROR) + continue; + + if (state == curState) { + ret = OK; + break; + } else { + ret = BAD_VALUE; + usleep(100); + } + + retry++; + if (retry == 10) + ret = TIMED_OUT; + } while (ret != OK && retry < 100); + + ALOGV("DEBUG(%s[%d]): retry count %d", __FUNCTION__, __LINE__, retry); + + return ret; +} + +status_t ExynosCameraFrame::setEntityState(uint32_t pipeId, + entity_state_t state) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_entityLock); +#endif + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + if (entity->getEntityState() == ENTITY_STATE_COMPLETE && + state != ENTITY_STATE_REWORK) { + return NO_ERROR; + } + + if (state == ENTITY_STATE_COMPLETE) { + m_numCompletePipe++; + if (m_numCompletePipe >= m_numRequestPipe) + setFrameState(FRAME_STATE_COMPLETE); + } + + entity->setEntityState(state); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getEntityState(uint32_t pipeId, + entity_state_t *state) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_entityLock); +#endif + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + *state = entity->getEntityState(); + return NO_ERROR; +} + +status_t ExynosCameraFrame::getEntityBufferType(uint32_t pipeId, + entity_buffer_type_t *type) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_entityLock); +#endif + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + *type = entity->getBufType(); + return NO_ERROR; +} + +uint32_t ExynosCameraFrame::getFrameCount(void) +{ + return m_frameCount; +} + +status_t ExynosCameraFrame::setNumRequestPipe(uint32_t num) +{ + m_numRequestPipe = num; + return NO_ERROR; +} + +uint32_t ExynosCameraFrame::getNumRequestPipe(void) +{ + return m_numRequestPipe; +} + +bool ExynosCameraFrame::isComplete(void) +{ + return checkFrameState(FRAME_STATE_COMPLETE); +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getFrameDoneEntity(void) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return NULL; + } + + r = m_linkageList.begin()++; + curEntity = *r; + + while (r != m_linkageList.end()) { + if (curEntity != NULL) { + switch (curEntity->getEntityState()) { + case ENTITY_STATE_FRAME_SKIP: + case ENTITY_STATE_REWORK: + case ENTITY_STATE_FRAME_DONE: + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + return curEntity; + break; + default: + break; + } + } + r++; + curEntity = *r; + } + + return NULL; +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getFrameDoneEntity(uint32_t pipeID) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return NULL; + } + + r = m_linkageList.begin()++; + curEntity = *r; + + while (r != m_linkageList.end()) { + if (curEntity != NULL && pipeID == curEntity->getPipeId()) { + switch (curEntity->getEntityState()) { + case ENTITY_STATE_FRAME_SKIP: + case ENTITY_STATE_REWORK: + case ENTITY_STATE_FRAME_DONE: + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + return curEntity; + break; + default: + break; + } + } + r++; + curEntity = *r; + } + + return NULL; +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getFrameDoneFirstEntity(void) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return NULL; + } + + r = m_linkageList.begin()++; + curEntity = *r; + + while (r != m_linkageList.end()) { + if (curEntity != NULL) { + switch (curEntity->getEntityState()) { + case ENTITY_STATE_REWORK: + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + return curEntity; + break; + case ENTITY_STATE_FRAME_DONE: + return curEntity; + break; + case ENTITY_STATE_FRAME_SKIP: + case ENTITY_STATE_COMPLETE: + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + break; + default: + break; + } + } + r++; + curEntity = *r; + } + + return NULL; +} + +ExynosCameraFrameEntity *ExynosCameraFrame::getFrameDoneFirstEntity(uint32_t pipeID) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return NULL; + } + + r = m_linkageList.begin()++; + curEntity = *r; + + while (r != m_linkageList.end()) { + if (curEntity != NULL) { + switch (curEntity->getEntityState()) { + case ENTITY_STATE_REWORK: + if (curEntity->getPipeId() == pipeID) + return curEntity; + + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + break; + case ENTITY_STATE_FRAME_DONE: + if (curEntity->getPipeId() == pipeID) + return curEntity; + + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + break; + case ENTITY_STATE_FRAME_SKIP: + case ENTITY_STATE_COMPLETE: + if (curEntity->getNextEntity() != NULL) { + curEntity = curEntity->getNextEntity(); + continue; + } + break; + default: + break; + } + } + r++; + curEntity = *r; + } + + return NULL; +} + +status_t ExynosCameraFrame::skipFrame(void) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameStateLock); +#endif + m_frameState = FRAME_STATE_SKIPPED; + + return NO_ERROR; +} + +void ExynosCameraFrame::setFrameState(frame_status_t state) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameStateLock); +#endif + /* TODO: We need state machine */ + if (state > FRAME_STATE_INVALID) + m_frameState = FRAME_STATE_INVALID; + else + m_frameState = state; +} + +frame_status_t ExynosCameraFrame::getFrameState(void) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameStateLock); +#endif + return m_frameState; +} + +bool ExynosCameraFrame::checkFrameState(frame_status_t state) +{ +#ifdef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameStateLock); +#endif + return (m_frameState == state) ? true : false; +} + +void ExynosCameraFrame::printEntity(void) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + int listSize = 0; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return; + } + + listSize = m_linkageList.size(); + r = m_linkageList.begin(); + + ALOGD("DEBUG(%s): FrameCount(%d), request(%d), complete(%d)", __FUNCTION__, getFrameCount(), m_numRequestPipe, m_numCompletePipe); + + for (int i = 0; i < listSize; i++) { + curEntity = *r; + if (curEntity == NULL) { + ALOGE("ERR(%s):curEntity is NULL, index(%d)", __FUNCTION__, i); + return; + } + + ALOGD("DEBUG(%s):sibling id(%d), state(%d)", + __FUNCTION__, curEntity->getPipeId(), curEntity->getEntityState()); + + while (curEntity != NULL) { + ALOGD("DEBUG(%s):----- Child id(%d), state(%d)", + __FUNCTION__, curEntity->getPipeId(), curEntity->getEntityState()); + curEntity = curEntity->getNextEntity(); + } + r++; + } + + return; +} + +void ExynosCameraFrame::printNotDoneEntity(void) +{ + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + int listSize = 0; + + Mutex::Autolock l(m_linkageLock); + if (m_linkageList.empty()) { + ALOGE("ERR(%s):m_linkageList is empty", __FUNCTION__); + return; + } + + listSize = m_linkageList.size(); + r = m_linkageList.begin(); + + ALOGD("DEBUG(%s): FrameCount(%d), request(%d), complete(%d)", + __FUNCTION__, getFrameCount(), m_numRequestPipe, m_numCompletePipe); + + for (int i = 0; i < listSize; i++) { + curEntity = *r; + if (curEntity == NULL) { + ALOGE("ERR(%s):curEntity is NULL, index(%d)", __FUNCTION__, i); + return; + } + + if (curEntity->getEntityState() != ENTITY_STATE_COMPLETE) { + ALOGD("DEBUG(%s):sibling id(%d), state(%d)", + __FUNCTION__, curEntity->getPipeId(), curEntity->getEntityState()); + } + + while (curEntity != NULL) { + if (curEntity->getEntityState() != ENTITY_STATE_COMPLETE) { + ALOGD("DEBUG(%s):----- Child id(%d), state(%d)", + __FUNCTION__, curEntity->getPipeId(), curEntity->getEntityState()); + } + curEntity = curEntity->getNextEntity(); + } + r++; + } + + return; +} + +void ExynosCameraFrame::dump(void) +{ + printEntity(); + + for (int i = 0; i < MAX_NUM_PIPES; i ++) { + if (m_request[INDEX(i)] == true) + ALOGI("INFO(%s[%d]):pipeId(%d)'s request is ture", __FUNCTION__, __LINE__, i); + } +} + +void ExynosCameraFrame::frameLock(void) +{ +#ifndef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameLock); +#endif + m_frameLocked = true; +} + +void ExynosCameraFrame::frameUnlock(void) +{ +#ifndef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameLock); +#endif + m_frameLocked = false; +} + +bool ExynosCameraFrame::getFrameLockState(void) +{ +#ifndef USE_FRAME_REFERENCE_COUNT + Mutex::Autolock lock(m_frameLock); +#endif + return m_frameLocked; +} + +status_t ExynosCameraFrame::initMetaData(struct camera2_shot_ext *shot) +{ + status_t ret = NO_ERROR; + + if (shot != NULL) { + ALOGV("DEBUG(%s[%d]): initialize shot_ext", __FUNCTION__, __LINE__); + memcpy(&m_metaData, shot, sizeof(struct camera2_shot_ext)); + } + + ret = m_parameters->duplicateCtrlMetadata(&m_metaData); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + return ret; +} + +status_t ExynosCameraFrame::getMetaData(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(shot, &m_metaData, sizeof(struct camera2_shot_ext)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::setMetaData(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&m_metaData, shot, sizeof(struct camera2_shot_ext)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeDynamicMeta(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (getMetaDmRequestFrameCount(shot) == 0) + ALOGW("WRN(%s[%d]): DM Frame count is ZERO", __FUNCTION__, __LINE__); + + memcpy(&m_metaData.shot.dm, &shot->shot.dm, sizeof(struct camera2_dm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeDynamicMeta(struct camera2_dm *dm) +{ + if (dm == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (getMetaDmRequestFrameCount(dm) == 0) + ALOGW("WRN(%s[%d]): DM Frame count is ZERO", __FUNCTION__, __LINE__); + + memcpy(&m_metaData.shot.dm, dm, sizeof(struct camera2_dm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeUserDynamicMeta(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&m_metaData.shot.udm, &shot->shot.udm, sizeof(struct camera2_udm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeUserDynamicMeta(struct camera2_udm *udm) +{ + if (udm == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&m_metaData.shot.udm, udm, sizeof(struct camera2_udm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getDynamicMeta(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&shot->shot.dm, &m_metaData.shot.dm, sizeof(struct camera2_dm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getDynamicMeta(struct camera2_dm *dm) +{ + if (dm == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(dm, &m_metaData.shot.dm, sizeof(struct camera2_dm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getUserDynamicMeta(struct camera2_shot_ext *shot) +{ + if (shot == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&shot->shot.udm, &m_metaData.shot.udm, sizeof(struct camera2_udm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getUserDynamicMeta(struct camera2_udm *udm) +{ + if (udm == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(udm, &m_metaData.shot.udm, sizeof(struct camera2_udm)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::setMetaDataEnable(bool flag) +{ + m_metaDataEnable = flag; + return NO_ERROR; +} + +bool ExynosCameraFrame::getMetaDataEnable() +{ + long count = 0; + + while (count < DM_WAITING_COUNT) { + if (m_metaDataEnable == true) { + if (0 < count) + ALOGD("DEBUG(%s[%d]): metadata enable count(%ld) ", __FUNCTION__, __LINE__, count); + + break; + } + + count++; + usleep(WAITING_TIME); + } + + return m_metaDataEnable; +} + +status_t ExynosCameraFrame::getNodeGroupInfo(struct camera2_node_group *node_group, int index) +{ + if (node_group == NULL) { + ALOGE("ERR(%s[%d]): node_group is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (index >= PERFRAME_NODE_GROUP_MAX) { + ALOGE("ERR(%s[%d]): index is bigger than PERFRAME_NODE_GROUP_MAX", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(node_group, &m_node_gorup[index], sizeof(struct camera2_node_group)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeNodeGroupInfo(struct camera2_node_group *node_group, int index) +{ + if (node_group == NULL) { + ALOGE("ERR(%s[%d]): node_group is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (index >= PERFRAME_NODE_GROUP_MAX) { + ALOGE("ERR(%s[%d]): index is bigger than PERFRAME_NODE_GROUP_MAX", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + memcpy(&m_node_gorup[index], node_group, sizeof(struct camera2_node_group)); + + return NO_ERROR; +} + +status_t ExynosCameraFrame::getNodeGroupInfo(struct camera2_node_group *node_group, int index, int *zoom) +{ + getNodeGroupInfo(node_group, index); + *zoom = m_zoom; + + return NO_ERROR; +} + +status_t ExynosCameraFrame::storeNodeGroupInfo(struct camera2_node_group *node_group, int index, int zoom) +{ + storeNodeGroupInfo(node_group, index); + m_zoom = zoom; + + return NO_ERROR; +} + +void ExynosCameraFrame::dumpNodeGroupInfo(const char *name) +{ + if (name != NULL) + ALOGD("INFO(%s[%d]):(%s)++++++++++++++++++++ frameCount(%d)", __FUNCTION__, __LINE__, name, m_frameCount); + else + ALOGD("INFO(%s[%d]):()++++++++++++++++++++ frameCount(%d)", __FUNCTION__, __LINE__, m_frameCount); + + for (int i = 0; i < PERFRAME_NODE_GROUP_MAX; i ++) { + ALOGI("INFO(%s[%d]):Leader[%d] (%d, %d, %d, %d)(%d, %d, %d, %d)(%d %d)", + __FUNCTION__, __LINE__, + i, + m_node_gorup[i].leader.input.cropRegion[0], + m_node_gorup[i].leader.input.cropRegion[1], + m_node_gorup[i].leader.input.cropRegion[2], + m_node_gorup[i].leader.input.cropRegion[3], + m_node_gorup[i].leader.output.cropRegion[0], + m_node_gorup[i].leader.output.cropRegion[1], + m_node_gorup[i].leader.output.cropRegion[2], + m_node_gorup[i].leader.output.cropRegion[3], + m_node_gorup[i].leader.request, + m_node_gorup[i].leader.vid); + + for (int j = 0; j < CAPTURE_NODE_MAX; j ++) { + ALOGI("INFO(%s[%d]):Capture[%d][%d] (%d, %d, %d, %d)(%d, %d, %d, %d)(%d, %d)", + __FUNCTION__, __LINE__, + i, + j, + m_node_gorup[i].capture[j].input.cropRegion[0], + m_node_gorup[i].capture[j].input.cropRegion[1], + m_node_gorup[i].capture[j].input.cropRegion[2], + m_node_gorup[i].capture[j].input.cropRegion[3], + m_node_gorup[i].capture[j].output.cropRegion[0], + m_node_gorup[i].capture[j].output.cropRegion[1], + m_node_gorup[i].capture[j].output.cropRegion[2], + m_node_gorup[i].capture[j].output.cropRegion[3], + m_node_gorup[i].capture[j].request, + m_node_gorup[i].capture[j].vid); + } + + if (name != NULL) + ALOGD("INFO(%s[%d]):(%s)------------------------ ", __FUNCTION__, __LINE__, name); + else + ALOGD("INFO(%s[%d]):()------------------------ ", __FUNCTION__, __LINE__); + } + + if (name != NULL) + ALOGD("INFO(%s[%d]):(%s)++++++++++++++++++++", __FUNCTION__, __LINE__, name); + else + ALOGD("INFO(%s[%d]):()++++++++++++++++++++", __FUNCTION__, __LINE__); + + return; +} + +void ExynosCameraFrame::setJpegSize(int size) +{ + m_jpegSize = size; +} + +int ExynosCameraFrame::getJpegSize(void) +{ + return m_jpegSize; +} + +int64_t ExynosCameraFrame::getTimeStamp(void) +{ + return (int64_t)getMetaDmSensorTimeStamp(&m_metaData); +} + +void ExynosCameraFrame::getFpsRange(uint32_t *min, uint32_t *max) +{ + getMetaCtlAeTargetFpsRange(&m_metaData, min, max); +} + +void ExynosCameraFrame::setRequest(bool tap, + bool tac, + bool isp, + bool scc, + bool dis, + bool scp) +{ + m_request[PIPE_3AP] = tap; + m_request[PIPE_3AC] = tac; + m_request[PIPE_ISP] = isp; + m_request[PIPE_SCC] = scc; + m_request[PIPE_DIS] = dis; + m_request[PIPE_SCP] = scp; +} + +void ExynosCameraFrame::setRequest(bool tap, + bool tac, + bool isp, + bool ispp, + bool ispc, + bool scc, + bool dis, + bool scp) +{ + setRequest(tap, + tac, + isp, + scc, + dis, + scp); + + m_request[PIPE_ISPP] = ispp; + m_request[PIPE_ISPC] = ispc; +} + +void ExynosCameraFrame::setRequest(uint32_t pipeId, bool val) +{ + switch (pipeId) { + case PIPE_3AC_FRONT: + case PIPE_3AC_REPROCESSING: + pipeId = PIPE_3AC; + break; + case PIPE_3AP_FRONT: + case PIPE_3AP_REPROCESSING: + pipeId = PIPE_3AP; + break; + case PIPE_ISP_FRONT: + pipeId = PIPE_ISP; + break; + case PIPE_ISPC_FRONT: + case PIPE_ISPC_REPROCESSING: + pipeId = PIPE_ISPC; + break; + case PIPE_SCC_FRONT: + case PIPE_SCC_REPROCESSING: + pipeId = PIPE_SCC; + break; + case PIPE_SCP_FRONT: + case PIPE_SCP_REPROCESSING: + pipeId = PIPE_SCP; + break; + default: + break; + } + + if ((pipeId % 100) >= MAX_NUM_PIPES) + ALOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + else + m_request[INDEX(pipeId)] = val; +} + +bool ExynosCameraFrame::getRequest(uint32_t pipeId) +{ + bool request = false; + + switch (pipeId) { + case PIPE_3AC_FRONT: + case PIPE_3AC_REPROCESSING: + pipeId = PIPE_3AC; + break; + case PIPE_3AP_FRONT: + case PIPE_3AP_REPROCESSING: + pipeId = PIPE_3AP; + break; + case PIPE_ISP_FRONT: + pipeId = PIPE_ISP; + break; + case PIPE_ISPC_FRONT: + case PIPE_ISPC_REPROCESSING: + pipeId = PIPE_ISPC; + break; + case PIPE_SCC_FRONT: + case PIPE_SCC_REPROCESSING: + pipeId = PIPE_SCC; + break; + case PIPE_SCP_FRONT: + case PIPE_SCP_REPROCESSING: + pipeId = PIPE_SCP; + break; + default: + break; + } + + if ((pipeId % 100) >= MAX_NUM_PIPES) + ALOGW("WRN(%s[%d]):Invalid pipeId(%d)", __FUNCTION__, __LINE__, pipeId); + else + request = m_request[INDEX(pipeId)]; + + return request; +} + +bool ExynosCameraFrame::getIspDone(void) +{ + return m_ispDoneFlag; +} + +void ExynosCameraFrame::setIspDone(bool done) +{ + m_ispDoneFlag = done; +} + +bool ExynosCameraFrame::get3aaDrop() +{ + return m_3aaDropFlag; +} + +void ExynosCameraFrame::set3aaDrop(bool flag) +{ + m_3aaDropFlag = flag; +} + +void ExynosCameraFrame::setIspcDrop(bool flag) +{ + m_ispcDropFlag = flag; +} + +bool ExynosCameraFrame::getIspcDrop(void) +{ + return m_ispcDropFlag; +} + +void ExynosCameraFrame::setDisDrop(bool flag) +{ + m_disDropFlag = flag; +} + +bool ExynosCameraFrame::getDisDrop(void) +{ + return m_disDropFlag; +} + +bool ExynosCameraFrame::getScpDrop() +{ + return m_scpDropFlag; +} + +void ExynosCameraFrame::setScpDrop(bool flag) +{ + m_scpDropFlag = flag; +} + +bool ExynosCameraFrame::getSccDrop() +{ + return m_sccDropFlag; +} + +void ExynosCameraFrame::setSccDrop(bool flag) +{ + m_sccDropFlag = flag; +} + +uint32_t ExynosCameraFrame::getUniqueKey(void) +{ + return m_uniqueKey; +} + +status_t ExynosCameraFrame::setUniqueKey(uint32_t uniqueKey) +{ + m_uniqueKey = uniqueKey; + return NO_ERROR; +} + +#ifdef USE_FRAME_REFERENCE_COUNT +int32_t ExynosCameraFrame::incRef() +{ + Mutex::Autolock lock(m_refCountLock); + m_refCount++; + return m_refCount; +} + +int32_t ExynosCameraFrame::decRef() +{ + Mutex::Autolock lock(m_refCountLock); + m_refCount--; + if (m_refCount < 0) + ALOGE("ERR(%s[%d]):reference count do not have negatve value, m_refCount(%d)", __FUNCTION__, __LINE__, m_refCount); + return m_refCount; +} + +int32_t ExynosCameraFrame::getRef() +{ + Mutex::Autolock lock(m_refCountLock); + return m_refCount; +} +#endif + +status_t ExynosCameraFrame::setFrameInfo(ExynosCameraParameters *obj_param, uint32_t frameCount, uint32_t frameType) +{ + status_t ret = NO_ERROR; + + m_parameters = obj_param; + m_frameCount = frameCount; + m_frameType = frameType; + return ret; +} + +uint32_t ExynosCameraFrame::getFrameType() +{ + return m_frameType; +} + +status_t ExynosCameraFrame::m_init() +{ + m_numRequestPipe = 0; + m_numCompletePipe = 0; + m_frameState = FRAME_STATE_READY; + m_frameLocked = false; + m_metaDataEnable = false; + m_zoom = 0; + memset(&m_metaData, 0x0, sizeof(struct camera2_shot_ext)); + m_jpegSize = 0; + m_ispDoneFlag = false; + m_3aaDropFlag = false; + m_ispcDropFlag = false; + m_disDropFlag = false; + m_scpDropFlag = false; + m_sccDropFlag = false; + + for (int i = 0; i < MAX_NUM_PIPES; i++) + m_request[i] = false; + + m_uniqueKey = 0; + m_capture = 0; + m_recording = false; + m_preview = false; + m_previewCb = false; + m_serviceBayer = false; + m_zsl = false; + +#ifdef USE_FRAME_REFERENCE_COUNT + m_refCount = 1; +#endif + + for (int i = 0; i < PERFRAME_NODE_GROUP_MAX; i++) + memset(&m_node_gorup[i], 0x0, sizeof(struct camera2_node_group)); + ALOGV("DEBUG(%s[%d]): Generate frame type(%d), frameCount(%d)", __FUNCTION__, __LINE__, m_frameType, m_frameCount); + +#ifdef DEBUG_FRAME_MEMORY_LEAK + m_privateCheckLeakCount = 0; + m_countLock.lock(); + m_checkLeakCount ++; + m_privateCheckLeakCount = m_checkLeakCount; + ALOGE("CONSTRUCTOR (%lld)", m_privateCheckLeakCount); + m_countLock.unlock(); +#endif + + m_dupBufferInfo.streamID = 0; + m_dupBufferInfo.extScalerPipeID = 0; + + return NO_ERROR; +} + +status_t ExynosCameraFrame::m_deinit() +{ + ALOGV("DEBUG(%s[%d]): Delete frame type(%d), frameCount(%d)", __FUNCTION__, __LINE__, m_frameType, m_frameCount); +#ifdef DEBUG_FRAME_MEMORY_LEAK + ALOGI("DESTRUCTOR (%lld)", m_privateCheckLeakCount); +#endif + + List::iterator r; + ExynosCameraFrameEntity *curEntity = NULL; + ExynosCameraFrameEntity *tmpEntity = NULL; + + Mutex::Autolock l(m_linkageLock); + while (!m_linkageList.empty()) { + r = m_linkageList.begin()++; + if (*r) { + curEntity = *r; + + while (curEntity != NULL) { + tmpEntity = curEntity->getNextEntity(); + ALOGV("DEBUG(%s[%d])", __FUNCTION__, curEntity->getPipeId()); + + delete curEntity; + curEntity = tmpEntity; + } + + } + m_linkageList.erase(r); + } + + return NO_ERROR; +} + +status_t ExynosCameraFrame::setRotation(uint32_t pipeId, int rotation) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setRotation(rotation); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):pipeId(%d)->setRotation(%d) fail", __FUNCTION__, __LINE__, pipeId, rotation); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getRotation(uint32_t pipeId) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + return entity->getRotation(); +} + +#ifdef PERFRAME_CONTROL_FOR_FLIP +status_t ExynosCameraFrame::setFlipHorizontal(uint32_t pipeId, int flipHorizontal) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setFlipHorizontal(flipHorizontal); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):pipeId(%d)->setRotation(%d) fail", __FUNCTION__, __LINE__, pipeId, flipHorizontal); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getFlipHorizontal(uint32_t pipeId) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + return entity->getFlipHorizontal(); +} + +status_t ExynosCameraFrame::setFlipVertical(uint32_t pipeId, int flipVertical) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + ret = entity->setFlipVertical(flipVertical); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):pipeId(%d)->setRotation(%d) fail", __FUNCTION__, __LINE__, pipeId, flipVertical); + return ret; + } + + return ret; +} + +status_t ExynosCameraFrame::getFlipVertical(uint32_t pipeId) +{ + status_t ret = NO_ERROR; + ExynosCameraFrameEntity *entity = searchEntityByPipeId(pipeId); + if (entity == NULL) { + ALOGE("ERR(%s[%d]):Could not find entity, pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + return entity->getFlipVertical(); +} +#endif + +/* + * ExynosCameraFrameEntity class + */ + +ExynosCameraFrameEntity::ExynosCameraFrameEntity( + uint32_t pipeId, + entity_type_t type, + entity_buffer_type_t bufType) +{ + m_pipeId = pipeId; + + if (m_setEntityType(type) != NO_ERROR) + ALOGE("ERR(%s[%d]):setEntityType fail, pipeId(%d), type(%d)", __FUNCTION__, __LINE__, pipeId, type); + + m_bufferType = bufType; + m_entityState = ENTITY_STATE_READY; + + m_prevEntity = NULL; + m_nextEntity = NULL; + + m_flagSpecificParent = false; + m_parentPipeId = -1; + + m_rotation = 0; + +#ifdef PERFRAME_CONTROL_FOR_FLIP + m_flipHorizontal = 0; + m_flipVertical = 0; +#endif +} + +status_t ExynosCameraFrameEntity::m_setEntityType(entity_type_t type) +{ + status_t ret = NO_ERROR; + + m_EntityType = type; + + switch (type) { + case ENTITY_TYPE_INPUT_ONLY: + m_srcBufState = ENTITY_BUFFER_STATE_REQUESTED; + m_dstBufState[DST_BUFFER_DEFAULT] = ENTITY_BUFFER_STATE_NOREQ; + break; + case ENTITY_TYPE_OUTPUT_ONLY: + m_srcBufState = ENTITY_BUFFER_STATE_NOREQ; + m_dstBufState[DST_BUFFER_DEFAULT] = ENTITY_BUFFER_STATE_REQUESTED; + break; + case ENTITY_TYPE_INPUT_OUTPUT: + m_srcBufState = ENTITY_BUFFER_STATE_REQUESTED; + m_dstBufState[DST_BUFFER_DEFAULT] = ENTITY_BUFFER_STATE_REQUESTED; + break; + default: + m_srcBufState = ENTITY_BUFFER_STATE_NOREQ; + m_dstBufState[DST_BUFFER_DEFAULT] = ENTITY_BUFFER_STATE_NOREQ; + m_EntityType = ENTITY_TYPE_INVALID; + ret = BAD_VALUE; + break; + } + + return ret; +} + +uint32_t ExynosCameraFrameEntity::getPipeId(void) +{ + return m_pipeId; +} + +status_t ExynosCameraFrameEntity::setSrcBuf(ExynosCameraBuffer buf) +{ + status_t ret = NO_ERROR; + + if (m_srcBufState == ENTITY_BUFFER_STATE_COMPLETE) { + ALOGV("WRN(%s[%d]):Buffer completed, state(%d)", __FUNCTION__, __LINE__, m_srcBufState); + return NO_ERROR; + } + + if (m_bufferType != ENTITY_BUFFER_DELIVERY && + m_srcBufState != ENTITY_BUFFER_STATE_REQUESTED) { + ALOGE("ERR(%s[%d]):Invalid buffer state(%d)", __FUNCTION__, __LINE__, m_srcBufState); + return INVALID_OPERATION; + } + + this->m_srcBuf = buf; + + ret = setSrcBufState(ENTITY_BUFFER_STATE_READY); + + return ret; +} + +status_t ExynosCameraFrameEntity::setDstBuf(ExynosCameraBuffer buf, uint32_t nodeIndex) +{ + status_t ret = NO_ERROR; + + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + if (m_bufferType != ENTITY_BUFFER_DELIVERY && + m_dstBufState[nodeIndex] != ENTITY_BUFFER_STATE_REQUESTED) { + ALOGE("ERR(%s[%d]):Invalid buffer state(%d)", __FUNCTION__, __LINE__, m_dstBufState[nodeIndex]); + return INVALID_OPERATION; + } + + this->m_dstBuf[nodeIndex] = buf; + ret = setDstBufState(ENTITY_BUFFER_STATE_READY, nodeIndex); + +#ifndef SUPPORT_DEPTH_MAP + /* HACK: Combine with old pipe */ + if (nodeIndex != DST_BUFFER_DEFAULT) + this->m_dstBuf[DST_BUFFER_DEFAULT] = buf; +#endif + + return ret; +} + +status_t ExynosCameraFrameEntity::getSrcBuf(ExynosCameraBuffer *buf) +{ + *buf = this->m_srcBuf; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::getDstBuf(ExynosCameraBuffer *buf, uint32_t nodeIndex) +{ + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + /* Comment out: It was collide with ExynosCamera's dirty dynamic bayer handling routine. + * (make error log, but no side effect) + * This code added for block human error. + * I will add this code after check the side effect closely. + */ + /* + if (this->m_dstBuf[nodeIndex].index == -1) { + ALOGE("ERR(%s[%d]):Invalid buffer index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + */ + + *buf = this->m_dstBuf[nodeIndex]; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::setSrcRect(ExynosRect rect) +{ + this->m_srcRect = rect; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::setDstRect(ExynosRect rect) +{ + this->m_dstRect = rect; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::getSrcRect(ExynosRect *rect) +{ + *rect = this->m_srcRect; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::getDstRect(ExynosRect *rect) +{ + *rect = this->m_dstRect; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::setSrcBufState(entity_buffer_state_t state) +{ + if (m_srcBufState == ENTITY_BUFFER_STATE_COMPLETE) { + ALOGV("WRN(%s[%d]):Buffer completed, state(%d)", __FUNCTION__, __LINE__, m_srcBufState); + return NO_ERROR; + } + + m_srcBufState = state; + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::setDstBufState(entity_buffer_state_t state, uint32_t nodeIndex) +{ + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return BAD_VALUE; + } + + m_dstBufState[nodeIndex] = state; + +#ifndef SUPPORT_DEPTH_MAP + /* HACK: Combine with old pipe */ + if (nodeIndex != DST_BUFFER_DEFAULT) + m_dstBufState[DST_BUFFER_DEFAULT] = state; +#endif + + return NO_ERROR; +} + +entity_buffer_state_t ExynosCameraFrameEntity::getSrcBufState(void) +{ + return m_srcBufState; +} + +entity_buffer_state_t ExynosCameraFrameEntity::getDstBufState(uint32_t nodeIndex) +{ + if (nodeIndex >= DST_BUFFER_COUNT_MAX) { + ALOGE("ERR(%s[%d]):Invalid buffer index, index(%d)", __FUNCTION__, __LINE__, nodeIndex); + return ENTITY_BUFFER_STATE_INVALID; + } + + return m_dstBufState[nodeIndex]; +} + +entity_buffer_type_t ExynosCameraFrameEntity::getBufType(void) +{ + return m_bufferType; +} + +status_t ExynosCameraFrameEntity::setEntityState(entity_state_t state) +{ + this->m_entityState = state; + + return NO_ERROR; +} + +entity_state_t ExynosCameraFrameEntity::getEntityState(void) +{ + return this->m_entityState; +} + +ExynosCameraFrameEntity *ExynosCameraFrameEntity::getPrevEntity(void) +{ + return this->m_prevEntity; +} + +ExynosCameraFrameEntity *ExynosCameraFrameEntity::getNextEntity(void) +{ + return this->m_nextEntity; +} + +status_t ExynosCameraFrameEntity::setPrevEntity(ExynosCameraFrameEntity *entity) +{ + this->m_prevEntity = entity; + + return NO_ERROR; +} + +status_t ExynosCameraFrameEntity::setNextEntity(ExynosCameraFrameEntity *entity) +{ + this->m_nextEntity = entity; + + return NO_ERROR; +} + +bool ExynosCameraFrameEntity::flagSpecficParent(void) +{ + return m_flagSpecificParent; +} + +status_t ExynosCameraFrameEntity::setParentPipeId(enum pipeline parentPipeId) +{ + if (0 <= m_parentPipeId) { + ALOGE("ERR(%s[%d]):m_parentPipeId(%d) is already set. parentPipeId(%d)", + __FUNCTION__, __LINE__, m_parentPipeId, parentPipeId); + return BAD_VALUE; + } + + m_flagSpecificParent = true; + m_parentPipeId = parentPipeId; + + return NO_ERROR; +} + +int ExynosCameraFrameEntity::getParentPipeId(void) +{ + return m_parentPipeId; +} + +status_t ExynosCameraFrameEntity::setRotation(int rotation) +{ + m_rotation = rotation; + + return NO_ERROR; +} + +int ExynosCameraFrameEntity::getRotation(void) +{ + return m_rotation; +} + +#ifdef PERFRAME_CONTROL_FOR_FLIP +status_t ExynosCameraFrameEntity::setFlipHorizontal(int flipHorizontal) +{ + m_flipHorizontal = flipHorizontal; + + return NO_ERROR; +} + +int ExynosCameraFrameEntity::getFlipHorizontal(void) +{ + return m_flipHorizontal; +} + +status_t ExynosCameraFrameEntity::setFlipVertical(int flipVertical) +{ + m_flipVertical = flipVertical; + + return NO_ERROR; +} + +int ExynosCameraFrameEntity::getFlipVertical(void) +{ + return m_flipVertical; +} +#endif + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraFrame.h b/libcamera/common_v2/ExynosCameraFrame.h new file mode 100644 index 0000000..448bc22 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrame.h @@ -0,0 +1,528 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FRAME_H +#define EXYNOS_CAMERA_FRAME_H + +#include + +#include "ExynosCameraParameters.h" +#include "ExynosCameraSensorInfo.h" +#include "ExynosCameraBuffer.h" + +/* #define DEBUG_FRAME_MEMORY_LEAK */ + +#ifndef MAX_NUM_PIPES +#define MAX_NUM_PIPES \ + GET_MAX_NUM((MAX_PIPE_NUM % 100),(MAX_PIPE_NUM_FRONT % 100),(MAX_PIPE_NUM_REPROCESSING % 100)) +#endif + +#ifndef INDEX +#define INDEX(x) ((x % 100) % MAX_NUM_PIPES) +#endif + +namespace android { + +/* Frame type for debugging */ +enum FRAME_TYPE { + FRAME_TYPE_OTHERS = 0, + FRAME_TYPE_PREVIEW = 1, + FRAME_TYPE_PREVIEW_FRONT = 2, + FRAME_TYPE_REPROCESSING = 3, + FRAME_TYPE_INTERNAL = 4, +}; + +typedef enum entity_type { + ENTITY_TYPE_INPUT_ONLY = 0, /* Need input buffer only */ + ENTITY_TYPE_OUTPUT_ONLY = 1, /* Need output buffer only */ + ENTITY_TYPE_INPUT_OUTPUT = 2, /* Need input/output both */ + ENTITY_TYPE_INVALID, +} entity_type_t; + +/* Entity state define */ +typedef enum entity_state { + ENTITY_STATE_NOP = 0, /* Do not need operation */ + ENTITY_STATE_READY = 1, /* Ready to operation */ + ENTITY_STATE_PROCESSING = 2, /* Processing stage */ + ENTITY_STATE_FRAME_DONE = 3, /* Pipe has been done HW operation. */ + ENTITY_STATE_COMPLETE = 4, /* Complete frame, done for all SW work. */ + ENTITY_STATE_REWORK = 5, /* After COMPLETE, but need re-work entity */ + ENTITY_STATE_FRAME_SKIP = 6, /* Entity has been skipped */ +} entity_state_t; + +typedef enum entity_buffer_type { + ENTITY_BUFFER_FIXED = 0, /* Buffer is never change */ + ENTITY_BUFFER_DELIVERY = 1, /* Buffer change is possible */ + ENTITY_BUFFER_INVALID +} entity_buffer_type_t; + +typedef enum entity_buffer_state { + ENTITY_BUFFER_STATE_NOREQ = 0, /* This buffer is not used */ + ENTITY_BUFFER_STATE_REQUESTED = 1, /* This buffer is not used */ + ENTITY_BUFFER_STATE_READY = 2, /* Buffer is ready */ + ENTITY_BUFFER_STATE_PROCESSING = 3, /* Buffer is being prossed */ + ENTITY_BUFFER_STATE_COMPLETE = 4, /* Buffer is complete */ + ENTITY_BUFFER_STATE_ERROR = 5, /* Buffer is complete */ + ENTITY_BUFFER_STATE_INVALID, +} entity_buffer_state_t; + +enum DST_BUFFER_COUNT +{ + DST_BUFFER_DEFAULT = 0, + DST_BUFFER_COUNT_MAX = 18, /* this must same with MAX_NODE */ +}; + +class ExynosCameraFrameEntity { +public: + ExynosCameraFrameEntity( + uint32_t pipeId, + entity_type_t type, + entity_buffer_type_t bufType); + uint32_t getPipeId(void); + + status_t setSrcBuf(ExynosCameraBuffer buf); + status_t setDstBuf(ExynosCameraBuffer buf, uint32_t nodeIndex = 0); + + status_t getSrcBuf(ExynosCameraBuffer *buf); + status_t getDstBuf(ExynosCameraBuffer *buf, uint32_t nodeIndex = 0); + + status_t setSrcRect(ExynosRect rect); + status_t setDstRect(ExynosRect rect); + + status_t getSrcRect(ExynosRect *rect); + status_t getDstRect(ExynosRect *rect); + + status_t setSrcBufState(entity_buffer_state_t state); + status_t setDstBufState(entity_buffer_state_t state, uint32_t nodeIndex = 0); + + entity_buffer_state_t getSrcBufState(void); + entity_buffer_state_t getDstBufState(uint32_t nodeIndex = 0); + + entity_buffer_type_t getBufType(void); + + status_t setEntityState(entity_state_t state); + entity_state_t getEntityState(void); + + ExynosCameraFrameEntity *getPrevEntity(void); + ExynosCameraFrameEntity *getNextEntity(void); + + status_t setPrevEntity(ExynosCameraFrameEntity *entity); + status_t setNextEntity(ExynosCameraFrameEntity *entity); + + bool flagSpecficParent(void); + status_t setParentPipeId(enum pipeline parentPipeId); + int getParentPipeId(void); + + status_t setRotation(int rotation); + int getRotation(void); +#ifdef PERFRAME_CONTROL_FOR_FLIP + status_t setFlipHorizontal(int flipHorizontal); + int getFlipHorizontal(void); + status_t setFlipVertical(int flipVertical); + int getFlipVertical(void); +#endif +private: + status_t m_setEntityType(entity_type_t type); + +private: + uint32_t m_pipeId; + ExynosCameraBuffer m_srcBuf; + ExynosCameraBuffer m_dstBuf[DST_BUFFER_COUNT_MAX]; + + ExynosRect m_srcRect; + ExynosRect m_dstRect; + + entity_type_t m_EntityType; + entity_buffer_type_t m_bufferType; + + entity_buffer_state_t m_srcBufState; + entity_buffer_state_t m_dstBufState[DST_BUFFER_COUNT_MAX]; + entity_state_t m_entityState; + + ExynosCameraFrameEntity *m_prevEntity; + ExynosCameraFrameEntity *m_nextEntity; + + bool m_flagSpecificParent; + int m_parentPipeId; + + int m_rotation; +#ifdef PERFRAME_CONTROL_FOR_FLIP + int m_flipHorizontal; + int m_flipVertical; +#endif +}; + +/* Frame state define */ +typedef enum frame_status { + FRAME_STATE_READY = 0, /* Ready to operation */ + FRAME_STATE_RUNNING = 1, /* Frame is running */ + FRAME_STATE_COMPLETE = 2, /* Complete frame. */ + FRAME_STATE_SKIPPED = 3, /* This Frame has been skipped. */ + FRAME_STATE_INVALID = 4, /* Invalid state */ +} frame_status_t; + +typedef struct ExynosCameraPerFrameInfo { + bool perFrameControlNode; + int perFrameNodeIndex; + int perFrameNodeVideID; +} camera_per_fream_into_t; + +typedef struct ExynosCameraDupBufferInfo { + int streamID; + int extScalerPipeID; +} dup_buffer_info_t; + +class ExynosCameraFrame { + +#ifdef USE_FRAMEMANAGER + friend class FrameWorker; + friend class CreateWorker; + friend class DeleteWorker; + friend class ExynosCameraFrameManager; +private: +#else +public: +#endif + + ExynosCameraFrame( + ExynosCameraParameters *obj_param, + uint32_t frameCount, + uint32_t frameType = 0); + ExynosCameraFrame(); + + ~ExynosCameraFrame(); + +public: + /* If curEntity is NULL, newEntity is added to m_linkageList */ + status_t addSiblingEntity( + ExynosCameraFrameEntity *curEntity, + ExynosCameraFrameEntity *newEntity); + status_t addChildEntity( + ExynosCameraFrameEntity *parentEntity, + ExynosCameraFrameEntity *newEntity); + status_t addChildEntity( + ExynosCameraFrameEntity *parentEntity, + ExynosCameraFrameEntity *newEntity, + int parentPipeId); + + ExynosCameraFrameEntity *getFirstEntity(void); + ExynosCameraFrameEntity *getNextEntity(void); + /* Unused, but useful */ + /* ExynosCameraFrameEntity *getChildEntity(ExynosCameraFrameEntity *parentEntity); */ + + ExynosCameraFrameEntity *searchEntityByPipeId(uint32_t pipeId); + + status_t setSrcBuffer( + uint32_t pipeId, + ExynosCameraBuffer srcBuf); + status_t setDstBuffer( + uint32_t pipeId, + ExynosCameraBuffer dstBuf, + uint32_t nodeIndex = 0); + status_t setDstBuffer( + uint32_t pipeId, + ExynosCameraBuffer dstBuf, + uint32_t nodeIndex, + int parentPipeId); + + status_t getSrcBuffer( + uint32_t pipeId, + ExynosCameraBuffer *srcBuf); + status_t getDstBuffer( + uint32_t pipeId, + ExynosCameraBuffer *dstBuf, + uint32_t nodeIndex = 0); + + status_t setSrcRect( + uint32_t pipeId, + ExynosRect srcRect); + status_t setDstRect( + uint32_t pipeId, + ExynosRect dstRect); + + status_t getSrcRect( + uint32_t pipeId, + ExynosRect *srcRect); + status_t getDstRect( + uint32_t pipeId, + ExynosRect *dstRect); + + status_t getSrcBufferState( + uint32_t pipeId, + entity_buffer_state_t *state); + status_t getDstBufferState( + uint32_t pipeId, + entity_buffer_state_t *state, + uint32_t nodeIndex = 0); + + status_t setSrcBufferState( + uint32_t pipeId, + entity_buffer_state_t state); + status_t setDstBufferState( + uint32_t pipeId, + entity_buffer_state_t state, + uint32_t nodeIndex = 0); + + status_t ensureSrcBufferState( + uint32_t pipeId, + entity_buffer_state_t state); + + status_t ensureDstBufferState( + uint32_t pipeId, + entity_buffer_state_t state); + + status_t setEntityState( + uint32_t pipeId, + entity_state_t state); + status_t getEntityState( + uint32_t pipeId, + entity_state_t *state); + + status_t getEntityBufferType( + uint32_t pipeId, + entity_buffer_type_t *type); + + void setRequest(bool tap, + bool tac, + bool isp, + bool scc, + bool dis, + bool scp); + + void setRequest(bool tap, + bool tac, + bool isp, + bool ispp, + bool ispc, + bool scc, + bool dis, + bool scp); + + void setRequest(uint32_t pipeId, bool val); + bool getRequest(uint32_t pipeId); + + uint32_t getFrameCount(void); + status_t setNumRequestPipe(uint32_t num); + uint32_t getNumRequestPipe(void); + + bool isComplete(void); + ExynosCameraFrameEntity *getFrameDoneEntity(void); + ExynosCameraFrameEntity *getFrameDoneEntity(uint32_t pipeID); + ExynosCameraFrameEntity *getFrameDoneFirstEntity(void); + ExynosCameraFrameEntity *getFrameDoneFirstEntity(uint32_t pipeID); + + status_t skipFrame(void); + + void setFrameState(frame_status_t state); + frame_status_t getFrameState(void); + bool checkFrameState(frame_status_t state); + + void printEntity(void); + void printNotDoneEntity(void); + void dump(void); + + void frameLock(void); + void frameUnlock(void); + bool getFrameLockState(void); + + status_t initMetaData(struct camera2_shot_ext *shot); + status_t getMetaData(struct camera2_shot_ext *shot); + status_t setMetaData(struct camera2_shot_ext *shot); + + status_t storeDynamicMeta(struct camera2_shot_ext *shot); + status_t storeDynamicMeta(struct camera2_dm *dm); + status_t storeUserDynamicMeta(struct camera2_shot_ext *shot); + status_t storeUserDynamicMeta(struct camera2_udm *udm); + + status_t getDynamicMeta(struct camera2_shot_ext *shot); + status_t getDynamicMeta(struct camera2_dm *dm); + status_t getUserDynamicMeta(struct camera2_shot_ext *shot); + status_t getUserDynamicMeta(struct camera2_udm *udm); + + status_t setMetaDataEnable(bool flag); + bool getMetaDataEnable(); + + status_t getNodeGroupInfo(struct camera2_node_group *node_group, int index); + status_t storeNodeGroupInfo(struct camera2_node_group *node_group, int index); + status_t getNodeGroupInfo(struct camera2_node_group *node_group, int index, int *zoom); + status_t storeNodeGroupInfo(struct camera2_node_group *node_group, int index, int zoom); + void dumpNodeGroupInfo(const char *name); + + void setJpegSize(int size); + int getJpegSize(void); + + int64_t getTimeStamp(void); + void getFpsRange(uint32_t *min, uint32_t *max); + + void setIspDone(bool done); + void set3aaDrop(bool flag); + void setIspcDrop(bool flag); + void setDisDrop(bool flag); + void setScpDrop(bool flag); + void setSccDrop(bool flag); + bool getIspDone(void); + bool get3aaDrop(void); + bool getIspcDrop(void); + bool getScpDrop(void); + bool getSccDrop(void); + bool getDisDrop(void); + + uint32_t getUniqueKey(void); + status_t setUniqueKey(uint32_t uniqueKey); + status_t setFrameInfo(ExynosCameraParameters *obj_param, uint32_t frameCount, uint32_t frameType); + uint32_t getFrameType(); + + void setFrameCapture(bool flag) { + m_capture = flag; + } + + bool getFrameCapture(void) { + return m_capture; + } + + void setFrameRecording(bool flag) { + m_recording = flag; + } + + bool getFrameRecording(void) { + return m_recording; + } + + void setFramePreview(bool flag) { + m_preview = flag; + } + + bool getFramePreview(void) { + return m_preview; + } + + void setFramePreviewCb(bool flag) { + m_previewCb = flag; + } + + bool getFramePreviewCb(void) { + return m_previewCb; + } + + void setFrameServiceBayer(bool flag) { + m_serviceBayer = flag; + } + + bool getFrameServiceBayer(void) { + return m_serviceBayer; + } + + void setFrameZsl(bool flag) { + m_zsl = flag; + } + + bool getFrameZsl(void) { + return m_zsl; + } + + dup_buffer_info_t getDupBufferInfo() { + return m_dupBufferInfo; + } + + void setDupBufferInfo(dup_buffer_info_t dupBufferInfo) { + m_dupBufferInfo = dupBufferInfo; + return; + } + +#ifdef USE_FRAME_REFERENCE_COUNT + int32_t incRef(void); + int32_t decRef(); + int32_t getRef(); +#endif + +#ifdef DEBUG_FRAME_MEMORY_LEAK + long long int getCheckLeakCount(); +#endif + + status_t setRotation(uint32_t pipeId, int rotation); + int getRotation(uint32_t pipeId); +#ifdef PERFRAME_CONTROL_FOR_FLIP + status_t setFlipHorizontal(uint32_t pipeId, int flipHorizontal); + int getFlipHorizontal(uint32_t pipeId); + status_t setFlipVertical(uint32_t pipeId, int flipVertical); + int getFlipVertical(uint32_t pipeId); +#endif + +private: + status_t m_init(); + status_t m_deinit(); + +private: + mutable Mutex m_linkageLock; + List m_linkageList; + List::iterator m_currentEntity; + + ExynosCameraParameters *m_parameters; + uint32_t m_frameCount; + uint32_t m_frameType; + + frame_status_t m_frameState; + mutable Mutex m_frameStateLock; + mutable Mutex m_frameLock; + + uint32_t m_numRequestPipe; + uint32_t m_numCompletePipe; + + bool m_frameLocked; + + bool m_metaDataEnable; + struct camera2_shot_ext m_metaData; + struct camera2_node_group m_node_gorup[PERFRAME_NODE_GROUP_MAX]; + int m_zoom; + + int m_jpegSize; + + bool m_request[MAX_NUM_PIPES]; + + bool m_ispDoneFlag; + bool m_3aaDropFlag; + bool m_ispcDropFlag; + bool m_disDropFlag; + bool m_sccDropFlag; + bool m_scpDropFlag; + + static unsigned int m_totalLeakCount; + uint32_t m_uniqueKey; + bool m_capture; + bool m_recording; + bool m_preview; + bool m_previewCb; + bool m_serviceBayer; + bool m_zsl; + +#ifdef USE_FRAME_REFERENCE_COUNT + mutable Mutex m_refCountLock; + int32_t m_refCount; + mutable Mutex m_entityLock; +#endif + +#ifdef DEBUG_FRAME_MEMORY_LEAK + static Mutex m_countLock; + static unsigned long long m_checkLeakCount; + unsigned long long m_privateCheckLeakCount; +#endif + dup_buffer_info_t m_dupBufferInfo; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/ExynosCameraFrameManager.cpp b/libcamera/common_v2/ExynosCameraFrameManager.cpp new file mode 100644 index 0000000..8ba7392 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrameManager.cpp @@ -0,0 +1,1118 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameManager" +#include + +#include "ExynosCameraFrameManager.h" + +namespace android { + +FrameWorker::FrameWorker(const char* name, int cameraid, FRAMEMGR_OPER::MODE operMode) +{ + m_cameraId = cameraid; + strncpy(m_name, name, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_operMode = operMode; + m_init(); +} + +FrameWorker::~FrameWorker() +{ + m_deinit(); +} + +status_t FrameWorker::m_init() +{ + m_key = 0; + m_enable = false; + return 0; +} + +status_t FrameWorker::m_deinit() +{ + return 0; +} + +uint32_t FrameWorker::getKey() +{ + return m_key; +} + +status_t FrameWorker::setKey(uint32_t key) +{ + m_key = key; + return 0; +} + +status_t FrameWorker::m_setEnable(bool enable) +{ + Mutex::Autolock lock(m_enableLock); + status_t ret = FRAMEMGR_ERRCODE::OK; + m_enable = enable; + return ret; +} + +bool FrameWorker::m_getEnable() +{ + Mutex::Autolock lock(m_enableLock); + return m_enable; +} + +status_t FrameWorker::m_delete(ExynosCameraFrame *frame) +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + delete frame; + frame = NULL; + + return ret; +} + +CreateWorker::CreateWorker(const char* name, + int cameraid, + FRAMEMGR_OPER::MODE operMode, + int32_t margin):FrameWorker(name, cameraid, operMode) +{ + CLOGD("DEBUG(%s): Worker CREATE(name(%s) cameraid(%d) mode(%d) margin(%d) ", + __FUNCTION__, name, cameraid, operMode, margin); + m_init(); + m_setMargin(margin); +} +CreateWorker::~CreateWorker() +{ + CLOGD("DEBUG(%s): Worker DELETE(name(%s) cameraid(%d) mode(%d) ", + __FUNCTION__, m_name, m_cameraId, m_operMode); + m_deinit(); +} + +status_t CreateWorker::m_deinit() +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + ExynosCameraFrame *frame = NULL; + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(false); + if (m_worklist != NULL) { + delete m_worklist; + m_worklist = NULL; + } + + if (m_lock != NULL) { + delete m_lock; + m_lock = NULL; + } + + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(false); + m_thread->requestExitAndWait(); + m_setMargin(0); + if (m_worklist != NULL) { + while (m_worklist->getSizeOfProcessQ() > 0) { + m_worklist->popProcessQ(&frame); + m_delete(frame); + frame = NULL; + } + + delete m_worklist; + m_worklist = NULL; + } + + if (m_lock != NULL) { + delete m_lock; + m_lock = NULL; + } + + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return ret; +} + +Mutex* CreateWorker::getLock() +{ + return m_lock; +} + +status_t CreateWorker::execute(__unused ExynosCameraFrame* inframe, ExynosCameraFrame** outframe) +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + if (m_getEnable() == false) { + CLOGE("ERR(%s[%d]): invalid state, Need to start Worker before execute", __FUNCTION__, __LINE__); + ret = FRAMEMGR_ERRCODE::ERR; + return ret; + } + + *outframe = m_execute(); + if (*outframe == NULL) { + CLOGE("ERR(%s[%d]): m_execute is invalid (outframe = NULL)", __FUNCTION__, __LINE__); + ret = FRAMEMGR_ERRCODE::ERR; + } + return ret; +} + +status_t CreateWorker::start() +{ + if (m_worklist->getSizeOfProcessQ() > 0) { + CLOGD("DEBUG(%s[%d]):previous worklist size(%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + m_worklist->release(); + } + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(true); + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(true); + m_thread->run(); + + if (m_thread->isRunning() == false) + m_thread->run(); + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + return 0; +} + +status_t CreateWorker::stop() +{ + ExynosCameraFrame *frame = NULL; + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(false); + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(false); + m_thread->requestExitAndWait(); + CLOGD("DEBUG(%s[%d]): worker stopped remove ths remained frames(%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + + while (m_worklist->getSizeOfProcessQ() > 0) { + m_worklist->popProcessQ(&frame); + m_delete(frame); + frame = NULL; + } + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return 0; +} + +status_t CreateWorker::m_setMargin(int32_t numOfMargin) +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + m_numOfMargin = numOfMargin; + return ret; +} + +int32_t CreateWorker::m_getMargin() +{ + return m_numOfMargin; +} + + +ExynosCameraFrame* CreateWorker::m_execute() +{ + ExynosCameraFrame *frame = NULL; + int32_t ret = NO_ERROR; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + frame = new ExynosCameraFrame(); + break; + case FRAMEMGR_OPER::SLIENT: + m_worklist->popProcessQ(&frame); + if (frame == NULL) { + ret = m_thread->run(); + m_worklist->waitAndPopProcessQ(&frame); + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]): getframe failed, processQ size (%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + CLOGE("ERR(%s[%d]): Thread return (%d) Thread Run Status (%d)", + __FUNCTION__, __LINE__, ret, m_thread->isRunning()); + } + + m_thread->run(); + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + return frame; +} + +status_t CreateWorker::m_init() +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_worklist = new frame_queue_t; + m_lock = new Mutex(); + break; + case FRAMEMGR_OPER::SLIENT: + m_thread = new FrameManagerThread(this, + static_cast(&CreateWorker::workerMain), + "Create Frame Thread", + PRIORITY_URGENT_DISPLAY); + m_worklist = new frame_queue_t; + m_lock = new Mutex(); + break; + case FRAMEMGR_OPER::NONE: + default: + m_worklist = NULL; + m_lock = NULL; + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + ret = FRAMEMGR_ERRCODE::ERR; + break; + } + + return ret; +} + +bool CreateWorker::workerMain() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + int32_t ret = FRAMEMGR_ERRCODE::OK; + bool loop = true; + ExynosCameraFrame *frame = NULL; + + if (m_getEnable() == false) { + loop = false; + CLOGD("DEBUG(%s[%d]): Create worker stopped delete current frame(%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + + while (m_worklist->getSizeOfProcessQ() > 0) { + frame = NULL; + ret = m_worklist->waitAndPopProcessQ(&frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + continue; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + continue; + } + m_delete(frame); + } + + return loop; + } + + while (m_worklist->getSizeOfProcessQ() < m_getMargin()) { + frame = NULL; + frame = new ExynosCameraFrame(); + m_worklist->pushProcessQ(&frame); + } + + // HACK, 2014.10.04 + // This sleep is added to prevent high CPU laod as busy wating. + // But this sleep must be deleted. + usleep(1000); + + if (m_worklist->getSizeOfProcessQ() < m_getMargin() ) + loop = true; + + return loop; + +} + +DeleteWorker::DeleteWorker(const char* name, + int cameraid, + FRAMEMGR_OPER::MODE operMode):FrameWorker(name, + cameraid, + operMode) +{ + CLOGD("DEBUG(%s): Worker CREATE(name(%s) cameraid(%d) mode(%d) ", + __FUNCTION__, name, cameraid, operMode); + m_operMode = operMode; + m_init(); +} + +DeleteWorker::~DeleteWorker() +{ + CLOGD("DEBUG(%s): Worker DELETE(name(%s) cameraid(%d) mode(%d) ", + __FUNCTION__, m_name, m_cameraId, m_operMode); + m_deinit(); +} + +status_t DeleteWorker::m_deinit() +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + ExynosCameraFrame *frame = NULL; + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(false); + if (m_worklist != NULL) { + delete m_worklist; + m_worklist = NULL; + } + + if (m_lock != NULL) { + delete m_lock; + m_lock = NULL; + } + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(false); + m_thread->requestExitAndWait(); + + if (m_worklist != NULL) { + while (m_worklist->getSizeOfProcessQ() > 0) { + m_worklist->popProcessQ(&frame); + m_delete(frame); + } + + delete m_worklist; + m_worklist = NULL; + } + + if (m_lock != NULL) { + delete m_lock; + m_lock = NULL; + } + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return ret; +} + +Mutex* DeleteWorker::getLock() +{ + return m_lock; +} + +status_t DeleteWorker::execute(ExynosCameraFrame* inframe, __unused ExynosCameraFrame** outframe) +{ + status_t ret = FRAMEMGR_ERRCODE::OK; + if (m_getEnable() == false) { + CLOGE("ERR(%s[%d]): invalid state, Need to start Worker before execute", __FUNCTION__, __LINE__); + ret = FRAMEMGR_ERRCODE::ERR; + return ret; + } + + ret = m_execute(inframe); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_execute is invalid ret(%d)", __FUNCTION__, __LINE__, ret); + ret = FRAMEMGR_ERRCODE::ERR; + } + + return ret; +} + +status_t DeleteWorker::start() +{ + if (m_worklist->getSizeOfProcessQ() > 0) { + CLOGD("DEBUG(%s[%d]):previous worklist size(%d), clear worklist", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + m_worklist->release(); + } + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(true); + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(true); + + if (m_thread->isRunning() == false) + m_thread->run(); + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + return 0; +} + +status_t DeleteWorker::stop() +{ + ExynosCameraFrame *frame = NULL; + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_setEnable(false); + break; + case FRAMEMGR_OPER::SLIENT: + m_setEnable(false); + m_thread->requestExitAndWait(); + CLOGD("DEBUG(%s[%d]): worker stopped remove ths remained frames(%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + + while (m_worklist->getSizeOfProcessQ() > 0) { + m_worklist->popProcessQ(&frame); + m_delete(frame); + frame = NULL; + } + break; + case FRAMEMGR_OPER::NONE: + default: + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return 0; +} + +status_t DeleteWorker::m_execute(ExynosCameraFrame* frame) +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_delete(frame); + break; + case FRAMEMGR_OPER::SLIENT: + m_worklist->pushProcessQ(&frame); + m_thread->run(); + break; + case FRAMEMGR_OPER::NONE: + default: + ret = FRAMEMGR_ERRCODE::ERR; + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + return ret; +} + +status_t DeleteWorker::m_init() +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + m_worklist = new frame_queue_t; + m_lock = new Mutex(); + break; + case FRAMEMGR_OPER::SLIENT: + m_thread = new FrameManagerThread(this, + static_cast(&DeleteWorker::workerMain), + "Delete Frame Thread", + PRIORITY_URGENT_DISPLAY); + m_worklist = new frame_queue_t; + m_lock = new Mutex(); + break; + case FRAMEMGR_OPER::NONE: + default: + m_worklist = NULL; + m_lock = NULL; + ret = FRAMEMGR_ERRCODE::ERR; + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return ret; +} + +bool DeleteWorker::workerMain() +{ +#ifdef DEBUG + ExynosCameraAutoTimer autoTimer(__FUNCTION__); +#endif + int32_t ret = FRAMEMGR_ERRCODE::OK; + bool loop = true; + ExynosCameraFrame *frame = NULL; + + if (m_getEnable() == false) { + loop = false; + CLOGD("DEBUG(%s[%d]): Delete worker stopped delete current frame(%d)", + __FUNCTION__, __LINE__, m_worklist->getSizeOfProcessQ()); + + while (m_worklist->getSizeOfProcessQ() > 0) { + frame = NULL; + ret = m_worklist->waitAndPopProcessQ(&frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + m_delete(frame); + } + return loop; + } + + while (m_worklist->getSizeOfProcessQ() > 0) { + frame = NULL; + ret = m_worklist->waitAndPopProcessQ(&frame); + if (ret < 0) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is NULL", __FUNCTION__, __LINE__); + goto func_exit; + } + + m_delete(frame); + } +func_exit: + + // HACK, 2014.10.04 + // This sleep is added to prevent high CPU laod as busy wating. + // But this sleep must be deleted. + usleep(2000); + + if (m_worklist->getSizeOfProcessQ() > 0) + loop = true; + + return loop; + +} + +ExynosCameraFrameManager::ExynosCameraFrameManager(const char* name, + int cameraid, + FRAMEMGR_OPER::MODE operMode, + uint32_t dumpmargin, + uint32_t dumpmarginCount) +{ + ALOGD("DEBUG(%s): FrameManager CREATE(name(%s) cameraid(%d) mode(%d)", + __FUNCTION__, name, cameraid, operMode); + + m_cameraId = cameraid; + m_dumpMargin = dumpmargin; + m_dumpMarginCount = dumpmarginCount; + strncpy(m_name, name, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + m_setOperMode(operMode); + m_init(); +} + +ExynosCameraFrameManager::~ExynosCameraFrameManager() +{ + CLOGD("DEBUG(%s): FrameManager DELETE(name(%s) cameraid(%d) mode(%d)", + __FUNCTION__, m_name, m_cameraId, m_operMode); + m_deinit(); +} + +ExynosCameraFrame* ExynosCameraFrameManager::createFrame(ExynosCameraParameters *param, uint32_t framecnt, uint32_t frametype) +{ + return m_createFrame(param, framecnt, frametype); +} + +status_t ExynosCameraFrameManager::deleteFrame(ExynosCameraFrame* frame) +{ + return m_deleteFrame(frame); +} + +status_t ExynosCameraFrameManager::setKeybox(sp keybox) +{ + Mutex::Autolock lock(m_stateLock); + m_keybox = keybox; + return 0; +} + +sp ExynosCameraFrameManager::getKeybox() +{ + Mutex::Autolock lock(m_stateLock); + return m_keybox; +} + +status_t ExynosCameraFrameManager::setWorker(int key, sp worker) +{ + Mutex::Autolock lock(m_stateLock); + map >::iterator iter; + pair >::iterator,bool> workerRet; + pair::iterator,bool> mutexRet; + + Mutex* locker = NULL; + + iter = m_workerList.find(key); + if (iter != m_workerList.end()) { + CLOGE("ERR(%s[%d]): already worker is EXIST(%d)", __FUNCTION__, __LINE__, key); + } else { + m_setupWorkerInfo(key, worker); + } + + return 0; +} + +status_t ExynosCameraFrameManager::setOperMode(FRAMEMGR_OPER::MODE mode) +{ + return m_setOperMode(mode); +} + +int ExynosCameraFrameManager::getOperMode() +{ + return m_getOperMode(); +} + +status_t ExynosCameraFrameManager::m_init() +{ + int ret = FRAMEMGR_ERRCODE::OK; + + m_keybox = NULL; + + m_lock = new Mutex(); + m_setEnable(false); + return ret; +} + +status_t ExynosCameraFrameManager::m_setupWorkerInfo(int key, sp worker) +{ + int ret = FRAMEMGR_ERRCODE::OK; + pair >::iterator,bool> workerRet; + pair::iterator,bool> mutexRet; + Mutex* locker = NULL; + + workerRet = m_workerList.insert( pair >(key, worker)); + if (workerRet.second == false) { + ret = FRAMEMGR_ERRCODE::ERR; + CLOGE("ERR(%s[%d]): work insert failed(%d)", __FUNCTION__, __LINE__, key); + } + return ret; +} + +sp ExynosCameraFrameManager::getWorker(int key) +{ + Mutex::Autolock lock(m_stateLock); + sp ret; + map >::iterator iter; + + iter = m_workerList.find(key); + if (iter != m_workerList.end()) { + ret = iter->second; + } else { + CLOGE("ERR(%s[%d]): worker is not EXIST(%d)", __FUNCTION__, __LINE__, key); + ret = NULL; + } + + return ret; +} + +sp ExynosCameraFrameManager::eraseWorker(int key) +{ + Mutex::Autolock lock(m_stateLock); + sp ret; + map >::iterator iter; + + iter = m_workerList.find(key); + if (iter != m_workerList.end()) { + ret = iter->second; + m_workerList.erase(iter); + } else { + CLOGE("ERR(%s[%d]): worker is not EXIST(%d)", __FUNCTION__, __LINE__, key); + ret = NULL; + } + return ret; +} + +status_t ExynosCameraFrameManager::start() +{ + Mutex::Autolock lock(m_stateLock); + status_t ret = FRAMEMGR_ERRCODE::OK; + sp worker = NULL; + + + if (m_getEnable() == true) { + CLOGD("DEBUG(%s[%d]):frameManager already start!!", __FUNCTION__, __LINE__); + return ret; + } + + { + Mutex::Autolock _l(m_lock); + if (m_runningFrameList.size() > 0) { + CLOGD("DEBUG(%s[%d]):previous runningFrameList size(%d), clear list", + __FUNCTION__, __LINE__, m_runningFrameList.size()); + m_runningFrameList.clear(); + } + } + + map >::iterator iter; + for (iter = m_workerList.begin() ; iter != m_workerList.end() ; ++iter) { + worker = iter->second; + ret = worker->start(); + } + + m_setEnable(true); + + return ret; +} + +status_t ExynosCameraFrameManager::stop() +{ + Mutex::Autolock lock(m_stateLock); + status_t ret = FRAMEMGR_ERRCODE::OK; + sp worker; + + if (m_getEnable() == false) { + CLOGD("DEBUG(%s[%d]):frameManager already stop!!", __FUNCTION__, __LINE__); + return ret; + } + + m_setEnable(false); + + map >::iterator iter; + for (iter = m_workerList.begin() ; iter != m_workerList.end() ; ++iter) { + worker = iter->second; + ret = worker->stop(); + } + + return ret; +} + +int ExynosCameraFrameManager::deleteAllFrame() +{ + Mutex::Autolock lock(m_stateLock); + status_t ret = NO_ERROR; + ExynosCameraFrame *frame = NULL; + + if (m_getEnable() == true) { + CLOGE("ERR(%s[%d]): invalid state, module state enabled, state must disabled", __FUNCTION__, __LINE__); + return ret; + } + + map::iterator frameIter; + + m_lock->lock(); + + int runningFrameListSize = m_runningFrameList.size(); + + if (runningFrameListSize == 0) { + CLOGD("DEBUG(%s[%d]):No memory leak detected m_runningFrameList.size()(%d)", + __FUNCTION__, __LINE__, runningFrameListSize); + } else { + CLOGW("WARN(%s[%d]):%d memory leak detected m_runningFrameList.size()(%d)", + __FUNCTION__, __LINE__, runningFrameListSize, runningFrameListSize); + } + + for (frameIter = m_runningFrameList.begin() ; frameIter != m_runningFrameList.end() ;) { + frame = frameIter->second; + + CLOGW("WARN(%s[%d]):delete memory leak detected FrameKey(%d) HAL-FrameCnt(%d) FrameType(%u)", + __FUNCTION__, __LINE__, frame->getUniqueKey(), frame->getFrameCount(), frame->getFrameType()); + + m_runningFrameList.erase(frameIter++); + SAFE_DELETE(frame); + } + + m_runningFrameList.clear(); + + m_lock->unlock(); + + return ret; +} + +ExynosCameraFrame* ExynosCameraFrameManager::m_createFrame(ExynosCameraParameters *param, + uint32_t framecnt, + uint32_t frametype) +{ + int ret = FRAMEMGR_ERRCODE::OK; + uint32_t key = 0; + ExynosCameraFrame *frame = NULL; + map >::iterator iter; + sp worker = NULL; + if (m_getEnable() == false) { + CLOGE("ERR(%s[%d]): module state disabled", __FUNCTION__, __LINE__); + return frame; + } + + iter = m_workerList.find(FRAMEMGR_WORKER::CREATE); + worker = iter->second; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + case FRAMEMGR_OPER::SLIENT: + worker->execute(NULL, &frame); + if (frame == NULL) { + CLOGE("ERR(%s[%d]): Frame is NULL", __FUNCTION__, __LINE__); + return frame; + } + frame->setUniqueKey(m_keybox->createKey()); + frame->setFrameInfo(param ,framecnt, frametype); + ret = m_insertFrame(frame, &m_runningFrameList, m_lock); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_insertFrame is Failed ", __FUNCTION__, __LINE__); + return NULL; + } + m_dumpFrameMargin(); + break; + } + + return frame; +} + +status_t ExynosCameraFrameManager::m_deleteFrame(ExynosCameraFrame* frame) +{ + int ret = FRAMEMGR_ERRCODE::OK; + ExynosCameraFrame* removeFrame = NULL; + map >::iterator iter; + sp worker; + + if (m_getEnable() == false) { + CLOGE("ERR(%s[%d]): module state disabled", __FUNCTION__, __LINE__); + ret = FRAMEMGR_ERRCODE::ERR; + return ret; + } + + iter = m_workerList.find(FRAMEMGR_WORKER::DELETE); + worker = iter->second; + + switch (m_operMode) { + case FRAMEMGR_OPER::ONDEMAND: + case FRAMEMGR_OPER::SLIENT: + removeFrame = m_removeFrame(frame, &m_runningFrameList, m_lock); + if (removeFrame == NULL) { +#ifndef USE_FRAME_REFERENCE_COUNT + CLOGE("ERR(%s[%d]): Frame is NULL", __FUNCTION__, __LINE__); +#else + /* in case of ths USE_FRAME_REFERENCE_COUNT function */ + /* The frame must delete that refcount is ZERO */ +#endif + ret = FRAMEMGR_ERRCODE::ERR; + + return ret; + } + worker->execute(frame, NULL); + m_dumpFrameMargin(); + break; + case FRAMEMGR_OPER::NONE: + default: + ret = FRAMEMGR_ERRCODE::ERR; + break; + } + + return ret; +} + +status_t ExynosCameraFrameManager::dump() +{ + status_t ret = FRAMEMGR_ERRCODE::OK; + m_dumpFrame(); + return ret; +} + +void ExynosCameraFrameManager::m_dumpFrameMargin() +{ + int32_t frameCount = 0; + static uint32_t count = 0; ; + + + frameCount = m_runningFrameList.size(); + + if (m_dumpMargin < frameCount) + { + count++; + if (count % m_dumpMarginCount == 0) { + CLOGW("WARN(%s[%d]):Suspect memory leak of Frame. m_dumpMargin(%d) m_dumpMarginCount(%d) m_runningFrameList.size()(%zu)", + __FUNCTION__, __LINE__, m_dumpMargin, m_dumpMarginCount, m_runningFrameList.size()); + + m_dumpFrame(); + } + } + +} + +status_t ExynosCameraFrameManager::m_dumpFrame() +{ + status_t ret = FRAMEMGR_ERRCODE::OK; + map::iterator frameIter; + ExynosCameraFrame *frame = NULL; + + m_lock->lock(); + CLOGD("DEBUG(%s[%d]):(%s) ++++++++++++++++++++", __FUNCTION__, __LINE__, m_name); + + CLOGD("DEBUG(%s[%d]):m_dumpMargin(%d) m_dumpMarginCount(%d) m_runningFrameList.size()(%zu)", + __FUNCTION__, __LINE__, m_dumpMargin, m_dumpMarginCount, m_runningFrameList.size()); + int testCount = 0; + for (frameIter = m_runningFrameList.begin() ; frameIter != m_runningFrameList.end() ; ++frameIter) { + frame = frameIter->second; + CLOGD("DEBUG(%s[%d]): RunningFrame UniqueKey(%d)", __FUNCTION__, __LINE__, frame->getUniqueKey()); + if (++testCount >= 5) + break; + } + + CLOGD("DEBUG(%s[%d]):(%s) ------------------------------", __FUNCTION__, __LINE__, m_name); + m_lock->unlock(); + + /* HACK FOR DEBUGGING : ANR DETECTION */ + if (m_runningFrameList.size() > 300) { +#ifdef AVOID_ASSERT_FRAME + CLOGE("ERR(%s[%d]): too many frames - m_runningFrameList.size(%d)", + __FUNCTION__, __LINE__, m_runningFrameList.size()); +#else + android_printAssert(NULL, LOG_TAG, "HACK For ANR DEBUGGING"); +#endif + } + + return ret; +} + +status_t ExynosCameraFrameManager::m_deinit() +{ + status_t ret = FRAMEMGR_ERRCODE::OK; + ExynosCameraFrame *frame = NULL; + sp worker; + + map >::iterator iter; + for (iter = m_workerList.begin() ; iter != m_workerList.end() ; ++iter) { + worker = iter->second; + m_workerList.erase(iter++); + worker = NULL; + } + + m_workerList.clear(); + + map::iterator frameIter; + + if (m_lock == NULL) { + CLOGE("ERR(%s[%d]): lock is NULL", __FUNCTION__, __LINE__); + ret = FRAMEMGR_ERRCODE::ERR; + return ret; + } + + m_lock->lock(); + for (frameIter = m_runningFrameList.begin() ; frameIter != m_runningFrameList.end() ;) { + frame = frameIter->second; + m_runningFrameList.erase(frameIter++); + delete frame; + frame = NULL; + } + m_runningFrameList.clear(); + m_lock->unlock(); + + CLOGD("DEBUG(%s[%d]): delete m_lock", __FUNCTION__, __LINE__); + delete m_lock; + m_lock = NULL; + + if (m_keybox != NULL) { + CLOGD("DEBUG(%s[%d]): delete m_keybox", __FUNCTION__, __LINE__); + m_keybox = NULL; + } + + return ret; +} + +status_t ExynosCameraFrameManager::m_insertFrame(ExynosCameraFrame* frame, + map *list, + Mutex *lock) +{ + lock->lock(); + pair::iterator,bool> listRet; + int32_t ret = FRAMEMGR_ERRCODE::OK; + + listRet = list->insert( pair(frame->getUniqueKey(), frame)); + if (listRet.second == false) { + ret = FRAMEMGR_ERRCODE::ERR; + CLOGE("ERR(%s[%d]): insertFrame fail frame already exist!! HAL frameCnt( %d ) UniqueKey ( %u ) ", + __FUNCTION__, __LINE__, frame->getFrameCount(), frame->getUniqueKey()); + } + lock->unlock(); + return ret; +} + +ExynosCameraFrame* ExynosCameraFrameManager::m_removeFrame(ExynosCameraFrame* frame, + map *list, + Mutex *lock) +{ + map::iterator iter; + pair::iterator,bool> listRet; + ExynosCameraFrame *ret = NULL; + + lock->lock(); + iter = list->find(frame->getUniqueKey()); + if (iter != list->end()) { + ret = iter->second; +#ifdef USE_FRAME_REFERENCE_COUNT + if (ret->getRef() != 0) { + lock->unlock(); + return NULL; + } +#endif + list->erase( iter ); + } else { + CLOGE("ERR(%s[%d]): frame is not EXIST HAL-FrameCnt(%d) UniqueKey(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount(), frame->getUniqueKey()); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraFrameManager::m_setOperMode(FRAMEMGR_OPER::MODE mode) +{ + int32_t ret = FRAMEMGR_ERRCODE::OK; + switch (mode) { + case FRAMEMGR_OPER::ONDEMAND: + case FRAMEMGR_OPER::SLIENT: + m_operMode = mode; + break; + default: + m_operMode = -1; + ret = FRAMEMGR_ERRCODE::ERR; + CLOGE("ERR(%s[%d]): operMode is invalid operMode(%d)", __FUNCTION__, __LINE__, m_operMode); + break; + } + + return ret; +} + +int ExynosCameraFrameManager::m_getOperMode() +{ + return m_operMode; +} + +status_t ExynosCameraFrameManager::m_setEnable(bool enable) +{ + Mutex::Autolock lock(m_enableLock); + status_t ret = FRAMEMGR_ERRCODE::OK; + m_enable = enable; + return ret; +} + +bool ExynosCameraFrameManager::m_getEnable() +{ + Mutex::Autolock lock(m_enableLock); + return m_enable; +} + + + + +//************FrameManager END *************************// + + + + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraFrameManager.h b/libcamera/common_v2/ExynosCameraFrameManager.h new file mode 100644 index 0000000..d7f605b --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrameManager.h @@ -0,0 +1,321 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*! + * \file ExynosCameraFrameManager.h + * \brief header file for ExynosCameraFrameManager + * \author suyoung lee(suyoung80.lee@samsung.com) + * \date 2014/05/08 +*/ + +#ifndef EXYNOS_CAMERA_FRAME_MANAGER_H +#define EXYNOS_CAMERA_FRAME_MANAGER_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include "csc.h" + +#include "ExynosCameraParameters.h" +#include "ExynosCameraThread.h" +#include "ExynosCameraFrame.h" +#include "ExynosCameraList.h" + +namespace android { + +using namespace std; + +class FrameWorker; + +typedef ExynosCameraList frame_queue_t; +typedef ExynosCameraThread FrameManagerThread; + +namespace FRAMEMGR_ERRCODE { + enum STATUS { + OK = 0, + ERR = -1, + ERR_STATE_ + }; +}; + +namespace FRAMEMGR_OPER { + enum MODE { + NONE = 0, + ONDEMAND = 1, + SLIENT = 2 + }; +}; + +namespace FRAMEMGR_WORKER { + enum TYPE { + NONE = 0, + CREATE = 1, + DELETE = 2 + }; +}; + +namespace DEBUG_LEVEL { + enum STATUS { + ERROR = 1, + DEBUG = 2, + TRACE = 3, + DEFAULT = DEBUG + }; +}; + + + +class KeyBox : public virtual RefBase{ +public: + KeyBox(const char* name, int cameraid) { + + m_cameraId = cameraid; + strncpy(m_name, name, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + init(); + } + virtual ~KeyBox() { } + + status_t init() { + m_uniqueKey = 0; + return 0; + } + + uint32_t createKey() { + Mutex::Autolock lock(m_lock); + + return m_uniqueKey++; + } + + uint32_t getKey() { + Mutex::Autolock lock(m_lock); + + return m_uniqueKey; + } + status_t setKey(uint32_t key){ + Mutex::Autolock lock(m_lock); + + m_uniqueKey = key; + return 0; + } + +private: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + uint32_t m_uniqueKey; + Mutex m_lock; + +}; + + +class FrameWorker : public virtual RefBase { + +public: + FrameWorker() {}; + FrameWorker(const char* name, int cameraid, FRAMEMGR_OPER::MODE m_operMode); + virtual ~FrameWorker(); + + virtual uint32_t getKey(); + virtual status_t setKey(uint32_t key); + virtual Mutex* getLock() = 0; + + virtual status_t execute(ExynosCameraFrame* inframe, ExynosCameraFrame** outframe) = 0; + virtual status_t start() = 0; + virtual status_t stop() = 0; + + +protected: + virtual status_t m_deinit(); + virtual status_t m_init(); + + virtual status_t m_delete(ExynosCameraFrame *frame); + + virtual status_t m_setEnable(bool enable); + virtual bool m_getEnable(); + virtual bool workerMain() = 0; + +private: + + +protected: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + sp m_thread; + FRAMEMGR_OPER::MODE m_operMode; + +private: + uint32_t m_key; + bool m_enable; + Mutex m_enableLock; + +}; + +typedef bool (FrameWorker::*func_ptr_t_)(); + + + +class CreateWorker : public FrameWorker{ + +public: + CreateWorker() {}; + CreateWorker(const char* name, int cameraid, FRAMEMGR_OPER::MODE operMode, int32_t margin); + virtual ~CreateWorker(); + + virtual Mutex* getLock(); + + virtual status_t execute(ExynosCameraFrame* inframe, ExynosCameraFrame** outframe); + virtual status_t start(); + virtual status_t stop(); + +protected: + virtual bool workerMain(); + virtual status_t m_init(); + virtual status_t m_deinit(); + +private: + virtual status_t m_setMargin(int32_t numOfMargin); + virtual int32_t m_getMargin(); + virtual ExynosCameraFrame* m_execute(); + + +private: + frame_queue_t *m_worklist; + Mutex *m_lock; + int32_t m_numOfMargin; + +}; + +class DeleteWorker : public FrameWorker{ + +public: + DeleteWorker() {}; + DeleteWorker(const char* name, int cameraid, FRAMEMGR_OPER::MODE operMode); + virtual ~DeleteWorker(); + + virtual Mutex* getLock(); + + virtual status_t execute(ExynosCameraFrame* inframe, ExynosCameraFrame** outframe); + virtual status_t start(); + virtual status_t stop(); + +protected: + virtual bool workerMain(); + virtual status_t m_deinit(); + virtual status_t m_init(); + +private: + virtual status_t m_execute(ExynosCameraFrame* frame); + +private: + frame_queue_t *m_worklist; + Mutex *m_lock; + +}; + +class ExynosCameraFrameManager { + +public: + ExynosCameraFrameManager() {}; + ExynosCameraFrameManager(const char* name, int cameraid, FRAMEMGR_OPER::MODE operMode, uint32_t dumpmargin, uint32_t dumpmarginCount); + virtual ~ExynosCameraFrameManager(); + + ExynosCameraFrame* createFrame(ExynosCameraParameters *param, uint32_t framecnt, uint32_t frametype = 0); + + status_t deleteFrame(ExynosCameraFrame* frame); + status_t setKeybox(sp keybox); + sp getKeybox(); + status_t setWorker(int key, sp worker); + sp getWorker(int key); + sp eraseWorker(int key); + + status_t setOperMode(FRAMEMGR_OPER::MODE mode); + int getOperMode(); + + status_t start(); + status_t stop(); + + int deleteAllFrame(); + + status_t dump(); + +private: + status_t m_init(); + + status_t m_deinit(); + status_t m_setupWorkerInfo(int key, sp worker); + + ExynosCameraFrame* m_createFrame(ExynosCameraParameters *param, uint32_t framecnt, uint32_t frametype = 0); + status_t m_deleteFrame(ExynosCameraFrame* frame); + + + status_t m_insertFrame(ExynosCameraFrame* frame, map *list, Mutex *lock); + ExynosCameraFrame* m_removeFrame(ExynosCameraFrame* frame, map *list, Mutex *lock); + status_t m_delete(ExynosCameraFrame* frame); + + status_t m_setOperMode(FRAMEMGR_OPER::MODE mode); + int m_getOperMode(); + + status_t m_setEnable(bool enable); + bool m_getEnable(); + + status_t m_incRef(ExynosCameraFrame* frame); + status_t m_decRef(ExynosCameraFrame* frame); + + void m_dumpFrameMargin(); + status_t m_dumpFrame(); + status_t m_dumpRef(); + + +public: + + +private: + int m_cameraId; + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + map m_runningFrameList; + Mutex *m_lock; + sp m_keybox; + map > m_workerList; + + int m_operMode; + bool m_enable; + Mutex m_enableLock; + int32_t m_dumpMargin; + int32_t m_dumpMarginCount; + Mutex m_stateLock; + + + +}; + + + + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraFrameSelector.cpp b/libcamera/common_v2/ExynosCameraFrameSelector.cpp new file mode 100644 index 0000000..5fb602d --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrameSelector.cpp @@ -0,0 +1,853 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameSelector" + +#include "ExynosCameraFrameSelector.h" + +#define FLASHED_LLS_COUNT 4 + +namespace android { + +#ifdef USE_FRAMEMANAGER +ExynosCameraFrameSelector::ExynosCameraFrameSelector(ExynosCameraParameters *param, + ExynosCameraBufferManager *bufMgr, + ExynosCameraFrameManager *manager +#ifdef SUPPORT_DEPTH_MAP + , depth_callback_queue_t *depthCallbackQ + , ExynosCameraBufferManager *depthMapbufMgr +#endif + ) +#else + ExynosCameraFrameSelector::ExynosCameraFrameSelector(ExynosCameraParameters *param, + ExynosCameraBufferManager *bufMgr) +#endif +{ +#ifdef USE_FRAMEMANAGER + m_frameMgr = manager; +#endif + m_parameters = param; + m_bufMgr= bufMgr; + m_activityControl = m_parameters->getActivityControl(); + + m_frameHoldList.setWaitTime(2000000000); +#ifdef RAWDUMP_CAPTURE + m_RawFrameHoldList.setWaitTime(2000000000); +#endif + + m_reprocessingCount = 0; + m_frameHoldCount = 1; + m_isFirstFrame = true; + isCanceled = false; + + m_CaptureCount = 0; +#ifdef SUPPORT_DEPTH_MAP + m_depthCallbackQ = depthCallbackQ; + m_depthMapbufMgr = depthMapbufMgr; +#endif +} + +ExynosCameraFrameSelector::~ExynosCameraFrameSelector() +{ +} + +status_t ExynosCameraFrameSelector::m_release(ExynosCameraList *list) +{ + int ret = 0; + ExynosCameraFrame *frame = NULL; + while (list->getSizeOfProcessQ() > 0) { + ret = m_popQ(list, &frame, true, 1); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):getBufferToManageQ fail", __FUNCTION__, __LINE__); + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } else { + m_frameComplete(frame, true); + } + } + return ret; +} + +status_t ExynosCameraFrameSelector::manageFrameHoldListForDynamicBayer(ExynosCameraFrame *frame) +{ + int ret = 0; + + if (frame == NULL) { + ALOGE("ERR(%s[%d]): frame is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } +#ifdef USE_FRAME_REFERENCE_COUNT + frame->incRef(); +#endif + m_pushQ(&m_frameHoldList, frame, true); + ALOGI("INFO(%s[%d]): frameCount(%d) m_frameHoldList size(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount(), m_frameHoldList.getSizeOfProcessQ()); + + return ret; +} + +status_t ExynosCameraFrameSelector::m_manageHdrFrameHoldList(ExynosCameraFrame *frame, + int pipeID, + bool isSrc, + int32_t dstPos) +{ + int ret = 0; + ExynosCameraBuffer buffer; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraActivitySpecialCapture *m_sCaptureMgr = NULL; + unsigned int hdrFcount = 0; + unsigned int fliteFcount = 0; + newFrame = frame; + + m_sCaptureMgr = m_activityControl->getSpecialCaptureMgr(); + hdrFcount = m_sCaptureMgr->getHdrDropFcount(); + hdrFcount += m_parameters->getHDRDelay(); + + ret = m_getBufferFromFrame(newFrame, pipeID, isSrc, &buffer, dstPos); + if( ret != NO_ERROR ) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s)", + __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst"); + } + + if (m_parameters->getUsePureBayerReprocessing() == true) { + camera2_shot_ext *shot_ext = NULL; + shot_ext = (camera2_shot_ext *)(buffer.addr[1]); + if (shot_ext != NULL) + fliteFcount = shot_ext->shot.dm.request.frameCount; + else + ALOGE("ERR(%s[%d]):fliteReprocessingBuffer is null", __FUNCTION__, __LINE__); + } else { + camera2_stream *shot_stream = NULL; + shot_stream = (camera2_stream *)(buffer.addr[1]); + if (shot_stream != NULL) + fliteFcount = shot_stream->fcount; + else + ALOGE("ERR(%s[%d]):fliteReprocessingBuffer is null", __FUNCTION__, __LINE__); + } + + if (hdrFcount + 1 == fliteFcount || hdrFcount + 2 == fliteFcount || hdrFcount + 3 == fliteFcount) { + ALOGI("INFO(%s[%d]):hdrFcount %d, fliteFcount %d", __FUNCTION__, __LINE__, hdrFcount, fliteFcount); + m_pushQ(&m_hdrFrameHoldList, newFrame, true); + } else { + m_frameComplete(newFrame, false, pipeID, isSrc, dstPos, true); + newFrame = NULL; + } + + return ret; +} + +/* It's for dynamic bayer */ +ExynosCameraFrame* ExynosCameraFrameSelector::selectDynamicFrames(__unused int count, + int pipeID, + bool isSrc, + int tryCount, + int32_t dstPos) +{ + return m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); +} + +ExynosCameraFrame* ExynosCameraFrameSelector::selectCaptureFrames(int count, + uint32_t frameCount, + int pipeID, + bool isSrc, + int tryCount, + int32_t dstPos) +{ + ExynosCameraFrame* selectedFrame = NULL; + ExynosCameraActivityFlash *m_flashMgr = NULL; + + m_reprocessingCount = count; + m_flashMgr = m_activityControl->getFlashMgr(); + + if (m_flashMgr->getNeedCaptureFlash() == true) { + selectedFrame = m_selectFlashFrameV2(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):Failed to selectFlashFrame", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } else { + selectedFrame = m_selectCaptureFrame(frameCount, pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):Failed to selectCaptureFrame", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } + + return selectedFrame; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectFocusedFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame* selectedFrame = NULL; + struct camera2_shot_ext shot_ext; + memset(&shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + + for (int i = 0; i < CAPTURE_WAITING_COUNT; i++) { + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):selectedFrame is NULL", __FUNCTION__, __LINE__); + break; + } + + /* get dynamic metadata for af state */ + if (selectedFrame->getMetaDataEnable() == false) + ALOGD("DEBUG(%s[%d]):Waiting for update metadata failed (%d) ", __FUNCTION__, __LINE__, ret); + selectedFrame->getDynamicMeta(&shot_ext); + + /* Skip focusing frame */ + if (m_activityControl->flagFocusing(&shot_ext, m_parameters->getFocusMode()) == true) { + ALOGD("DEBUG(%s[%d]):skip focusing frame(count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return NULL; + } else { + m_frameComplete(selectedFrame, false, pipeID, isSrc, dstPos, true); + selectedFrame = NULL; + } + } else { + ALOGD("DEBUG(%s[%d]):focusing complete (count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + break; + } + + usleep(DM_WAITING_TIME); + } + + return selectedFrame; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectFlashFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame* selectedFrame = NULL; + ExynosCameraBuffer selectedBuffer; + int bufferFcount = 0; + int waitFcount = 0; + int totalWaitingCount = 0; + + /* Choose bayerBuffer to process reprocessing */ + while (totalWaitingCount <= (FLASH_MAIN_TIMEOUT_COUNT + m_parameters->getReprocessingBayerHoldCount())) { + /* Start main flash & Get best frame count for flash */ + if (waitFcount == 0) { + waitFcount = m_activityControl->startMainFlash() + 1; + ALOGD("DEBUG(%s):best frame count for flash capture : %d", __FUNCTION__, waitFcount); + } + + ret = m_waitAndpopQ(&m_frameHoldList, &selectedFrame, false, tryCount); + if (ret < 0 || selectedFrame == NULL) { + ALOGD("DEBUG(%s[%d]):getFrame Fail ret(%d)", __FUNCTION__, __LINE__, ret); + return NULL; + } else if (isCanceled == true) { + ALOGD("DEBUG(%s[%d]):isCanceled", __FUNCTION__, __LINE__); + if (selectedFrame != NULL) { + m_LockedFrameComplete(selectedFrame, pipeID, isSrc, dstPos); + } + return NULL; + } + + ALOGD("DEBUG(%s[%d]):Frame Count(%d)", __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + + /* Handling exception cases : like preflash is not processed */ + if (waitFcount < 0) { + ALOGW("WARN(%s[%d]):waitFcount is negative : preflash is not processed", __FUNCTION__, __LINE__); + + return NULL; + } + + if (isCanceled == true) { + ALOGD("DEBUG(%s[%d]):isCanceled", __FUNCTION__, __LINE__); + + return NULL; + } + + ret = m_getBufferFromFrame(selectedFrame, pipeID, isSrc, &selectedBuffer, dstPos); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s) bufferPtr(%p)", + __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst", selectedBuffer); + } + + if (m_isFrameMetaTypeShotExt() == true) { + camera2_shot_ext *shot_ext = NULL; + shot_ext = (camera2_shot_ext *)(selectedBuffer.addr[1]); + if (shot_ext != NULL) + bufferFcount = shot_ext->shot.dm.request.frameCount; + else + ALOGE("ERR(%s[%d]):selectedBuffer is null", __FUNCTION__, __LINE__); + } else { + camera2_stream *shot_stream = NULL; + shot_stream = (camera2_stream *)(selectedBuffer.addr[1]); + if (shot_stream != NULL) + bufferFcount = shot_stream->fcount; + else + ALOGE("ERR(%s[%d]):selectedBuffer is null", __FUNCTION__, __LINE__); + } + + /* Put mismatched buffer */ + if (waitFcount != bufferFcount) { + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return NULL; + } else { + m_frameComplete(selectedFrame, false, pipeID, isSrc, dstPos, true); + selectedFrame = NULL; + } + } + + if (waitFcount <= bufferFcount) { + break; + } + + totalWaitingCount++; + ALOGD("DEBUG(%s[%d]) (totalWaitingCount %d)", __FUNCTION__, __LINE__, totalWaitingCount); + } + + if (totalWaitingCount > FLASH_MAIN_TIMEOUT_COUNT) { + ALOGW("WARN(%s[%d]):fail to get bayer frame count for flash capture (totalWaitingCount %d)", + __FUNCTION__, __LINE__, totalWaitingCount); + } + + ALOGD("DEBUG(%s[%d]):waitFcount : %d, bufferFcount : %d", + __FUNCTION__, __LINE__, waitFcount, bufferFcount); + + /* Stop main flash */ + m_activityControl->stopMainFlash(); + + return selectedFrame; + +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectHdrFrame(__unused int pipeID, __unused bool isSrc, int tryCount, __unused int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame* selectedFrame = NULL; + + ret = m_waitAndpopQ(&m_hdrFrameHoldList, &selectedFrame, false, tryCount); + if( ret < 0 || selectedFrame == NULL ) { + ALOGD("DEBUG(%s[%d]):getFrame Fail ret(%d)", __FUNCTION__, __LINE__, ret); + return NULL; + } + + return selectedFrame; + +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectBurstFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame* selectedFrame = NULL; + + ExynosCameraActivityFlash *m_flashMgr = m_activityControl->getFlashMgr(); + + for (int i = 0; i < TOTAL_WAITING_TIME; i += DM_WAITING_TIME) { + if (m_isFirstFrame == true) { + selectedFrame = m_selectFocusedFrame(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):selectedFrame is NULL", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } else { + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + + /* Skip flash frame */ + if (m_flashMgr->checkPreFlash() == true) { + if (m_flashMgr->checkFlashOff() == false) { + ALOGD("DEBUG(%s[%d]):skip flash frame(count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return NULL; + } else { + m_frameComplete(selectedFrame, false, pipeID, isSrc, dstPos, true); + selectedFrame = NULL; + } + } else { + ALOGD("DEBUG(%s[%d]):flash off done (count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + break; + } + } else { + ALOGD("DEBUG(%s[%d]):pre-flash off", __FUNCTION__, __LINE__); + break; + } + usleep(DM_WAITING_TIME); + } + + return selectedFrame; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectCaptureFrame(uint32_t frameCount, int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame *selectedFrame = NULL; + + for (int i = 0; i < CAPTURE_WAITING_COUNT; i++) { + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):selectedFrame is NULL", __FUNCTION__, __LINE__); + break; + } + + if (selectedFrame->getFrameCount() < frameCount) { + ALOGD("DEBUG(%s[%d]):skip capture frame(count %d), waiting frame(count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount(), frameCount); + + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return NULL; + } else { + m_frameComplete(selectedFrame, false, pipeID, isSrc, dstPos, true); + selectedFrame = NULL; + } + } else { + ALOGD("DEBUG(%s[%d]):capture frame (count %d)", + __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + break; + } + } + + return selectedFrame; +} + +status_t ExynosCameraFrameSelector::m_getBufferFromFrame(ExynosCameraFrame *frame, + int pipeID, + bool isSrc, + ExynosCameraBuffer *outBuffer, + int32_t dstPos) +{ + status_t ret = NO_ERROR; + ExynosCameraBuffer selectedBuffer; + + if (frame == NULL) { + ALOGE("ERR(%s[%d]):frame == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (outBuffer == NULL) { + ALOGE("ERR(%s[%d]):outBuffer == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (isSrc) { + ret = frame->getSrcBuffer(pipeID, &selectedBuffer); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getSrcBuffer(pipeID %d) fail", __FUNCTION__, __LINE__, pipeID); + } else { + if (dstPos < 0) { + ret = frame->getDstBuffer(pipeID, &selectedBuffer); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getDstBuffer(pipeID %d) fail", __FUNCTION__, __LINE__, pipeID); + } else { + ret = frame->getDstBuffer(pipeID, &selectedBuffer, dstPos); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):getDstBuffer(pipeID %d, dstPos %d) fail", __FUNCTION__, __LINE__, pipeID, dstPos); + } + } + + *outBuffer = selectedBuffer; + return ret; +} + +status_t ExynosCameraFrameSelector::m_pushQ(ExynosCameraList *list, + ExynosCameraFrame* inframe, + bool lockflag) +{ + status_t ret = NO_ERROR; + if( lockflag ) { + inframe->frameLock(); + } + list->pushProcessQ(&inframe); + return ret; +} +status_t ExynosCameraFrameSelector::m_popQ(ExynosCameraList *list, + ExynosCameraFrame** outframe, + bool unlockflag, + int tryCount) +{ + status_t ret = NO_ERROR; + int iter = 0; + + do { + ret = list->popProcessQ(outframe); + if( ret < 0 ) { + if( ret == TIMED_OUT ) { + ALOGD("DEBUG(%s[%d]):PopQ Time out -> retry[max cur](%d %d)", + __FUNCTION__, __LINE__, tryCount, iter); + + iter++; + continue; + } + } + } while (ret != OK && tryCount > iter); + + if( ret != OK ) { + ALOGE("ERR(%s[%d]):popQ fail(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if( *outframe == NULL ) { + ALOGE("ERR(%s[%d]):popQ frame = NULL frame(%p)", __FUNCTION__, __LINE__, *outframe); + return ret; + } + + if( unlockflag ) { + (*outframe)->frameUnlock(); + } + return ret; +} + +#ifdef USE_FRAMEMANAGER +status_t ExynosCameraFrameSelector::m_frameComplete(ExynosCameraFrame *frame, bool isForcelyDelete, + int pipeID, bool isSrc, int32_t dstPos, bool flagReleaseBuf) +{ + int ret = OK; + + if(flagReleaseBuf) { + m_releaseBuffer(frame, pipeID, isSrc, dstPos); + } + + if (isForcelyDelete == true) { + ALOGD("DEBUG(%s[%d]):frame deleted forcely : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } + frame = NULL; + } else { + ALOGV("DEBUG(%s[%d]):frame complete, count(%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); +#ifndef USE_FRAME_REFERENCE_COUNT + if (frame->isComplete() == true) { + if (frame->getFrameLockState() == false) { + ALOGV("DEBUG(%s[%d]):frame complete, count(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + } + frame = NULL; + } else { + ALOGV("DEBUG(%s[%d]):frame is locked : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + } + } else { + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); + ALOGE("ERR(%s[%d]):frame deleted : key(%d) refCnt(%d)", + __FUNCTION__, __LINE__, frame->getUniqueKey(), frame->getRef()); +#endif + m_frameMgr->deleteFrame(frame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + } + frame = NULL; + } +#else /* USE_FRAME_REFERENCE_COUNT */ + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } +#endif + frame = NULL; + } + + return ret; +} +#else /* USE_FRAMEMANAGER */ +status_t ExynosCameraFrameSelector::m_frameComplete(ExynosCameraFrame *frame, bool isForcelyDelete, int pipeID, + int pipeID, bool isSrc, int32_t dstPos, bool flagReleaseBuf) +{ + int ret = OK; + + if(flagReleaseBuf) { + m_releaseBuffer(frame, pipeID, isSrc, dstPos); + } + + if (isForcelyDelete == true) { + ALOGD("DEBUG(%s[%d]):frame deleted forcely : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + delete frame; + frame = NULL; + } else { + if (frame->isComplete() == true) { + if (frame->getFrameLockState() == false) { + ALOGV("DEBUG(%s[%d]):frame complete, count(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + + delete frame; + frame = NULL; + } else { + ALOGV("DEBUG(%s[%d]):frame is locked : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + } + } else { + ALOGV("DEBUG(%s[%d]):frame is not completed : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + } + } + return ret; +} +#endif + +/* + * Check complete flag of the Frame and deallocate it if it is completed. + * This function ignores lock flag of the frame(Lock flag is usually set to protect + * the frame from deallocation), so please use with caution. + * This function is required to remove a frame from frameHoldingList. + */ +#ifdef USE_FRAMEMANAGER +status_t ExynosCameraFrameSelector::m_LockedFrameComplete(ExynosCameraFrame *frame, int pipeID, + bool isSrc, int32_t dstPos) +{ + int ret = OK; + + m_releaseBuffer(frame, pipeID, isSrc, dstPos); + +#ifndef USE_FRAME_REFERENCE_COUNT + if (frame->isComplete() == true) { + if (frame->getFrameLockState() == true) + { + ALOGV("DEBUG(%s[%d]):Deallocating locked frame, count(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + } + + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + } + frame = NULL; + } +#else + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } +#endif + + return ret; +} +#else +status_t ExynosCameraFrameSelector::m_LockedFrameComplete(ExynosCameraFrame *frame, int pipeID, + bool isSrc, int32_t dstPos) +{ + int ret = OK; + + m_releaseBuffer(frame, pipeID, isSrc, dstPos); + + if (frame->isComplete() == true) { + if (frame->getFrameLockState() == true) + { + ALOGV("DEBUG(%s[%d]):Deallocating locked frame, count(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); + ALOGV("DEBUG(%s[%d]):frame is locked : isComplete(%d) count(%d) LockState(%d)", + __FUNCTION__, __LINE__, + frame->isComplete(), + frame->getFrameCount(), + frame->getFrameLockState()); + } + delete frame; + frame = NULL; + } + return ret; +} +#endif + +status_t ExynosCameraFrameSelector::wakeupQ(void) +{ + m_frameHoldList.sendCmd(WAKE_UP); + + return NO_ERROR; +} + +status_t ExynosCameraFrameSelector::cancelPicture(bool flagCancel) +{ + isCanceled = flagCancel; + + return NO_ERROR; +} + +status_t ExynosCameraFrameSelector::m_clearList(ExynosCameraList *list,int pipeID, bool isSrc, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame *frame = NULL; + ExynosCameraBuffer buffer; + + while (list->getSizeOfProcessQ() > 0) { + if( m_popQ(list, &frame, false, 1) != NO_ERROR ) { + ALOGE("ERR(%s[%d]):getBufferToManageQ fail", __FUNCTION__, __LINE__); + + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } else { + ret = m_getBufferFromFrame(frame, pipeID, isSrc, &buffer, dstPos); + if( ret != NO_ERROR ) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s)", + __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst"); + } + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else { + if (buffer.index >= 0) + ret = m_bufMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putIndex is %d", __FUNCTION__, __LINE__, buffer.index); + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } + /* + Frames in m_frameHoldList and m_hdrFrameHoldList are locked when they are inserted + on the list. So we need to use m_LockedFrameComplete() to remove those frames. + Please beware that the frame might be deleted in elsewhere, epically on erroneous + conditions. So if the program encounters memory fault here, please check the other + frame deallocation(delete) routines. + */ + + /* Rather than blindly deleting frame in m_LockedFrameComplete(), we do following: + * 1. Check whether frame is complete. Delete the frame if it is complete. + * 2. If the frame is not complete, unlock it. mainThread will delete this frame. + */ + + //m_LockedFrameComplete(frame); + +#ifdef USE_FRAMEMANAGER +#ifndef USE_FRAME_REFERENCE_COUNT + if (frame->isComplete() == true) { + if (frame->getFrameLockState() == true) + ALOGV("DEBUG(%s[%d]):Deallocating locked frame, count(%d)", + __FUNCTION__, __LINE__, frame->getFrameCount()); +#else + { +#endif + if (m_frameMgr != NULL) { +#ifdef USE_FRAME_REFERENCE_COUNT + frame->decRef(); +#endif + m_frameMgr->deleteFrame(frame); + } else { + ALOGE("ERR(%s[%d]):m_frameMgr is NULL (%d)", __FUNCTION__, __LINE__, frame->getFrameCount()); + } + } +#else + if (frame->isComplete() == true) { + delete frame; + frame = NULL; + } else { + if (frame->getFrameLockState() == true) + frame->frameUnlock(); + } +#endif + } + } + } + return ret; +} + +status_t ExynosCameraFrameSelector::setFrameHoldCount(int32_t count) +{ + if (count < 0) { + ALOGE("ERR(%s[%d]):frame hold count cannot be negative value, current value(%d)", + __FUNCTION__, __LINE__, count); + return BAD_VALUE; + } + + m_frameHoldCount = count; + + return NO_ERROR; +} + +bool ExynosCameraFrameSelector::m_isFrameMetaTypeShotExt(void) +{ + bool isShotExt = true; + + if (m_parameters->isSccCapture() == true) { + if (m_parameters->isReprocessing() == true) + isShotExt = true; + else + isShotExt = false; + } else { + if (m_parameters->getUsePureBayerReprocessing() == false) + isShotExt = false; + } + + return isShotExt; +} + +void ExynosCameraFrameSelector::setWaitTime(uint64_t waitTime) +{ + m_frameHoldList.setWaitTime(waitTime); +} + +void ExynosCameraFrameSelector::setIsFirstFrame(bool isFirstFrame) +{ + m_isFirstFrame = isFirstFrame; +} + +bool ExynosCameraFrameSelector::getIsFirstFrame() +{ + return m_isFirstFrame; +} + +void ExynosCameraFrameSelector::wakeselectDynamicFrames(void) +{ + isCanceled = true; + m_frameHoldList.wakeupAll(); +} +} diff --git a/libcamera/common_v2/ExynosCameraFrameSelector.h b/libcamera/common_v2/ExynosCameraFrameSelector.h new file mode 100644 index 0000000..c3e1088 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraFrameSelector.h @@ -0,0 +1,116 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_BAYER_SELECTOR_H +#define EXYNOS_CAMERA_BAYER_SELECTOR_H + +#include "ExynosCameraParameters.h" +#include "ExynosCameraBuffer.h" +#include "ExynosCameraBufferManager.h" +#include "ExynosCameraList.h" +#include "ExynosCameraActivityControl.h" +#include "ExynosCameraFrame.h" +#ifdef USE_FRAMEMANAGER +#include "ExynosCameraFrameManager.h" +#endif +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA +#include "ExynosCameraDualFrameSelector.h" +#endif + +namespace android{ +class ExynosCameraFrameSelector { +public: +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameSelector (ExynosCameraParameters *param, + ExynosCameraBufferManager *bufMgr, ExynosCameraFrameManager *manager = NULL + ); +#else + ExynosCameraFrameSelector (ExynosCameraParameters *param, + ExynosCameraBufferManager *bufMgr); +#endif + ~ExynosCameraFrameSelector(); + status_t release(void); + status_t manageFrameHoldList(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos = 0); + status_t manageFrameHoldListForDynamicBayer(ExynosCameraFrame *frame); + ExynosCameraFrame* selectFrames(int count, int pipeID, bool isSrc, int tryCount, int32_t dstPos = 0); + ExynosCameraFrame* selectDynamicFrames(int count, int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* selectCaptureFrames(int count, uint32_t frameCount, int pipeID, bool isSrc, int tryCount, int32_t dstPos = 0); + status_t clearList(int pipeID = -1 , bool isSrc = false, int32_t dstPos = 0); + int getHoldCount(void) { return m_frameHoldList.getSizeOfProcessQ(); }; + status_t setFrameHoldCount(int32_t count); + status_t cancelPicture(bool flagCancel = true); + status_t wakeupQ(void); + void setWaitTime(uint64_t waitTime); + void setIsFirstFrame(bool isFirstFrame); + bool getIsFirstFrame(); + + void wakeselectDynamicFrames(void); +private: + status_t m_manageNormalFrameHoldList(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos); + status_t m_manageHdrFrameHoldList(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos); +#ifdef RAWDUMP_CAPTURE + status_t m_manageRawFrameHoldList(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos); + ExynosCameraFrame* m_selectRawNormalFrame(int pipeID, bool isSrc, int tryCount); + status_t m_list_release(ExynosCameraList *list, int pipeID, bool isSrc, int32_t dstPos); + int removeFlags; +#endif + + ExynosCameraFrame* m_selectNormalFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectFlashFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectFlashFrameV2(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectFocusedFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectHdrFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectBurstFrame(int pipeID, bool isSrc, int tryCount, int32_t dstPos); + ExynosCameraFrame* m_selectCaptureFrame(uint32_t frameCount, int pipeID, bool isSrc, int tryCount, int32_t dstPos); + status_t m_getBufferFromFrame(ExynosCameraFrame *frame, int pipeID, bool isSrc, ExynosCameraBuffer *outBuffer, int32_t dstPos); + status_t m_pushQ(ExynosCameraList *list, ExynosCameraFrame* inframe, bool lockflag); + status_t m_popQ(ExynosCameraList *list, ExynosCameraFrame** outframe, bool unlockflag, int tryCount); + status_t m_waitAndpopQ(ExynosCameraList *list, ExynosCameraFrame** outframe, bool unlockflag, int tryCount); + status_t m_frameComplete(ExynosCameraFrame *frame, bool isForcelyDelete = false, + int pipeID = 0, bool isSrc = false, int32_t dstPos = 0, bool flagReleaseBuf = false); + status_t m_LockedFrameComplete(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos); + status_t m_clearList(ExynosCameraList *list, int pipeID, bool isSrc, int32_t dstPos); + status_t m_release(ExynosCameraList *list); + status_t m_releaseBuffer(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos); + + bool m_isFrameMetaTypeShotExt(void); + +private: + ExynosCameraList m_frameHoldList; + ExynosCameraList m_hdrFrameHoldList; +#ifdef RAWDUMP_CAPTURE + ExynosCameraList m_RawFrameHoldList; +#endif +#ifdef USE_FRAMEMANAGER + ExynosCameraFrameManager *m_frameMgr; +#endif + ExynosCameraParameters *m_parameters; + ExynosCameraBufferManager *m_bufMgr; + ExynosCameraActivityControl *m_activityControl; + + int m_reprocessingCount; + + mutable Mutex m_listLock; + int32_t m_frameHoldCount; + bool isCanceled; + int32_t m_CaptureCount; + bool m_isFirstFrame; +}; +} + +#endif + diff --git a/libcamera/common_v2/ExynosCameraInterface.cpp b/libcamera/common_v2/ExynosCameraInterface.cpp new file mode 100644 index 0000000..76e01b8 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraInterface.cpp @@ -0,0 +1,802 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraInterface" +#include + +#include "ExynosCameraInterface.h" +#include "ExynosCameraAutoTimer.h" + +namespace android { + +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(id); + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + bool hasFlash = false; + FILE *fp = NULL; + int ret = 0; + char flashFilePath[100] = {'\0',}; + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + cameraId += 1; +#endif + +#ifdef BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 0) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + +#ifdef BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 1) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) +#else + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + if (cameraId < 1 || cameraId > HAL_getNumberOfCameras()) +#else + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) +#endif + { + ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); + return -EINVAL; + } + +#ifdef FRONT_1_CAMERA_SENSOR_NAME + /* + * if it use dual camera. + * just calibrate it as proper cameraId_1. + */ + int cameraId_0 = -1; + int cameraId_1 = -1; + + getDualCameraId(&cameraId_0, &cameraId_1); + + if (0 < cameraId_1) { + if (CAMERA_ID_BACK_1 <= cameraId) { + ALOGD("DEBUG(%s[%d]):calibrate camera ID(%d) -> camera ID(%d) for dual camera", + __FUNCTION__, __LINE__, cameraId, cameraId_1); + cameraId = cameraId_1; + } + } +#endif + + state = CAMERA_OPENED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return -EUSERS; + } + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + if ((unsigned int)cameraId <= (sizeof(sCameraInfo) / sizeof(sCameraInfo[0]))) +#else + if ((unsigned int)cameraId < (sizeof(sCameraInfo) / sizeof(sCameraInfo[0]))) +#endif + { + if (g_cam_device[cameraId]) { + ALOGE("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); + *device = (hw_device_t *)g_cam_device[cameraId]; + goto done; + } + + g_cam_device[cameraId] = (camera_device_t *)malloc(sizeof(camera_device_t)); + if (!g_cam_device[cameraId]) + return -ENOMEM; + + g_cam_openLock[cameraId].lock(); + g_cam_device[cameraId]->common.tag = HARDWARE_DEVICE_TAG; + g_cam_device[cameraId]->common.version = 1; + g_cam_device[cameraId]->common.module = const_cast(module); + g_cam_device[cameraId]->common.close = HAL_camera_device_close; + + g_cam_device[cameraId]->ops = &camera_device_ops; + + ALOGD("DEBUG(%s):open camera %s", __FUNCTION__, id); + g_cam_device[cameraId]->priv = new ExynosCamera(cameraId, g_cam_device[cameraId]); + *device = (hw_device_t *)g_cam_device[cameraId]; + ALOGI("INFO(%s[%d]):camera(%d) out from new g_cam_device[%d]->priv()", + __FUNCTION__, __LINE__, cameraId, cameraId); + + g_cam_openLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + } else { + ALOGE("DEBUG(%s):camera(%s) open fail - must front camera open first", + __FUNCTION__, id); + return -EINVAL; + } + +done: + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + + if (g_cam_info[cameraId]) { + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + hasFlash = true; + } else { + hasFlash = false; + } + } + + if(hasFlash && g_cam_torchEnabled[cameraId]) { + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, flashFilePath, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + fflush(fp); + fclose(fp); + + g_cam_torchEnabled[cameraId] = false; + } + } + + g_callbacks->torch_mode_status_change(g_callbacks, id, TORCH_MODE_STATUS_NOT_AVAILABLE); + + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +#endif /* (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) */ + + return 0; +} + +static int HAL_camera_device_close(struct hw_device_t* device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId; + enum CAMERA_STATE state; + char camid[10]; + +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) +#else + ALOGI("INFO(%s[%d]): in", __FUNCTION__, __LINE__); + + if (device) { + camera_device_t *cam_device = (camera_device_t *)device; + cameraId = obj(cam_device)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_CLOSED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_openLock[cameraId].lock(); + ALOGI("INFO(%s[%d]):camera(%d) locked..", __FUNCTION__, __LINE__, cameraId); + g_cam_device[cameraId] = NULL; + g_cam_openLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + delete static_cast(cam_device->priv); + free(cam_device); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + } + + /* Update torch status */ + g_cam_torchEnabled[cameraId] = false; + snprintf(camid, sizeof(camid), "%d\n", cameraId); + g_callbacks->torch_mode_status_change(g_callbacks, camid, TORCH_MODE_STATUS_AVAILABLE_OFF); + + ALOGI("INFO(%s[%d]): out", __FUNCTION__, __LINE__); +#endif /* (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) */ + + return 0; +} + +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + ret = obj(dev)->setPreviewWindow(buf); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); + return ret; +} + +static void HAL_camera_device_set_callbacks(struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->setCallbacks(notify_cb, data_cb, data_cb_timestamp, + get_memory, + user); +} + +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->enableMsgType(msg_type); +} + +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->disableMsgType(msg_type); +} + +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->msgTypeEnabled(msg_type); +} + +static int HAL_camera_device_start_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_PREVIEW; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_previewLock[cameraId].lock(); + +#ifdef DUAL_CAMERA_SUPPORTED + if (cameraId != 0 && g_cam_device[0] != NULL + && cam_state[0] != CAMERA_NONE && cam_state[0] != CAMERA_CLOSED) { + ret = obj(dev)->setDualMode(true); + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):camera(%d) set dual mode fail, ret(%d)", + __FUNCTION__, __LINE__, cameraId, ret); + else + ALOGI("INFO(%s[%d]):camera(%d) set dual mode)", + __FUNCTION__, __LINE__, cameraId); + } +#endif + + ret = obj(dev)->startPreview(); + ALOGI("INFO(%s[%d]):camera(%d) out from startPreview()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_previewLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + return ret; +} + +static void HAL_camera_device_stop_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); +/* HACK : If camera in recording state, */ +/* CameraService have to call the stop_recording before the stop_preview */ +#if 1 + if (cam_state[cameraId] == CAMERA_RECORDING) { + ALOGE("ERR(%s[%d]):camera(%d) in RECORDING RUNNING state ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + HAL_camera_device_stop_recording(dev); + ALOGE("ERR(%s[%d]):cameraId=%d out from stop_recording ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + + for (int i=0; i<30; i++) { + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + } + ALOGE("ERR(%s[%d]):camera(%d) sleep 500ms for ---- INVALID ---- state", + __FUNCTION__, __LINE__, cameraId); + usleep(500000); /* to notify, sleep 500ms */ + } +#endif + state = CAMERA_PREVIEWSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + g_cam_previewLock[cameraId].lock(); + + obj(dev)->stopPreview(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopPreview()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_previewLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_preview_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->previewEnabled(); +} + +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->storeMetaDataInBuffers(enable); +} + +static int HAL_camera_device_start_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDING; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + g_cam_recordingLock[cameraId].lock(); + + ret = obj(dev)->startRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from startRecording()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_recordingLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + return ret; +} + +static void HAL_camera_device_stop_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDINGSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + g_cam_recordingLock[cameraId].lock(); + + obj(dev)->stopRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopRecording()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_recordingLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_recording_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->recordingEnabled(); +} + +static void HAL_camera_device_release_recording_frame(struct camera_device *dev, + const void *opaque) +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + + ALOGV("DEBUG(%s):", __FUNCTION__); + obj(dev)->releaseRecordingFrame(opaque); +} + +static int HAL_camera_device_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->autoFocus(); +} + +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->cancelAutoFocus(); +} + +static int HAL_camera_device_take_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->takePicture(); +} + +static int HAL_camera_device_cancel_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->cancelPicture(); +} + +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + String8 str(parms); + CameraParameters p(str); + return obj(dev)->setParameters(p); +} + +char *HAL_camera_device_get_parameters(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + String8 str; + +/* HACK : to avoid compile error */ +#if (defined BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) && (defined BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) + ALOGE("ERR(%s[%d]):invalid opertion on external camera", __FUNCTION__, __LINE__); +#else + CameraParameters parms = obj(dev)->getParameters(); + str = parms.flatten(); +#endif + return strdup(str.string()); +} + +static void HAL_camera_device_put_parameters( + __unused struct camera_device *dev, + char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + free(parms); +} + +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->sendCommand(cmd, arg1, arg2); +} + +static void HAL_camera_device_release(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RELEASED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return; + } + + g_cam_openLock[cameraId].lock(); + + obj(dev)->release(); + ALOGI("INFO(%s[%d]):camera(%d) out from release()", + __FUNCTION__, __LINE__, cameraId); + + g_cam_openLock[cameraId].unlock(); + + ALOGI("INFO(%s[%d]):camera(%d) unlocked..", __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_dump(struct camera_device *dev, int fd) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->dump(fd); +} + +static int HAL_getNumberOfCameras() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return sizeof(sCameraInfo) / sizeof(sCameraInfo[0]); +} + +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + if (callbacks == NULL) + ALOGE("ERR(%s[%d]):dev is NULL", __FUNCTION__, __LINE__); + + g_callbacks = callbacks; + + return 0; +} + +static int HAL_getCameraInfo(int cameraId, struct camera_info *info) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + status_t ret = NO_ERROR; + + ALOGV("DEBUG(%s):", __FUNCTION__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s):Invalid camera ID %d", __FUNCTION__, cameraId); + return -EINVAL; + } + + memcpy(info, &sCameraInfo[cameraId], sizeof(CameraInfo)); + info->device_version = HARDWARE_DEVICE_API_VERSION(1, 0); + + if (g_cam_info[cameraId] == NULL) { + ALOGD("DEBUG(%s[%d]):Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + ret = ExynosCamera1MetadataConverter::constructStaticInfo(cameraId, &g_cam_info[cameraId]); + if (ret != 0) { + ALOGE("ERR(%s[%d]): static information is NULL", __FUNCTION__, __LINE__); + return -EINVAL; + } + info->static_camera_characteristics = g_cam_info[cameraId]; + } else { + ALOGD("DEBUG(%s[%d]):Reuse!! Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + info->static_camera_characteristics = g_cam_info[cameraId]; + } + + /* set service arbitration (resource_cost, conflicting_devices, conflicting_devices_length */ + info->resource_cost = sCameraConfigInfo[cameraId].resource_cost; + info->conflicting_devices = sCameraConfigInfo[cameraId].conflicting_devices; + info->conflicting_devices_length = sCameraConfigInfo[cameraId].conflicting_devices_length; + ALOGD("INFO(%s info->resource_cost = %d ", __FUNCTION__, info->resource_cost); + if (info->conflicting_devices_length) { + for (size_t i = 0; i < info->conflicting_devices_length; i++) { + ALOGD("INFO(%s info->conflicting_devices = %s ", __FUNCTION__, info->conflicting_devices[i]); + } + } else { + ALOGD("INFO(%s info->conflicting_devices_length is zero ", __FUNCTION__); + } + + return NO_ERROR; +} + +static int HAL_set_torch_mode(const char* camera_id, bool enabled) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(camera_id); + FILE *fp = NULL; + char flashFilePath[100] = {'\0',}; + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + int ret = 0; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s[%d]):Invalid camera ID %d", __FUNCTION__, __LINE__, cameraId); + return -EINVAL; + } + + /* Check the android.flash.info.available */ + /* If this camera device does not support flash, It have to return -ENOSYS */ + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + ALOGV("DEBUG(%s[%d]): Flash metadata exist", __FUNCTION__, __LINE__); + } else { + ALOGE("ERR(%s[%d]): Can not find flash metadata", __FUNCTION__, __LINE__); + return -ENOSYS; + } + + ALOGI("INFO(%s[%d]): Current Camera State (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + + /* Add the check the camera state that camera in use or not */ + if (cam_state[cameraId] > CAMERA_CLOSED) { + ALOGE("ERR(%s[%d]): Camera Device is busy (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_OFF", __FUNCTION__, __LINE__, cameraId); + } + return -EBUSY; + } + + /* Add the sysfs file read (sys/class/camera/flash/torch_flash) then set 0 or 1 */ + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, flashFilePath, fp); + return -ENOSYS; + } + + if (enabled) { + fwrite("1", sizeof(char), 1, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + } + + fflush(fp); + + ret = fclose(fp); + if (ret != 0) { + ALOGE("ERR(%s[%d]): file close failed(%d)", __FUNCTION__, __LINE__, ret); + } + + if (enabled) { + g_cam_torchEnabled[cameraId] = true; + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_ON); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_ON", __FUNCTION__, __LINE__, cameraId); + } + } else { + g_cam_torchEnabled[cameraId] = false; + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_OFF", __FUNCTION__, __LINE__, cameraId); + } + } + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +static int HAL_init() +{ + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + return OK; +} + +static int HAL_open_legacy(__unused const struct hw_module_t* module, __unused const char* id, + __unused uint32_t halVersion, __unused struct hw_device_t** device) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + return NO_ERROR; +} + +static void HAL_get_vendor_tag_ops(__unused vendor_tag_ops_t* ops) +{ + ALOGV("INFO(%s):", __FUNCTION__); +/* + SecCameraVendorTags::Ops = ops; + + ops->get_all_tags = SecCameraVendorTags::get_ext_all_tags; + ops->get_tag_count = SecCameraVendorTags::get_ext_tag_count; + ops->get_tag_type = SecCameraVendorTags::get_ext_tag_type; + ops->get_tag_name = SecCameraVendorTags::get_ext_tag_name; + ops->get_section_name = SecCameraVendorTags::get_ext_section_name; + ops->reserved[0] = NULL; +*/ +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraInterface.h b/libcamera/common_v2/ExynosCameraInterface.h new file mode 100644 index 0000000..3a6bd76 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraInterface.h @@ -0,0 +1,400 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_SERVICE_INTERFACE_H +#define EXYNOS_CAMERA_SERVICE_INTERFACE_H + +#include + +#include "ExynosCamera.h" +#include "ExynosCameraInterfaceState.h" +#include "ExynosCamera1MetadataConverter.h" + +#if defined(BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA) || defined(BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA) +#include "SecCameraInterface.h" +#endif + +#ifndef CAMERA_MODULE_VERSION +#define CAMERA_MODULE_VERSION CAMERA_MODULE_API_VERSION_2_4 +#endif + +#define SET_METHOD(m) m : HAL_camera_device_##m + +#define MAX_NUM_OF_CAMERA 4 + +namespace android { + +static CameraInfo sCameraInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + CAMERA_FACING_BACK, + BACK_ROTATION /* orientation */ + }, +#endif + { + CAMERA_FACING_FRONT, + FRONT_ROTATION /* orientation */ + }, +#if defined(MAIN_1_CAMERA_SENSOR_NAME) + { + CAMERA_FACING_BACK, + BACK_ROTATION /* orientation */ + }, +#endif +#if defined(FRONT_1_CAMERA_SENSOR_NAME) + { + CAMERA_FACING_FRONT, + FRONT_ROTATION /* orientation */ + } +#endif +}; + +/* This struct used in device3.3 service arbitration */ +struct CameraConfigInfo { + int resource_cost; + char** conflicting_devices; + size_t conflicting_devices_length; +}; + +const CameraConfigInfo sCameraConfigInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + 51, /* resoruce_cost : [0 , 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"1"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + }, +#endif + { + 51, /* resoruce_cost : [0, 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"0"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + } +}; + +static camera_metadata_t *g_cam_info[MAX_NUM_OF_CAMERA] = {NULL, NULL}; +static const camera_module_callbacks_t *g_callbacks = NULL; + +static camera_device_t *g_cam_device[MAX_NUM_OF_CAMERA]; + +static Mutex g_cam_openLock[MAX_NUM_OF_CAMERA]; +static Mutex g_cam_previewLock[MAX_NUM_OF_CAMERA]; +static Mutex g_cam_recordingLock[MAX_NUM_OF_CAMERA]; +static bool g_cam_torchEnabled[MAX_NUM_OF_CAMERA] = {false, false}; + +static inline ExynosCamera *obj(struct camera_device *dev) +{ + return reinterpret_cast(dev->priv); +}; + +/** + * Open camera device + */ +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device); + +/** + * Close camera device + */ +static int HAL_camera_device_close(struct hw_device_t* device); + +/** + * Set the preview_stream_ops to which preview frames are sent + */ +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf); + +/** + * Set the notification and data callbacks + */ +static void HAL_camera_device_set_callbacks( + struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user); + +/** + * The following three functions all take a msg_type, which is a bitmask of + * the messages defined in include/ui/Camera.h + */ + +/** + * Enable a message, or set of messages. + */ +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera + * HAL should not rely on its client to call releaseRecordingFrame() to + * release video recording frames sent out by the cameral HAL before and + * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL + * clients must not modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). + */ +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Query whether a message, or a set of messages, is enabled. Note that + * this is operates as an AND, if any of the messages queried are off, this + * will return false. + */ +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type); + +/** + * Start preview mode. + */ +static int HAL_camera_device_start_preview(struct camera_device *dev); + +/** + * Stop a previously started preview. + */ +static void HAL_camera_device_stop_preview(struct camera_device *dev); + +/** + * Returns true if preview is enabled. + */ +static int HAL_camera_device_preview_enabled(struct camera_device *dev); + +/** + * Request the camera HAL to store meta data or real YUV data in the video + * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If + * it is not called, the default camera HAL behavior is to store real YUV + * data in the video buffers. + * + * This method should be called before startRecording() in order to be + * effective. + * + * If meta data is stored in the video buffers, it is up to the receiver of + * the video buffers to interpret the contents and to find the actual frame + * data with the help of the meta data in the buffer. How this is done is + * outside of the scope of this method. + * + * Some camera HALs may not support storing meta data in the video buffers, + * but all camera HALs should support storing real YUV data in the video + * buffers. If the camera HAL does not support storing the meta data in the + * video buffers when it is requested to do do, INVALID_OPERATION must be + * returned. It is very useful for the camera HAL to pass meta data rather + * than the actual frame data directly to the video encoder, since the + * amount of the uncompressed frame data can be very large if video size is + * large. + * + * @param enable if true to instruct the camera HAL to store + * meta data in the video buffers; false to instruct + * the camera HAL to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable); + +/** + * Start record mode. When a record image is available, a + * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding + * frame. Every record frame must be released by a camera HAL client via + * releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames, + * and the client must not modify/access any video recording frames. + */ +static int HAL_camera_device_start_recording(struct camera_device *dev); + +/** + * Stop a previously started recording. + */ +static void HAL_camera_device_stop_recording(struct camera_device *dev); + +/** + * Returns true if recording is enabled. + */ +static int HAL_camera_device_recording_enabled(struct camera_device *dev); + +/** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + */ +static void HAL_camera_device_release_recording_frame( + struct camera_device *dev, + const void *opaque); + +/** + * Start auto focus, the notification callback routine is called with + * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be + * called again if another auto focus is needed. + */ +static int HAL_camera_device_auto_focus(struct camera_device *dev); + +/** + * Cancels auto-focus function. If the auto-focus is still in progress, + * this function will cancel it. Whether the auto-focus is in progress or + * not, this function will return the focus position to the default. If + * the camera does not support auto-focus, this is a no-op. + */ +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev); + +/** + * Take a picture. + */ +static int HAL_camera_device_take_picture(struct camera_device *dev); + +/** + * Cancel a picture that was started with takePicture. Calling this method + * when no picture is being taken is a no-op. + */ +static int HAL_camera_device_cancel_picture(struct camera_device *dev); + +/** + * Set the camera parameters. This returns BAD_VALUE if any parameter is + * invalid or not supported. + */ +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms); + +/** + * Return the camera parameters. + */ +char *HAL_camera_device_get_parameters(struct camera_device *dev); + +/** + * Release buffer that used by the camera parameters. + */ +static void HAL_camera_device_put_parameters( + struct camera_device *dev, + char *parms); + +/** + * Send command to camera driver. + */ +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2); + +/** + * Release the hardware resources owned by this object. Note that this is + * *not* done in the destructor. + */ +static void HAL_camera_device_release(struct camera_device *dev); + +/** + * Dump state of the camera hardware + */ +static int HAL_camera_device_dump(struct camera_device *dev, int fd); + +/** + * Callback functions for the camera HAL module to use to inform the framework + * of changes to the camera subsystem. These are called only by HAL modules + * implementing version CAMERA_MODULE_API_VERSION_2_1 or higher of the HAL + * module API interface. + */ +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks); + +/** + * Retrun the camera hardware info + */ +static int HAL_getCameraInfo(int cameraId, struct camera_info *info); + +/** + * Return number of the camera hardware + */ +static int HAL_getNumberOfCameras(); + +static int HAL_open_legacy(const struct hw_module_t* module, const char* id, uint32_t halVersion, struct hw_device_t** device); + +static void HAL_get_vendor_tag_ops(vendor_tag_ops_t* ops); +static int HAL_set_torch_mode(const char* camera_id, bool enabled); +static int HAL_init(); + +static camera_device_ops_t camera_device_ops = { + SET_METHOD(set_preview_window), + SET_METHOD(set_callbacks), + SET_METHOD(enable_msg_type), + SET_METHOD(disable_msg_type), + SET_METHOD(msg_type_enabled), + SET_METHOD(start_preview), + SET_METHOD(stop_preview), + SET_METHOD(preview_enabled), + SET_METHOD(store_meta_data_in_buffers), + SET_METHOD(start_recording), + SET_METHOD(stop_recording), + SET_METHOD(recording_enabled), + SET_METHOD(release_recording_frame), + SET_METHOD(auto_focus), + SET_METHOD(cancel_auto_focus), + SET_METHOD(take_picture), + SET_METHOD(cancel_picture), + SET_METHOD(set_parameters), + SET_METHOD(get_parameters), + SET_METHOD(put_parameters), + SET_METHOD(send_command), + SET_METHOD(release), + SET_METHOD(dump), +}; + +static hw_module_methods_t camera_module_methods = { + open : HAL_camera_device_open +}; + +extern "C" { + struct camera_module HAL_MODULE_INFO_SYM = { + common : { + tag : HARDWARE_MODULE_TAG, + module_api_version : CAMERA_MODULE_VERSION, + hal_api_version : HARDWARE_HAL_API_VERSION, + id : CAMERA_HARDWARE_MODULE_ID, + name : "Exynos Camera HAL1", + author : "Samsung Corporation", + methods : &camera_module_methods, + dso : NULL, + reserved : {0}, + }, + get_number_of_cameras : HAL_getNumberOfCameras, + get_camera_info : HAL_getCameraInfo, + set_callbacks : HAL_set_callbacks, +#if (TARGET_ANDROID_VER_MAJ >= 4 && TARGET_ANDROID_VER_MIN >= 4) + get_vendor_tag_ops : HAL_get_vendor_tag_ops, + open_legacy : HAL_open_legacy, + set_torch_mode : HAL_set_torch_mode, + init : HAL_init, + reserved : {0} +#endif + }; +} + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraInterfaceState.h b/libcamera/common_v2/ExynosCameraInterfaceState.h new file mode 100644 index 0000000..971edbd --- /dev/null +++ b/libcamera/common_v2/ExynosCameraInterfaceState.h @@ -0,0 +1,130 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "ExynosCameraConfig.h" + +namespace android { + +enum CAMERA_STATE { + CAMERA_NONE, + CAMERA_OPENED, + CAMERA_RELEASED, + CAMERA_CLOSED, + CAMERA_PREVIEW, + CAMERA_PREVIEWSTOPPED, + CAMERA_RECORDING, + CAMERA_RECORDINGSTOPPED, +}; + +static const char *camera_state_enum2str[40] = { + "NONE", + "OPENED", + "RELEASED", + "CLOSED", + "PREVIEW_RUNNING", + "PREVIEW_STOPPED", + "RECORDING_RUNNING", + "RECORDING_STOPPED" +}; + +static CAMERA_STATE cam_state[4]; +static Mutex cam_stateLock[4]; + +static int check_camera_state(CAMERA_STATE state, int cameraId) +{ + bool ret = false; + cam_stateLock[cameraId].lock(); + + ALOGD("DEBUG(%s):camera(%d) state(%d) checking...", __FUNCTION__, cameraId, state); + +#ifndef DUAL_CAMERA_SUPPORTED + /* Assuming that only 2 cameras are present */ + if ((state == CAMERA_OPENED) && (cam_state[(cameraId + 1) % 2] != CAMERA_NONE) && + (cam_state[(cameraId + 1) % 2] != CAMERA_CLOSED)) { + ALOGE("ERR(%s):camera(%d) DUAL camera not supported\n", __FUNCTION__, cameraId); + cam_stateLock[cameraId].unlock(); + return ret; + } +#endif + switch (state) { + case CAMERA_NONE: + ret = true; + break; + case CAMERA_OPENED: + if (cam_state[cameraId] == CAMERA_NONE || + cam_state[cameraId] == CAMERA_CLOSED) + ret = true; + break; + case CAMERA_RELEASED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED) + ret = true; + break; + case CAMERA_CLOSED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED || + cam_state[cameraId] == CAMERA_RELEASED) + ret = true; + break; + case CAMERA_PREVIEW: + if (cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED) + ret = true; + break; + case CAMERA_PREVIEWSTOPPED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEW || + cam_state[cameraId] == CAMERA_RECORDINGSTOPPED) + ret = true; + break; + case CAMERA_RECORDING: + if (cam_state[cameraId] == CAMERA_PREVIEW || + cam_state[cameraId] == CAMERA_RECORDINGSTOPPED) + ret = true; + break; + case CAMERA_RECORDINGSTOPPED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_RECORDING) + ret = true; + break; + default: + ALOGE("ERR(%s):camera(%d) state(%s) is unknown value", + __FUNCTION__, cameraId, camera_state_enum2str[state]); + ret = false; + break; + } + + if (ret == true) { + ALOGD("DEBUG(%s):camera(%d) state(%d:%s->%d:%s) is valid", + __FUNCTION__, cameraId, + cam_state[cameraId], camera_state_enum2str[cam_state[cameraId]], + state, camera_state_enum2str[state]); + } else { + ALOGE("ERR(%s):camera(%d) state(%d:%s->%d:%s) is INVALID", + __FUNCTION__, cameraId, + cam_state[cameraId], camera_state_enum2str[cam_state[cameraId]], + state, camera_state_enum2str[state]); + } + + cam_stateLock[cameraId].unlock(); + return ret; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraList.h b/libcamera/common_v2/ExynosCameraList.h new file mode 100644 index 0000000..159a8f4 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraList.h @@ -0,0 +1,241 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraList.h + * \brief hearder file for CAMERA HAL MODULE + * \author Hyeonmyeong Choi(hyeon.choi@samsung.com) + * \date 2013/03/05 + * + */ + +#ifndef EXYNOS_CAMERA_LIST_H__ +#define EXYNOS_CAMERA_LIST_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include "cutils/properties.h" + +#define WAIT_TIME (150 * 1000000) + +using namespace android; + +enum LIST_CMD { + WAKE_UP = 1, +}; + +template +class ExynosCameraList { +public: + typedef List listType; + typedef typename listType::iterator iterator; + + ExynosCameraList() + { + m_statusException = NO_ERROR; + m_waitProcessQ = false; + m_waitTime = WAIT_TIME; + m_thread = NULL; + } + + ExynosCameraList(sp thread) + { + m_statusException = NO_ERROR; + m_waitProcessQ = false; + m_waitTime = WAIT_TIME; + m_thread = NULL; + + m_thread = thread; + } + + ~ExynosCameraList() + { + release(); + } + + void setup(sp thread) + { + m_processQMutex.lock(); + m_thread = thread; + m_processQMutex.unlock(); + } + + void wakeupAll(void) + { + setStatusException(TIMED_OUT); + if (m_waitProcessQ) + m_processQCondition.signal(); + } + + void sendCmd(uint32_t cmd) + { + switch (cmd) { + case WAKE_UP: + wakeupAll(); + break; + default: + ALOGE("ERR(%s):unknown cmd(%d)", __FUNCTION__, cmd); + break; + } + } + + void setStatusException(status_t exception) + { + Mutex::Autolock lock(m_flagMutex); + m_statusException = exception; + } + + status_t getStatusException(void) + { + Mutex::Autolock lock(m_flagMutex); + return m_statusException; + } + + /* Process Queue */ + void pushProcessQ(T *buf) + { + Mutex::Autolock lock(m_processQMutex); + m_processQ.push_back(*buf); + + if (m_waitProcessQ) + m_processQCondition.signal(); + else if (m_thread != NULL && m_thread->isRunning() == false) + m_thread->run(); + }; + + status_t popProcessQ(T *buf) + { + iterator r; + + Mutex::Autolock lock(m_processQMutex); + if (m_processQ.empty()) + return TIMED_OUT; + + r = m_processQ.begin()++; + *buf = *r; + m_processQ.erase(r); + + return OK; + }; + + status_t waitAndPopProcessQ(T *buf) + { + iterator r; + + status_t ret; + m_processQMutex.lock(); + if (m_processQ.empty()) { + m_waitProcessQ = true; + + setStatusException(NO_ERROR); + ret = m_processQCondition.waitRelative(m_processQMutex, m_waitTime); + m_waitProcessQ = false; + + if (ret < 0) { + if (ret == TIMED_OUT) + ALOGV("DEBUG(%s):Time out, Skip to pop process Q", __FUNCTION__); + else + ALOGE("ERR(%s):Fail to pop processQ", __FUNCTION__); + + m_processQMutex.unlock(); + return ret; + } + + ret = getStatusException(); + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) { + ALOGV("DEBUG(%s):return CAM_ECANCELED.(%d).", __FUNCTION__, ret); + } else { + ALOGW("WARN(%s[%d]): Exception status(%d)", __FUNCTION__, __LINE__, ret); + } + m_processQMutex.unlock(); + return ret; + } + } + + if (m_processQ.empty()) { + ALOGE("ERR(%s[%d]): processQ is empty, invalid state", __FUNCTION__, __LINE__); + m_processQMutex.unlock(); + return INVALID_OPERATION; + } + + r = m_processQ.begin(); + *buf = *r; + m_processQ.erase(r); + + m_processQMutex.unlock(); + return OK; + }; + + int getSizeOfProcessQ(void) + { + Mutex::Autolock lock(m_processQMutex); + return m_processQ.size(); + }; + + /* release both Queue */ + void release(void) + { + setStatusException(TIMED_OUT); + + m_processQMutex.lock(); + if (m_waitProcessQ) + m_processQCondition.signal(); + + m_processQ.clear(); + m_processQMutex.unlock(); + }; + + void setWaitTime(uint64_t waitTime) + { + m_waitTime = waitTime; + ALOGV("DEBUG(%s):m_waitTime : %llu", __FUNCTION__, m_waitTime); + } + + bool isWaiting(void) { + Mutex::Autolock lock(m_processQMutex); + return m_waitProcessQ; + } + +private: + List m_processQ; + Mutex m_processQMutex; + Mutex m_flagMutex; + mutable Condition m_processQCondition; + bool m_waitProcessQ; + status_t m_statusException; + uint64_t m_waitTime; + + sp m_thread; +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraMemory.cpp b/libcamera/common_v2/ExynosCameraMemory.cpp new file mode 100644 index 0000000..66bd7fe --- /dev/null +++ b/libcamera/common_v2/ExynosCameraMemory.cpp @@ -0,0 +1,1026 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "ExynosCameraMemoryAllocator" +#include "ExynosCameraMemory.h" + +namespace android { + + +gralloc_module_t const *ExynosCameraGrallocAllocator::m_grallocHal; +gralloc_module_t const *ExynosCameraStreamAllocator::m_grallocHal; + +ExynosCameraGraphicBufferAllocator::ExynosCameraGraphicBufferAllocator() +{ +} + +ExynosCameraGraphicBufferAllocator::~ExynosCameraGraphicBufferAllocator() +{ +} + +status_t ExynosCameraGraphicBufferAllocator::init(void) +{ + m_width = 0; + m_height = 0; + m_stride = 0; + m_halPixelFormat = 0; + m_grallocUsage = GRALLOC_SET_USAGE_FOR_CAMERA; + + for (int i = 0; i < VIDEO_MAX_FRAME; i++) { + m_flagGraphicBufferAlloc[i] = false; + } + + return NO_ERROR; +} + +status_t ExynosCameraGraphicBufferAllocator::setSize(int width, int height, int stride) +{ + m_width = width; + m_height = height; + m_stride = stride; + + return NO_ERROR; +} + +status_t ExynosCameraGraphicBufferAllocator::getSize(int *width, int *height, int *stride) +{ + *width = m_width; + *height = m_height; + *stride = m_stride; + + return NO_ERROR; +} + +status_t ExynosCameraGraphicBufferAllocator::setHalPixelFormat(int halPixelFormat) +{ + m_halPixelFormat = halPixelFormat; + + return NO_ERROR; +} + +int ExynosCameraGraphicBufferAllocator::getHalPixelFormat(void) +{ + return m_halPixelFormat; +} + +status_t ExynosCameraGraphicBufferAllocator::setGrallocUsage(int grallocUsage) +{ + m_grallocUsage = grallocUsage; + + return NO_ERROR; +} + +int ExynosCameraGraphicBufferAllocator::getGrallocUsage(void) +{ + return m_grallocUsage; +} + +sp ExynosCameraGraphicBufferAllocator::alloc(int index, int planeCount, int fdArr[], char *bufAddr[], unsigned int bufSize[]) +{ + if ((index < 0) || (index >= VIDEO_MAX_FRAME)) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Buffer index error (%d/%d), assert!!!!", + __FUNCTION__, __LINE__, index, VIDEO_MAX_FRAME); + } + + sp graphicBuffer; + + if (m_flagGraphicBufferAlloc[index] == false) { + graphicBuffer = m_alloc(index, planeCount, fdArr, bufAddr, bufSize, m_width, m_height, m_halPixelFormat, m_grallocUsage, m_stride); + if (graphicBuffer == 0) { + ALOGE("ERR(%s[%d]):m_alloc() fail", __FUNCTION__, __LINE__); + goto done; + } + } else { + graphicBuffer = m_graphicBuffer[index]; + if (graphicBuffer == 0) { + ALOGE("ERR(%s[%d]):m_graphicBuffer[%d] == 0. so, fail", __FUNCTION__, __LINE__, index); + goto done; + } + } + +done: + return graphicBuffer; +} + +status_t ExynosCameraGraphicBufferAllocator::free(int index) +{ + int ret = 0; + + if (m_flagGraphicBufferAlloc[index] == false) + return NO_ERROR; + + m_flagGraphicBufferAlloc[index] = false; + + delete m_privateHandle[index]; + m_privateHandle[index] = NULL; + + m_graphicBuffer[index] = 0; + + return NO_ERROR; +} + +sp ExynosCameraGraphicBufferAllocator::m_alloc(int index, + int planeCount, + int fdArr[], + char *bufAddr[], + unsigned int bufSize[], + int width, + int height, + int halPixelFormat, + int grallocUsage, + int stride) +{ + if (m_flagGraphicBufferAlloc[index] == true) { + ALOGE("ERR(%s[%d]):%d is already allocated. so, fail!!", + __FUNCTION__, __LINE__, index); + goto done; + } + + if (planeCount <= 0) { + ALOGE("ERR(%s[%d]):invalid value : planeCount(%d). so, fail!!", + __FUNCTION__, __LINE__, planeCount); + goto done; + } + + if (width == 0 || height == 0 || halPixelFormat == 0 || grallocUsage <= 0 || stride <= 0) { + ALOGE("ERR(%s[%d]):invalid value : width(%d), height(%d), halPixelFormat(%d), grallocUsage(%d), stride(%d). so, fail!!", + __FUNCTION__, __LINE__, width, height, halPixelFormat, grallocUsage, stride); + goto done; + } + + if (planeCount == 1) { + m_privateHandle[index] = new private_handle_t(fdArr[0], bufSize[0], grallocUsage, width, height, + halPixelFormat, halPixelFormat, halPixelFormat, width, height, 0); + + m_privateHandle[index]->base = (uint64_t)bufAddr[0]; + m_privateHandle[index]->offset = 0; + } else { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):planeCount(%d) is not yet support, assert!!!!", + __FUNCTION__, __LINE__, planeCount); + } + + ALOGV("DEBUG(%s[%d]):new GraphicBuffer(bufAddr(%p), width(%d), height(%d), halPixelFormat(%d), grallocUsage(%d), stride(%d), m_privateHandle[%d](%p), false)", + __FUNCTION__, __LINE__, bufAddr[0], width, height, halPixelFormat, grallocUsage, stride, index, m_privateHandle[index]); + + m_graphicBuffer[index] = new GraphicBuffer(width, height, halPixelFormat, grallocUsage, stride, (native_handle_t*)m_privateHandle[index], false); + + m_flagGraphicBufferAlloc[index] = true; + +done: + return m_graphicBuffer[index]; +} + +ExynosCameraIonAllocator::ExynosCameraIonAllocator() +{ + m_ionClient = 0; + m_ionAlign = 0; + m_ionHeapMask = 0; + m_ionFlags = 0; +} + +ExynosCameraIonAllocator::~ExynosCameraIonAllocator() +{ + ion_close(m_ionClient); +} + +status_t ExynosCameraIonAllocator::init(bool isCached) +{ + status_t ret = NO_ERROR; + + if (m_ionClient == 0) { + m_ionClient = ion_open(); + + if (m_ionClient < 0) { + ALOGE("ERR(%s):ion_open failed", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + } + + m_ionAlign = 0; + m_ionHeapMask = ION_HEAP_SYSTEM_MASK; + m_ionFlags = (isCached == true ? + (ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC ) : 0); + +func_exit: + + return ret; +} + +status_t ExynosCameraIonAllocator::alloc( + int size, + int *fd, + char **addr, + bool mapNeeded) +{ + status_t ret = NO_ERROR; + int ionFd = 0; + char *ionAddr = NULL; + + if (m_ionClient == 0) { + ALOGE("ERR(%s):allocator is not yet created", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (size == 0) { + ALOGE("ERR(%s):size equals zero", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + + ret = ion_alloc_fd(m_ionClient, size, m_ionAlign, m_ionHeapMask, m_ionFlags, &ionFd); + + if (ret < 0) { + ALOGE("ERR(%s):ion_alloc_fd(fd=%d) failed(%s)", __FUNCTION__, ionFd, strerror(errno)); + ionFd = -1; + ret = INVALID_OPERATION; + goto func_exit; + } + + if (mapNeeded == true) { + if (map(size, ionFd, &ionAddr) != NO_ERROR) { + ALOGE("ERR(%s):map failed", __FUNCTION__); + } + } + +func_exit: + + *fd = ionFd; + *addr = ionAddr; + + return ret; +} + +status_t ExynosCameraIonAllocator::alloc( + int size, + int *fd, + char **addr, + int mask, + int flags, + bool mapNeeded) +{ + status_t ret = NO_ERROR; + int ionFd = 0; + char *ionAddr = NULL; + + if (m_ionClient == 0) { + ALOGE("ERR(%s):allocator is not yet created", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (size == 0) { + ALOGE("ERR(%s):size equals zero", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + + ret = ion_alloc_fd(m_ionClient, size, m_ionAlign, mask, flags, &ionFd); + + if (ret < 0) { + ALOGE("ERR(%s):ion_alloc_fd(fd=%d) failed(%s)", __FUNCTION__, ionFd, strerror(errno)); + ionFd = -1; + ret = INVALID_OPERATION; + goto func_exit; + } + + if (mapNeeded == true) { + if (map(size, ionFd, &ionAddr) != NO_ERROR) { + ALOGE("ERR(%s):map failed", __FUNCTION__); + } + } + +func_exit: + + *fd = ionFd; + *addr = ionAddr; + + return ret; +} + +status_t ExynosCameraIonAllocator::free( + __unused int size, + int *fd, + char **addr, + bool mapNeeded) +{ + status_t ret = NO_ERROR; + int ionFd = *fd; + char *ionAddr = *addr; + + if (ionFd < 0) { + ALOGE("ERR(%s):ion_fd is lower than zero", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + + if (mapNeeded == true) { + if (ionAddr == NULL) { + ALOGE("ERR(%s):ion_addr equals NULL", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + + if (munmap(ionAddr, size) < 0) { + ALOGE("ERR(%s):munmap failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + ion_close(ionFd); + + ionFd = -1; + ionAddr = NULL; + +func_exit: + + *fd = ionFd; + *addr = ionAddr; + + return ret; +} + +status_t ExynosCameraIonAllocator::map(int size, int fd, char **addr) +{ + status_t ret = NO_ERROR; + char *ionAddr = NULL; + + if (size == 0) { + ALOGE("ERR(%s):size equals zero", __FUNCTION__); + ret = BAD_VALUE; + goto func_exit; + } + + if (fd <= 0) { + ALOGE("ERR(%s):fd=%d failed", __FUNCTION__, fd); + ret = BAD_VALUE; + goto func_exit; + } + + ionAddr = (char *)mmap(NULL, size, PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0); + + if (ionAddr == (char *)MAP_FAILED || ionAddr == NULL) { + ALOGE("ERR(%s):ion_map(size=%d) failed, (fd=%d), (%s)", __FUNCTION__, size, fd, strerror(errno)); + close(fd); + ionAddr = NULL; + ret = INVALID_OPERATION; + goto func_exit; + } + +func_exit: + + *addr = ionAddr; + + return ret; +} + +void ExynosCameraIonAllocator::setIonHeapMask(int mask) +{ + m_ionHeapMask |= mask; +} + +void ExynosCameraIonAllocator::setIonFlags(int flags) +{ + m_ionFlags |= flags; +} + +ExynosCameraMHBAllocator::ExynosCameraMHBAllocator() +{ + m_allocator = NULL; +} + +ExynosCameraMHBAllocator::~ExynosCameraMHBAllocator() +{ +} + +status_t ExynosCameraMHBAllocator::init(camera_request_memory allocator) +{ + m_allocator = allocator; + + return NO_ERROR; +} + +status_t ExynosCameraMHBAllocator::alloc( + int size, + int *fd, + char **addr, + int numBufs, + camera_memory_t **heap) +{ + status_t ret = NO_ERROR; + camera_memory_t *heap_ptr = NULL; + int heapFd = 0; + char *heapAddr = NULL; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + heap_ptr = m_allocator(-1, size, numBufs, &heapFd); + + if (heap_ptr == NULL || heapFd < 0) { + ALOGE("ERR(%s):heap_alloc(size=%d) failed", __FUNCTION__, size); + heap_ptr = NULL; + heapFd = -1; + ret = BAD_VALUE; + goto func_exit; + } + + heapAddr = (char *)heap_ptr->data; + +func_exit: + + *fd = heapFd; + *addr = heapAddr; + *heap = heap_ptr; + +#ifdef EXYNOS_CAMERA_MEMORY_TRACE + ALOGI("INFO(%s[%d]):[heap.fd=%d] .addr=%p .heap=%p]", + __FUNCTION__, __LINE__, heapFd, heapAddr, heap_ptr); +#endif + + return ret; +} + +status_t ExynosCameraMHBAllocator::free( + __unused int size, + int *fd, + char **addr, + camera_memory_t **heap) +{ + status_t ret = NO_ERROR; + camera_memory_t *heap_ptr = *heap; + int heapFd = *fd; + char *heapAddr = *addr; + + if (heap_ptr == NULL) { + ALOGE("ERR(%s):heap_ptr equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + heap_ptr->release(heap_ptr); + heapAddr = NULL; + heapFd = -1; + heap_ptr = 0; + +func_exit: + + *fd = heapFd; + *addr = heapAddr; + *heap = heap_ptr; + + return ret; +} + +ExynosCameraGrallocAllocator::ExynosCameraGrallocAllocator() +{ + m_allocator = NULL; + m_minUndequeueBufferMargin = 0; + if (hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal)) + ALOGE("ERR(%s):Loading gralloc HAL failed", __FUNCTION__); + + m_grallocUsage = GRALLOC_SET_USAGE_FOR_CAMERA; +} + +ExynosCameraGrallocAllocator::~ExynosCameraGrallocAllocator() +{ + m_minUndequeueBufferMargin = 0; +} + +status_t ExynosCameraGrallocAllocator::init( + preview_stream_ops *allocator, + int bufCount, + int minUndequeueBufferCount) +{ + status_t ret = NO_ERROR; + + ret = init(allocator, bufCount, minUndequeueBufferCount, GRALLOC_SET_USAGE_FOR_CAMERA); + return ret; +} + +status_t ExynosCameraGrallocAllocator::init( + preview_stream_ops *allocator, + int bufCount, + int minUndequeueBufferCount, + int grallocUsage) +{ + status_t ret = NO_ERROR; + + m_allocator = allocator; + if( minUndequeueBufferCount < 0 ) { + m_minUndequeueBufferMargin = 0; + } else { + m_minUndequeueBufferMargin = minUndequeueBufferCount; + } + + if (setBufferCount(bufCount) != 0) { + ALOGE("ERR(%s):setBufferCount failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_allocator->set_usage(m_allocator, grallocUsage) != 0) { + ALOGE("ERR(%s):set_usage failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + m_grallocUsage = grallocUsage; + + if (m_grallocHal == NULL) { + if (hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal)) + ALOGE("ERR(%s):Loading gralloc HAL failed", __FUNCTION__); + } + +func_exit: + + return ret; +} + +status_t ExynosCameraGrallocAllocator::alloc( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + int *bufStride, + bool *isLocked) +{ + status_t ret = NO_ERROR; + int width = 0; + int height = 0; + void *grallocAddr[3] = {NULL}; + int grallocFd[3] = {0}; + const private_handle_t *priv_handle = NULL; + int retryCount = 5; + ExynosCameraDurationTimer dequeuebufferTimer; + ExynosCameraDurationTimer lockbufferTimer; + + for (int retryCount = 5; retryCount > 0; retryCount--) { +#ifdef EXYNOS_CAMERA_MEMORY_TRACE + ALOGI("INFO(%s[%d]):dequeue_buffer retryCount=%d", + __FUNCTION__, __LINE__, retryCount); +#endif + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + dequeuebufferTimer.start(); + ret = m_allocator->dequeue_buffer(m_allocator, bufHandle, bufStride); + dequeuebufferTimer.stop(); + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check dequeue buffer performance, duration(%lld usec)", + __FUNCTION__, __LINE__, dequeuebufferTimer.durationUsecs()); +#else + if (dequeuebufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):dequeue_buffer() duration(%lld msec)", + __FUNCTION__, __LINE__, dequeuebufferTimer.durationMsecs()); +#endif + + if (ret == NO_INIT) { + ALOGW("WARN(%s):BufferQueue is abandoned", __FUNCTION__); + return ret; + } else if (ret != 0) { + ALOGE("ERR(%s):dequeue_buffer failed", __FUNCTION__); + continue; + } + + if (bufHandle == NULL) { + ALOGE("ERR(%s):bufHandle == NULL failed, retry(%d)", __FUNCTION__, retryCount); + continue; + } + + lockbufferTimer.start(); + ret = m_allocator->lock_buffer(m_allocator, *bufHandle); + lockbufferTimer.stop(); + if (ret != 0) + ALOGE("ERR(%s):lock_buffer failed, but go on to the next step ...", __FUNCTION__); + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check lock buffer performance, duration(%lld usec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationUsecs()); +#else + if (lockbufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):lock_buffer() duration(%lld msec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationMsecs()); +#endif + + if (*isLocked == false) { + lockbufferTimer.start(); + ret = m_grallocHal->lock(m_grallocHal, **bufHandle, GRALLOC_LOCK_FOR_CAMERA, + 0, 0,/* left, top */ width, height, grallocAddr); + lockbufferTimer.stop(); + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check grallocHAL lock performance, duration(%lld usec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationUsecs()); +#else + if (lockbufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):grallocHAL->lock() duration(%lld msec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationMsecs()); +#endif + + if (ret != 0) { + ALOGE("ERR(%s):grallocHal->lock failed.. retry", __FUNCTION__); + + if (m_allocator->cancel_buffer(m_allocator, *bufHandle) != 0) + ALOGE("ERR(%s):cancel_buffer failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + break; + } + } + + if (bufHandle == NULL) { + ALOGE("ERR(%s):bufHandle == NULL failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (*bufHandle == NULL) { + ALOGE("@@@@ERR(%s):*bufHandle == NULL failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + priv_handle = private_handle_t::dynamicCast(**bufHandle); + + grallocFd[0] = priv_handle->fd; + grallocFd[1] = priv_handle->fd1; + *isLocked = true; + +func_exit: + + fd[0] = grallocFd[0]; + fd[1] = grallocFd[1]; + addr[0] = (char *)grallocAddr[0]; + addr[1] = (char *)grallocAddr[1]; + + return ret; +} + +status_t ExynosCameraGrallocAllocator::free(buffer_handle_t *bufHandle, bool isLocked) +{ + status_t ret = NO_ERROR; + + if (bufHandle == NULL) { + ALOGE("ERR(%s):bufHandle equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (isLocked == true) { + if (m_grallocHal->unlock(m_grallocHal, *bufHandle) != 0) { + ALOGE("ERR(%s):grallocHal->unlock failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + } + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (m_allocator->cancel_buffer(m_allocator, bufHandle) != 0) { + ALOGE("ERR(%s):cancel_buffer failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + +func_exit: + + return ret; +} + +status_t ExynosCameraGrallocAllocator::setBufferCount(int bufCount) +{ + status_t ret = NO_ERROR; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + } + + if (m_allocator->set_buffer_count(m_allocator, bufCount) != 0) { + ALOGE("ERR(%s):set_buffer_count failed [bufCount=%d]", __FUNCTION__, bufCount); + ret = INVALID_OPERATION; + } + + return ret; +} +status_t ExynosCameraGrallocAllocator::setBuffersGeometry( + int width, + int height, + int halPixelFormat) +{ + status_t ret = NO_ERROR; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + } + + if (m_allocator->set_buffers_geometry( + m_allocator, + width, height, + halPixelFormat) != 0) { + ALOGE("ERR(%s):set_buffers_geometry failed", __FUNCTION__); + ret = INVALID_OPERATION; + } + + return ret; +} + +int ExynosCameraGrallocAllocator::getGrallocUsage(void) +{ + return m_grallocUsage; +} + +status_t ExynosCameraGrallocAllocator::getAllocator(preview_stream_ops **allocator) +{ + *allocator = m_allocator; + + return NO_ERROR; +} + +int ExynosCameraGrallocAllocator::getMinUndequeueBuffer() +{ + int minUndeqBufCount = 0; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_allocator->get_min_undequeued_buffer_count(m_allocator, &minUndeqBufCount) != 0) { + ALOGE("ERR(%s):enqueue_buffer failed", __FUNCTION__); + return INVALID_OPERATION; + } + + return minUndeqBufCount < 2 ? (minUndeqBufCount + m_minUndequeueBufferMargin) : minUndeqBufCount; +} + +status_t ExynosCameraGrallocAllocator::dequeueBuffer( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + bool *isLocked) +{ + int bufStride = 0; + status_t ret = NO_ERROR; + + ret = alloc(bufHandle, fd, addr, &bufStride, isLocked); + if (ret == NO_INIT) { + ALOGW("WARN(%s):BufferQueue is abandoned", __FUNCTION__); + return ret; + } else if (ret != NO_ERROR) { + ALOGE("ERR(%s):alloc failed", __FUNCTION__); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraGrallocAllocator::enqueueBuffer(buffer_handle_t *handle) +{ + status_t ret = NO_ERROR; + ExynosCameraDurationTimer enqueuebufferTimer; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + enqueuebufferTimer.start(); + ret = m_allocator->enqueue_buffer(m_allocator, handle); + enqueuebufferTimer.stop(); + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check enqueue buffer performance, duration(%lld usec)", + __FUNCTION__, __LINE__, enqueuebufferTimer.durationUsecs()); +#else + if (enqueuebufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):enqueue_buffer() duration(%lld msec)", + __FUNCTION__, __LINE__, enqueuebufferTimer.durationMsecs()); +#endif + + if (ret != 0) { + ALOGE("ERR(%s):enqueue_buffer failed", __FUNCTION__); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraGrallocAllocator::cancelBuffer(buffer_handle_t *handle) +{ + status_t ret = NO_ERROR; + ExynosCameraDurationTimer cancelbufferTimer; + + if (m_allocator == NULL) { + ALOGE("ERR(%s):m_allocator equals NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_grallocHal->unlock(m_grallocHal, *handle) != 0) { + ALOGE("ERR(%s):grallocHal->unlock failed", __FUNCTION__); + return INVALID_OPERATION; + } + + cancelbufferTimer.start(); + ret = m_allocator->cancel_buffer(m_allocator, handle); + cancelbufferTimer.stop(); + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check cancel buffer performance, duration(%lld usec)", + __FUNCTION__, __LINE__, cancelbufferTimer.durationUsecs()); +#else + if (cancelbufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):cancel_buffer() duration(%lld msec)", + __FUNCTION__, __LINE__, cancelbufferTimer.durationMsecs()); +#endif + + if (ret != 0) { + ALOGE("ERR(%s):cancel_buffer failed", __FUNCTION__); + return INVALID_OPERATION; + } + return NO_ERROR; +} + +ExynosCameraStreamAllocator::ExynosCameraStreamAllocator() +{ + m_allocator = NULL; + if (hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal)) + ALOGE("ERR(%s):Loading gralloc HAL failed", __FUNCTION__); +} + +ExynosCameraStreamAllocator::~ExynosCameraStreamAllocator() +{ +} + +status_t ExynosCameraStreamAllocator::init(camera3_stream_t *allocator) +{ + status_t ret = NO_ERROR; + + m_allocator = allocator; + + return ret; +} + +int ExynosCameraStreamAllocator::lock( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + bool *isLocked, + int planeCount) +{ + int ret = 0; + uint32_t width = 0; + uint32_t height = 0; + uint32_t usage = 0; + uint32_t format = 0; + void *grallocAddr[3] = {NULL}; + const private_handle_t *priv_handle = NULL; + int grallocFd[3] = {0}; + ExynosCameraDurationTimer lockbufferTimer; + + if (bufHandle == NULL) { + ALOGE("ERR(%s):bufHandle equals NULL, failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + if (*bufHandle == NULL) { + ALOGE("ERR(%s):*bufHandle == NULL, failed", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + width = m_allocator->width; + height = m_allocator->height; + usage = m_allocator->usage; + format = m_allocator->format; + + switch (format) { + case HAL_PIXEL_FORMAT_YCbCr_420_888: + android_ycbcr ycbcr; + lockbufferTimer.start(); + ret = m_grallocHal->lock_ycbcr( + m_grallocHal, + **bufHandle, + usage, + 0, 0, /* left, top */ + width, height, + &ycbcr); + lockbufferTimer.stop(); + grallocAddr[0] = ycbcr.y; + break; + default: + lockbufferTimer.start(); + ret = m_grallocHal->lock( + m_grallocHal, + **bufHandle, + usage, + 0, 0, /* left, top */ + width, height, + grallocAddr); + lockbufferTimer.stop(); + break; + } + +#if defined (EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE) + ALOGD("DEBUG(%s[%d]):Check grallocHAL lock performance, duration(%lld usec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationUsecs()); +#else + if (lockbufferTimer.durationMsecs() > GRALLOC_WARNING_DURATION_MSEC) + ALOGW("WRN(%s[%d]):grallocHAL->lock() duration(%lld msec)", + __FUNCTION__, __LINE__, lockbufferTimer.durationMsecs()); +#endif + + if (ret != 0) { + ALOGE("ERR(%s):grallocHal->lock failed.. ", __FUNCTION__); + goto func_exit; + } + + priv_handle = private_handle_t::dynamicCast(**bufHandle); + + switch (planeCount) { + case 3: + grallocFd[2] = priv_handle->fd2; + case 2: + grallocFd[1] = priv_handle->fd1; + case 1: + grallocFd[0] = priv_handle->fd; + break; + default: + break; + } + + *isLocked = true; + +func_exit: + switch (planeCount) { + case 3: + fd[2] = grallocFd[2]; + addr[2] = (char *)grallocAddr[2]; + case 2: + fd[1] = grallocFd[1]; + addr[1] = (char *)grallocAddr[1]; + case 1: + fd[0] = grallocFd[0]; + addr[0] = (char *)grallocAddr[0]; + break; + default: + break; + } + + return ret; +} + +int ExynosCameraStreamAllocator::unlock(buffer_handle_t *bufHandle) +{ + int ret = 0; + + if (bufHandle == NULL) { + ALOGE("ERR(%s):bufHandle equals NULL", __FUNCTION__); + ret = INVALID_OPERATION; + goto func_exit; + } + + ret = m_grallocHal->unlock(m_grallocHal, *bufHandle); + + if (ret != 0) + ALOGE("ERR(%s):grallocHal->unlock failed", __FUNCTION__); + +func_exit: + return ret; +} +} diff --git a/libcamera/common_v2/ExynosCameraMemory.h b/libcamera/common_v2/ExynosCameraMemory.h new file mode 100644 index 0000000..5e0da03 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraMemory.h @@ -0,0 +1,234 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraMemory.h + * \brief header file for ExynosCameraMemory + * \author Sunmi Lee(carrotsm.lee@samsung.com) + * \date 2013/07/22 + * + */ + +#ifndef EXYNOS_CAMERA_MEMORY_H__ +#define EXYNOS_CAMERA_MEMORY_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "gralloc_priv.h" +#include "exynos_format.h" + +#include "ExynosCameraConfig.h" + +#include "fimc-is-metadata.h" +#include "ExynosCameraAutoTimer.h" + +namespace android { + +/* #define EXYNOS_CAMERA_MEMORY_TRACE */ + +/* EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE define is log of gralloc function(xxxxx) duration. + * If you want check function duration, enable this define. + */ +/* #define EXYNOS_CAMERA_MEMORY_TRACE_GRALLOC_PERFORMANCE */ +#define GRALLOC_WARNING_DURATION_MSEC (180) /* 180ms */ + +class ExynosCameraGraphicBufferAllocator { +public: + ExynosCameraGraphicBufferAllocator(); + virtual ~ExynosCameraGraphicBufferAllocator(); + + status_t init(void); + + status_t setSize(int width, int height, int stride); + status_t getSize(int *width, int *height, int *stride); + + status_t setHalPixelFormat(int halPixelFormat); + int getHalPixelFormat(void); + + status_t setGrallocUsage(int grallocUsage); + int getGrallocUsage(void); + + sp alloc(int index, int planeCount, int fdArr[], char *bufAddr[], unsigned int bufSize[]); + status_t free(int index); + +private: + sp m_alloc( + int index, + int planeCount, + int fdArr[], + char *bufAddr[], + unsigned int bufSize[], + int width, + int height, + int halPixelFormat, + int grallocUsage, + int stride); + +private: + int m_width; + int m_height; + int m_stride; + int m_halPixelFormat; + int m_grallocUsage; + + private_handle_t *m_privateHandle[VIDEO_MAX_FRAME]; + sp m_graphicBuffer[VIDEO_MAX_FRAME]; + bool m_flagGraphicBufferAlloc[VIDEO_MAX_FRAME]; +}; + +class ExynosCameraIonAllocator { +public: + ExynosCameraIonAllocator(); + virtual ~ExynosCameraIonAllocator(); + + status_t init(bool isCached); + status_t alloc( + int size, + int *fd, + char **addr, + bool mapNeeded); + status_t alloc( + int size, + int *fd, + char **addr, + int mask, + int flags, + bool mapNeeded); + status_t free( + int size, + int *fd, + char **addr, + bool mapNeeded); + status_t map(int size, int fd, char **addr); + void setIonHeapMask(int mask); + void setIonFlags(int flags); + +private: + int m_ionClient; + size_t m_ionAlign; + unsigned int m_ionHeapMask; + unsigned int m_ionFlags; + + sp m_graphicBuffer[VIDEO_MAX_FRAME]; + bool m_flagGraphicBufferAlloc[VIDEO_MAX_FRAME]; +}; + +class ExynosCameraMHBAllocator { +/* do not use! deprecated class */ +public: + ExynosCameraMHBAllocator(); + virtual ~ExynosCameraMHBAllocator(); + + status_t init(camera_request_memory allocator); + status_t alloc( + int size, + int *fd, + char **addr, + int numBufs, + camera_memory_t **heap); + status_t free( + int size, + int *fd, + char **addr, + camera_memory_t **heap); + +private: + camera_request_memory m_allocator; +}; + +class ExynosCameraGrallocAllocator { +public: + ExynosCameraGrallocAllocator(); + virtual ~ExynosCameraGrallocAllocator(); + + status_t init( + preview_stream_ops *allocator, + int bufCount, + int minUndequeueBufferMargin = -1); + status_t init( + preview_stream_ops *allocator, + int bufCount, + int minUndequeueBufferMargin, + int grallocUsage); + status_t alloc( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + int *bufStride, + bool *isLocked); + status_t free(buffer_handle_t *bufHandle, bool isLocked); + + status_t setBufferCount(int bufCount); + status_t setBuffersGeometry( + int width, + int height, + int halPixelFormat); + + /* + * setGrallocUsage is happen on init() api. + * default grallocUsage is GRALLOC_SET_USAGE_FOR_CAMERA + */ + int getGrallocUsage(void); + + status_t getAllocator(preview_stream_ops **allocator); + int getMinUndequeueBuffer(); + status_t dequeueBuffer( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + bool *isLocked); + status_t enqueueBuffer(buffer_handle_t *bufHandle); + status_t cancelBuffer(buffer_handle_t *bufHandle); + +private: + preview_stream_ops *m_allocator; + static gralloc_module_t const *m_grallocHal; + int32_t m_minUndequeueBufferMargin; + + int m_grallocUsage; +}; + +class ExynosCameraStreamAllocator { +public: + ExynosCameraStreamAllocator(); + virtual ~ExynosCameraStreamAllocator(); + + status_t init(camera3_stream_t *allocator); + int lock( + buffer_handle_t **bufHandle, + int fd[], + char *addr[], + bool *isLocked, + int planeCount); + + int unlock(buffer_handle_t *bufHandle); + +private: + camera3_stream_t *m_allocator; + static gralloc_module_t const *m_grallocHal; +}; +} +#endif diff --git a/libcamera/common_v2/ExynosCameraMetadataConverter.cpp b/libcamera/common_v2/ExynosCameraMetadataConverter.cpp new file mode 100644 index 0000000..dbb838c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraMetadataConverter.cpp @@ -0,0 +1,4313 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ExynosCameraMetadataConverter" + +#include "ExynosCameraMetadataConverter.h" +#include "ExynosCameraRequestManager.h" + +namespace android { +#define SET_BIT(x) (1 << x) + +ExynosCamera3MetadataConverter::ExynosCamera3MetadataConverter(int cameraId, ExynosCameraParameters *parameters) +{ + ExynosCameraActivityControl *activityControl = NULL; + + m_cameraId = cameraId; + m_parameters = parameters; + activityControl = m_parameters->getActivityControl(); + m_flashMgr = activityControl->getFlashMgr(); + m_sensorStaticInfo = m_parameters->getSensorStaticInfo(); + m_preCaptureTriggerOn = false; + m_isManualAeControl = false; + + m_blackLevelLockOn = false; + m_faceDetectModeOn = false; + + m_lockVendorIsoValue = 0; + m_lockExposureTime = 0; + + m_afMode = AA_AFMODE_CONTINUOUS_PICTURE; + m_preAfMode = AA_AFMODE_CONTINUOUS_PICTURE; + + m_focusDistance = -1; + + m_maxFps = 30; + m_overrideFlashControl= false; + memset(m_gpsProcessingMethod, 0x00, sizeof(m_gpsProcessingMethod)); +} + +ExynosCamera3MetadataConverter::~ExynosCamera3MetadataConverter() +{ + m_defaultRequestSetting.release(); +} + +status_t ExynosCamera3MetadataConverter::constructDefaultRequestSettings(int type, camera_metadata_t **request) +{ + Mutex::Autolock l(m_requestLock); + + ALOGD("DEBUG(%s[%d]):Type(%d), cameraId(%d)", __FUNCTION__, __LINE__, type, m_cameraId); + + CameraMetadata settings; + m_preExposureTime = 0; + const int64_t USEC = 1000LL; + const int64_t MSEC = USEC * 1000LL; + const int64_t SEC = MSEC * 1000LL; + /** android.request */ + /* request type */ + const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; + settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); + + /* meta data mode */ + const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL; + settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); + + /* id */ + const int32_t id = 0; + settings.update(ANDROID_REQUEST_ID, &id, 1); + + /* frame count */ + const int32_t frameCount = 0; + settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1); + + /** android.control */ + /* control intent */ + uint8_t controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; + switch (type) { + case CAMERA3_TEMPLATE_PREVIEW: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; + ALOGD("DEBUG(%s[%d]):type is ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW", __FUNCTION__, __LINE__); + break; + case CAMERA3_TEMPLATE_STILL_CAPTURE: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; + ALOGD("DEBUG(%s[%d]):type is CAMERA3_TEMPLATE_STILL_CAPTURE", __FUNCTION__, __LINE__); + break; + case CAMERA3_TEMPLATE_VIDEO_RECORD: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; + ALOGD("DEBUG(%s[%d]):type is CAMERA3_TEMPLATE_VIDEO_RECORD", __FUNCTION__, __LINE__); + break; + case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; + ALOGD("DEBUG(%s[%d]):type is CAMERA3_TEMPLATE_VIDEO_SNAPSHOT", __FUNCTION__, __LINE__); + break; + case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; + ALOGD("DEBUG(%s[%d]):type is CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG", __FUNCTION__, __LINE__); + break; + case CAMERA3_TEMPLATE_MANUAL: + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL; + ALOGD("DEBUG(%s[%d]):type is CAMERA3_TEMPLATE_MANUAL", __FUNCTION__, __LINE__); + break; + default: + ALOGD("ERR(%s[%d]):Custom intent type is selected for setting control intent(%d)", __FUNCTION__, __LINE__, type); + controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; + break; + } + settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); + + /* 3AA control */ + uint8_t controlMode = ANDROID_CONTROL_MODE_OFF; + uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF; + uint8_t aeMode = ANDROID_CONTROL_AE_MODE_OFF; + uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_OFF; + int32_t aeTargetFpsRange[2] = {15, 30}; + switch (type) { + case CAMERA3_TEMPLATE_PREVIEW: + controlMode = ANDROID_CONTROL_MODE_AUTO; + if (m_cameraId == CAMERA_ID_BACK) + afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; + aeMode = ANDROID_CONTROL_AE_MODE_ON; + awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + break; + case CAMERA3_TEMPLATE_STILL_CAPTURE: + controlMode = ANDROID_CONTROL_MODE_AUTO; + if (m_cameraId == CAMERA_ID_BACK) + afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; + aeMode = ANDROID_CONTROL_AE_MODE_ON; + awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + break; + case CAMERA3_TEMPLATE_VIDEO_RECORD: + controlMode = ANDROID_CONTROL_MODE_AUTO; + if (m_cameraId == CAMERA_ID_BACK) + afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; + aeMode = ANDROID_CONTROL_AE_MODE_ON; + awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + /* Fix FPS for Recording */ + aeTargetFpsRange[0] = 30; + aeTargetFpsRange[1] = 30; + break; + case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: + controlMode = ANDROID_CONTROL_MODE_AUTO; + if (m_cameraId == CAMERA_ID_BACK) + afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO; + aeMode = ANDROID_CONTROL_AE_MODE_ON; + awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + break; + case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: + controlMode = ANDROID_CONTROL_MODE_AUTO; + if (m_cameraId == CAMERA_ID_BACK) + afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE; + aeMode = ANDROID_CONTROL_AE_MODE_ON; + awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; + break; + case CAMERA3_TEMPLATE_MANUAL: + controlMode = ANDROID_CONTROL_MODE_OFF; + afMode = ANDROID_CONTROL_AF_MODE_OFF; + aeMode = ANDROID_CONTROL_AE_MODE_OFF; + awbMode = ANDROID_CONTROL_AWB_MODE_OFF; + break; + default: + ALOGD("ERR(%s[%d]):Custom intent type is selected for setting 3AA control(%d)", __FUNCTION__, __LINE__, type); + break; + } + settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); + settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); + settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); + settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); + settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2); + + const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; + settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); + + const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; + settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); + + /* effect mode */ + const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; + settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); + + /* scene mode */ + const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; + settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); + + /* ae lock mode */ + const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; + settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); + + /* ae region */ + int w,h; + m_parameters->getMaxSensorSize(&w, &h); + const int32_t controlRegions[5] = { + 0, 0, w, h, 0 + }; + if (m_cameraId == CAMERA_ID_BACK) { + settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5); + settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5); + settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5); + } + + /* exposure compensation */ + const int32_t aeExpCompensation = 0; + settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1); + + /* anti-banding mode */ + const uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; + settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1); + + /* awb lock */ + const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; + settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + + /* video stabilization mode */ + const uint8_t vstabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1); + + /** android.lens */ + const float focusDistance = -1.0f; + settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); + settings.update(ANDROID_LENS_FOCAL_LENGTH, &(m_sensorStaticInfo->focalLength), 1); + settings.update(ANDROID_LENS_APERTURE, &(m_sensorStaticInfo->fNumber), 1); // ExifInterface : TAG_APERTURE = "FNumber"; + const float filterDensity = 0.0f; + settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1); + const uint8_t opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opticalStabilizationMode, 1); + + /** android.sensor */ + const int64_t exposureTime = 0 * MSEC; + settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); + const int64_t frameDuration = 33333333L; /* 1/30 s */ + settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); + const int32_t sensitivity = 400; + settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); + + /** android.flash */ + const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; + settings.update(ANDROID_FLASH_MODE, &flashMode, 1); + const uint8_t firingPower = 0; + settings.update(ANDROID_FLASH_FIRING_POWER, &firingPower, 1); + const int64_t firingTime = 0; + settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); + + /** android.noise_reduction */ + const uint8_t noiseStrength = 5; + settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1); + + /** android.color_correction */ + const camera_metadata_rational_t colorTransform[9] = { + {1,1}, {0,1}, {0,1}, + {0,1}, {1,1}, {0,1}, + {0,1}, {0,1}, {1,1} + }; + settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); + + /** android.tonemap */ + const float tonemapCurve[4] = { + 0.0f, 0.0f, + 1.0f, 1.0f + }; + settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4); + settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4); + settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4); + + /** android.edge */ + const uint8_t edgeStrength = 5; + settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1); + + /** android.scaler */ + const int32_t cropRegion[4] = { + 0, 0, w, h + }; + settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); + + /** android.jpeg */ + const uint8_t jpegQuality = 96; + settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1); + const int32_t thumbnailSize[2] = { + 512, 384 + }; + settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); + const uint8_t thumbnailQuality = 100; + settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); + const double gpsCoordinates[3] = { + 0, 0 + }; + settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); + const uint8_t gpsProcessingMethod[32] = "None"; + settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32); + const int64_t gpsTimestamp = 0; + settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); + const int32_t jpegOrientation = 0; + settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); + + /** android.stats */ + const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; + settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); + const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF; + settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); + const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF; + settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); + const uint8_t hotPixelMapMode = 0; + settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); + + /** android.blacklevel */ + const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF; + settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1); + + /** Processing block modes */ + uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_OFF; + uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST; + uint8_t noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_OFF; + uint8_t shadingMode = ANDROID_SHADING_MODE_OFF; + uint8_t colorCorrectionMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX; + uint8_t tonemapMode = ANDROID_TONEMAP_MODE_CONTRAST_CURVE; + uint8_t edgeMode = ANDROID_EDGE_MODE_OFF; + uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; + switch (type) { + case CAMERA3_TEMPLATE_STILL_CAPTURE: + if (m_cameraId == CAMERA_ID_BACK) + lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON; + hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; + noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; + shadingMode = ANDROID_SHADING_MODE_FAST; + colorCorrectionMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; + tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY; + edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY; + break; + case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: + hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; + noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_FAST; + shadingMode = ANDROID_SHADING_MODE_FAST; + colorCorrectionMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; + tonemapMode = ANDROID_TONEMAP_MODE_FAST; + edgeMode = ANDROID_EDGE_MODE_FAST; + break; + case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: + hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY; + noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG; + shadingMode = ANDROID_SHADING_MODE_FAST; + colorCorrectionMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY; + tonemapMode = ANDROID_TONEMAP_MODE_FAST; + edgeMode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG; + break; + case CAMERA3_TEMPLATE_PREVIEW: + case CAMERA3_TEMPLATE_VIDEO_RECORD: + default: + hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST; + noiseReductionMode = ANDROID_NOISE_REDUCTION_MODE_FAST; + shadingMode = ANDROID_SHADING_MODE_FAST; + colorCorrectionMode = ANDROID_COLOR_CORRECTION_MODE_FAST; + tonemapMode = ANDROID_TONEMAP_MODE_FAST; + edgeMode = ANDROID_EDGE_MODE_FAST; + break; + } + settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); + settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1); + settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); + settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); + settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorCorrectionMode, 1); + settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1); + settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); + settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + + *request = settings.release(); + m_defaultRequestSetting = *request; + + ALOGD("DEBUG(%s[%d]):Registered default request template(%d)", __FUNCTION__, __LINE__, type); + return OK; +} + +status_t ExynosCamera3MetadataConverter::constructStaticInfo(int cameraId, camera_metadata_t **cameraInfo) +{ + status_t ret = NO_ERROR; + + ALOGD("DEBUG(%s[%d]):ID(%d)", __FUNCTION__, __LINE__, cameraId); + struct ExynosSensorInfoBase *sensorStaticInfo = NULL; + CameraMetadata info; + Vector i64Vector; + Vector i32Vector; + + sensorStaticInfo = createExynosCamera3SensorInfo(cameraId); + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]): sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* android.colorCorrection static attributes */ + if (sensorStaticInfo->colorAberrationModes != NULL) { + ret = info.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES, + sensorStaticInfo->colorAberrationModes, + sensorStaticInfo->colorAberrationModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_COLOR_CORRECTION_ABERRATION_MODE update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):colorAberrationModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* andorid.control static attributes */ + if (sensorStaticInfo->antiBandingModes != NULL) { + ret = info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, + sensorStaticInfo->antiBandingModes, + sensorStaticInfo->antiBandingModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):antiBandingModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->aeModes != NULL) { + ret = info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, + sensorStaticInfo->aeModes, + sensorStaticInfo->aeModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_AVAILABLE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):aeModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + i32Vector.clear(); + m_createAeAvailableFpsRanges(sensorStaticInfo, &i32Vector, cameraId); + ret = info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, + i32Vector.array(), i32Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, + sensorStaticInfo->exposureCompensationRange, + ARRAY_LENGTH(sensorStaticInfo->exposureCompensationRange)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_COMPENSATION_RANGE update failed(%d)", __FUNCTION__, ret); + + const camera_metadata_rational exposureCompensationStep = + {(int32_t)((sensorStaticInfo->exposureCompensationStep) * 100.0), 100}; + ret = info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, + &exposureCompensationStep, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_COMPENSATION_STEP update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->afModes != NULL) { + ret = info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, + sensorStaticInfo->afModes, + sensorStaticInfo->afModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AF_AVAILABLE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):afModes is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->effectModes != NULL) { + ret = info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, + sensorStaticInfo->effectModes, + sensorStaticInfo->effectModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AVAILABLE_EFFECTS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):effectModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->sceneModes != NULL) { + ret = info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, + sensorStaticInfo->sceneModes, + sensorStaticInfo->sceneModesLength); + if (ret < 0) + ALOGE("DEBUG(%s):ANDROID_CONTROL_AVAILABLE_SCENE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):sceneModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->videoStabilizationModes != NULL) { + ret = info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, + sensorStaticInfo->videoStabilizationModes, + sensorStaticInfo->videoStabilizationModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):videoStabilizationModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->awbModes != NULL) { + ret = info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, + sensorStaticInfo->awbModes, + sensorStaticInfo->awbModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AWB_AVAILABLE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):awbModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + ret = info.update(ANDROID_CONTROL_MAX_REGIONS, + sensorStaticInfo->max3aRegions, + ARRAY_LENGTH(sensorStaticInfo->max3aRegions)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_MAX_REGIONS update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->sceneModeOverrides != NULL) { + ret = info.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, + sensorStaticInfo->sceneModeOverrides, + sensorStaticInfo->sceneModeOverridesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_SCENE_MODE_OVERRIDES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):sceneModeOverrides at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + i32Vector.clear(); + if ( m_createControlAvailableHighSpeedVideoConfigurations(sensorStaticInfo, &i32Vector, cameraId) == NO_ERROR ) { + ret = info.update(ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, + i32Vector.array(), i32Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS is NULL", __FUNCTION__, __LINE__); + } + + ret = info.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, + &(sensorStaticInfo->aeLockAvailable), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AE_LOCK_AVAILABLE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, + &(sensorStaticInfo->awbLockAvailable), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AWB_LOCK_AVAILABLE update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->controlModes != NULL) { + ret = info.update(ANDROID_CONTROL_AVAILABLE_MODES, + sensorStaticInfo->controlModes, + sensorStaticInfo->controlModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_CONTROL_AVAILABLE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):controlModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* android.edge static attributes */ + if (sensorStaticInfo->edgeModes != NULL) { + ret = info.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES, + sensorStaticInfo->edgeModes, + sensorStaticInfo->edgeModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_EDGE_AVAILABLE_EDGE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):edgeModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* andorid.flash static attributes */ + ret = info.update(ANDROID_FLASH_INFO_AVAILABLE, + &(sensorStaticInfo->flashAvailable), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_INFO_AVAILABLE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, + &(sensorStaticInfo->chargeDuration), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_INFO_CHARGE_DURATION update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_FLASH_COLOR_TEMPERATURE, + &(sensorStaticInfo->colorTemperature), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_COLOR_TEMPERATURE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_FLASH_MAX_ENERGY, + &(sensorStaticInfo->maxEnergy), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_MAX_ENERGY update failed(%d)", __FUNCTION__, ret); + + /* android.hotPixel static attributes */ + if (sensorStaticInfo->hotPixelModes != NULL) { + ret = info.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES, + sensorStaticInfo->hotPixelModes, + sensorStaticInfo->hotPixelModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):hotPixelModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* andorid.jpeg static attributes */ + i32Vector.clear(); + m_createJpegAvailableThumbnailSizes(sensorStaticInfo, &i32Vector); + ret = info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, i32Vector.array(), i32Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES update failed(%d)", __FUNCTION__, ret); + + const int32_t jpegMaxSize = sensorStaticInfo->maxPictureW * sensorStaticInfo->maxPictureH * 2; + ret = info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_JPEG_MAX_SIZE update failed(%d)", __FUNCTION__, ret); + + + /* android.lens static attributes */ + ret = info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, + &(sensorStaticInfo->fNumber), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_AVAILABLE_APERTURES update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, + &(sensorStaticInfo->filterDensity), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, + &(sensorStaticInfo->focalLength), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->opticalStabilization != NULL + && m_hasTagInList(sensorStaticInfo->requestKeys, + sensorStaticInfo->requestKeysLength, + ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) + { + ret = info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, + sensorStaticInfo->opticalStabilization, + sensorStaticInfo->opticalStabilizationLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):opticalStabilization at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + ret = info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, + &(sensorStaticInfo->hyperFocalDistance), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, + &(sensorStaticInfo->minimumFocusDistance), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, + sensorStaticInfo->shadingMapSize, + ARRAY_LENGTH(sensorStaticInfo->shadingMapSize)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_SHADING_MAP_SIZE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, + &(sensorStaticInfo->focusDistanceCalibration), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_LENS_FACING, + &(sensorStaticInfo->lensFacing), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LENS_FACING update failed(%d)", __FUNCTION__, ret); + + /* android.noiseReduction static attributes */ + if (sensorStaticInfo->noiseReductionModes != NULL + && m_hasTagInList(sensorStaticInfo->requestKeys, + sensorStaticInfo->requestKeysLength, + ANDROID_NOISE_REDUCTION_MODE)) + { + ret = info.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES, + sensorStaticInfo->noiseReductionModes, + sensorStaticInfo->noiseReductionModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):noiseReductionModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* android.request static attributes */ + ret = info.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, + sensorStaticInfo->maxNumOutputStreams, + ARRAY_LENGTH(sensorStaticInfo->maxNumOutputStreams)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, + &(sensorStaticInfo->maxNumInputStreams), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, + &(sensorStaticInfo->maxPipelineDepth), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_PIPELINE_MAX_DEPTH update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT, + &(sensorStaticInfo->partialResultCount), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_PARTIAL_RESULT_COUNT update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->capabilities != NULL) { + ret = info.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + sensorStaticInfo->capabilities, + sensorStaticInfo->capabilitiesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_AVAILABLE_CAPABILITIES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):capabilities at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->requestKeys != NULL) { + ret = info.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, + sensorStaticInfo->requestKeys, + sensorStaticInfo->requestKeysLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):requestKeys at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->resultKeys != NULL) { + ret = info.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, + sensorStaticInfo->resultKeys, + sensorStaticInfo->resultKeysLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_AVAILABLE_RESULT_KEYS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):resultKeys at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->characteristicsKeys != NULL) { + ret = info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, + sensorStaticInfo->characteristicsKeys, + sensorStaticInfo->characteristicsKeysLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):characteristicsKeys at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* android.scaler static attributes */ + const float maxZoom = (sensorStaticInfo->maxZoomRatio / 1000); + ret = info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, &maxZoom, 1); + if (ret < 0) { + ALOGD("DEBUG(%s):ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM update failed(%d)", __FUNCTION__, ret); + } + + /* TODO:implement input/output format map */ +#if 0 + ret = info.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, + , + ); + if (ret < 0) + ALOGE("DEBUG(%s):ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP update failed(%d)", __FUNCTION__, ret); +#endif + + i32Vector.clear(); + if(m_createScalerAvailableInputOutputFormatsMap(sensorStaticInfo, &i32Vector, cameraId) == NO_ERROR) { + /* Update AvailableInputOutputFormatsMap only if private reprocessing is supported */ + ret = info.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, i32Vector.array(), i32Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP update failed(%d)", __FUNCTION__, ret); + } + + i32Vector.clear(); + m_createScalerAvailableStreamConfigurationsOutput(sensorStaticInfo, &i32Vector, cameraId); + ret = info.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, i32Vector.array(), i32Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS update failed(%d)", __FUNCTION__, ret); + + i64Vector.clear(); + m_createScalerAvailableMinFrameDurations(sensorStaticInfo, &i64Vector, cameraId); + ret = info.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, i64Vector.array(), i64Vector.size()); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS update failed(%d)", __FUNCTION__, ret); + + if (m_hasTagInList(sensorStaticInfo->capabilities, sensorStaticInfo->capabilitiesLength, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING) == true) { + /* Set Stall duration for reprocessing */ +#ifdef HAL3_REPROCESSING_MAX_CAPTURE_STALL + int32_t maxCaptureStall = HAL3_REPROCESSING_MAX_CAPTURE_STALL; + ret = info.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &maxCaptureStall, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_REPROCESS_MAX_CAPTURE_STALL update failed(%d)", __FUNCTION__, ret); +#else + ALOGE("ERR(%s):Private reprocessing is supported but ANDROID_REPROCESS_MAX_CAPTURE_STALL has not specified.", __FUNCTION__); +#endif + } + + if (sensorStaticInfo->stallDurations != NULL) { + ret = info.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, + sensorStaticInfo->stallDurations, + sensorStaticInfo->stallDurationsLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SCALER_AVAILABLE_STALL_DURATIONS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):stallDurations at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + ret = info.update(ANDROID_SCALER_CROPPING_TYPE, + &(sensorStaticInfo->croppingType), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SCALER_CROPPING_TYPE update failed(%d)", __FUNCTION__, ret); + + /* android.sensor static attributes */ + const int32_t kResolution[4] = + {0, 0, sensorStaticInfo->maxSensorW, sensorStaticInfo->maxSensorH}; + ret = info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, kResolution, 4); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, + sensorStaticInfo->sensitivityRange, + ARRAY_LENGTH(sensorStaticInfo->sensitivityRange)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_SENSITIVITY_RANGE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, + &(sensorStaticInfo->colorFilterArrangement), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, + sensorStaticInfo->exposureTimeRange, + ARRAY_LENGTH(sensorStaticInfo->exposureTimeRange)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, + &(sensorStaticInfo->maxFrameDuration), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_MAX_FRAME_DURATION update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, + sensorStaticInfo->sensorPhysicalSize, + ARRAY_LENGTH(sensorStaticInfo->sensorPhysicalSize)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_PHYSICAL_SIZE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, &(kResolution[2]), 2); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, + &(sensorStaticInfo->whiteLevel), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_WHITE_LEVEL update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, + &(sensorStaticInfo->timestampSource), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, + &(sensorStaticInfo->referenceIlluminant1), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_REFERENCE_ILLUMINANT1 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, + &(sensorStaticInfo->referenceIlluminant2), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_REFERENCE_ILLUMINANT2 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, sensorStaticInfo->calibration1, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_CALIBRATION_TRANSFORM2 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, sensorStaticInfo->calibration2, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_CALIBRATION_TRANSFORM2 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_COLOR_TRANSFORM1, sensorStaticInfo->colorTransformMatrix1, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_COLOR_TRANSFORM1 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_COLOR_TRANSFORM2, sensorStaticInfo->colorTransformMatrix2, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_COLOR_TRANSFORM2 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_FORWARD_MATRIX1, sensorStaticInfo->forwardMatrix1, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_FORWARD_MATRIX1 update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_FORWARD_MATRIX2, sensorStaticInfo->forwardMatrix2, 9); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_FORWARD_MATRIX2 update failed(%d)", __FUNCTION__, ret); + +#if 0 + ret = info.update(ANDROID_SENSOR_BASE_GAIN_FACTOR, + , + ); + if (ret < 0) + ALOGE("DEBUG(%s):ANDROID_SENSOR_BASE_GAIN_FACTOR update failed(%d)", __FUNCTION__, ret); +#endif + + ret = info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, + sensorStaticInfo->blackLevelPattern, + ARRAY_LENGTH(sensorStaticInfo->blackLevelPattern)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_BLACK_LEVEL_PATTERN update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, + &(sensorStaticInfo->maxAnalogSensitivity), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_ORIENTATION, + &(sensorStaticInfo->orientation), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_ORIENTATION update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS, + sensorStaticInfo->profileHueSatMapDimensions, + ARRAY_LENGTH(sensorStaticInfo->profileHueSatMapDimensions)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->testPatternModes != NULL) { + ret = info.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, + sensorStaticInfo->testPatternModes, + sensorStaticInfo->testPatternModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):testPatternModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* android.statistics static attributes */ + if (sensorStaticInfo->faceDetectModes != NULL) { + ret = info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, + sensorStaticInfo->faceDetectModes, + sensorStaticInfo->faceDetectModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):faceDetectModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + ret = info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, + &(sensorStaticInfo->histogramBucketCount), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, + &sensorStaticInfo->maxNumDetectedFaces, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_MAX_FACE_COUNT update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, + &sensorStaticInfo->maxHistogramCount, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, + &(sensorStaticInfo->maxSharpnessMapValue), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE update failed(%d)", __FUNCTION__, ret); + + ret = info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, + sensorStaticInfo->sharpnessMapSize, + ARRAY_LENGTH(sensorStaticInfo->sharpnessMapSize)); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->hotPixelMapModes != NULL) { + ret = info.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES, + sensorStaticInfo->hotPixelMapModes, + sensorStaticInfo->hotPixelMapModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):hotPixelMapModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->lensShadingMapModes != NULL) { + ret = info.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, + sensorStaticInfo->lensShadingMapModes, + sensorStaticInfo->lensShadingMapModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES update failed(%d)", + __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):lensShadingMapModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + if (sensorStaticInfo->shadingAvailableModes != NULL) { + ret = info.update(ANDROID_SHADING_AVAILABLE_MODES, + sensorStaticInfo->shadingAvailableModes, + sensorStaticInfo->shadingAvailableModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SHADING_AVAILABLE_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):shadingAvailableModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* andorid.tonemap static attributes */ + ret = info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, + &(sensorStaticInfo->tonemapCurvePoints), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_TONEMAP_MAX_CURVE_POINTS update failed(%d)", __FUNCTION__, ret); + + if (sensorStaticInfo->toneMapModes != NULL) { + ret = info.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, + sensorStaticInfo->toneMapModes, + sensorStaticInfo->toneMapModesLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):toneMapModes at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* android.led static attributes */ + if (sensorStaticInfo->leds != NULL) { + ret = info.update(ANDROID_LED_AVAILABLE_LEDS, + sensorStaticInfo->leds, + sensorStaticInfo->ledsLength); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_LED_AVAILABLE_LEDS update failed(%d)", __FUNCTION__, ret); + } else { + ALOGD("DEBUG(%s[%d]):leds at sensorStaticInfo is NULL", __FUNCTION__, __LINE__); + } + + /* andorid.info static attributes */ + ret = info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, + &(sensorStaticInfo->supportedHwLevel), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL update failed(%d)", __FUNCTION__, ret); + + /* android.sync static attributes */ + ret = info.update(ANDROID_SYNC_MAX_LATENCY, + &(sensorStaticInfo->maxLatency), 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_SYNC_MAX_LATENCY update failed(%d)", __FUNCTION__, ret); + + *cameraInfo = info.release(); + + return OK; +} + +void ExynosCamera3MetadataConverter::setStaticInfo(int camId, camera_metadata_t *info) +{ + if (info == NULL) { + camera_metadata_t *meta; + ALOGW("WARN(%s[%d]):info is null", __FUNCTION__, __LINE__); + ExynosCamera3MetadataConverter::constructStaticInfo(camId, &meta); + m_staticInfo = meta; + } else { + m_staticInfo = info; + } +} + +status_t ExynosCamera3MetadataConverter::initShotData(struct camera2_shot_ext *shot_ext) +{ + ALOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + + struct camera2_shot *shot = &shot_ext->shot; + + // TODO: make this from default request settings + /* request */ + shot->ctl.request.id = 0; + shot->ctl.request.metadataMode = METADATA_MODE_FULL; + shot->ctl.request.frameCount = 0; + + /* lens */ + shot->ctl.lens.focusDistance = -1.0f; + shot->ctl.lens.aperture = m_sensorStaticInfo->fNumber; // ExifInterface : TAG_APERTURE = "FNumber"; + shot->ctl.lens.focalLength = m_sensorStaticInfo->focalLength; + shot->ctl.lens.filterDensity = 0.0f; + shot->ctl.lens.opticalStabilizationMode = OPTICAL_STABILIZATION_MODE_OFF; + + shot->uctl.lensUd.pos = 0; + shot->uctl.lensUd.posSize = 0; + shot->ctl.aa.vendor_afState = AA_AFSTATE_INACTIVE; + + int minFps = (m_sensorStaticInfo->minFps == 0) ? 0 : (m_sensorStaticInfo->maxFps / 2); + int maxFps = (m_sensorStaticInfo->maxFps == 0) ? 0 : m_sensorStaticInfo->maxFps; + + /* The min fps can not be '0'. Therefore it is set up default value '15'. */ + if (minFps == 0) { + ALOGW("WRN(%s): Invalid min fps value(%d)", __FUNCTION__, minFps); + minFps = 15; + } + /* The initial fps can not be '0' and bigger than '30'. Therefore it is set up default value '30'. */ + if (maxFps == 0 || 30 < maxFps) { + ALOGW("WRN(%s): Invalid max fps value(%d)", __FUNCTION__, maxFps); + maxFps = 30; + } + + m_maxFps = maxFps; + + /* sensor */ + shot->ctl.sensor.exposureTime = 0; + shot->ctl.sensor.frameDuration = (1000 * 1000 * 1000) / maxFps; + shot->ctl.sensor.sensitivity = 0; + + /* flash */ + shot->ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; + shot->ctl.flash.firingPower = 0; + shot->ctl.flash.firingTime = 0; + m_overrideFlashControl = false; + + /* hotpixel */ + shot->ctl.hotpixel.mode = (enum processing_mode)0; + + /* demosaic */ + shot->ctl.demosaic.mode = (enum demosaic_processing_mode)0; + + /* noise */ + shot->ctl.noise.mode = ::PROCESSING_MODE_OFF; + shot->ctl.noise.strength = 5; + + /* shading */ + shot->ctl.shading.mode = (enum processing_mode)0; + + /* color */ + shot->ctl.color.mode = COLORCORRECTION_MODE_FAST; + static const camera_metadata_rational_t colorTransform[9] = { + {1, 1}, {0, 1}, {0, 1}, + {0, 1}, {1, 1}, {0, 1}, + {0, 1}, {0, 1}, {1, 1}, + }; + memcpy(shot->ctl.color.transform, colorTransform, sizeof(shot->ctl.color.transform)); + + /* tonemap */ + shot->ctl.tonemap.mode = ::TONEMAP_MODE_FAST; + static const float tonemapCurve[4] = { + 0.f, 0.f, + 1.f, 1.f + }; + + int tonemapCurveSize = sizeof(tonemapCurve); + int sizeOfCurve = sizeof(shot->ctl.tonemap.curveRed) / sizeof(shot->ctl.tonemap.curveRed[0]); + + for (int i = 0; i < sizeOfCurve; i += 4) { + memcpy(&(shot->ctl.tonemap.curveRed[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveGreen[i]), tonemapCurve, tonemapCurveSize); + memcpy(&(shot->ctl.tonemap.curveBlue[i]), tonemapCurve, tonemapCurveSize); + } + + /* edge */ + shot->ctl.edge.mode = ::PROCESSING_MODE_OFF; + shot->ctl.edge.strength = 5; + + /* scaler */ + float zoomRatio = m_parameters->getZoomRatio(0) / 1000; + if (setMetaCtlCropRegion(shot_ext, 0, + m_sensorStaticInfo->maxSensorW, + m_sensorStaticInfo->maxSensorH, + m_sensorStaticInfo->maxPreviewW, + m_sensorStaticInfo->maxPreviewH, + zoomRatio) != NO_ERROR) { + ALOGE("ERR(%s):m_setZoom() fail", __FUNCTION__); + } + + /* jpeg */ + shot->ctl.jpeg.quality = 96; + shot->ctl.jpeg.thumbnailSize[0] = m_sensorStaticInfo->maxThumbnailW; + shot->ctl.jpeg.thumbnailSize[1] = m_sensorStaticInfo->maxThumbnailH; + shot->ctl.jpeg.thumbnailQuality = 100; + shot->ctl.jpeg.gpsCoordinates[0] = 0; + shot->ctl.jpeg.gpsCoordinates[1] = 0; + shot->ctl.jpeg.gpsCoordinates[2] = 0; + memset(&shot->ctl.jpeg.gpsProcessingMethod, 0x0, + sizeof(shot->ctl.jpeg.gpsProcessingMethod)); + shot->ctl.jpeg.gpsTimestamp = 0L; + shot->ctl.jpeg.orientation = 0L; + + /* stats */ + shot->ctl.stats.faceDetectMode = ::FACEDETECT_MODE_OFF; + shot->ctl.stats.histogramMode = ::STATS_MODE_OFF; + shot->ctl.stats.sharpnessMapMode = ::STATS_MODE_OFF; + + /* aa */ + shot->ctl.aa.captureIntent = ::AA_CAPTURE_INTENT_CUSTOM; + shot->ctl.aa.mode = ::AA_CONTROL_AUTO; + shot->ctl.aa.effectMode = ::AA_EFFECT_OFF; + shot->ctl.aa.sceneMode = ::AA_SCENE_MODE_FACE_PRIORITY; + shot->ctl.aa.videoStabilizationMode = VIDEO_STABILIZATION_MODE_OFF; + + /* default metering is center */ + shot->ctl.aa.aeMode = ::AA_AEMODE_CENTER; + shot->ctl.aa.aeRegions[0] = 0; + shot->ctl.aa.aeRegions[1] = 0; + shot->ctl.aa.aeRegions[2] = 0; + shot->ctl.aa.aeRegions[3] = 0; + shot->ctl.aa.aeRegions[4] = 1000; + shot->ctl.aa.aeExpCompensation = 0; /* 0 is middle */ + shot->ctl.aa.vendor_aeExpCompensationStep = m_sensorStaticInfo->exposureCompensationStep; + shot->ctl.aa.aeLock = ::AA_AE_LOCK_OFF; + + shot->ctl.aa.aeTargetFpsRange[0] = minFps; + shot->ctl.aa.aeTargetFpsRange[1] = maxFps; + + shot->ctl.aa.aeAntibandingMode = ::AA_AE_ANTIBANDING_AUTO; + shot->ctl.aa.vendor_aeflashMode = ::AA_FLASHMODE_OFF; + + shot->ctl.aa.awbMode = ::AA_AWBMODE_WB_AUTO; + shot->ctl.aa.awbLock = ::AA_AWB_LOCK_OFF; + shot->ctl.aa.afMode = ::AA_AFMODE_OFF; + shot->ctl.aa.afRegions[0] = 0; + shot->ctl.aa.afRegions[1] = 0; + shot->ctl.aa.afRegions[2] = 0; + shot->ctl.aa.afRegions[3] = 0; + shot->ctl.aa.afRegions[4] = 1000; + shot->ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + + shot->ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot->ctl.aa.vendor_isoValue = 0; + shot->ctl.aa.vendor_videoMode = AA_VIDEOMODE_OFF; + + /* 2. dm */ + + /* 3. utrl */ +#ifdef USE_FW_OPMODE + shot->uctl.opMode = CAMERA_OP_MODE_HAL3_GED; +#endif + + /* 4. udm */ + + /* 5. magicNumber */ + shot->magicNumber = SHOT_MAGIC_NUMBER; + + /* 6. default setfile index */ + setMetaSetfile(shot_ext, ISS_SUB_SCENARIO_STILL_PREVIEW); + + /* user request */ + shot_ext->drc_bypass = 1; + shot_ext->dis_bypass = 1; + shot_ext->dnr_bypass = 1; + shot_ext->fd_bypass = 1; +/* + m_dummyShot.request_taap = 1; + m_dummyShot.request_taac = 0; + m_dummyShot.request_isp = 1; + m_dummyShot.request_scc = 0; + m_dummyShot.request_scp = 1; + m_dummyShot.request_dis = 0; +*/ + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateColorControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { + entry = settings.find(ANDROID_COLOR_CORRECTION_MODE); + dst->ctl.color.mode = (enum colorcorrection_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_COLOR_CORRECTION_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) { + entry = settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM); + for (size_t i = 0; i < entry.count && i < 9; i++) { + /* Convert rational to float */ + dst->ctl.color.transform[i].num = entry.data.r[i].numerator; + dst->ctl.color.transform[i].den = entry.data.r[i].denominator; + } + ALOGV("DEBUG(%s):ANDROID_COLOR_CORRECTION_TRANSFORM(%zu)", __FUNCTION__, entry.count); + } + + if (settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) { + entry = settings.find(ANDROID_COLOR_CORRECTION_GAINS); + for (size_t i = 0; i < entry.count && i < 4; i++) { + dst->ctl.color.gains[i] = entry.data.f[i]; + } + ALOGV("DEBUG(%s):ANDROID_COLOR_CORRECTION_GAINS(%f,%f,%f,%f)", __FUNCTION__, + entry.data.f[0], entry.data.f[1], entry.data.f[2], entry.data.f[3]); + } + + if (settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) { + entry = settings.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE); + dst->ctl.color.aberrationCorrectionMode = (enum processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_COLOR_CORRECTION_ABERRATION_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateControlControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + uint32_t bnsRatio = DEFAULT_BNS_RATIO; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (m_flashMgr == NULL) { + ALOGE("ERR(%s[%d]):FlashMgr is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + +#ifdef USE_BNS_PREVIEW + bnsRatio = m_parameters->getBnsScaleRatio()/1000; +#endif + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { + entry = settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE); + dst->ctl.aa.aeAntibandingMode = (enum aa_ae_antibanding_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_COLOR_AE_ANTIBANDING_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { + entry = settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION); + dst->ctl.aa.aeExpCompensation = (int32_t) (entry.data.i32[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION(%d)", __FUNCTION__, entry.data.i32[0]); + } + + if (settings.exists(ANDROID_CONTROL_AE_MODE)) { + enum aa_aemode aeMode = AA_AEMODE_OFF; + entry = settings.find(ANDROID_CONTROL_AE_MODE); + aeMode = (enum aa_aemode) FIMC_IS_METADATA(entry.data.u8[0]); + m_flashMgr->setFlashExposure(aeMode); + dst->ctl.aa.aeMode = aeMode; + + enum ExynosCameraActivityFlash::FLASH_REQ flashReq = ExynosCameraActivityFlash::FLASH_REQ_OFF; + switch (aeMode) { + case AA_AEMODE_ON_AUTO_FLASH: + case AA_AEMODE_ON_AUTO_FLASH_REDEYE: + flashReq = ExynosCameraActivityFlash::FLASH_REQ_AUTO; + dst->ctl.aa.aeMode = AA_AEMODE_CENTER; + m_overrideFlashControl = true; + break; + case AA_AEMODE_ON_ALWAYS_FLASH: + flashReq = ExynosCameraActivityFlash::FLASH_REQ_ON; + dst->ctl.aa.aeMode = AA_AEMODE_CENTER; + m_overrideFlashControl = true; + break; + case AA_AEMODE_ON: + dst->ctl.aa.aeMode = AA_AEMODE_CENTER; + case AA_AEMODE_OFF: + default: + m_overrideFlashControl = false; + break; + } + if (m_flashMgr != NULL) { + ALOGV("DEBUG(%s):m_flashMgr(%d)", __FUNCTION__,flashReq); + m_flashMgr->setFlashReq(flashReq, m_overrideFlashControl); + } + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AE_LOCK)) { + entry = settings.find(ANDROID_CONTROL_AE_LOCK); + dst->ctl.aa.aeLock = (enum aa_ae_lock) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_LOCK(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AE_REGIONS)) { + ExynosRect2 aeRegion; + + entry = settings.find(ANDROID_CONTROL_AE_REGIONS); + aeRegion.x1 = entry.data.i32[0]; + aeRegion.y1 = entry.data.i32[1]; + aeRegion.x2 = entry.data.i32[2]; + aeRegion.y2 = entry.data.i32[3]; + dst->ctl.aa.aeRegions[4] = entry.data.i32[4]; + m_convert3AARegion(&aeRegion); + + dst->ctl.aa.aeRegions[0] = aeRegion.x1; + dst->ctl.aa.aeRegions[1] = aeRegion.y1; + dst->ctl.aa.aeRegions[2] = aeRegion.x2; + dst->ctl.aa.aeRegions[3] = aeRegion.y2; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_REGIONS(%d,%d,%d,%d,%d)", __FUNCTION__, + entry.data.i32[0], + entry.data.i32[1], + entry.data.i32[2], + entry.data.i32[3], + entry.data.i32[4]); + } + + if (settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { + ExynosRect2 awbRegion; + + /* AWB region value would not be used at the f/w, + because AWB is not related with a specific region */ + entry = settings.find(ANDROID_CONTROL_AWB_REGIONS); + awbRegion.x1 = entry.data.i32[0]; + awbRegion.y1 = entry.data.i32[1]; + awbRegion.x2 = entry.data.i32[2]; + awbRegion.y2 = entry.data.i32[3]; + dst->ctl.aa.awbRegions[4] = entry.data.i32[4]; + m_convert3AARegion(&awbRegion); + + dst->ctl.aa.awbRegions[0] = awbRegion.x1; + dst->ctl.aa.awbRegions[1] = awbRegion.y1; + dst->ctl.aa.awbRegions[2] = awbRegion.x2; + dst->ctl.aa.awbRegions[3] = awbRegion.y2; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AWB_REGIONS(%d,%d,%d,%d,%d)", __FUNCTION__, + entry.data.i32[0], + entry.data.i32[1], + entry.data.i32[2], + entry.data.i32[3], + entry.data.i32[4]); + } + + if (settings.exists(ANDROID_CONTROL_AF_REGIONS)) { + ExynosRect2 afRegion; + + entry = settings.find(ANDROID_CONTROL_AF_REGIONS); + afRegion.x1 = entry.data.i32[0]; + afRegion.y1 = entry.data.i32[1]; + afRegion.x2 = entry.data.i32[2]; + afRegion.y2 = entry.data.i32[3]; + dst->ctl.aa.afRegions[4] = entry.data.i32[4]; + m_convert3AARegion(&afRegion); + + dst->ctl.aa.afRegions[0] = afRegion.x1; + dst->ctl.aa.afRegions[1] = afRegion.y1; + dst->ctl.aa.afRegions[2] = afRegion.x2; + dst->ctl.aa.afRegions[3] = afRegion.y2; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AF_REGIONS(%d,%d,%d,%d,%d)", __FUNCTION__, + entry.data.i32[0], + entry.data.i32[1], + entry.data.i32[2], + entry.data.i32[3], + entry.data.i32[4]); + } + + if (settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { + entry = settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE); + for (size_t i = 0; i < entry.count && i < 2; i++) + dst->ctl.aa.aeTargetFpsRange[i] = entry.data.i32[i]; + m_maxFps = dst->ctl.aa.aeTargetFpsRange[1]; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_TARGET_FPS_RANGE(%d-%d)", __FUNCTION__, + entry.data.i32[0], entry.data.i32[1]); + } + + if (settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)) { + entry = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER); + dst->ctl.aa.aePrecaptureTrigger = (enum aa_ae_precapture_trigger) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AF_MODE)) { + entry = settings.find(ANDROID_CONTROL_AF_MODE); + dst->ctl.aa.afMode = (enum aa_afmode) FIMC_IS_METADATA(entry.data.u8[0]); + m_preAfMode = m_afMode; + m_afMode = dst->ctl.aa.afMode; + + switch (dst->ctl.aa.afMode) { + case AA_AFMODE_AUTO: + dst->ctl.aa.vendor_afmode_option = 0x00; + break; + case AA_AFMODE_MACRO: + dst->ctl.aa.vendor_afmode_option = 0x00 | SET_BIT(AA_AFMODE_OPTION_BIT_MACRO); + break; + case AA_AFMODE_CONTINUOUS_VIDEO: + dst->ctl.aa.vendor_afmode_option = 0x00 | SET_BIT(AA_AFMODE_OPTION_BIT_VIDEO); + /* The afRegion value should be (0,0,0,0) at the Continuous Video mode */ + dst->ctl.aa.afRegions[0] = 0; + dst->ctl.aa.afRegions[1] = 0; + dst->ctl.aa.afRegions[2] = 0; + dst->ctl.aa.afRegions[3] = 0; + break; + case AA_AFMODE_CONTINUOUS_PICTURE: + dst->ctl.aa.vendor_afmode_option = 0x00; + /* The afRegion value should be (0,0,0,0) at the Continuous Picture mode */ + dst->ctl.aa.afRegions[0] = 0; + dst->ctl.aa.afRegions[1] = 0; + dst->ctl.aa.afRegions[2] = 0; + dst->ctl.aa.afRegions[3] = 0; + break; + case AA_AFMODE_OFF: + default: + dst->ctl.aa.vendor_afmode_option = 0x00; + break; + } + ALOGV("DEBUG(%s):ANDROID_CONTROL_AF_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AF_TRIGGER)) { + entry = settings.find(ANDROID_CONTROL_AF_TRIGGER); + dst->ctl.aa.afTrigger = (enum aa_af_trigger)entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_CONTROL_AF_TRIGGER(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AWB_LOCK)) { + entry = settings.find(ANDROID_CONTROL_AWB_LOCK); + dst->ctl.aa.awbLock = (enum aa_awb_lock) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_AWB_LOCK(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_AWB_MODE)) { + entry = settings.find(ANDROID_CONTROL_AWB_MODE); + dst->ctl.aa.awbMode = (enum aa_awbmode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_AWB_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { + entry = settings.find(ANDROID_CONTROL_CAPTURE_INTENT); + dst->ctl.aa.captureIntent = (enum aa_capture_intent) entry.data.u8[0]; + if (dst->ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD) { + dst->ctl.aa.vendor_videoMode = AA_VIDEOMODE_ON; + setMetaSetfile(dst_ext, ISS_SUB_SCENARIO_VIDEO); + } else { + setMetaSetfile(dst_ext, ISS_SUB_SCENARIO_STILL_PREVIEW); + } + ALOGV("DEBUG(%s):ANDROID_CONTROL_CAPTURE_INTENT(%d) setfile(%d)", __FUNCTION__, dst->ctl.aa.captureIntent, dst_ext->setfile); + } + if (settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { + entry = settings.find(ANDROID_CONTROL_EFFECT_MODE); + dst->ctl.aa.effectMode = (enum aa_effect_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_EFFECT_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + ALOGV("DEBUG(%s):dst->ctl.aa.effectMode(%d)", __FUNCTION__, dst->ctl.aa.effectMode); + } + + if (settings.exists(ANDROID_CONTROL_MODE)) { + entry = settings.find(ANDROID_CONTROL_MODE); + dst->ctl.aa.mode = (enum aa_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_CONTROL_SCENE_MODE)) { + entry = settings.find(ANDROID_CONTROL_SCENE_MODE); + /* HACK : Temporary save the Mode info for adjusting value for CTS Test */ + if (entry.data.u8[0] == ANDROID_CONTROL_SCENE_MODE_HDR) + dst->ctl.aa.sceneMode = AA_SCENE_MODE_HDR; + else + dst->ctl.aa.sceneMode = (enum aa_scene_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_CONTROL_SCENE_MODE(%d)", __FUNCTION__, dst->ctl.aa.sceneMode); + } + + if (settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) { + entry = settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE); + dst->ctl.aa.videoStabilizationMode = (enum aa_videostabilization_mode) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_CONTROL_VIDEO_STABILIZATION_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + enum ExynosCameraActivityFlash::FLASH_STEP flashStep = ExynosCameraActivityFlash::FLASH_STEP_OFF; + bool isFlashStepChanged = false; + + /* Check AF Trigger to turn on the pre-flash */ + switch (dst->ctl.aa.afTrigger) { + case AA_AF_TRIGGER_START: + if (m_flashMgr->getNeedCaptureFlash() == true + && m_flashMgr->getFlashStatus() == AA_FLASHMODE_OFF) { + flashStep = ExynosCameraActivityFlash::FLASH_STEP_PRE_START; + m_flashMgr->setCaptureStatus(true); + isFlashStepChanged = true; + } + break; + case AA_AF_TRIGGER_CANCEL: + if (m_flashMgr->getNeedCaptureFlash() == true) { + m_flashMgr->setFlashStep(ExynosCameraActivityFlash::FLASH_STEP_CANCEL); + isFlashStepChanged = true; + } + break; + case AA_AF_TRIGGER_IDLE: + default: + break; + } + /* Check Precapture Trigger to turn on the pre-flash */ + switch (dst->ctl.aa.aePrecaptureTrigger) { + case AA_AE_PRECAPTURE_TRIGGER_START: + if (m_flashMgr->getNeedCaptureFlash() == true + && m_flashMgr->getFlashStatus() == AA_FLASHMODE_OFF) { + flashStep = ExynosCameraActivityFlash::FLASH_STEP_PRE_START; + m_flashMgr->setCaptureStatus(true); + isFlashStepChanged = true; + } + break; + case AA_AE_PRECAPTURE_TRIGGER_CANCEL: + if (m_flashMgr->getNeedCaptureFlash() == true + && m_flashMgr->getFlashStatus() != AA_FLASHMODE_OFF + && m_flashMgr->getFlashStatus() != AA_FLASHMODE_CANCEL) { + flashStep = ExynosCameraActivityFlash::FLASH_STEP_CANCEL; + m_flashMgr->setCaptureStatus(false); + isFlashStepChanged = true; + } + break; + case AA_AE_PRECAPTURE_TRIGGER_IDLE: + default: + break; + } + /* Check Capture Intent to turn on the main-flash */ + switch (dst->ctl.aa.captureIntent) { + case AA_CAPTURE_INTENT_STILL_CAPTURE: + if (m_flashMgr->getNeedCaptureFlash() == true) { + flashStep = ExynosCameraActivityFlash::FLASH_STEP_MAIN_START; + isFlashStepChanged = true; + m_parameters->setMarkingOfExifFlash(1); + } else { + m_parameters->setMarkingOfExifFlash(0); + } + break; + case AA_CAPTURE_INTENT_CUSTOM: + case AA_CAPTURE_INTENT_PREVIEW: + case AA_CAPTURE_INTENT_VIDEO_RECORD: + case AA_CAPTURE_INTENT_VIDEO_SNAPSHOT: + case AA_CAPTURE_INTENT_ZERO_SHUTTER_LAG: + case AA_CAPTURE_INTENT_MANUAL: + default: + break; + } + + if (isFlashStepChanged == true && m_flashMgr != NULL) + m_flashMgr->setFlashStep(flashStep); + + /* If aeMode or Mode is NOT Off, Manual AE control can NOT be operated */ + if (dst->ctl.aa.aeMode == AA_AEMODE_OFF + || dst->ctl.aa.mode == AA_CONTROL_OFF) { + m_isManualAeControl = true; + ALOGV("DEBUG(%s):Operate Manual AE Control, aeMode(%d), Mode(%d)", __FUNCTION__, + dst->ctl.aa.aeMode, dst->ctl.aa.mode); + } else { + m_isManualAeControl = false; + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateDemosaicControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_DEMOSAIC_MODE)) { + entry = settings.find(ANDROID_DEMOSAIC_MODE); + dst->ctl.demosaic.mode = (enum demosaic_processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_DEMOSAIC_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateEdgeControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_EDGE_STRENGTH)) { + entry = settings.find(ANDROID_EDGE_STRENGTH); + dst->ctl.edge.strength = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_EDGE_STRENGTH(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_EDGE_MODE)) { + entry = settings.find(ANDROID_EDGE_MODE); + dst->ctl.edge.mode = (enum processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_EDGE_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + + switch (dst->ctl.edge.mode) { + case PROCESSING_MODE_HIGH_QUALITY: + dst->ctl.edge.strength = 10; + break; + case PROCESSING_MODE_FAST: + case PROCESSING_MODE_OFF: + case PROCESSING_MODE_MINIMAL: + default: + break; + } + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateFlashControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (m_overrideFlashControl == true) { + return OK; + } + + if (settings.exists(ANDROID_FLASH_FIRING_POWER)) { + entry = settings.find(ANDROID_FLASH_FIRING_POWER); + dst->ctl.flash.firingPower = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_FLASH_FIRING_POWER(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_FLASH_FIRING_TIME)) { + entry = settings.find(ANDROID_FLASH_FIRING_TIME); + dst->ctl.flash.firingTime = (uint64_t) entry.data.i64[0]; + ALOGV("DEBUG(%s):ANDROID_FLASH_FIRING_TIME(%lld)", __FUNCTION__, entry.data.i64[0]); + } + + if (settings.exists(ANDROID_FLASH_MODE)) { + entry = settings.find(ANDROID_FLASH_MODE); + dst->ctl.flash.flashMode = (enum flash_mode) FIMC_IS_METADATA(entry.data.u8[0]); + + ALOGV("DEBUG(%s):ANDROID_FLASH_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateHotPixelControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_HOT_PIXEL_MODE)) { + entry = settings.find(ANDROID_HOT_PIXEL_MODE); + dst->ctl.hotpixel.mode = (enum processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_HOT_PIXEL_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateJpegControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { + entry = settings.find(ANDROID_JPEG_GPS_COORDINATES); + for (size_t i = 0; i < entry.count && i < 3; i++) + dst->ctl.jpeg.gpsCoordinates[i] = entry.data.d[i]; + ALOGV("DEBUG(%s):ANDROID_JPEG_GPS_COORDINATES(%f,%f,%f)", __FUNCTION__, + entry.data.d[0], entry.data.d[1], entry.data.d[2]); + } + + if (settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { + entry = settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD); + + /* HAKC for Exif CTS Test */ +#if 0 + dst->ctl.jpeg.gpsProcessingMethod = entry.data.u8; +#else + if (strcmp((char *)entry.data.u8, "None") != 0) { + strncpy((char *)m_gpsProcessingMethod, (char *)entry.data.u8, entry.count); + strncpy((char *)dst->ctl.jpeg.gpsProcessingMethod, (char *)entry.data.u8, entry.count); + } +#endif + + ALOGV("DEBUG(%s):ANDROID_JPEG_GPS_PROCESSING_METHOD(%s)", __FUNCTION__, + dst->ctl.jpeg.gpsProcessingMethod); + } + + if (settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { + entry = settings.find(ANDROID_JPEG_GPS_TIMESTAMP); + dst->ctl.jpeg.gpsTimestamp = (uint64_t) entry.data.i64[0]; + ALOGV("DEBUG(%s):ANDROID_JPEG_GPS_TIMESTAMP(%lld)", __FUNCTION__, entry.data.i64[0]); + } + + if (settings.exists(ANDROID_JPEG_ORIENTATION)) { + entry = settings.find(ANDROID_JPEG_ORIENTATION); + dst->ctl.jpeg.orientation = (uint32_t) entry.data.i32[0]; + ALOGV("DEBUG(%s):ANDROID_JPEG_ORIENTATION(%d)", __FUNCTION__, entry.data.i32[0]); + } + + if (settings.exists(ANDROID_JPEG_QUALITY)) { + entry = settings.find(ANDROID_JPEG_QUALITY); + dst->ctl.jpeg.quality = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_JPEG_QUALITY(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { + entry = settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY); + dst->ctl.jpeg.thumbnailQuality = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_JPEG_THUMBNAIL_QUALITY(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { + entry = settings.find(ANDROID_JPEG_THUMBNAIL_SIZE); + for (size_t i = 0; i < entry.count && i < 2; i++) + dst->ctl.jpeg.thumbnailSize[i] = (uint32_t) entry.data.i32[i]; + ALOGV("DEBUG(%s):ANDROID_JPEG_THUMBNAIL_SIZE(%d,%d)", __FUNCTION__, + entry.data.i32[0], entry.data.i32[1]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateLensControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_LENS_APERTURE)) { + entry = settings.find(ANDROID_LENS_APERTURE); + dst->ctl.lens.aperture = entry.data.f[0]; + ALOGV("DEBUG(%s):ANDROID_LENS_APERTURE(%f)", __FUNCTION__, entry.data.f[0]); + } + + if (settings.exists(ANDROID_LENS_FILTER_DENSITY)) { + entry = settings.find(ANDROID_LENS_FILTER_DENSITY); + dst->ctl.lens.filterDensity = entry.data.f[0]; + ALOGV("DEBUG(%s):ANDROID_LENS_FILTER_DENSITY(%f)", __FUNCTION__, entry.data.f[0]); + } + + if (settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { + entry = settings.find(ANDROID_LENS_FOCAL_LENGTH); + dst->ctl.lens.focalLength = entry.data.f[0]; + ALOGV("DEBUG(%s):ANDROID_LENS_FOCAL_LENGTH(%f)", __FUNCTION__, entry.data.f[0]); + } + + if (settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { + entry = settings.find(ANDROID_LENS_FOCUS_DISTANCE); + /* should not control afMode and focusDistance at the same time + should not set the same focusDistance continuously + set the -1 to focusDistance if you do not need to change focusDistance + */ + if (m_afMode != AA_AFMODE_OFF || m_afMode != m_preAfMode || m_focusDistance == entry.data.f[0]) { + dst->ctl.lens.focusDistance = -1; + } else { + dst->ctl.lens.focusDistance = entry.data.f[0]; + } + m_focusDistance = dst->ctl.lens.focusDistance; + ALOGV("DEBUG(%s):ANDROID_LENS_FOCUS_DISTANCE(%f)", __FUNCTION__, entry.data.f[0]); + } + + if (settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { + entry = settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE); + dst->ctl.lens.opticalStabilizationMode = (enum optical_stabilization_mode) (entry.data.u8[0]); + + switch ((enum optical_stabilization_mode) entry.data.u8[0]) { + case OPTICAL_STABILIZATION_MODE_ON: + dst->ctl.lens.opticalStabilizationMode = OPTICAL_STABILIZATION_MODE_STILL; + break; + case OPTICAL_STABILIZATION_MODE_OFF: + default: + dst->ctl.lens.opticalStabilizationMode = OPTICAL_STABILIZATION_MODE_CENTERING; + break; + } + ALOGV("DEBUG(%s):ANDROID_LENS_OPTICAL_STABILIZATION_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateNoiseControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { + entry = settings.find(ANDROID_NOISE_REDUCTION_STRENGTH); + dst->ctl.noise.strength = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_NOISE_REDUCTION_STRENGTH(%d)", __FUNCTION__, + dst->ctl.noise.strength); + } + + if (settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { + entry = settings.find(ANDROID_NOISE_REDUCTION_MODE); + dst->ctl.noise.mode = (enum processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_NOISE_REDUCTION_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + + switch (dst->ctl.noise.mode) { + case PROCESSING_MODE_HIGH_QUALITY: + dst->ctl.noise.strength = 10; + break; + case PROCESSING_MODE_FAST: + case PROCESSING_MODE_OFF: + case PROCESSING_MODE_MINIMAL: + default: + break; + } + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateRequestControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext, int *reqId) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_REQUEST_ID)) { + entry = settings.find(ANDROID_REQUEST_ID); + dst->ctl.request.id = (uint32_t) entry.data.i32[0]; + ALOGV("DEBUG(%s):ANDROID_REQUEST_ID(%d)", __FUNCTION__, entry.data.i32[0]); + + if (reqId != NULL) + *reqId = dst->ctl.request.id; + } + + if (settings.exists(ANDROID_REQUEST_METADATA_MODE)) { + entry = settings.find(ANDROID_REQUEST_METADATA_MODE); + dst->ctl.request.metadataMode = (enum metadata_mode) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_REQUEST_METADATA_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateScalerControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_SCALER_CROP_REGION)) { + entry = settings.find(ANDROID_SCALER_CROP_REGION); + /* HACK: Temporary save the cropRegion for CTS */ + m_cropRegion.x = entry.data.i32[0]; + m_cropRegion.y = entry.data.i32[1]; + m_cropRegion.w = entry.data.i32[2]; + m_cropRegion.h = entry.data.i32[3]; + for (size_t i = 0; i < entry.count && i < 4; i++) + dst->ctl.scaler.cropRegion[i] = (uint32_t) entry.data.i32[i]; + ALOGV("DEBUG(%s):ANDROID_SCALER_CROP_REGION(%d,%d,%d,%d)", __FUNCTION__, + entry.data.i32[0], entry.data.i32[1], + entry.data.i32[2], entry.data.i32[3]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateSensorControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (m_isManualAeControl == true + && settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { + entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME); + dst->ctl.sensor.exposureTime = (uint64_t) entry.data.i64[0]; + ALOGV("DEBUG(%s):ANDROID_SENSOR_EXPOSURE_TIME(%lld)", __FUNCTION__, entry.data.i64[0]); + } + + if (m_isManualAeControl == true + && settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { + entry = settings.find(ANDROID_SENSOR_FRAME_DURATION); + dst->ctl.sensor.frameDuration = (uint64_t) entry.data.i64[0]; + ALOGV("DEBUG(%s):ANDROID_SENSOR_FRAME_DURATION(%lld)", __FUNCTION__, entry.data.i64[0]); + } else { + /* default value */ + dst->ctl.sensor.frameDuration = (1000 * 1000 * 1000) / m_maxFps; + } + + if (m_isManualAeControl == true + && settings.exists(ANDROID_SENSOR_SENSITIVITY)) { + entry = settings.find(ANDROID_SENSOR_SENSITIVITY); + dst->ctl.aa.vendor_isoMode = AA_ISOMODE_MANUAL; + dst->ctl.sensor.sensitivity = (uint32_t) entry.data.i32[0]; + dst->ctl.aa.vendor_isoValue = (uint32_t) entry.data.i32[0]; + ALOGV("DEBUG(%s):ANDROID_SENSOR_SENSITIVITY(%d)", __FUNCTION__, entry.data.i32[0]); + } else { + dst->ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + dst->ctl.sensor.sensitivity = 0; + dst->ctl.aa.vendor_isoValue = 0; + } + + if (settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) { + entry = settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA); + for (size_t i = 0; i < entry.count && i < 4; i++) + dst->ctl.sensor.testPatternData[i] = entry.data.i32[i]; + ALOGV("DEBUG(%s):ANDROID_SENSOR_TEST_PATTERN_DATA(%d,%d,%d,%d)", __FUNCTION__, + entry.data.i32[0], entry.data.i32[1], entry.data.i32[2], entry.data.i32[3]); + } + + if (settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) { + entry = settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE); + /* TODO : change SENSOR_TEST_PATTERN_MODE_CUSTOM1 from 256 to 267 */ + if (entry.data.i32[0] == ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1) + dst->ctl.sensor.testPatternMode = SENSOR_TEST_PATTERN_MODE_CUSTOM1; + else + dst->ctl.sensor.testPatternMode = (enum sensor_test_pattern_mode) FIMC_IS_METADATA(entry.data.i32[0]); + ALOGV("DEBUG(%s):ANDROID_SENSOR_TEST_PATTERN_MODE(%d)", __FUNCTION__, entry.data.i32[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateShadingControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_SHADING_MODE)) { + entry = settings.find(ANDROID_SHADING_MODE); + dst->ctl.shading.mode = (enum processing_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_SHADING_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if (settings.exists(ANDROID_SHADING_STRENGTH)) { + entry = settings.find(ANDROID_SHADING_STRENGTH); + dst->ctl.shading.strength = (uint32_t) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_SHADING_STRENGTH(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateStatisticsControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { + entry = settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE); + /* HACK : F/W does NOT support FD Off */ + if (entry.data.u8[0] == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { + m_faceDetectModeOn = false; + dst_ext->fd_bypass = 1; + } else { + m_faceDetectModeOn = true; + dst_ext->fd_bypass = 0; + } + dst->ctl.stats.faceDetectMode = (enum facedetect_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_STATISTICS_FACE_DETECT_MODE(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + if (settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { + entry = settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE); + dst->ctl.stats.histogramMode = (enum stats_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_STATISTICS_HISTOGRAM_MODE(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + if (settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { + entry = settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE); + dst->ctl.stats.sharpnessMapMode = (enum stats_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_STATISTICS_SHARPNESS_MAP_MODE(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + if (settings.exists(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE)) { + entry = settings.find(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE); + dst->ctl.stats.hotPixelMapMode = (enum stats_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + if (settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) { + entry = settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE); + dst->ctl.stats.lensShadingMapMode = (enum stats_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_STATISTICS_LENS_SHADING_MAP_MODE(%d)", __FUNCTION__, + entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateTonemapControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_TONEMAP_MODE)) { + entry = settings.find(ANDROID_TONEMAP_MODE); + dst->ctl.tonemap.mode = (enum tonemap_mode) FIMC_IS_METADATA(entry.data.u8[0]); + ALOGV("DEBUG(%s):ANDROID_TONEMAP_MODE(%d)", __FUNCTION__, entry.data.u8[0]); + } + + if(dst->ctl.tonemap.mode == TONEMAP_MODE_CONTRAST_CURVE) { + if (settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) { + float tonemapCurveBlue[64]; + + entry = settings.find(ANDROID_TONEMAP_CURVE_BLUE); + if (entry.count < 64) { + if (entry.count == 4) { + float deltaIn, deltaOut; + + deltaIn = entry.data.f[2] - entry.data.f[0]; + deltaOut = entry.data.f[3] - entry.data.f[1]; + for (size_t i = 0; i < 61; i += 2) { + tonemapCurveBlue[i] = deltaIn * i / 64.0 + entry.data.f[0]; + tonemapCurveBlue[i+1] = deltaOut * i / 64.0 + entry.data.f[1]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_BLUE([%d]:%f)", __FUNCTION__, i, tonemapCurveBlue[i]); + } + tonemapCurveBlue[62] = entry.data.f[2]; + tonemapCurveBlue[63] = entry.data.f[3]; + } else if (entry.count == 32) { + size_t i; + for (i = 0; i < 30; i += 2) { + tonemapCurveBlue[2*i] = entry.data.f[i]; + tonemapCurveBlue[2*i+1] = entry.data.f[i+1]; + tonemapCurveBlue[2*i+2] = (entry.data.f[i] + entry.data.f[i+2])/2; + tonemapCurveBlue[2*i+3] = (entry.data.f[i+1] + entry.data.f[i+3])/2; + } + i = 30; + tonemapCurveBlue[2*i] = entry.data.f[i]; + tonemapCurveBlue[2*i+1] = entry.data.f[i+1]; + tonemapCurveBlue[2*i+2] = entry.data.f[i]; + tonemapCurveBlue[2*i+3] = entry.data.f[i+1]; + } else { + ALOGE("ERROR(%s):ANDROID_TONEMAP_CURVE_BLUE( entry count : %d)", __FUNCTION__, entry.count); + } + } else { + for (size_t i = 0; i < entry.count && i < 64; i++) { + tonemapCurveBlue[i] = entry.data.f[i]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_BLUE([%d]:%f)", __FUNCTION__, i, entry.data.f[i]); + } + } + memcpy(&(dst->ctl.tonemap.curveBlue[0]), tonemapCurveBlue, sizeof(float)*64); + } + + if (settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) { + float tonemapCurveGreen[64]; + + entry = settings.find(ANDROID_TONEMAP_CURVE_GREEN); + if (entry.count < 64) { + if (entry.count == 4) { + float deltaIn, deltaOut; + + deltaIn = entry.data.f[2] - entry.data.f[0]; + deltaOut = entry.data.f[3] - entry.data.f[1]; + for (size_t i = 0; i < 61; i += 2) { + tonemapCurveGreen[i] = deltaIn * i / 64.0 + entry.data.f[0]; + tonemapCurveGreen[i+1] = deltaOut * i / 64.0 + entry.data.f[1]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_GREEN([%d]:%f)", __FUNCTION__, i, tonemapCurveGreen[i]); + } + tonemapCurveGreen[62] = entry.data.f[2]; + tonemapCurveGreen[63] = entry.data.f[3]; + } else if (entry.count == 32) { + size_t i; + for (i = 0; i < 30; i += 2) { + tonemapCurveGreen[2*i] = entry.data.f[i]; + tonemapCurveGreen[2*i+1] = entry.data.f[i+1]; + tonemapCurveGreen[2*i+2] = (entry.data.f[i] + entry.data.f[i+2])/2; + tonemapCurveGreen[2*i+3] = (entry.data.f[i+1] + entry.data.f[i+3])/2; + } + i = 30; + tonemapCurveGreen[2*i] = entry.data.f[i]; + tonemapCurveGreen[2*i+1] = entry.data.f[i+1]; + tonemapCurveGreen[2*i+2] = entry.data.f[i]; + tonemapCurveGreen[2*i+3] = entry.data.f[i+1]; + } else { + ALOGE("ERROR(%s):ANDROID_TONEMAP_CURVE_GREEN( entry count : %d)", __FUNCTION__, entry.count); + } + } else { + for (size_t i = 0; i < entry.count && i < 64; i++) { + tonemapCurveGreen[i] = entry.data.f[i]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_GREEN([%d]:%f)", __FUNCTION__, i, entry.data.f[i]); + } + } + memcpy(&(dst->ctl.tonemap.curveGreen[0]), tonemapCurveGreen, sizeof(float)*64); + } + + if (settings.exists(ANDROID_TONEMAP_CURVE_RED)) { + float tonemapCurveRed[64]; + + entry = settings.find(ANDROID_TONEMAP_CURVE_RED); + if (entry.count < 64) { + if (entry.count == 4) { + float deltaIn, deltaOut; + + deltaIn = entry.data.f[2] - entry.data.f[0]; + deltaOut = entry.data.f[3] - entry.data.f[1]; + for (size_t i = 0; i < 61; i += 2) { + tonemapCurveRed[i] = deltaIn * i / 64.0 + entry.data.f[0]; + tonemapCurveRed[i+1] = deltaOut * i / 64.0 + entry.data.f[1]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_RED([%d]:%f)", __FUNCTION__, i, tonemapCurveRed[i]); + } + tonemapCurveRed[62] = entry.data.f[2]; + tonemapCurveRed[63] = entry.data.f[3]; + } else if (entry.count == 32) { + size_t i; + for (i = 0; i < 30; i += 2) { + tonemapCurveRed[2*i] = entry.data.f[i]; + tonemapCurveRed[2*i+1] = entry.data.f[i+1]; + tonemapCurveRed[2*i+2] = (entry.data.f[i] + entry.data.f[i+2])/2; + tonemapCurveRed[2*i+3] = (entry.data.f[i+1] + entry.data.f[i+3])/2; + } + i = 30; + tonemapCurveRed[2*i] = entry.data.f[i]; + tonemapCurveRed[2*i+1] = entry.data.f[i+1]; + tonemapCurveRed[2*i+2] = entry.data.f[i]; + tonemapCurveRed[2*i+3] = entry.data.f[i+1]; + } else { + ALOGE("ERROR(%s):ANDROID_TONEMAP_CURVE_RED( entry count : %d)", __FUNCTION__, entry.count); + } + } else { + for (size_t i = 0; i < entry.count && i < 64; i++) { + tonemapCurveRed[i] = entry.data.f[i]; + ALOGV("DEBUG(%s):ANDROID_TONEMAP_CURVE_RED([%d]:%f)", __FUNCTION__, i, entry.data.f[i]); + } + } + memcpy(&(dst->ctl.tonemap.curveRed[0]), tonemapCurveRed, sizeof(float)*64); + } + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateLedControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_LED_TRANSMIT)) { + entry = settings.find(ANDROID_LED_TRANSMIT); + dst->ctl.led.transmit = (enum led_transmit) entry.data.u8[0]; + ALOGV("DEBUG(%s):ANDROID_LED_TRANSMIT(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateBlackLevelControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext) +{ + struct camera2_shot *dst = NULL; + camera_metadata_entry_t entry; + + if (settings.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + dst = &dst_ext->shot; + dst->magicNumber = SHOT_MAGIC_NUMBER; + + if (settings.exists(ANDROID_BLACK_LEVEL_LOCK)) { + entry = settings.find(ANDROID_BLACK_LEVEL_LOCK); + dst->ctl.blacklevel.lock = (enum blacklevel_lock) entry.data.u8[0]; + /* HACK : F/W does NOT support thie field */ + if (entry.data.u8[0] == ANDROID_BLACK_LEVEL_LOCK_ON) + m_blackLevelLockOn = true; + else + m_blackLevelLockOn = false; + ALOGV("DEBUG(%s):ANDROID_BLACK_LEVEL_LOCK(%d)", __FUNCTION__, entry.data.u8[0]); + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::convertRequestToShot(CameraMetadata &request, struct camera2_shot_ext *dst_ext, int *reqId) +{ + status_t ret = OK; + uint32_t errorFlag = 0; + + if (request.isEmpty()) { + ALOGE("ERR(%s[%d]):Settings is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (dst_ext == NULL) { + ALOGE("ERR(%s[%d]):dst_ext is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + ret = translateColorControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 0); + ret = translateControlControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 1); + ret = translateDemosaicControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 2); + ret = translateEdgeControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 3); + ret = translateFlashControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 4); + ret = translateHotPixelControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 5); + ret = translateJpegControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 6); + ret = translateLensControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 7); + ret = translateNoiseControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 8); + ret = translateRequestControlData(request, dst_ext, reqId); + if (ret != OK) + errorFlag |= (1 << 9); + ret = translateScalerControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 10); + ret = translateSensorControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 11); + ret = translateShadingControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 12); + ret = translateStatisticsControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 13); + ret = translateTonemapControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 14); + ret = translateLedControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 15); + ret = translateBlackLevelControlData(request, dst_ext); + if (ret != OK) + errorFlag |= (1 << 16); + + if (errorFlag != 0) { + ALOGE("ERR(%s[%d]):failed to translate Control Data(%d)", __FUNCTION__, __LINE__, errorFlag); + return INVALID_OPERATION; + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateColorMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + const uint8_t colorMode = (uint8_t) CAMERA_METADATA(src->dm.color.mode); + settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1); + ALOGV("DEBUG(%s):dm.color.mode(%d)", __FUNCTION__, src->dm.color.mode); + + camera_metadata_rational_t colorTransform[9]; + for (int i = 0; i < 9; i++) { + colorTransform[i].numerator = (int32_t) src->dm.color.transform[i].num; + colorTransform[i].denominator = (int32_t) src->dm.color.transform[i].den; + } + settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9); + ALOGV("DEBUG(%s):dm.color.transform", __FUNCTION__); + + float colorGains[4]; + for (int i = 0; i < 4; i++) { + colorGains[i] = src->dm.color.gains[i]; + } + settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4); + ALOGV("DEBUG(%s):dm.color.gains(%f,%f,%f,%f)", __FUNCTION__, + colorGains[0], colorGains[1], colorGains[2], colorGains[3]); + + const uint8_t aberrationMode = (uint8_t) CAMERA_METADATA(src->dm.color.aberrationCorrectionMode); + settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); + ALOGV("DEBUG(%s):dm.color.aberrationCorrectionMode(%d)", __FUNCTION__, + src->dm.color.aberrationCorrectionMode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateControlMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + CameraMetadata service_settings; + camera_metadata_entry_t entry; + camera_metadata_entry_t cropRegionEntry; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + uint8_t controlState = 0; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* HACK : F/W does NOT support some fields */ + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + service_settings = requestInfo->getServiceMeta(); + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + const uint8_t antibandingMode = (uint8_t) CAMERA_METADATA(src->dm.aa.aeAntibandingMode); + settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); + ALOGV("DEBUG(%s):dm.aa.aeAntibandingMode(%d)", __FUNCTION__, src->dm.aa.aeAntibandingMode); + + const int32_t aeExposureCompensation = src->dm.aa.aeExpCompensation; + settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExposureCompensation, 1); + ALOGV("DEBUG(%s):dm.aa.aeExpCompensation(%d)", __FUNCTION__, src->dm.aa.aeExpCompensation); + uint8_t aeMode = ANDROID_CONTROL_AE_MODE_OFF; + + if (src->dm.aa.aeMode == AA_AEMODE_OFF) { + aeMode = ANDROID_CONTROL_AE_MODE_OFF; + } else { + if (m_flashMgr != NULL) { + switch (m_flashMgr->getFlashReq()) { + case ExynosCameraActivityFlash::FLASH_REQ_AUTO: + aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; + break; + case ExynosCameraActivityFlash::FLASH_REQ_ON: + aeMode = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; + break; + case ExynosCameraActivityFlash::FLASH_REQ_RED_EYE: + aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; + break; + case ExynosCameraActivityFlash::FLASH_REQ_TORCH: + case ExynosCameraActivityFlash::FLASH_REQ_OFF: + default: + aeMode = ANDROID_CONTROL_AE_MODE_ON; + break; + } + } + } + settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); + ALOGV("DEBUG(%s):dm.aa.aeMode(%d), AE_MODE(%d)", __FUNCTION__, src->dm.aa.aeMode, aeMode); + + const uint8_t aeLock = (uint8_t) CAMERA_METADATA(src->dm.aa.aeLock); + settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); + ALOGV("DEBUG(%s):dm.aa.aeLock(%d)", __FUNCTION__, aeLock); + + if (m_cameraId == CAMERA_ID_BACK) { + /* HACK: Result AE_REGION must be updated based of the value from F/W */ + int32_t aeRegion[5]; + if (service_settings.exists(ANDROID_SCALER_CROP_REGION) && + service_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { + cropRegionEntry = service_settings.find(ANDROID_SCALER_CROP_REGION); + entry = service_settings.find(ANDROID_CONTROL_AE_REGIONS); + /* ae region is bigger than crop region */ + if (cropRegionEntry.data.i32[2] < entry.data.i32[2] - entry.data.i32[0] + || cropRegionEntry.data.i32[3] < entry.data.i32[3] - entry.data.i32[1]) { + aeRegion[0] = cropRegionEntry.data.i32[0]; + aeRegion[1] = cropRegionEntry.data.i32[1]; + aeRegion[2] = cropRegionEntry.data.i32[2] + aeRegion[0]; + aeRegion[3] = cropRegionEntry.data.i32[3] + aeRegion[1]; + aeRegion[4] = entry.data.i32[4]; + } else { + aeRegion[0] = entry.data.i32[0]; + aeRegion[1] = entry.data.i32[1]; + aeRegion[2] = entry.data.i32[2]; + aeRegion[3] = entry.data.i32[3]; + aeRegion[4] = entry.data.i32[4]; + } + } else { + aeRegion[0] = service_shot->ctl.aa.aeRegions[0]; + aeRegion[1] = service_shot->ctl.aa.aeRegions[1]; + aeRegion[2] = service_shot->ctl.aa.aeRegions[2]; + aeRegion[3] = service_shot->ctl.aa.aeRegions[3]; + aeRegion[4] = service_shot->ctl.aa.aeRegions[4]; + } + + settings.update(ANDROID_CONTROL_AE_REGIONS, aeRegion, 5); + ALOGV("DEBUG(%s):dm.aa.aeRegions(%d,%d,%d,%d,%d)", __FUNCTION__, + src->dm.aa.aeRegions[0], + src->dm.aa.aeRegions[1], + src->dm.aa.aeRegions[2], + src->dm.aa.aeRegions[3], + src->dm.aa.aeRegions[4]); + + + /* HACK: Result AWB_REGION must be updated based of the value from F/W */ + int32_t awbRegion[5]; + if (service_settings.exists(ANDROID_SCALER_CROP_REGION) && + service_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { + cropRegionEntry = service_settings.find(ANDROID_SCALER_CROP_REGION); + entry = service_settings.find(ANDROID_CONTROL_AWB_REGIONS); + /* awb region is bigger than crop region */ + if (cropRegionEntry.data.i32[2] < entry.data.i32[2] - entry.data.i32[0] + || cropRegionEntry.data.i32[3] < entry.data.i32[3] - entry.data.i32[1]) { + awbRegion[0] = cropRegionEntry.data.i32[0]; + awbRegion[1] = cropRegionEntry.data.i32[1]; + awbRegion[2] = cropRegionEntry.data.i32[2] + awbRegion[0]; + awbRegion[3] = cropRegionEntry.data.i32[3] + awbRegion[1]; + awbRegion[4] = entry.data.i32[4]; + } else { + awbRegion[0] = entry.data.i32[0]; + awbRegion[1] = entry.data.i32[1]; + awbRegion[2] = entry.data.i32[2]; + awbRegion[3] = entry.data.i32[3]; + awbRegion[4] = entry.data.i32[4]; + } + } else { + awbRegion[0] = service_shot->ctl.aa.awbRegions[0]; + awbRegion[1] = service_shot->ctl.aa.awbRegions[1]; + awbRegion[2] = service_shot->ctl.aa.awbRegions[2]; + awbRegion[3] = service_shot->ctl.aa.awbRegions[3]; + awbRegion[4] = service_shot->ctl.aa.awbRegions[4]; + } + + settings.update(ANDROID_CONTROL_AWB_REGIONS, awbRegion, 5); + ALOGV("DEBUG(%s):dm.aa.awbRegions(%d,%d,%d,%d,%d)", __FUNCTION__, + src->dm.aa.awbRegions[0], + src->dm.aa.awbRegions[1], + src->dm.aa.awbRegions[2], + src->dm.aa.awbRegions[3], + src->dm.aa.awbRegions[4]); + + /* HACK: Result AF_REGION must be updated based of the value from F/W */ + int32_t afRegion[5]; + if (service_settings.exists(ANDROID_SCALER_CROP_REGION) && + service_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { + cropRegionEntry = service_settings.find(ANDROID_SCALER_CROP_REGION); + entry = service_settings.find(ANDROID_CONTROL_AF_REGIONS); + /* af region is bigger than crop region */ + if (cropRegionEntry.data.i32[2] < entry.data.i32[2] - entry.data.i32[0] + || cropRegionEntry.data.i32[3] < entry.data.i32[3] - entry.data.i32[1]) { + afRegion[0] = cropRegionEntry.data.i32[0]; + afRegion[1] = cropRegionEntry.data.i32[1]; + afRegion[2] = cropRegionEntry.data.i32[2] + afRegion[0]; + afRegion[3] = cropRegionEntry.data.i32[3] + afRegion[1]; + afRegion[4] = entry.data.i32[4]; + } else { + afRegion[0] = entry.data.i32[0]; + afRegion[1] = entry.data.i32[1]; + afRegion[2] = entry.data.i32[2]; + afRegion[3] = entry.data.i32[3]; + afRegion[4] = entry.data.i32[4]; + } + } else { + afRegion[0] = service_shot->ctl.aa.afRegions[0]; + afRegion[1] = service_shot->ctl.aa.afRegions[1]; + afRegion[2] = service_shot->ctl.aa.afRegions[2]; + afRegion[3] = service_shot->ctl.aa.afRegions[3]; + afRegion[4] = service_shot->ctl.aa.afRegions[4]; + } + settings.update(ANDROID_CONTROL_AF_REGIONS, afRegion, 5); + ALOGV("DEBUG(%s):dm.aa.afRegions(%d,%d,%d,%d,%d)", __FUNCTION__, + src->dm.aa.afRegions[0], + src->dm.aa.afRegions[1], + src->dm.aa.afRegions[2], + src->dm.aa.afRegions[3], + src->dm.aa.afRegions[4]); + } + + const int32_t aeTargetFps[2] = + { (int32_t) src->dm.aa.aeTargetFpsRange[0], (int32_t) src->dm.aa.aeTargetFpsRange[1] }; + settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFps, 2); + ALOGV("DEBUG(%s):dm.aa.aeTargetFpsRange(%d,%d)", __FUNCTION__, + src->dm.aa.aeTargetFpsRange[0], src->dm.aa.aeTargetFpsRange[1]); + + const uint8_t aePrecaptureTrigger = (uint8_t) src->dm.aa.aePrecaptureTrigger; + settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger ,1); + ALOGV("DEBUG(%s):dm.aa.aePrecaptureTrigger(%d)", __FUNCTION__, + src->dm.aa.aePrecaptureTrigger); + + uint8_t afMode = (uint8_t) CAMERA_METADATA(src->dm.aa.afMode); + settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1); + ALOGV("DEBUG(%s):dm.aa.afMode(%d)", __FUNCTION__, src->dm.aa.afMode); + + const uint8_t afTrigger = (uint8_t )src->dm.aa.afTrigger; + settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); + ALOGV("DEBUG(%s):dm.aa.afTrigger(%d)", __FUNCTION__, src->dm.aa.afTrigger); + + const uint8_t awbLock = (uint8_t) CAMERA_METADATA(src->dm.aa.awbLock); + settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); + ALOGV("DEBUG(%s):dm.aa.awbLock(%d)", __FUNCTION__, src->dm.aa.awbLock); + + const uint8_t awbMode = (uint8_t) CAMERA_METADATA(src->dm.aa.awbMode); + settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); + ALOGV("DEBUG(%s):dm.aa.awbMode(%d)", __FUNCTION__, src->dm.aa.awbMode); + + //const uint8_t captureIntent = (uint8_t) src->dm.aa.captureIntent; + const uint8_t captureIntent = (uint8_t)service_shot->ctl.aa.captureIntent; + settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &captureIntent, 1); + ALOGV("DEBUG(%s):dm.aa.captureIntent(%d)", __FUNCTION__, src->dm.aa.captureIntent); + + const uint8_t effectMode = (uint8_t) CAMERA_METADATA(src->dm.aa.effectMode); + settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); + ALOGV("DEBUG(%s):dm.aa.effectMode(%d)", __FUNCTION__, src->dm.aa.effectMode); + + const uint8_t mode = (uint8_t) CAMERA_METADATA(src->dm.aa.mode); + settings.update(ANDROID_CONTROL_MODE, &mode, 1); + ALOGV("DEBUG(%s):dm.aa.mode(%d)", __FUNCTION__, src->dm.aa.mode); + + uint8_t sceneMode = (uint8_t) CAMERA_METADATA(src->dm.aa.sceneMode); + /* HACK : Adjust the Scene mode for unsupported scene mode by F/W */ + if (src->dm.aa.sceneMode == AA_SCENE_MODE_HDR) + sceneMode = ANDROID_CONTROL_SCENE_MODE_HDR; + else if (service_shot->ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) + sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; + settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); + ALOGV("DEBUG(%s):dm.aa.sceneMode(%d)", __FUNCTION__, src->dm.aa.sceneMode); + + const uint8_t videoStabilizationMode = (enum aa_videostabilization_mode) src->dm.aa.videoStabilizationMode; + settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); + ALOGV("DEBUG(%s):dm.aa.videoStabilizationMode(%d)", __FUNCTION__, src->dm.aa.videoStabilizationMode); + + uint8_t tmpAeState = (uint8_t) CAMERA_METADATA(src->dm.aa.aeState); + /* Convert Sec specific AE_STATE_LOCKED_* to valid state value + (Based on the guide from ist.song@samsung.com) */ + switch(src->dm.aa.aeState) { + case AE_STATE_LOCKED_CONVERGED: + ALOGV("DEBUG(%s):dm.aa.aeState(%d) -> Changed to (%d)-AE_STATE_CONVERGED" + , __FUNCTION__, src->dm.aa.aeState, AE_STATE_CONVERGED); + tmpAeState = (uint8_t) CAMERA_METADATA(AE_STATE_CONVERGED); + break; + + case AE_STATE_LOCKED_FLASH_REQUIRED: + ALOGV("DEBUG(%s):dm.aa.aeState(%d) -> Changed to (%d)-AE_STATE_FLASH_REQUIRED" + , __FUNCTION__, src->dm.aa.aeState, AE_STATE_FLASH_REQUIRED); + tmpAeState = (uint8_t) CAMERA_METADATA(AE_STATE_FLASH_REQUIRED); + break; + default: + // Keep the original value + break; + } + + /* HACK: forcely set AE state during init skip count (FW not supported) */ + if (src->dm.request.frameCount < INITIAL_SKIP_FRAME) { + tmpAeState = (uint8_t) CAMERA_METADATA(AE_STATE_SEARCHING); + } + +#ifdef USE_AE_CONVERGED_UDM + if (m_cameraId == CAMERA_ID_BACK && + tmpAeState == (uint8_t) CAMERA_METADATA(AE_STATE_CONVERGED)) { + uint32_t aeUdmState = (uint32_t)src->udm.ae.vendorSpecific[397]; + /* 1: converged, 0: searching */ + if (aeUdmState == 0) { + tmpAeState = (uint8_t) CAMERA_METADATA(AE_STATE_SEARCHING); + } + } +#endif + + const uint8_t aeState = tmpAeState; + settings.update(ANDROID_CONTROL_AE_STATE, &aeState, 1); + ALOGV("DEBUG(%s):dm.aa.aeState(%d), AE_STATE(%d)", __FUNCTION__, src->dm.aa.aeState, aeState); + + const uint8_t awbState = (uint8_t) CAMERA_METADATA(src->dm.aa.awbState); + settings.update(ANDROID_CONTROL_AWB_STATE, &awbState, 1); + ALOGV("DEBUG(%s):dm.aa.awbState(%d)", __FUNCTION__, src->dm.aa.awbState); + + const uint8_t afState = (uint8_t) CAMERA_METADATA(src->dm.aa.afState); + settings.update(ANDROID_CONTROL_AF_STATE, &afState, 1); + ALOGV("DEBUG(%s):dm.aa.afState(%d)", __FUNCTION__, src->dm.aa.afState); + + switch (src->dm.aa.aeState) { + case AE_STATE_CONVERGED: + case AE_STATE_LOCKED: + if (m_flashMgr != NULL) + m_flashMgr->notifyAeResult(); + break; + case AE_STATE_INACTIVE: + case AE_STATE_SEARCHING: + case AE_STATE_FLASH_REQUIRED: + case AE_STATE_PRECAPTURE: + default: + break; + } + + switch (src->dm.aa.afState) { + case AA_AFSTATE_FOCUSED_LOCKED: + case AA_AFSTATE_NOT_FOCUSED_LOCKED: + if (m_flashMgr != NULL) + m_flashMgr->notifyAfResultHAL3(); + break; + case AA_AFSTATE_INACTIVE: + case AA_AFSTATE_PASSIVE_SCAN: + case AA_AFSTATE_PASSIVE_FOCUSED: + case AA_AFSTATE_ACTIVE_SCAN: + case AA_AFSTATE_PASSIVE_UNFOCUSED: + default: + break; + } + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateEdgeMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + const uint8_t edgeMode = (uint8_t) CAMERA_METADATA(service_shot->ctl.edge.mode); + settings.update(ANDROID_EDGE_MODE, &edgeMode, 1); + ALOGV("DEBUG(%s):dm.edge.mode(%d)", __FUNCTION__, src->dm.edge.mode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateFlashMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + uint8_t controlState = 0; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + const uint8_t firingPower = (uint8_t) src->dm.flash.firingPower; + settings.update(ANDROID_FLASH_FIRING_POWER, &firingPower, 1); + ALOGV("DEBUG(%s):dm.flash.firingPower(%d)", __FUNCTION__, src->dm.flash.firingPower); + + const int64_t firingTime = (int64_t) src->dm.flash.firingTime; + settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1); + ALOGV("DEBUG(%s):dm.flash.firingTime(%lld)", __FUNCTION__, src->dm.flash.firingTime); + + const uint8_t flashMode = (uint8_t)CAMERA_METADATA(src->dm.flash.flashMode); + settings.update(ANDROID_FLASH_MODE, &flashMode, 1); + ALOGV("DEBUG(%s):dm.flash.flashMode(%d), flashMode=%d", __FUNCTION__, src->dm.flash.flashMode, flashMode); + + uint8_t flashState = ANDROID_FLASH_STATE_READY; + if (m_flashMgr == NULL) + flashState = ANDROID_FLASH_STATE_UNAVAILABLE; + else if (m_sensorStaticInfo->flashAvailable == ANDROID_FLASH_INFO_AVAILABLE_FALSE) + flashState = ANDROID_FLASH_STATE_UNAVAILABLE; + else + flashState = src->dm.flash.flashState; + settings.update(ANDROID_FLASH_STATE, &flashState , 1); + ALOGV("DEBUG(%s):flashState=%d", __FUNCTION__, flashState); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateHotPixelMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* HACK : F/W does NOT support some fields */ + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + //const uint8_t hotPixelMode = (uint8_t) CAMERA_METADATA(src->dm.hotpixel.mode); + const uint8_t hotPixelMode = (uint8_t) CAMERA_METADATA(service_shot->ctl.hotpixel.mode); + settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1); + ALOGV("DEBUG(%s):dm.hotpixel.mode(%d)", __FUNCTION__, src->dm.hotpixel.mode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateJpegMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getServiceShot(&shot_ext); + src = &shot_ext.shot; + + const double gpsCoordinates[3] = + { src->ctl.jpeg.gpsCoordinates[0], src->ctl.jpeg.gpsCoordinates[1], src->ctl.jpeg.gpsCoordinates[2] }; + settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 3); + ALOGV("DEBUG(%s):ctl.jpeg.gpsCoordinates(%f,%f,%f)", __FUNCTION__, + src->ctl.jpeg.gpsCoordinates[0], + src->ctl.jpeg.gpsCoordinates[1], + src->ctl.jpeg.gpsCoordinates[2]); +#if 0 + if (src->ctl.jpeg.gpsProcessingMethod != NULL) { + size_t gpsProcessingMethodLength = strlen((char *)src->ctl.jpeg.gpsProcessingMethod) + 1; + uint8_t *gpsProcessingMethod = src->ctl.jpeg.gpsProcessingMethod; + settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, gpsProcessingMethodLength); + ALOGV("DEBUG(%s):ctl.jpeg.gpsProcessingMethod(%s)", __FUNCTION__, + gpsProcessingMethod); + + if (gpsProcessingMethod != NULL) { + free(gpsProcessingMethod); + gpsProcessingMethod = NULL; + } + } +#endif + const int64_t gpsTimestamp = (int64_t) src->ctl.jpeg.gpsTimestamp; + settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1); + ALOGV("DEBUG(%s):ctl.jpeg.gpsTimestamp(%lld)", __FUNCTION__, + src->ctl.jpeg.gpsTimestamp); + + const int32_t orientation = src->ctl.jpeg.orientation; + settings.update(ANDROID_JPEG_ORIENTATION, &orientation, 1); + ALOGV("DEBUG(%s):ctl.jpeg.orientation(%d)", __FUNCTION__, src->ctl.jpeg.orientation); + + const uint8_t quality = (uint8_t) src->ctl.jpeg.quality; + settings.update(ANDROID_JPEG_QUALITY, &quality, 1); + ALOGV("DEBUG(%s):ctl.jpeg.quality(%d)", __FUNCTION__, src->ctl.jpeg.quality); + + const uint8_t thumbnailQuality = (uint8_t) src->ctl.jpeg.thumbnailQuality; + settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1); + ALOGV("DEBUG(%s):ctl.jpeg.thumbnailQuality(%d)", __FUNCTION__, + src->ctl.jpeg.thumbnailQuality); + + const int32_t thumbnailSize[2] = + { (int32_t) src->ctl.jpeg.thumbnailSize[0], (int32_t) src->ctl.jpeg.thumbnailSize[1] }; + settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); + ALOGV("DEBUG(%s):ctl.jpeg.thumbnailSize(%d,%d)", __FUNCTION__, + src->ctl.jpeg.thumbnailSize[0], src->ctl.jpeg.thumbnailSize[1]); + + const int32_t jpegSize = (int32_t) src->dm.jpeg.size; + settings.update(ANDROID_JPEG_SIZE, &jpegSize, 1); + ALOGV("DEBUG(%s):dm.jpeg.size(%d)", __FUNCTION__, src->dm.jpeg.size); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateLensMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + uint8_t controlState = 0; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + settings.update(ANDROID_LENS_APERTURE, &(m_sensorStaticInfo->fNumber), 1); + ALOGV("DEBUG(%s):dm.lens.aperture is fNumber(%f)", __FUNCTION__, m_sensorStaticInfo->fNumber); + + settings.update(ANDROID_LENS_FILTER_DENSITY, &m_sensorStaticInfo->filterDensity, 1); + ALOGV("DEBUG(%s):dm.lens.filterDensity(%f)", __FUNCTION__, m_sensorStaticInfo->filterDensity); + + settings.update(ANDROID_LENS_FOCAL_LENGTH, &(m_sensorStaticInfo->focalLength), 1); + ALOGV("DEBUG(%s):dm.lens.focalLength(%f)", __FUNCTION__, m_sensorStaticInfo->focalLength); + + /* Focus distance 0 means infinite */ + float focusDistance = src->dm.lens.focusDistance; + if(focusDistance < 0) { + focusDistance = 0; + } else if (focusDistance > m_sensorStaticInfo->minimumFocusDistance) { + focusDistance = m_sensorStaticInfo->minimumFocusDistance; + } + settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1); + ALOGV("DEBUG(%s):dm.lens.focusDistance(%f)", __FUNCTION__, src->dm.lens.focusDistance); + + uint8_t opticalStabilizationMode = (uint8_t) src->dm.lens.opticalStabilizationMode; + + switch (opticalStabilizationMode) { + case OPTICAL_STABILIZATION_MODE_STILL: + opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; + break; + case OPTICAL_STABILIZATION_MODE_VIDEO: + opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON; + break; + case OPTICAL_STABILIZATION_MODE_CENTERING: + default: + opticalStabilizationMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; + break; + } + settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opticalStabilizationMode, 1); + ALOGV("DEBUG(%s):dm.lens.opticalStabilizationMode(%d)", __FUNCTION__, + src->dm.lens.opticalStabilizationMode); + + const uint8_t lensState = src->dm.lens.state; + settings.update(ANDROID_LENS_STATE, &lensState, 1); + ALOGV("DEBUG(%s):dm.lens.state(%d)", __FUNCTION__, src->dm.lens.state); + + const float focusRange[2] = + { src->dm.lens.focusRange[0], src->dm.lens.focusRange[1] }; + settings.update(ANDROID_LENS_FOCUS_RANGE, focusRange, 2); + ALOGV("DEBUG(%s):dm.lens.focusRange(%f,%f)", __FUNCTION__, + focusRange[0], focusRange[1]); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateNoiseMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + uint8_t noiseReductionMode = (uint8_t) CAMERA_METADATA(service_shot->ctl.noise.mode); + //uint8_t noiseReductionMode = (uint8_t) CAMERA_METADATA(src->dm.noise.mode); + settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseReductionMode, 1); + ALOGV("DEBUG(%s):dm.noise.mode(%d)", __FUNCTION__, src->dm.noise.mode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateQuirksMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + //settings.update(ANDROID_QUIRKS_PARTIAL_RESULT, ,); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateRequestMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + src->dm.request.id = requestInfo->getRequestId(); + + const int32_t requestId = src->dm.request.id; + settings.update(ANDROID_REQUEST_ID, &requestId, 1); + ALOGV("DEBUG(%s):dm.request.id(%d)", __FUNCTION__, src->dm.request.id); + + const uint8_t metadataMode = src->dm.request.metadataMode; + settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1); + ALOGV("DEBUG(%s):dm.request.metadataMode(%d)", __FUNCTION__, + src->dm.request.metadataMode); + +/* + * + * pipelineDepth is filed of 'REQUEST' + * + * but updating pipelineDepth data can be conflict + * and we separeted this data not using data but request's private data + * + * remaining this code as comment is that to prevent missing update pieplineDepth data in the medta of 'REQUEST' field + * + */ +/* + * const uint8_t pipelineDepth = src->dm.request.pipelineDepth; + * settings.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipelineDepth, 1); + * ALOGV("DEBUG(%s):ANDROID_REQUEST_PIPELINE_DEPTH(%d)", __FUNCTION__, pipelineDepth); + */ + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateScalerMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + const int32_t cropRegion[4] = + { + m_cropRegion.x, + m_cropRegion.y, + m_cropRegion.w, + m_cropRegion.h + }; + settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4); + ALOGV("DEBUG(%s):dm.scaler.cropRegion(%d,%d,%d,%d)", __FUNCTION__, + src->dm.scaler.cropRegion[0], + src->dm.scaler.cropRegion[1], + src->dm.scaler.cropRegion[2], + src->dm.scaler.cropRegion[3]); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateSensorMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* HACK : F/W does NOT support some fields */ + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + int64_t frameDuration = (int64_t) src->dm.sensor.frameDuration; + if (frameDuration == 0) { + frameDuration = service_shot_ext.shot.ctl.sensor.frameDuration; + } + settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1); + ALOGV("DEBUG(%s):dm.sensor.frameDuration(%lld)", __FUNCTION__, src->dm.sensor.frameDuration); + + int64_t exposureTime = (int64_t)src->dm.sensor.exposureTime; + if (exposureTime == 0 || exposureTime > frameDuration) { + exposureTime = frameDuration; + } + src->dm.sensor.exposureTime = exposureTime; // for EXIF Data + settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1); + + int32_t sensitivity = (int32_t) src->dm.sensor.sensitivity; + if (sensitivity < m_sensorStaticInfo->sensitivityRange[MIN]) { + sensitivity = m_sensorStaticInfo->sensitivityRange[MIN]; + } else if (sensitivity > m_sensorStaticInfo->sensitivityRange[MAX]) { + sensitivity = m_sensorStaticInfo->sensitivityRange[MAX]; + } + src->dm.sensor.sensitivity = sensitivity; // for EXIF Data + settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1); + + ALOGV("DEBUG(%s):[frameCount is %d] exposureTime(%lld) sensitivity(%d)", __FUNCTION__, + src->dm.request.frameCount, exposureTime, sensitivity); + + int32_t testPatternMode = (int32_t) CAMERA_METADATA(src->dm.sensor.testPatternMode); + if (src->dm.sensor.testPatternMode == SENSOR_TEST_PATTERN_MODE_CUSTOM1) + testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1; + settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1); + ALOGV("DEBUG(%s):dm.sensor.testPatternMode(%d)", __FUNCTION__, + src->dm.sensor.testPatternMode); + + const int32_t testPatternData[4] = + { + src->dm.sensor.testPatternData[0], src->dm.sensor.testPatternData[1], + src->dm.sensor.testPatternData[2], src->dm.sensor.testPatternData[3] + }; + settings.update(ANDROID_SENSOR_TEST_PATTERN_DATA, testPatternData, 4); + ALOGV("DEBUG(%s):dm.sensor.testPatternData(%d,%d,%d,%d)", __FUNCTION__, + src->dm.sensor.testPatternData[0], src->dm.sensor.testPatternData[1], + src->dm.sensor.testPatternData[2], src->dm.sensor.testPatternData[3]); + + const int64_t timeStamp = (int64_t) src->udm.sensor.timeStampBoot; + settings.update(ANDROID_SENSOR_TIMESTAMP, &timeStamp, 1); + ALOGV("DEBUG(%s):udm.sensor.timeStampBoot(%lld)", __FUNCTION__, src->udm.sensor.timeStampBoot); + + const camera_metadata_rational_t neutralColorPoint[3] = + { + {(int32_t) src->dm.sensor.neutralColorPoint[0].num, + (int32_t) src->dm.sensor.neutralColorPoint[0].den}, + {(int32_t) src->dm.sensor.neutralColorPoint[1].num, + (int32_t) src->dm.sensor.neutralColorPoint[1].den}, + {(int32_t) src->dm.sensor.neutralColorPoint[2].num, + (int32_t) src->dm.sensor.neutralColorPoint[2].den} + }; + + settings.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT, neutralColorPoint, 3); + ALOGV("DEBUG(%s):dm.sensor.neutralColorPoint(%d/%d,%d/%d,%d/%d)", __FUNCTION__, + src->dm.sensor.neutralColorPoint[0].num, + src->dm.sensor.neutralColorPoint[0].den, + src->dm.sensor.neutralColorPoint[1].num, + src->dm.sensor.neutralColorPoint[1].den, + src->dm.sensor.neutralColorPoint[2].num, + src->dm.sensor.neutralColorPoint[2].den); + + /* HACK : F/W does NOT support this field */ + const double noiseProfile[8] = + { + src->dm.sensor.noiseProfile[0][0], src->dm.sensor.noiseProfile[0][1], + src->dm.sensor.noiseProfile[1][0], src->dm.sensor.noiseProfile[1][1], + src->dm.sensor.noiseProfile[2][0], src->dm.sensor.noiseProfile[2][1], + src->dm.sensor.noiseProfile[3][0], src->dm.sensor.noiseProfile[3][1] + }; + settings.update(ANDROID_SENSOR_NOISE_PROFILE, noiseProfile , 8); + ALOGV("DEBUG(%s):dm.sensor.noiseProfile({%f,%f},{%f,%f},{%f,%f},{%f,%f})", __FUNCTION__, + src->dm.sensor.noiseProfile[0][0], + src->dm.sensor.noiseProfile[0][1], + src->dm.sensor.noiseProfile[1][0], + src->dm.sensor.noiseProfile[1][1], + src->dm.sensor.noiseProfile[2][0], + src->dm.sensor.noiseProfile[2][1], + src->dm.sensor.noiseProfile[3][0], + src->dm.sensor.noiseProfile[3][1]); + + const float greenSplit = src->dm.sensor.greenSplit; + settings.update(ANDROID_SENSOR_GREEN_SPLIT, &greenSplit, 1); + ALOGV("DEBUG(%s):dm.sensor.greenSplit(%f)", __FUNCTION__, src->dm.sensor.greenSplit); + + const int64_t rollingShutterSkew = (int64_t) src->dm.sensor.rollingShutterSkew; + settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW, &rollingShutterSkew, 1); + ALOGV("DEBUG(%s):dm.sensor.rollingShutterSkew(%lld)", __FUNCTION__, + src->dm.sensor.rollingShutterSkew); + + //settings.update(ANDROID_SENSOR_TEMPERATURE, , ); + //settings.update(ANDROID_SENSOR_PROFILE_HUE_SAT_MAP, , ); + //settings.update(ANDROID_SENSOR_PROFILE_TONE_CURVE, , ); + + requestInfo->setResultMeta(settings); + + /* HACK: SensorMetaData sync with shot_ext. These values should be used for EXIF */ + requestInfo->setResultShot(&shot_ext); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateShadingMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* HACK : F/W does NOT support some fields */ + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + //const uint8_t shadingMode = (uint8_t) CAMERA_METADATA(src->dm.shading.mode); + const uint8_t shadingMode = (uint8_t) CAMERA_METADATA(service_shot->ctl.shading.mode); + settings.update(ANDROID_SHADING_MODE, &shadingMode, 1); + ALOGV("DEBUG(%s):dm.shading.mode(%d)", __FUNCTION__, src->dm.shading.mode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateStatisticsMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getServiceShot(&service_shot_ext); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + + src->dm.stats.faceDetectMode = service_shot_ext.shot.ctl.stats.faceDetectMode; + const uint8_t faceDetectMode = (uint8_t) CAMERA_METADATA(src->dm.stats.faceDetectMode); + settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1); + ALOGV("DEBUG(%s):dm.stats.faceDetectMode(%d)", __FUNCTION__, + src->dm.stats.faceDetectMode); + + if (faceDetectMode > ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) + m_updateFaceDetectionMetaData(&settings, &shot_ext); + + const uint8_t histogramMode = (uint8_t) CAMERA_METADATA(src->dm.stats.histogramMode); + settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1); + ALOGV("DEBUG(%s):dm.stats.histogramMode(%d)", __FUNCTION__, + src->dm.stats.histogramMode); + + const uint8_t sharpnessMapMode = (uint8_t) CAMERA_METADATA(src->dm.stats.sharpnessMapMode); + settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1); + ALOGV("DEBUG(%s):dm.stats.sharpnessMapMode(%d)", __FUNCTION__, + src->dm.stats.sharpnessMapMode); + + const uint8_t hotPixelMapMode = (uint8_t) CAMERA_METADATA(src->dm.stats.hotPixelMapMode); + settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1); + ALOGV("DEBUG(%s):dm.stats.hotPixelMapMode(%d)", __FUNCTION__, + src->dm.stats.hotPixelMapMode); + + /* HACK : F/W does NOT support this field */ + //int32_t *hotPixelMap = (int32_t *) src->dm.stats.hotPixelMap; + const int32_t hotPixelMap[] = {}; + settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, hotPixelMap, ARRAY_LENGTH(hotPixelMap)); + ALOGV("DEBUG(%s):dm.stats.hotPixelMap", __FUNCTION__); + + const uint8_t lensShadingMapMode = (uint8_t) src->dm.stats.lensShadingMapMode; + settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); + ALOGV("DEBUG(%s):dm.stats.lensShadingMapMode(%d)", __FUNCTION__, + src->dm.stats.lensShadingMapMode); + + /* HACK : F/W does NOT support this field */ + //float *lensShadingMap = (float *) src->dm.stats.lensShadingMap; + const float lensShadingMap[] = {1.0, 1.0, 1.0, 1.0}; + settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP, lensShadingMap, 4); + ALOGV("DEBUG(%s):dm.stats.lensShadingMap(%f,%f,%f,%f)", __FUNCTION__, + lensShadingMap[0], lensShadingMap[1], + lensShadingMap[2], lensShadingMap[3]); + + uint8_t sceneFlicker = (uint8_t) CAMERA_METADATA(src->dm.stats.sceneFlicker); + settings.update(ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); + ALOGV("DEBUG(%s):dm.stats.sceneFlicker(%d)", __FUNCTION__, src->dm.stats.sceneFlicker); + + //settings.update(ANDROID_STATISTICS_HISTOGRAM, , ); + //settings.update(ANDROID_STATISTICS_SHARPNESS_MAP, , ); + //settings.update(ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP, , ); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateTonemapMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + uint8_t controlState = 0; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* HACK : F/W does NOT support some fields */ + struct camera2_shot_ext service_shot_ext; + struct camera2_shot *service_shot = NULL; + requestInfo->getServiceShot(&service_shot_ext); + service_shot = &service_shot_ext.shot; + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + float *curveBlue = (float *) src->dm.tonemap.curveBlue; + settings.update(ANDROID_TONEMAP_CURVE_BLUE, curveBlue, 64); + ALOGV("DEBUG(%s):dm.tonemap.curveBlue", __FUNCTION__); + + float *curveGreen = (float *) src->dm.tonemap.curveGreen; + settings.update(ANDROID_TONEMAP_CURVE_GREEN, curveGreen, 64); + ALOGV("DEBUG(%s):dm.tonemap.curveGreen", __FUNCTION__); + + float *curveRed = (float *) src->dm.tonemap.curveRed; + settings.update(ANDROID_TONEMAP_CURVE_RED, curveRed, 64); + ALOGV("DEBUG(%s):dm.tonemap.curveRed", __FUNCTION__); + + //const uint8_t toneMapMode = (uint8_t) CAMERA_METADATA(src->dm.tonemap.mode); + const uint8_t toneMapMode = (uint8_t) CAMERA_METADATA(service_shot->ctl.tonemap.mode); + settings.update(ANDROID_TONEMAP_MODE, &toneMapMode, 1); + ALOGV("DEBUG(%s):dm.tonemap.mode(%d)", __FUNCTION__, src->dm.tonemap.mode); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateLedMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + //settings.update(ANDROID_LED_TRANSMIT, (uint8_t *) NULL, 0); + ALOGV("DEBUG(%s):dm.led.transmit(%d)", __FUNCTION__, src->dm.led.transmit); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::translateBlackLevelMetaData(ExynosCameraRequest *requestInfo) +{ + CameraMetadata settings; + struct camera2_shot_ext shot_ext; + struct camera2_shot *src = NULL; + + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + settings = requestInfo->getResultMeta(); + requestInfo->getResultShot(&shot_ext); + src = &shot_ext.shot; + + /* HACK: F/W does NOT support this field */ + //const uint8_t blackLevelLock = (uint8_t) src->dm.blacklevel.lock; + const uint8_t blackLevelLock = (uint8_t) m_blackLevelLockOn; + settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1); + ALOGV("DEBUG(%s):dm.blacklevel.lock(%d)", __FUNCTION__, src->dm.blacklevel.lock); + + requestInfo->setResultMeta(settings); + + return OK; +} + +status_t ExynosCamera3MetadataConverter::updateDynamicMeta(ExynosCameraRequest *requestInfo) +{ + status_t ret = OK; + uint32_t errorFlag = 0; + + ALOGV("DEBUG(%s[%d]):%d frame", __FUNCTION__, __LINE__, requestInfo->getFrameCount()); + /* Validation check */ + if (requestInfo == NULL) { + ALOGE("ERR(%s[%d]):RequestInfo is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + ret = translateColorMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 0); + ret = translateControlMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 1); + ret = translateEdgeMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 2); + ret = translateFlashMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 3); + ret = translateHotPixelMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 4); + ret = translateJpegMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 5); + ret = translateLensMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 6); + ret = translateNoiseMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 7); + ret = translateQuirksMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 8); + ret = translateRequestMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 9); + ret = translateScalerMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 10); + ret = translateSensorMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 11); + ret = translateShadingMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 12); + ret = translateStatisticsMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 13); + ret = translateTonemapMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 14); + ret = translateLedMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 15); + ret = translateBlackLevelMetaData(requestInfo); + if (ret != OK) + errorFlag |= (1 << 16); + + if (errorFlag != 0) { + ALOGE("ERR(%s[%d]):failed to translate Meta Data(%d)", __FUNCTION__, __LINE__, errorFlag); + return INVALID_OPERATION; + } + + return OK; +} + +status_t ExynosCamera3MetadataConverter::checkAvailableStreamFormat(int format) +{ + int ret = OK; + ALOGD("DEBUG(%s[%d]) format(%d)", __FUNCTION__, __LINE__, format); + + // TODO:check available format + return ret; +} + +status_t ExynosCamera3MetadataConverter::m_createControlAvailableHighSpeedVideoConfigurations( + const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + int cameraId) +{ + status_t ret = NO_ERROR; + int (*highSpeedVideoSizeList)[3] = NULL; + int highSpeedVideoSizeListLength = 0; + int (*highSpeedVideoFPSList)[2] = NULL; + int highSpeedVideoFPSListLength = 0; + int streamConfigSize = 0; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (streamConfigs == NULL) { + ALOGE("ERR(%s[%d]):Stream configs is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (cameraId == CAMERA_ID_FRONT) { + ALOGD("DEBUG(%s[%d]) Front camera does not support High Speed Video", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + highSpeedVideoSizeList = sensorStaticInfo->highSpeedVideoList; + highSpeedVideoSizeListLength = sensorStaticInfo->highSpeedVideoListMax; + highSpeedVideoFPSList = sensorStaticInfo->highSpeedVideoFPSList; + highSpeedVideoFPSListLength = sensorStaticInfo->highSpeedVideoFPSListMax; + + streamConfigSize = (highSpeedVideoSizeListLength * highSpeedVideoFPSListLength * 5); + + for (int i = 0; i < highSpeedVideoFPSListLength; i++) { + for (int j = 0; j < highSpeedVideoSizeListLength; j++) { + streamConfigs->add(highSpeedVideoSizeList[j][0]); + streamConfigs->add(highSpeedVideoSizeList[j][1]); + streamConfigs->add(highSpeedVideoFPSList[i][0]/1000); + streamConfigs->add(highSpeedVideoFPSList[i][1]/1000); + streamConfigs->add(1); + } + } + + return ret; +} + +/* + - Returns NO_ERROR if private reprocessing is supported: streamConfigs will have valid entries. + - Returns NAME_NOT_FOUND if private reprocessing is not supported: streamConfigs will be returned as is, + and scaler.AvailableInputOutputFormatsMap should not be updated. +*/ +status_t ExynosCamera3MetadataConverter::m_createScalerAvailableInputOutputFormatsMap(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + __unused int cameraId) +{ + int streamConfigSize = 0; + bool isSupportPrivReprocessing = false; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (streamConfigs == NULL) { + ALOGE("ERR(%s[%d]):Stream configs is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + isSupportPrivReprocessing = m_hasTagInList( + sensorStaticInfo->capabilities, + sensorStaticInfo->capabilitiesLength, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING); + + if(isSupportPrivReprocessing == true) { + streamConfigs->add(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED); + streamConfigs->add(2); + streamConfigs->add(HAL_PIXEL_FORMAT_YCbCr_420_888); + streamConfigs->add(HAL_PIXEL_FORMAT_BLOB); + streamConfigs->setCapacity(streamConfigSize); + + return NO_ERROR; + } else { + return NAME_NOT_FOUND; + } +} + +status_t ExynosCamera3MetadataConverter::m_createScalerAvailableStreamConfigurationsOutput(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + int cameraId) +{ + status_t ret = NO_ERROR; + int (*yuvSizeList)[SIZE_OF_RESOLUTION] = NULL; + int yuvSizeListLength = 0; + int (*jpegSizeList)[SIZE_OF_RESOLUTION] = NULL; + int jpegSizeListLength = 0; + int streamConfigSize = 0; + bool isSupportHighResolution = false; + bool isSupportPrivReprocessing = false; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (streamConfigs == NULL) { + ALOGE("ERR(%s[%d]):Stream configs is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + isSupportHighResolution = m_hasTagInList( + sensorStaticInfo->capabilities, + sensorStaticInfo->capabilitiesLength, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); + + if (cameraId == CAMERA_ID_BACK) { + yuvSizeList = sensorStaticInfo->rearPreviewList; + yuvSizeListLength = sensorStaticInfo->rearPreviewListMax; + jpegSizeList = sensorStaticInfo->rearPictureList; + jpegSizeListLength = sensorStaticInfo->rearPictureListMax; + } else { /* CAMERA_ID_FRONT */ + yuvSizeList = sensorStaticInfo->frontPreviewList; + yuvSizeListLength = sensorStaticInfo->frontPreviewListMax; + jpegSizeList = sensorStaticInfo->frontPictureList; + jpegSizeListLength = sensorStaticInfo->frontPictureListMax; + } + + /* Check wheather the private reprocessing is supported or not */ + isSupportPrivReprocessing = m_hasTagInList( + sensorStaticInfo->capabilities, + sensorStaticInfo->capabilitiesLength, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING); + + /* TODO: Add YUV reprocessing if necessary */ + + /* HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED stream configuration list size */ + streamConfigSize = yuvSizeListLength * 4; + /* YUV output stream configuration list size */ + streamConfigSize += (yuvSizeListLength * 4) * (ARRAY_LENGTH(YUV_FORMATS)); + /* Stall output stream configuration list size */ + streamConfigSize += (jpegSizeListLength * 4) * (ARRAY_LENGTH(STALL_FORMATS)); + /* RAW output stream configuration list size */ + streamConfigSize += (1 * 4) * (ARRAY_LENGTH(RAW_FORMATS)); + /* ZSL input stream configuration list size */ + if(isSupportPrivReprocessing == true) { + streamConfigSize += 4; + } + streamConfigs->setCapacity(streamConfigSize); + + /* HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED stream supported size list */ + for (int i = 0; i < yuvSizeListLength; i++) { + streamConfigs->add(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED); + streamConfigs->add(yuvSizeList[i][0]); + streamConfigs->add(yuvSizeList[i][1]); + streamConfigs->add(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + /* YUV output stream supported size list */ + for (size_t i = 0; i < ARRAY_LENGTH(YUV_FORMATS); i++) { + for (int j = 0; j < yuvSizeListLength; j++) { + int pixelSize = yuvSizeList[j][0] * yuvSizeList[j][1]; + if (isSupportHighResolution == false + && pixelSize > HIGH_RESOLUTION_MIN_PIXEL_SIZE) { + streamConfigSize -= 4; + continue; + } + + streamConfigs->add(YUV_FORMATS[i]); + streamConfigs->add(yuvSizeList[j][0]); + streamConfigs->add(yuvSizeList[j][1]); + streamConfigs->add(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + } + + /* Stall output stream supported size list */ + for (size_t i = 0; i < ARRAY_LENGTH(STALL_FORMATS); i++) { + for (int j = 0; j < jpegSizeListLength; j++) { + int pixelSize = jpegSizeList[j][0] * jpegSizeList[j][1]; + if (isSupportHighResolution == false + && pixelSize > HIGH_RESOLUTION_MIN_PIXEL_SIZE) { + streamConfigSize -= 4; + continue; + } + + streamConfigs->add(STALL_FORMATS[i]); + streamConfigs->add(jpegSizeList[j][0]); + streamConfigs->add(jpegSizeList[j][1]); + streamConfigs->add(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + } + + /* RAW output stream supported size list */ + for (size_t i = 0; i < ARRAY_LENGTH(RAW_FORMATS); i++) { + /* Add sensor max size */ + streamConfigs->add(RAW_FORMATS[i]); + streamConfigs->add(sensorStaticInfo->maxSensorW); + streamConfigs->add(sensorStaticInfo->maxSensorH); + streamConfigs->add(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + /* ZSL input stream supported size list */ + { + if(isSupportPrivReprocessing == true) { + streamConfigs->add(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED); + streamConfigs->add(yuvSizeList[0][0]); + streamConfigs->add(yuvSizeList[0][1]); + streamConfigs->add(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT); + } + } + + streamConfigs->setCapacity(streamConfigSize); + +#ifdef DEBUG_STREAM_CONFIGURATIONS + const int32_t* streamConfigArray = NULL; + streamConfigArray = streamConfigs->array(); + for (int i = 0; i < streamConfigSize; i = i + 4) { + ALOGD("DEBUG(%s[%d]):ID %d Size %4dx%4d Format %2x %6s", + __FUNCTION__, __LINE__, + cameraId, + streamConfigArray[i+1], streamConfigArray[i+2], + streamConfigArray[i], + (streamConfigArray[i+3] == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT)? + "OUTPUT" : "INPUT"); + } +#endif + + return ret; +} + +status_t ExynosCamera3MetadataConverter::m_createScalerAvailableMinFrameDurations(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *minDurations, + int cameraId) +{ + status_t ret = NO_ERROR; + int (*yuvSizeList)[SIZE_OF_RESOLUTION] = NULL; + int yuvSizeListLength = 0; + int (*jpegSizeList)[SIZE_OF_RESOLUTION] = NULL; + int jpegSizeListLength = 0; + int minDurationSize = 0; + int64_t currentMinDuration = 0L; + bool isSupportHighResolution = false; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (minDurations == NULL) { + ALOGE("ERR(%s[%d]):Stream configs is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + isSupportHighResolution = m_hasTagInList( + sensorStaticInfo->capabilities, + sensorStaticInfo->capabilitiesLength, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE); + + if (cameraId == CAMERA_ID_BACK) { + yuvSizeList = sensorStaticInfo->rearPreviewList; + yuvSizeListLength = sensorStaticInfo->rearPreviewListMax; + jpegSizeList = sensorStaticInfo->rearPictureList; + jpegSizeListLength = sensorStaticInfo->rearPictureListMax; + } else { /* CAMERA_ID_FRONT */ + yuvSizeList = sensorStaticInfo->frontPreviewList; + yuvSizeListLength = sensorStaticInfo->frontPreviewListMax; + jpegSizeList = sensorStaticInfo->frontPictureList; + jpegSizeListLength = sensorStaticInfo->frontPictureListMax; + } + + /* HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED stream min frame duration list size */ + minDurationSize = yuvSizeListLength * 4; + /* YUV output stream min frame duration list size */ + minDurationSize += (yuvSizeListLength * 4) * (ARRAY_LENGTH(YUV_FORMATS)); + /* Stall output stream configuration list size */ + minDurationSize += (jpegSizeListLength * 4) * (ARRAY_LENGTH(STALL_FORMATS)); + /* RAW output stream min frame duration list size */ + minDurationSize += (1 * 4) * (ARRAY_LENGTH(RAW_FORMATS)); + minDurations->setCapacity(minDurationSize); + + /* HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED stream min frame duration list */ + for (int i = 0; i < yuvSizeListLength; i++) { + minDurations->add(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED); + minDurations->add((int64_t)yuvSizeList[i][0]); + minDurations->add((int64_t)yuvSizeList[i][1]); + minDurations->add((int64_t)YUV_FORMAT_MIN_DURATION); + } + + /* YUV output stream min frame duration list */ + for (size_t i = 0; i < ARRAY_LENGTH(YUV_FORMATS); i++) { + for (int j = 0; j < yuvSizeListLength; j++) { + int pixelSize = yuvSizeList[j][0] * yuvSizeList[j][1]; + if (isSupportHighResolution == false + && pixelSize > HIGH_RESOLUTION_MIN_PIXEL_SIZE) { + minDurationSize -= 4; + continue; + } + + minDurations->add((int64_t)YUV_FORMATS[i]); + minDurations->add((int64_t)yuvSizeList[j][0]); + minDurations->add((int64_t)yuvSizeList[j][1]); + minDurations->add((int64_t)YUV_FORMAT_MIN_DURATION); + } + } + + /* Stall output stream min frame duration list */ + for (size_t i = 0; i < ARRAY_LENGTH(STALL_FORMATS); i++) { + for (int j = 0; j < jpegSizeListLength; j++) { + int pixelSize = jpegSizeList[j][0] * jpegSizeList[j][1]; + if (isSupportHighResolution == false + && pixelSize > HIGH_RESOLUTION_MIN_PIXEL_SIZE) { + minDurationSize -= 4; + continue; + } + + minDurations->add((int64_t)STALL_FORMATS[i]); + minDurations->add((int64_t)jpegSizeList[j][0]); + minDurations->add((int64_t)jpegSizeList[j][1]); + + if (pixelSize > HIGH_RESOLUTION_MIN_PIXEL_SIZE) + currentMinDuration = HIGH_RESOLUTION_MIN_DURATION; + else if (pixelSize > FHD_PIXEL_SIZE) + currentMinDuration = STALL_FORMAT_MIN_DURATION; + else + currentMinDuration = YUV_FORMAT_MIN_DURATION; + minDurations->add((int64_t)currentMinDuration); + } + } + + /* RAW output stream min frame duration list */ + for (size_t i = 0; i < ARRAY_LENGTH(RAW_FORMATS); i++) { + /* Add sensor max size */ + minDurations->add((int64_t)RAW_FORMATS[i]); + minDurations->add((int64_t)sensorStaticInfo->maxSensorW); + minDurations->add((int64_t)sensorStaticInfo->maxSensorH); + minDurations->add((int64_t)YUV_FORMAT_MIN_DURATION); + } + + minDurations->setCapacity(minDurationSize); + +#ifdef DEBUG_STREAM_CONFIGURATIONS + const int64_t* minDurationArray = NULL; + minDurationArray = minDurations->array(); + for (int i = 0; i < minDurationSize; i = i + 4) { + ALOGD("DEBUG(%s[%d]):ID %d Size %4lldx%4lld Format %2x MinDuration %9lld", + __FUNCTION__, __LINE__, + cameraId, + minDurationArray[i+1], minDurationArray[i+2], + (int)minDurationArray[i], minDurationArray[i+3]); + } +#endif + + return ret; +} + +status_t ExynosCamera3MetadataConverter::m_createJpegAvailableThumbnailSizes(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *thumbnailSizes) +{ + int ret = OK; + int (*thumbnailSizeList)[3] = NULL; + size_t thumbnailSizeListLength = 0; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (thumbnailSizes == NULL) { + ALOGE("ERR(%s[%d]):Thumbnail sizes is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + thumbnailSizeList = sensorStaticInfo->thumbnailList; + thumbnailSizeListLength = sensorStaticInfo->thumbnailListMax; + thumbnailSizes->setCapacity(thumbnailSizeListLength * 2); + + /* JPEG thumbnail sizes must be delivered with ascending ordering */ + for (int i = (int)thumbnailSizeListLength - 1; i >= 0; i--) { + thumbnailSizes->add(thumbnailSizeList[i][0]); + thumbnailSizes->add(thumbnailSizeList[i][1]); + } + + return ret; +} + +status_t ExynosCamera3MetadataConverter::m_createAeAvailableFpsRanges(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *fpsRanges, + int cameraId) +{ + int ret = OK; + int (*fpsRangesList)[2] = NULL; + size_t fpsRangesLength = 0; + + if (sensorStaticInfo == NULL) { + ALOGE("ERR(%s[%d]):Sensor static info is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (fpsRanges == NULL) { + ALOGE("ERR(%s[%d]):FPS ranges is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (cameraId == CAMERA_ID_BACK) { + fpsRangesList = sensorStaticInfo->rearFPSList; + fpsRangesLength = sensorStaticInfo->rearFPSListMax; + } else { /* CAMERA_ID_FRONT */ + fpsRangesList = sensorStaticInfo->frontFPSList; + fpsRangesLength = sensorStaticInfo->frontFPSListMax; + } + + fpsRanges->setCapacity(fpsRangesLength * 2); + + for (size_t i = 0; i < fpsRangesLength; i++) { + fpsRanges->add(fpsRangesList[i][0]/1000); + fpsRanges->add(fpsRangesList[i][1]/1000); + } + + return ret; +} + +bool ExynosCamera3MetadataConverter::m_hasTagInList(int32_t *list, size_t listSize, int32_t tag) +{ + bool hasTag = false; + + for (size_t i = 0; i < listSize; i++) { + if (list[i] == tag) { + hasTag = true; + break; + } + } + + return hasTag; +} + +bool ExynosCamera3MetadataConverter::m_hasTagInList(uint8_t *list, size_t listSize, int32_t tag) +{ + bool hasTag = false; + + for (size_t i = 0; i < listSize; i++) { + if (list[i] == tag) { + hasTag = true; + break; + } + } + + return hasTag; +} + +status_t ExynosCamera3MetadataConverter::m_integrateOrderedSizeList(int (*list1)[SIZE_OF_RESOLUTION], size_t list1Size, + int (*list2)[SIZE_OF_RESOLUTION], size_t list2Size, + int (*orderedList)[SIZE_OF_RESOLUTION]) +{ + int *currentSize = NULL; + size_t sizeList1Index = 0; + size_t sizeList2Index = 0; + + if (list1 == NULL || list2 == NULL || orderedList == NULL) { + ALOGE("ERR(%s[%d]):Arguments are NULL. list1 %p list2 %p orderedlist %p", + __FUNCTION__, __LINE__, + list1, list2, orderedList); + return BAD_VALUE; + } + + /* This loop will integrate two size list in descending order */ + for (size_t i = 0; i < list1Size + list2Size; i++) { + if (sizeList1Index >= list1Size) { + currentSize = list2[sizeList2Index++]; + } else if (sizeList2Index >= list2Size) { + currentSize = list1[sizeList1Index++]; + } else { + if (list1[sizeList1Index][0] < list2[sizeList2Index][0]) { + currentSize = list2[sizeList2Index++]; + } else if (list1[sizeList1Index][0] > list2[sizeList2Index][0]) { + currentSize = list1[sizeList1Index++]; + } else { + if (list1[sizeList1Index][1] < list2[sizeList2Index][1]) + currentSize = list2[sizeList2Index++]; + else + currentSize = list1[sizeList1Index++]; + } + } + orderedList[i][0] = currentSize[0]; + orderedList[i][1] = currentSize[1]; + orderedList[i][2] = currentSize[2]; + } + + return NO_ERROR; +} + +void ExynosCamera3MetadataConverter::m_updateFaceDetectionMetaData(CameraMetadata *settings, struct camera2_shot_ext *shot_ext) +{ + int32_t faceIds[NUM_OF_DETECTED_FACES]; + /* {leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY} */ + int32_t faceLandmarks[NUM_OF_DETECTED_FACES * FACE_LANDMARKS_MAX_INDEX]; + /* {xmin, ymin, xmax, ymax} with the absolute coordinate */ + int32_t faceRectangles[NUM_OF_DETECTED_FACES * RECTANGLE_MAX_INDEX]; + uint8_t faceScores[NUM_OF_DETECTED_FACES]; + uint8_t detectedFaceCount = 0; + int maxSensorW = 0, maxSensorH = 0; + ExynosRect bnsSize, bayerCropSize; + int xOffset = 0, yOffset = 0; + int hwPreviewW = 0, hwPreviewH = 0; + float scaleRatioW = 0.0f, scaleRatioH = 0.0f; + + if (settings == NULL) { + ALOGE("ERR(%s[%d]:CameraMetadata is NULL", __FUNCTION__, __LINE__); + return; + } + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]:camera2_shot_ext is NULL", __FUNCTION__, __LINE__); + return; + } + + /* Original FD region : based on FD input size (e.g. preview size) + * Camera Metadata FD region : based on sensor active array size (e.g. max sensor size) + * The FD region from firmware must be scaled into the size based on sensor active array size + */ + m_parameters->getMaxSensorSize(&maxSensorW, &maxSensorH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + if ((maxSensorW - bayerCropSize.w) / 2 > 0) + xOffset = ALIGN_DOWN(((maxSensorW - bayerCropSize.w) / 2), 2); + if ((maxSensorH - bayerCropSize.h) / 2 > 0) + yOffset = ALIGN_DOWN(((maxSensorH - bayerCropSize.h) / 2), 2); + if (m_parameters->isMcscVraOtf() == true) + m_parameters->getYuvSize(&hwPreviewW, &hwPreviewH, 0); + else + m_parameters->getHwVraInputSize(&hwPreviewW, &hwPreviewH); + scaleRatioW = (float)bayerCropSize.w / (float)hwPreviewW; + scaleRatioH = (float)bayerCropSize.h / (float)hwPreviewH; + + for (int i = 0; i < NUM_OF_DETECTED_FACES; i++) { + if (shot_ext->shot.dm.stats.faceIds[i]) { + switch (shot_ext->shot.dm.stats.faceDetectMode) { + case FACEDETECT_MODE_FULL: + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + LEFT_EYE_X] = -1; + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + LEFT_EYE_Y] = -1; + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + RIGHT_EYE_X] = -1; + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + RIGHT_EYE_Y] = -1; + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + MOUTH_X] = -1; + faceLandmarks[(i * FACE_LANDMARKS_MAX_INDEX) + MOUTH_Y] = -1; + case FACEDETECT_MODE_SIMPLE: + faceIds[i] = shot_ext->shot.dm.stats.faceIds[i]; + /* Normalize the score into the range of [0, 100] */ + faceScores[i] = (uint8_t) ((float)shot_ext->shot.dm.stats.faceScores[i] / (255.0f / 100.0f)); + + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X1] = (int32_t) ((shot_ext->shot.dm.stats.faceRectangles[i][X1] * scaleRatioW) + xOffset); + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y1] = (int32_t) ((shot_ext->shot.dm.stats.faceRectangles[i][Y1] * scaleRatioH) + yOffset); + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X2] = (int32_t) ((shot_ext->shot.dm.stats.faceRectangles[i][X2] * scaleRatioW) + xOffset); + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y2] = (int32_t) ((shot_ext->shot.dm.stats.faceRectangles[i][Y2] * scaleRatioH) + yOffset); + ALOGV("DEBUG(%s):faceIds[%d](%d), faceScores[%d](%d), original(%d,%d,%d,%d), converted(%d,%d,%d,%d)", + __FUNCTION__, + i, faceIds[i], i, faceScores[i], + shot_ext->shot.dm.stats.faceRectangles[i][X1], + shot_ext->shot.dm.stats.faceRectangles[i][Y1], + shot_ext->shot.dm.stats.faceRectangles[i][X2], + shot_ext->shot.dm.stats.faceRectangles[i][Y2], + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X1], + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y1], + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X2], + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y2]); + + detectedFaceCount++; + break; + case FACEDETECT_MODE_OFF: + faceScores[i] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X1] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y1] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X2] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX)+ Y2] = 0; + break; + default: + ALOGE("ERR(%s[%d]):Not supported FD mode(%d)", __FUNCTION__, __LINE__, + shot_ext->shot.dm.stats.faceDetectMode); + break; + } + } else { + faceIds[i] = 0; + faceScores[i] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X1] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y1] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + X2] = 0; + faceRectangles[(i * RECTANGLE_MAX_INDEX) + Y2] = 0; + } + } + + if (detectedFaceCount > 0) { + switch (shot_ext->shot.dm.stats.faceDetectMode) { + case FACEDETECT_MODE_FULL: + settings->update(ANDROID_STATISTICS_FACE_LANDMARKS, faceLandmarks, + detectedFaceCount * FACE_LANDMARKS_MAX_INDEX); + ALOGV("DEBUG(%s):dm.stats.faceLandmarks(%d)", __FUNCTION__, detectedFaceCount); + case FACEDETECT_MODE_SIMPLE: + settings->update(ANDROID_STATISTICS_FACE_IDS, faceIds, detectedFaceCount); + ALOGV("DEBUG(%s):dm.stats.faceIds(%d)", __FUNCTION__, detectedFaceCount); + + settings->update(ANDROID_STATISTICS_FACE_RECTANGLES, faceRectangles, + detectedFaceCount * RECTANGLE_MAX_INDEX); + ALOGV("DEBUG(%s):dm.stats.faceRectangles(%d)", __FUNCTION__, detectedFaceCount); + + settings->update(ANDROID_STATISTICS_FACE_SCORES, faceScores, detectedFaceCount); + ALOGV("DEBUG(%s):dm.stats.faceScores(%d)", __FUNCTION__, detectedFaceCount); + break; + case FACEDETECT_MODE_OFF: + default: + ALOGE("ERR(%s[%d]):Not supported FD mode(%d)", __FUNCTION__, __LINE__, + shot_ext->shot.dm.stats.faceDetectMode); + break; + } + } + + return; +} + +void ExynosCamera3MetadataConverter::m_convert3AARegion(ExynosRect2 *region) +{ + ExynosRect2 newRect2; + ExynosRect maxSensorSize; + ExynosRect hwBcropSize; + + m_parameters->getMaxSensorSize(&maxSensorSize.w, &maxSensorSize.h); + m_parameters->getHwBayerCropRegion(&hwBcropSize.w, &hwBcropSize.h, + &hwBcropSize.x, &hwBcropSize.y); + + newRect2 = convertingSrcArea2DstArea(region, &maxSensorSize, &hwBcropSize); + + region->x1 = newRect2.x1; + region->y1 = newRect2.y1; + region->x2 = newRect2.x2; + region->y2 = newRect2.y2; +} + +status_t ExynosCamera3MetadataConverter::checkMetaValid(camera_metadata_tag_t tag, const void *data) +{ + status_t ret = NO_ERROR; + camera_metadata_entry_t entry; + + int32_t value = 0; + const int32_t *i32Range = NULL; + + if (m_staticInfo.exists(tag) == false) { + ALOGE("ERR(%s[%d]):Cannot find entry, tag(%d)", __FUNCTION__, __LINE__, tag); + return BAD_VALUE; + } + + entry = m_staticInfo.find(tag); + + /* TODO: handle all tags + * need type check + */ + switch (tag) { + case ANDROID_SENSOR_INFO_SENSITIVITY_RANGE: + value = *(int32_t *)data; + i32Range = entry.data.i32; + if (value < i32Range[0] || value > i32Range[1]) { + ALOGE("ERR(%s[%d]):Invalid Sensitivity value(%d), range(%d, %d)", + __FUNCTION__, __LINE__, value, i32Range[0], i32Range[1]); + ret = BAD_VALUE; + } + break; + default: + ALOGE("ERR(%s[%d]):Tag (%d) is not implemented", __FUNCTION__, __LINE__, tag); + break; + } + + return ret; +} + +status_t ExynosCamera3MetadataConverter::getDefaultSetting(camera_metadata_tag_t tag, void *data) +{ + status_t ret = NO_ERROR; + camera_metadata_entry_t entry; + + const int32_t *i32Range = NULL; + + if (m_defaultRequestSetting.exists(tag) == false) { + ALOGE("ERR(%s[%d]):Cannot find entry, tag(%d)", __FUNCTION__, __LINE__, tag); + return BAD_VALUE; + } + + entry = m_defaultRequestSetting.find(tag); + + /* TODO: handle all tags + * need type check + */ + switch (tag) { + case ANDROID_SENSOR_SENSITIVITY: + i32Range = entry.data.i32; + *(int32_t *)data = i32Range[0]; + break; + default: + ALOGE("ERR(%s[%d]):Tag (%d) is not implemented", __FUNCTION__, __LINE__, tag); + break; + } + + return ret; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraMetadataConverter.h b/libcamera/common_v2/ExynosCameraMetadataConverter.h new file mode 100644 index 0000000..8b5afe7 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraMetadataConverter.h @@ -0,0 +1,210 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA_METADATA_CONVERTER_H__ +#define EXYNOS_CAMERA_METADATA_CONVERTER_H__ + +#include +#include +#include +#include + +#include "ExynosCameraConfig.h" + +#include "ExynosCameraParameters.h" + +#include "ExynosCamera3SensorInfo.h" + +#include "fimc-is-metadata.h" + +#define FIMC_IS_METADATA(x) (x + 1) +#define CAMERA_METADATA(x) ((x < 1)? 0 : x - 1) + +#define PREVIEW_FORMAT_MIN_DURATION (33331760L) +#define PICTURE_FORMAT_MIN_DURATION (100000000L) +#define ACTUAL_PIPELINE_DEPTH (4) + +namespace android { + +class ExynosCameraRequestManager; +class ExynosCameraRequest; + +enum map_index { + CAMERA_META, + FIMC_IS_META, + MAX_INDEX +}; +const int32_t PREVIEW_FORMATS[] = +{ + HAL_PIXEL_FORMAT_RGBA_8888, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, + HAL_PIXEL_FORMAT_YCbCr_420_888 +}; + +const int32_t PICTURE_FORMATS[] = +{ + /* HAL_PIXEL_FORMAT_RAW_SENSOR, */ + HAL_PIXEL_FORMAT_RAW16, + HAL_PIXEL_FORMAT_BLOB, +}; + +enum rectangle_index { + X1, + Y1, + X2, + Y2, + RECTANGLE_MAX_INDEX, +}; + +enum face_landmarks_index { + LEFT_EYE_X, + LEFT_EYE_Y, + RIGHT_EYE_X, + RIGHT_EYE_Y, + MOUTH_X, + MOUTH_Y, + FACE_LANDMARKS_MAX_INDEX, +}; + +class ExynosCameraMetadataConverter : public virtual RefBase { +public: + ExynosCameraMetadataConverter(){}; + ~ExynosCameraMetadataConverter(){}; + + //virtual status_t constructStaticInfo(int cameraId, camera_metadata_t **info) = 0; + virtual status_t constructDefaultRequestSettings(int type, camera_metadata_t **request) = 0; + virtual status_t initShotData(struct camera2_shot_ext *shot_ext) = 0; + virtual status_t updateDynamicMeta(ExynosCameraRequest *requestInfo) = 0; + virtual status_t convertRequestToShot(CameraMetadata &request, struct camera2_shot_ext *dst_ext, int *reqId = NULL) = 0; + virtual status_t checkAvailableStreamFormat(int format) = 0; + virtual void setStaticInfo(int camId, camera_metadata_t *info) = 0; +}; + +class ExynosCamera3MetadataConverter : public virtual ExynosCameraMetadataConverter { +public: + ExynosCamera3MetadataConverter(int cameraId, ExynosCameraParameters *parameters); + ~ExynosCamera3MetadataConverter(); + static status_t constructStaticInfo(int cameraId, camera_metadata_t **info); + virtual status_t constructDefaultRequestSettings(int type, camera_metadata_t **request); + + /* helper functions for android control and meta data */ + virtual status_t convertRequestToShot(CameraMetadata &request, struct camera2_shot_ext *dst_ext, int *reqId = NULL); + virtual status_t updateDynamicMeta(ExynosCameraRequest *requestInfo); + + + /* meta -> shot */ + virtual status_t translateColorControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateControlControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateDemosaicControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateEdgeControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateFlashControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateHotPixelControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateJpegControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateLensControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateNoiseControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateRequestControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext, int *reqId); + virtual status_t translateScalerControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateSensorControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateShadingControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateStatisticsControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateTonemapControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateLedControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + virtual status_t translateBlackLevelControlData(CameraMetadata &settings, struct camera2_shot_ext *dst_ext); + + /* shot -> meta */ + virtual status_t translateColorMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateControlMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateEdgeMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateFlashMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateHotPixelMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateJpegMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateLensMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateNoiseMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateQuirksMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateRequestMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateScalerMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateSensorMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateShadingMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateStatisticsMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateTonemapMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateLedMetaData(ExynosCameraRequest *requestInfo); + virtual status_t translateBlackLevelMetaData(ExynosCameraRequest *requestInfo); + + /* Other helper functions */ + virtual status_t initShotData(struct camera2_shot_ext *shot_ext); + virtual status_t checkAvailableStreamFormat(int format); + + virtual void setStaticInfo(int camId, camera_metadata_t *info); + virtual status_t checkMetaValid(camera_metadata_tag_t tag, const void *data); + virtual status_t getDefaultSetting(camera_metadata_tag_t tag, void *data); +private: + static status_t m_createControlAvailableHighSpeedVideoConfigurations(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + int cameraId); + static status_t m_createScalerAvailableInputOutputFormatsMap(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + int cameraId); + static status_t m_createScalerAvailableStreamConfigurationsOutput(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *streamConfigs, + int cameraId); + static status_t m_createScalerAvailableMinFrameDurations(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *minDurations, + int cameraId); + static status_t m_createJpegAvailableThumbnailSizes(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *thumbnailSizes); + static status_t m_createAeAvailableFpsRanges(const struct ExynosSensorInfoBase *sensorStaticInfo, + Vector *fpsRanges, + int cameraId); + static bool m_hasTagInList(int32_t *list, size_t listSize, int32_t tag); + static bool m_hasTagInList(uint8_t *list, size_t listSize, int32_t tag); + static status_t m_integrateOrderedSizeList(int (*list1)[SIZE_OF_RESOLUTION], size_t list1Size, + int (*list2)[SIZE_OF_RESOLUTION], size_t list2Size, + int (*orderedList)[SIZE_OF_RESOLUTION]); + + void m_updateFaceDetectionMetaData(CameraMetadata *settings, + struct camera2_shot_ext *shot_ext); + void m_convert3AARegion(ExynosRect2 *region); + +private: + mutable Mutex m_requestLock; + int m_cameraId; + ExynosCameraParameters *m_parameters; + ExynosCameraActivityFlash *m_flashMgr; + + CameraMetadata m_staticInfo; + CameraMetadata m_defaultRequestSetting; + struct ExynosSensorInfoBase *m_sensorStaticInfo; + + bool m_preCaptureTriggerOn; + bool m_isManualAeControl; + + /* HACK : Temporary save the Mode info for adjusting value for CTS Test */ + ExynosRect m_cropRegion; + bool m_blackLevelLockOn; + bool m_faceDetectModeOn; + uint32_t m_lockVendorIsoValue; + uint64_t m_lockExposureTime; + uint64_t m_preExposureTime; + uint32_t m_afMode; + uint32_t m_preAfMode; + float m_focusDistance; + uint32_t m_maxFps; + bool m_overrideFlashControl; + uint8_t m_gpsProcessingMethod[32]; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraNode.cpp b/libcamera/common_v2/ExynosCameraNode.cpp new file mode 100644 index 0000000..f90d847 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraNode.cpp @@ -0,0 +1,1775 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraNode" + +#include"ExynosCameraNode.h" +#include"ExynosCameraUtils.h" + +namespace android { + +/* ExynosCameraNodeItem */ +ExynosCameraNodeRequest::ExynosCameraNodeRequest() +{ + m_requestState = NODE_REQUEST_STATE_BASE; + m_requestCount = 0; +} + +ExynosCameraNodeRequest::~ExynosCameraNodeRequest() +{ +} + +void ExynosCameraNodeRequest::setState(enum node_request_state state) +{ + m_requestState = state; +} + +enum node_request_state ExynosCameraNodeRequest::getState(void) +{ + return m_requestState; +} + +void ExynosCameraNodeRequest::setRequest(unsigned int requestCount) +{ + m_requestCount = requestCount; +} + +unsigned int ExynosCameraNodeRequest::getRequest(void) +{ + return m_requestCount; +} + + +ExynosCameraNode::ExynosCameraNode() +{ + memset(m_name, 0x00, sizeof(m_name)); + memset(m_alias, 0x00, sizeof(m_alias)); + memset(&m_v4l2Format, 0x00, sizeof(struct v4l2_format)); + memset(&m_v4l2ReqBufs, 0x00, sizeof(struct v4l2_requestbuffers)); + + m_fd = NODE_INIT_NEGATIVE_VALUE; + + m_v4l2Format.fmt.pix_mp.width = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.height = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.pixelformat = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.num_planes = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.colorspace = (enum v4l2_colorspace)7; /* V4L2_COLORSPACE_JPEG */ + /* + * 7 : Full YuvRange, 4 : Limited YuvRange + * you can refer m_YUV_RANGE_2_V4L2_COLOR_RANGE() and m_V4L2_COLOR_RANGE_2_YUV_RANGE() + */ + + m_v4l2ReqBufs.count = NODE_INIT_NEGATIVE_VALUE; + m_v4l2ReqBufs.memory = (v4l2_memory)NODE_INIT_ZERO_VALUE; + m_v4l2ReqBufs.type = (v4l2_buf_type)NODE_INIT_ZERO_VALUE; + + m_crop.type = (v4l2_buf_type)NODE_INIT_ZERO_VALUE; + m_crop.c.top = NODE_INIT_ZERO_VALUE; + m_crop.c.left = NODE_INIT_ZERO_VALUE; + m_crop.c.width = NODE_INIT_ZERO_VALUE; + m_crop.c.height =NODE_INIT_ZERO_VALUE; + + m_flagStart = false; + m_flagCreate = false; + + memset(m_flagQ, 0x00, sizeof(m_flagQ)); + m_flagStreamOn = false; + m_flagDup = false; + m_paramState = 0; + m_nodeState = 0; + m_cameraId = 0; + m_sensorId = -1; + m_videoNodeNum = -1; + + for (uint32_t i = 0; i < MAX_BUFFERS; i++) { + m_queueBufferList[i].index = NODE_INIT_NEGATIVE_VALUE; + } + + m_nodeType = NODE_TYPE_BASE; +} + +ExynosCameraNode::~ExynosCameraNode() +{ + EXYNOS_CAMERA_NODE_IN(); + + destroy(); +} + +status_t ExynosCameraNode::create() +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeType = NODE_TYPE_BASE; + + m_flagCreate = true; + + return NO_ERROR; +} + +status_t ExynosCameraNode::create(const char *nodeName) +{ + return create(nodeName, 0); +} + +status_t ExynosCameraNode::create(const char *nodeName, int cameraId) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + if (nodeName == NULL) + return BAD_VALUE; + + if (cameraId >= 0) + m_cameraId = cameraId; + + ret = create(nodeName, nodeName); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):create fail [%d]", __FUNCTION__, __LINE__, (int)ret); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCameraNode::create(const char *nodeName, const char *nodeAlias) +{ + EXYNOS_CAMERA_NODE_IN(); + + if ((nodeName == NULL) || (nodeAlias == NULL)) + return BAD_VALUE; + + m_nodeType = NODE_TYPE_BASE; + + strncpy(m_name, nodeName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + strncpy(m_alias, nodeAlias, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + CLOGV("DEBUG(%s[%d]):Create", __FUNCTION__, __LINE__); + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_CREATED; + m_flagCreate = true; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::create(const char *nodeName, int cameraId, int fd) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + if (nodeName == NULL) + return BAD_VALUE; + + if (cameraId > 0) + m_cameraId = cameraId; + + ret = create(nodeName, nodeName); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):create fail [%d]", __FUNCTION__, __LINE__, (int)ret); + return ret; + } + + m_fd = fd; + + m_nodeType = NODE_TYPE_BASE; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_OPENED; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::create( + __unused const char *nodeName, + __unused int cameraId, + __unused enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION location, + __unused ExynosJpegEncoderForCamera *jpegEncoder) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::destroy(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_DESTROYED; + m_flagCreate = false; + m_nodeStateLock.unlock(); + + m_removeItemBufferQ(); + + m_nodeType = NODE_TYPE_BASE; + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::open(int videoNodeNum) +{ + EXYNOS_CAMERA_NODE_IN(); + + CLOGV("DEBUG(%s[%d]):open", __FUNCTION__, __LINE__); + + char node_name[30]; + + if (m_nodeState != NODE_STATE_CREATED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + memset(&node_name, 0x00, sizeof(node_name)); + snprintf(node_name, sizeof(node_name), "%s%d", NODE_PREFIX, videoNodeNum); + + if (videoNodeNum == (int)NODE_TYPE_DUMMY) { + m_nodeType = NODE_TYPE_DUMMY; + m_dummyIndexQ.clear(); + + CLOGW("WARN(%s[%d]): dummy node opened", __FUNCTION__, __LINE__); + } else { + m_fd = exynos_v4l2_open(node_name, O_RDWR, 0); + if (m_fd < 0) { + CLOGE("ERR(%s[%d]):exynos_v4l2_open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, node_name, m_fd); + return INVALID_OPERATION; + } + CLOGD("DEBUG(%s[%d]): Node(%d)(%s) opened. m_fd(%d)", __FUNCTION__, __LINE__, videoNodeNum, node_name, m_fd); + } + + m_videoNodeNum = videoNodeNum; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_OPENED; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::open(int videoNodeNum, __unused bool useThumbnailHWFC) +{ + return open(videoNodeNum); +} + +status_t ExynosCameraNode::close(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + CLOGD("DEBUG(%s[%d]): close(fd:%d)", __FUNCTION__, __LINE__, m_fd); + + if (m_nodeState == NODE_STATE_CREATED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_nodeType == NODE_TYPE_DUMMY) { + m_dummyIndexQ.clear(); + CLOGW("WARN(%s[%d]): dummy node closed", __FUNCTION__, __LINE__); + } else { + if (exynos_v4l2_close(m_fd) != 0) { + CLOGE("ERR(%s[%d]):close fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + m_nodeType = NODE_TYPE_BASE; + m_videoNodeNum = -1; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_CREATED; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::getFd(int *fd) +{ + *fd = m_fd; + + return NO_ERROR; +} + +status_t ExynosCameraNode::getJpegEncoder(__unused ExynosJpegEncoderForCamera **jpegEncoder) +{ + return NO_ERROR; +} + +char *ExynosCameraNode::getName(void) +{ + return m_name; +} + +status_t ExynosCameraNode::setColorFormat(int v4l2Colorformat, int planesCount, enum YUV_RANGE yuvRange) +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + m_v4l2Format.fmt.pix_mp.pixelformat = v4l2Colorformat; + m_v4l2Format.fmt.pix_mp.num_planes = planesCount; + + int v4l2ColorRange = m_YUV_RANGE_2_V4L2_COLOR_RANGE(yuvRange); + if (v4l2ColorRange < 0) { + CLOGE("ERR(%s[%d]):invalid yuvRange : %d. so, fail", + __FUNCTION__, __LINE__, (int)yuvRange); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + m_v4l2Format.fmt.pix_mp.colorspace = (enum v4l2_colorspace)v4l2ColorRange; + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::getColorFormat(int *v4l2Colorformat, int *planesCount, enum YUV_RANGE *yuvRange) +{ + EXYNOS_CAMERA_NODE_IN(); + + *v4l2Colorformat = m_v4l2Format.fmt.pix_mp.pixelformat; + *planesCount = m_v4l2Format.fmt.pix_mp.num_planes; + + if (yuvRange) + *yuvRange = m_V4L2_COLOR_RANGE_2_YUV_RANGE(m_v4l2Format.fmt.pix_mp.colorspace); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setQuality(__unused int quality) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setQuality(__unused const unsigned char qtable[]) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setSize(int w, int h) +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + CLOGD("DEBUG(%s[%d]):w (%d) h (%d)", __FUNCTION__, __LINE__, w, h); + + m_v4l2Format.fmt.pix_mp.width = w; + m_v4l2Format.fmt.pix_mp.height = h; + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::getSize(int *w, int *h) +{ + EXYNOS_CAMERA_NODE_IN(); + + *w = m_v4l2Format.fmt.pix_mp.width; + *h = m_v4l2Format.fmt.pix_mp.height; + + return NO_ERROR; +} + +status_t ExynosCameraNode::setId(__unused int id) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setBufferType( + int bufferCount, + enum v4l2_buf_type type, + enum v4l2_memory bufferMemoryType) +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + m_v4l2ReqBufs.count = bufferCount; + m_v4l2ReqBufs.type = type; + m_v4l2ReqBufs.memory = bufferMemoryType; + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::getBufferType( + int *bufferCount, + enum v4l2_buf_type *type, + enum v4l2_memory *bufferMemoryType) +{ + EXYNOS_CAMERA_NODE_IN(); + + *bufferCount = m_v4l2ReqBufs.count; + *type = (enum v4l2_buf_type)m_v4l2ReqBufs.type; + *bufferMemoryType = (enum v4l2_memory)m_v4l2ReqBufs.memory; + + return NO_ERROR; +} + +/* Should not implement in DMA_BUF and USER_PTR types */ +status_t ExynosCameraNode::queryBuf(void) +{ + return INVALID_OPERATION; +} + +status_t ExynosCameraNode::reqBuffers(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + int result = 0; + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + if (m_reqBuffers(&result) < 0) { + CLOGE("ERR(%s):m_setFmt fail result[%d]", __FUNCTION__, result); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + m_nodeState = NODE_STATE_IN_PREPARE; + + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::clrBuffers(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + int result = 0; + + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_clrBuffers(&result) < 0) { + CLOGE("ERR(%s):m_clrBuffers fail result[%d]", __FUNCTION__, result); + return INVALID_OPERATION; + } + +#if 0 + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); +#endif + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +unsigned int ExynosCameraNode::reqBuffersCount(void) +{ + return m_nodeRequest.getRequest(); +} + +status_t ExynosCameraNode::setControl(unsigned int id, int value) +{ + EXYNOS_CAMERA_NODE_IN(); + +/* + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +*/ + + if (m_setControl(id, value) < 0) { + CLOGE("ERR(%s):m_setControl fail", __FUNCTION__); + return INVALID_OPERATION; + } + +/* + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); +*/ + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::getControl(unsigned int id, int *value) +{ + EXYNOS_CAMERA_NODE_IN(); + +/* + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +*/ + + if (m_getControl(id, value) < 0) { + CLOGE("ERR(%s):m_getControl fail", __FUNCTION__); + return INVALID_OPERATION; + } + +/* + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); +*/ + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setExtControl(struct v4l2_ext_controls *ctrl) +{ + EXYNOS_CAMERA_NODE_IN(); + + if (m_setExtControl(ctrl) < 0) { + CLOGE("ERR(%s):m_setExtControl fail", __FUNCTION__); + return INVALID_OPERATION; + } + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::polling(void) +{ + EXYNOS_CAMERA_NODE_IN(); +/* + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +*/ + if (m_polling() < 0) { + CLOGE("ERR(%s):m_polling fail", __FUNCTION__); + return INVALID_OPERATION; + } +/* + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); +*/ + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setInput(int sensorId) +{ + EXYNOS_CAMERA_NODE_IN(); + + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_setInput(sensorId) < 0) { + CLOGE("ERR(%s):m_setInput fail [%d]", __FUNCTION__, sensorId); + return INVALID_OPERATION; + } + + m_sensorId = sensorId; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +int ExynosCameraNode::getInput(void) +{ + return m_sensorId; +} + +int ExynosCameraNode::resetInput(void) +{ + m_sensorId = 0; + + return NO_ERROR; +} + +int ExynosCameraNode::getNodeNum(void) +{ + return m_videoNodeNum; +} + +status_t ExynosCameraNode::setFormat(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + if (m_setFmt() < 0) { + CLOGE("ERR(%s):m_setFmt fail", __FUNCTION__); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setFormat(unsigned int bytesPerPlane[]) +{ + status_t ret = NO_ERROR; + + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + for (int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) + m_v4l2Format.fmt.pix_mp.plane_fmt[i].bytesperline = bytesPerPlane[i]; + + if (0 < m_v4l2Format.fmt.pix_mp.num_planes && + m_v4l2Format.fmt.pix_mp.plane_fmt[0].bytesperline < m_v4l2Format.fmt.pix_mp.width) { + CLOGW("WARN(%s[%d]):name(%s) fd %d, bytesperline(%d) < width(%d), height(%d)", + __FUNCTION__, __LINE__, + m_name, m_fd, + m_v4l2Format.fmt.pix_mp.plane_fmt[0].bytesperline, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height); + } + + ret = setFormat(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setFormat() is failed [%d]", __FUNCTION__, __LINE__, (int)ret); + return ret; + } + + return NO_ERROR; +} +status_t ExynosCameraNode::setCrop(enum v4l2_buf_type type, int x, int y, int w, int h) +{ + EXYNOS_CAMERA_NODE_IN(); + + int ret = 0; + struct v4l2_crop crop; + memset(&crop, 0x00, sizeof(struct v4l2_crop)); + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + crop.type = type; + crop.c.top = x; + crop.c.left = y; + crop.c.width = w; + crop.c.height = h; + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):type %d, %d x %d, top %d, left %d", + __FUNCTION__, __LINE__, + crop.type, + crop.c.width, + crop.c.height, + crop.c.top, + crop.c.left); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_s_crop(m_fd, &crop); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_crop fail (%d)", __func__, ret); + m_nodeStateLock.unlock(); + return ret; + } + } + + /* back-up to debug */ + m_crop = crop; + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setExifInfo(__unused exif_attribute_t *exifInfo) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::setDebugInfo(__unused debug_attribute_t *debugInfo) +{ + EXYNOS_CAMERA_NODE_IN(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::start(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + if (m_nodeState != NODE_STATE_IN_PREPARE) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_streamOn() < 0) { + CLOGE("ERR(%s):m_streamOn fail", __FUNCTION__); + return INVALID_OPERATION; + } + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_RUNNING; + m_flagStart = true; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +status_t ExynosCameraNode::stop(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + if (m_nodeState < NODE_STATE_OPENED || m_nodeState > NODE_STATE_RUNNING) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_streamOff() < 0) { + CLOGE("ERR(%s):m_streamOff fail", __FUNCTION__); + return INVALID_OPERATION; + } + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_IN_PREPARE; + m_flagStart = false; + m_nodeStateLock.unlock(); + + m_removeItemBufferQ(); + + EXYNOS_CAMERA_NODE_OUT(); + + return NO_ERROR; +} + +bool ExynosCameraNode::isCreated(void) +{ + return m_flagCreate; +} + +bool ExynosCameraNode::isStarted(void) +{ + return m_flagStart; +} + +status_t ExynosCameraNode::prepareBuffer(ExynosCameraBuffer *buf) +{ + status_t ret = NO_ERROR; + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + + memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer)); + memset(&planes, 0, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = m_v4l2ReqBufs.type; + v4l2_buf.memory = m_v4l2ReqBufs.memory; + v4l2_buf.index = buf->index; + v4l2_buf.length = m_v4l2Format.fmt.pix_mp.num_planes; + + for (int i = 0; i < (int)v4l2_buf.length; i++) { + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[i].m.fd = (int)(buf->fd[i]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(buf->addr[i]); + } else { + CLOGE("ERR(%s[%d]):invalid srcNode->memory(%d)", __FUNCTION__, __LINE__, v4l2_buf.memory); + return INVALID_OPERATION; + } + + v4l2_buf.m.planes[i].length = (unsigned long)(buf->size[i]); + } + + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.fd = (int)(buf->fd[v4l2_buf.length - 1]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.userptr = (unsigned long)(buf->addr[v4l2_buf.length - 1]); + } else { + CLOGE("ERR(%s[%d]):invalid meta(%d)", __FUNCTION__, __LINE__, v4l2_buf.memory); + return INVALID_OPERATION; + } + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d, index(%d), length(%d), width(%d), height(%d), bytesperline(%d)", + __FUNCTION__, __LINE__, + m_name, + m_fd, + v4l2_buf.index, + v4l2_buf.length, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height, + m_v4l2Format.fmt.pix_mp.plane_fmt[v4l2_buf.index].bytesperline); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + m_dummyIndexQ.push_back(v4l2_buf.index); + } else { + ret = exynos_v4l2_prepare(m_fd, &v4l2_buf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):exynos_v4l2_prepare(m_fd:%d, buf->index:%d) fail (%d)", + __FUNCTION__, __LINE__, m_fd, v4l2_buf.index, ret); + return ret; + } + } + + CLOGI("INFO(%s[%d]):prepareBuffers(%d)", __FUNCTION__, __LINE__, v4l2_buf.index); + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d, index %d done", + __FUNCTION__, __LINE__, m_name, m_fd, v4l2_buf.index); +#endif + + return ret; +} + +status_t ExynosCameraNode::putBuffer(ExynosCameraBuffer *buf) +{ + if (buf == NULL) { + CLOGE("ERR(%s[%d]):buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (m_nodeRequest.getRequest() == 0 ) { + CLOGE("ERR(%s[%d]):requestBuf is 0, disable queue/dequeue", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + +#if 0 + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +#endif + + if (m_qBuf(buf) < 0) { + CLOGE("ERR(%s[%d]):qBuf fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + return NO_ERROR; +} + +status_t ExynosCameraNode::getBuffer(ExynosCameraBuffer *buf, int *dqIndex) +{ + int ret = NO_ERROR; + + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + if (m_nodeRequest.getRequest() == 0 ) { + CLOGE("ERR(%s[%d]):requestBuf is 0, disable queue/dequeue", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + ret = m_dqBuf(buf, dqIndex); + + return ret; +} + +void ExynosCameraNode::dump(void) +{ + dumpState(); + dumpQueue(); + + return; +} + +void ExynosCameraNode::dumpState(void) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + if (strnlen(m_name, sizeof(char) * EXYNOS_CAMERA_NAME_STR_SIZE) > 0 ) + CLOGD("m_name[%s]", m_name); + if (strnlen(m_alias, sizeof(char) * EXYNOS_CAMERA_NAME_STR_SIZE) > 0 ) + CLOGD("m_alias[%s]", m_alias); + + CLOGD("m_fd[%d], width[%d], height[%d]", + m_fd, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height); + CLOGD("m_format[%d], m_planes[%d], m_buffers[%d], m_memory[%d]", + m_v4l2Format.fmt.pix_mp.pixelformat, + m_v4l2Format.fmt.pix_mp.num_planes, + m_v4l2ReqBufs.count, + m_v4l2ReqBufs.memory); + CLOGD("m_type[%d], m_flagStart[%d], m_flagCreate[%d]", + m_v4l2ReqBufs.type, + m_flagStart, + m_flagCreate); + CLOGD("m_crop type[%d], X : Y[%d : %d], W : H[%d : %d]", + m_crop.type, + m_crop.c.top, + m_crop.c.left, + m_crop.c.width, + m_crop.c.height); + + CLOGI("Node state(%d)", m_nodeRequest.getState()); + + return; +} + +void ExynosCameraNode::dumpQueue(void) +{ + m_printBufferQ(); + + return; +} + +int ExynosCameraNode::m_pixelDepth(void) +{ + return NO_ERROR; +} + +bool ExynosCameraNode::m_getFlagQ(__unused int index) +{ + return NO_ERROR; +} + +bool ExynosCameraNode::m_setFlagQ(__unused int index, __unused bool toggle) +{ + return NO_ERROR; +} + +int ExynosCameraNode::m_polling(void) +{ + struct pollfd events; + + if (m_nodeType == NODE_TYPE_DUMMY) + return 0; + + /* 50 msec * 40 = 2sec */ + int cnt = 40; + long sec = 50; /* 50 msec */ + + int ret = 0; + int pollRet = 0; + + events.fd = m_fd; + events.events = POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM | POLLERR; + events.revents = 0; + + while (cnt--) { + pollRet = poll(&events, 1, sec); + if (pollRet < 0) { + ret = -1; + } else if (0 < pollRet) { + if (events.revents & POLLIN) { + break; + } else if (events.revents & POLLERR) { + ret = -1; + } + } + } + + if (ret < 0 || cnt <= 0) { + CLOGE("ERR(%s):poll[%d], pollRet(%d) event(0x%x), cnt(%d)", + __FUNCTION__, m_fd, pollRet, events.revents, cnt); + + if (cnt <= 0) + ret = -1; + } + + return ret; +} + +int ExynosCameraNode::m_streamOn(void) +{ + int ret = 0; +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):fd %d, type %d", + __FUNCTION__, __LINE__, m_fd, (int)m_v4l2ReqBufs.type); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_streamon(m_fd, (enum v4l2_buf_type)m_v4l2ReqBufs.type); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_streamon(fd:%d, type:%d) fail (%d)", + __FUNCTION__, m_fd, (int)m_v4l2ReqBufs.type, ret); + return ret; + } + } + + m_nodeRequest.setState(NODE_REQUEST_STATE_READY); + + return ret; +} + +int ExynosCameraNode::m_streamOff(void) +{ + int ret = 0; + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):fd %d, type %d", + __FUNCTION__, __LINE__, m_fd, (int)m_v4l2ReqBufs.type); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_streamoff(m_fd, (enum v4l2_buf_type)m_v4l2ReqBufs.type); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_streamoff(fd:%d, type:%d) fail (%d)", + __FUNCTION__, m_fd, (int)m_v4l2ReqBufs.type, ret); + return ret; + } + } + + m_nodeRequest.setState(NODE_REQUEST_STATE_STOPPED); + +/* + for (int i = 0; i < VIDEO_MAX_FRAME; i++) + cam_int_m_setQ(node, i, false); +*/ + return ret; +} + +int ExynosCameraNode::m_setInput(int id) +{ + int ret = 0; + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s):fd %d, index %d", __FUNCTION__, m_fd, id); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_s_input(m_fd, id); + if (ret < 0) + CLOGE("ERR(%s):exynos_v4l2_s_input(fd:%d, index:%d) fail (%d)", + __FUNCTION__, m_fd, id, ret); + } + + return ret; +} + +int ExynosCameraNode::m_setFmt(void) +{ + int ret = 0; + + if (m_v4l2Format.fmt.pix_mp.num_planes <= 0) { + CLOGE("ERR(%s):S_FMT, Out of bound : Number of element plane(%d)", + __FUNCTION__, m_v4l2Format.fmt.pix_mp.num_planes); + return -1; + } + +/* #ifdef EXYNOS_CAMERA_NODE_TRACE */ + CLOGD("DEBUG(%s[%d]):type %d, %d x %d, format %d, yuvRange %d", + __FUNCTION__, __LINE__, + m_v4l2ReqBufs.type, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height, + m_v4l2Format.fmt.pix_mp.pixelformat, + m_v4l2Format.fmt.pix_mp.colorspace); +/* #endif */ + + m_v4l2Format.type = m_v4l2ReqBufs.type; + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_s_fmt(m_fd, &m_v4l2Format); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_fmt(fd:%d) fail (%d)", + __FUNCTION__, m_fd, ret); + return ret; + } + } + + return ret; +} + +int ExynosCameraNode::m_reqBuffers(int *reqCount) +{ + int ret = 0; + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):fd %d, count %d, type %d, memory %d", + __FUNCTION__, __LINE__, + m_fd, + m_v4l2ReqBufs.count, + m_v4l2ReqBufs.type, + m_v4l2ReqBufs.memory); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_reqbufs(m_fd, &m_v4l2ReqBufs); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_reqbufs(fd:%d, count:%d) fail (%d)", + __FUNCTION__, m_fd, m_v4l2ReqBufs.count, ret); + return ret; + } + } + +#if 0 + for (int i = 0; i < VIDEO_MAX_FRAME; i++) + cam_int_m_setQ(node, i, false); +#endif + + m_nodeRequest.setRequest(m_v4l2ReqBufs.count); + + *reqCount = m_v4l2ReqBufs.count; + + return ret; +} + +void ExynosCameraNode::removeItemBufferQ() +{ + m_removeItemBufferQ(); +} + +int ExynosCameraNode::m_clrBuffers(int *reqCount) +{ + int ret = 0; + + if (m_nodeRequest.getRequest() == 0) { + CLOGE("ERR(%s):not yet requested. so, just return 0(fd:%d, count:%d)", + __FUNCTION__, m_fd, m_v4l2ReqBufs.count); + + m_v4l2ReqBufs.count = 0; + + goto done; + } + + m_v4l2ReqBufs.count = 0; + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):fd %d, count %d, type %d, memory %d", + __FUNCTION__, __LINE__, + m_fd, + m_v4l2ReqBufs.count, + m_v4l2ReqBufs.type, + m_v4l2ReqBufs.memory); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_reqbufs(m_fd, &m_v4l2ReqBufs); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_reqbufs(fd:%d, count:%d) fail (%d)", + __FUNCTION__, m_fd, m_v4l2ReqBufs.count, ret); + return ret; + } + } + +done: + +#if 0 + for (int i = 0; i < VIDEO_MAX_FRAME; i++) + cam_int_m_setQ(node, i, false); +#endif + + m_nodeRequest.setRequest(m_v4l2ReqBufs.count); + + *reqCount = m_v4l2ReqBufs.count; + + return ret; +} + +int ExynosCameraNode::m_setCrop(__unused int v4l2BufType, __unused ExynosRect *rect) +{ + return NO_ERROR; +} + +int ExynosCameraNode::setParam(struct v4l2_streamparm *stream_parm) +{ + int ret = 0; + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_s_parm(m_fd, stream_parm); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_parm(fd:%d) fail (%d)", + __FUNCTION__, m_fd, ret); + } + } + + return ret; +} + +int ExynosCameraNode::m_setControl(unsigned int id, int value) +{ + int ret = 0; + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_s_ctrl(m_fd, id, value); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_ctrl(fd:%d) fail (%d) [id %d, value %d]", + __FUNCTION__, m_fd, ret, id, value); + return ret; + } + } + + return ret; +} + +int ExynosCameraNode::m_getControl(unsigned int id, int *value) +{ + int ret = 0; + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* nop */ + } else { + ret = exynos_v4l2_g_ctrl(m_fd, id, value); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_g_ctrl(fd:%d) fail (%d) [id %d, value %d]", + __FUNCTION__, m_fd, ret, id, *value); + return ret; + } + } + + return ret; +} + +int ExynosCameraNode::m_setExtControl(struct v4l2_ext_controls *ctrl) +{ + int ret = 0; + + if (m_nodeType == NODE_TYPE_DUMMY) { + /* no operation */ + } else { + ret = exynos_v4l2_s_ext_ctrl(m_fd, ctrl); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_ext_ctrl(fd:%d) fail (%d)", + __FUNCTION__, m_fd, ret); + return ret; + } + } + + return ret; +} + +int ExynosCameraNode::m_qBuf(ExynosCameraBuffer *buf) +{ + int ret = 0; + int error = 0; + int i = 0; + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + if( m_isExistBufferQ(buf) ) { + CLOGE("ERR(%s):queue index already exist!! index(%d)", __FUNCTION__, buf->index); + m_printBufferQ(); + return -1; + } + + memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer)); + memset(&planes, 0, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = m_v4l2ReqBufs.type; + v4l2_buf.memory = m_v4l2ReqBufs.memory; + v4l2_buf.index = buf->index; + v4l2_buf.length = m_v4l2Format.fmt.pix_mp.num_planes; + + for (i = 0; i < (int)v4l2_buf.length; i++) { + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[i].m.fd = (int)(buf->fd[i]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(buf->addr[i]); + } else { + CLOGE("ERR(%s):invalid srcNode->memory(%d)", __FUNCTION__, v4l2_buf.memory); + return -1; + } + + v4l2_buf.m.planes[i].length = (unsigned long)(buf->size[i]); + } + + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.fd = (int)(buf->fd[v4l2_buf.length - 1]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.userptr = (unsigned long)(buf->addr[v4l2_buf.length - 1]); + } else { + CLOGE("ERR(%s):invalid meta(%d)", __FUNCTION__, v4l2_buf.memory); + return -1; + } + + /* set fence */ + v4l2_buf.flags = V4L2_BUF_FLAG_USE_SYNC; + v4l2_buf.reserved = buf->acquireFence; + + int orgFence = v4l2_buf.reserved; + + m_nodeRequest.setState(NODE_REQUEST_STATE_QBUF_BLOCK); + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d, index(%d), length(%d), width(%d), height(%d), bytesperline(%d)", + __FUNCTION__, __LINE__, + m_name, + m_fd, + v4l2_buf.index, + v4l2_buf.length, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height, + m_v4l2Format.fmt.pix_mp.plane_fmt[v4l2_buf.index].bytesperline); +#endif + +#ifdef EXYNOS_CAMERA_NODE_TRACE_Q_DURATION + m_qTimer.stop(); + CLOGD("DEBUG(%s[%d]):EXYNOS_CAMERA_NODE_TRACE_Q_DURATION name(%s) : %d msec", + __FUNCTION__, __LINE__, m_name, (int)m_qTimer.durationUsecs() / 1000); + m_qTimer.start(); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + m_dummyIndexQ.push_back(v4l2_buf.index); + } else { + ret = exynos_v4l2_qbuf(m_fd, &v4l2_buf); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_qbuf(m_fd:%d, buf->index:%d) fail (%d)", + __FUNCTION__, m_fd, buf->index, ret); + return ret; + } + } + + /* + * Disable below close. + * This seems not need. + */ +#if 0 + /* + * After give acquire fence to driver, + * HAL don't need any more. + */ + if (0 <= buf->acquireFence) { + exynos_v4l2_close(buf->acquireFence); + } +#endif + + /* Important : Give the changed fence (by driver) to caller. */ + buf->releaseFence = static_cast(v4l2_buf.reserved); + + CLOGV("DEBUG(%s[%d]):fence:v4l2_buf.reserved %d -> %d", + __FUNCTION__, __LINE__, orgFence, static_cast(v4l2_buf.reserved)); + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d, index %d done", + __FUNCTION__, __LINE__, + m_name, m_fd, v4l2_buf.index); +#endif + + m_nodeRequest.setState(NODE_REQUEST_STATE_QBUF_DONE); + +#if 0 + cam_int_m_setQ(node, index, true); +#endif + + error = m_putBufferQ(buf, &buf->index); + if( error != NO_ERROR ) { + ret = error; + } + /* m_printBufferQ(); */ + + return ret; +} + +int ExynosCameraNode::m_mBuf(ExynosCameraBuffer *buf) +{ + int ret = 0; + int i = 0; + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + if(m_isExistBufferQ(buf)) { + CLOGE("ERR(%s):queue index already exist!! index(%d)", __FUNCTION__, buf->index); + m_printBufferQ(); + return -1; + } + + memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer)); + memset(&planes, 0, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = m_v4l2ReqBufs.type; + v4l2_buf.memory = m_v4l2ReqBufs.memory; + v4l2_buf.index = buf->index; + v4l2_buf.length = m_v4l2Format.fmt.pix_mp.num_planes; + + for (i = 0; i < (int)v4l2_buf.length; i++) { + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[i].m.fd = (int)(buf->fd[i]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(buf->addr[i]); + } else { + CLOGE("ERR(%s):invalid srcNode->memory(%d)", __FUNCTION__, v4l2_buf.memory); + return -1; + } + + v4l2_buf.m.planes[i].length = (unsigned long)(buf->size[i]); + } + + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.fd = (int)(buf->fd[v4l2_buf.length - 1]); + } else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[v4l2_buf.length - 1].m.userptr = (unsigned long)(buf->addr[v4l2_buf.length - 1]); + } else { + CLOGE("ERR(%s):invalid meta(%d)", __FUNCTION__, v4l2_buf.memory); + return -1; + } + + +#ifndef SUPPORT_64BITS + ret = exynos_v4l2_s_ctrl(m_fd, V4L2_CID_IS_MAP_BUFFER, (int)&v4l2_buf); + if (ret < 0) { + CLOGE("ERR(%s):exynos_v4l2_s_ctrl(m_fd:%d, buf->index:%d) fail (%d)", + __FUNCTION__, m_fd, buf->index, ret); + return ret; + } +#endif +#if 0 + cam_int_m_setQ(node, index, true); +#endif + + /* m_printBufferQ(); */ + + return ret; +} + +int ExynosCameraNode::m_dqBuf(ExynosCameraBuffer *buf, int *dqIndex) +{ + int ret = 0; + int error = 0; + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + + memset(&v4l2_buf, 0, sizeof(struct v4l2_buffer)); + memset(&planes, 0, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES); + + v4l2_buf.type = m_v4l2ReqBufs.type; + v4l2_buf.memory = m_v4l2ReqBufs.memory; + v4l2_buf.m.planes = planes; + v4l2_buf.length = m_v4l2Format.fmt.pix_mp.num_planes; + + m_nodeRequest.setState(NODE_REQUEST_STATE_DQBUF_BLOCK); + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d", + __FUNCTION__, __LINE__, + m_name, m_fd); +#endif + +#ifdef EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION + m_dqTimer.stop(); + CLOGD("DEBUG(%s[%d]):EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION name(%s) : %d msec", + __FUNCTION__, __LINE__, m_name, (int)m_dqTimer.durationUsecs() / 1000); + m_dqTimer.start(); +#endif + +#ifdef EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION + ExynosCameraDurationTimer m_dqPerformanceTimer; + m_dqPerformanceTimer.start(); +#endif + + if (m_nodeType == NODE_TYPE_DUMMY) { + if (m_dummyIndexQ.size() == 0) { + v4l2_buf.index = 0; + } else { + List::iterator r; + + r = m_dummyIndexQ.begin()++; + v4l2_buf.index = *r; + m_dummyIndexQ.erase(r); + } + } else { + ret = exynos_v4l2_dqbuf(m_fd, &v4l2_buf); + if (ret < 0) { + if (ret != -EAGAIN) + CLOGE("ERR(%s):exynos_v4l2_dqbuf(fd:%d) fail (%d)", __FUNCTION__, m_fd, ret); + + return ret; + } + } + +#ifdef EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION + m_dqPerformanceTimer.stop(); + CLOGD("DEBUG(%s[%d]):EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION : DQ_PERFORMANCE name(%s) : %d msec", + __FUNCTION__, __LINE__, m_name, (int)m_dqPerformanceTimer.durationUsecs() / 1000); +#endif + +#ifdef EXYNOS_CAMERA_NODE_TRACE + CLOGD("DEBUG(%s[%d]):name(%s) fd %d, index %d done", + __FUNCTION__, __LINE__, + m_name, m_fd, v4l2_buf.index); +#endif + + m_nodeRequest.setState(NODE_REQUEST_STATE_DQBUF_DONE); + +#if 1 +//#ifdef USE_FOR_DTP + if (v4l2_buf.flags & V4L2_BUF_FLAG_ERROR) { + CLOGE("ERR(%s):exynos_v4l2_dqbuf(fd:%d) returned with error (%d)", + __FUNCTION__, m_fd, V4L2_BUF_FLAG_ERROR); + ret = -1; + } +#endif + +#if 0 + cam_int_m_setQ(node, v4l2_buf.index, false); +#endif + + *dqIndex = v4l2_buf.index; + error = m_getBufferQ(buf, dqIndex); + if( error != NO_ERROR ) { + ret = error; + } + + return ret; +} + +status_t ExynosCameraNode::m_putBufferQ(ExynosCameraBuffer *buf, int *qindex) +{ + Mutex::Autolock lock(m_queueBufferListLock); + if( m_queueBufferList[*qindex].index == NODE_INIT_NEGATIVE_VALUE ) { + m_queueBufferList[*qindex] = *buf; + } else { + CLOGE("ERR(%s[%d]): buf.index(%d) already exist!",__FUNCTION__, __LINE__, *qindex); + m_printBufferQ(); + return BAD_VALUE; + } + + return NO_ERROR; +} + +status_t ExynosCameraNode::mapBuffer(ExynosCameraBuffer *buf) +{ + if (buf == NULL) { + CLOGE("ERR(%s[%d]):buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + +#if 0 + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +#endif + + m_mBuf(buf); + + return NO_ERROR; +} + +status_t ExynosCameraNode::m_getBufferQ(ExynosCameraBuffer *buf, int *dqindex) +{ + Mutex::Autolock lock(m_queueBufferListLock); + if( m_queueBufferList[*dqindex].index == NODE_INIT_NEGATIVE_VALUE ) { + CLOGE("ERR(%s[%d]): buf.index(%d) not exist!",__FUNCTION__, __LINE__, *dqindex); + m_printBufferQ(); + return BAD_VALUE; + } else { + *buf = m_queueBufferList[*dqindex]; + m_queueBufferList[*dqindex].index = NODE_INIT_NEGATIVE_VALUE; + } + return NO_ERROR; +} + +bool ExynosCameraNode::m_isExistBufferQ(ExynosCameraBuffer *buf) +{ + Mutex::Autolock lock(m_queueBufferListLock); + + if( m_queueBufferList[buf->index].index != NODE_INIT_NEGATIVE_VALUE) { + return true; + } else { + return false; + } + return true; +} + +void ExynosCameraNode::m_printBufferQ() +{ + bool empty = true; + + for (uint32_t i = 0; i < MAX_BUFFERS; i++) { + if( m_queueBufferList[i].index != NODE_INIT_NEGATIVE_VALUE ) { + CLOGD("DEBUG(%s[%d]): index(%d) buf.index(%d)",__FUNCTION__, __LINE__, i, m_queueBufferList[i].index); + displayExynosBuffer(&m_queueBufferList[i]); + empty = false; + } + } + + if( empty ) { + CLOGD("DEBUG(%s):no items", __FUNCTION__); + return; + } + return; +} + +void ExynosCameraNode::m_removeItemBufferQ() +{ + List::iterator r; + Mutex::Autolock lock(m_queueBufferListLock); + +#if 0 + for (r = m_queueBufferList.begin(); r != m_queueBufferList.end(); r++) { + m_queueBufferList.erase(r); + } +#else + for (uint32_t i = 0; i < MAX_BUFFERS; i++) { + m_queueBufferList[i].index = NODE_INIT_NEGATIVE_VALUE; + } +#endif + return; +} + + +int ExynosCameraNode::m_YUV_RANGE_2_V4L2_COLOR_RANGE(enum YUV_RANGE yuvRange) +{ + int v42ColorRange = -1; + + switch (yuvRange) { + case YUV_FULL_RANGE: + v42ColorRange = 7; /* V4L2_COLORSPACE_JPEG */ + break; + case YUV_LIMITED_RANGE: + v42ColorRange = 4; /* V4L2_COLORSPACE_BT878 */ + break; + default: + CLOGE("ERR(%s[%d]):invalid yuvRange : %d. so, fail", + __FUNCTION__, __LINE__, (int)yuvRange); + break; + } + + return v42ColorRange; +} + +enum YUV_RANGE ExynosCameraNode::m_V4L2_COLOR_RANGE_2_YUV_RANGE(int v4l2ColorRange) +{ + enum YUV_RANGE yuvRange = YUV_FULL_RANGE; + + switch (v4l2ColorRange) { + case 7: + yuvRange = YUV_FULL_RANGE; + break; + case 4: + yuvRange = YUV_LIMITED_RANGE; + break; + default: + CLOGE("ERR(%s[%d]):invalid v4l2ColorRange : %d. so, fail", + __FUNCTION__, __LINE__, (int)v4l2ColorRange); + break; + } + + return yuvRange; +} +}; diff --git a/libcamera/common_v2/ExynosCameraNode.h b/libcamera/common_v2/ExynosCameraNode.h new file mode 100644 index 0000000..1a674a3 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraNode.h @@ -0,0 +1,420 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file ExynosCameraNode.h + * \brief hearder file for ExynosCameraNode + * \author Pilsun Jang(pilsun.jang@samsung.com) + * \date 2013/6/27 + * + */ + +#ifndef EXYNOS_CAMERA_NODE_H__ +#define EXYNOS_CAMERA_NODE_H__ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include "cutils/properties.h" +#include + +#include "exynos_format.h" +#include "ExynosCameraBuffer.h" +#include "ExynosRect.h" +/* #include "ExynosFrame.h" */ +#include "ExynosCameraAutoTimer.h" + +#include "ExynosJpegEncoderForCamera.h" +#include "exynos_v4l2.h" + +#include "fimc-is-metadata.h" + +/* #include "ExynosCameraState.h" */ +#include "ExynosCameraConfig.h" + +using namespace android; + +namespace android { +/* #define EXYNOS_CAMERA_NODE_TRACE */ +/* #define EXYNOS_CAMERA_NODE_TRACE_Q_DURATION */ +/* #define EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION */ + +#ifdef EXYNOS_CAMERA_NODE_TRACE +#define EXYNOS_CAMERA_NODE_IN() CLOGD("DEBUG(%s[%d]):IN...m_nodeState[%d]", __FUNCTION__, __LINE__, m_nodeState) +#define EXYNOS_CAMERA_NODE_OUT() CLOGD("DEBUG(%s[%d]):OUT..m_nodeState[%d]", __FUNCTION__, __LINE__, m_nodeState) +#else +#define EXYNOS_CAMERA_NODE_IN() ((void *)0) +#define EXYNOS_CAMERA_NODE_OUT() ((void *)0) +#endif + +/* ExynosCameraNode + * + * ingroup Exynos + */ + +/* + * Mapping table + * + * |------------------------------------------------------------------ + * | ExynosCamera | ExynosCameraNode | Driver IOCTL + * |------------------------------------------------------------------ + * | setSize() | setSize() - NONE | S_FMT + * | setColorFormat() | setColorFormat() - NONE | S_FMT + * | setBufferType() | setBufferTyoe() - NONE | S_FMT + * | prepare() | prepare() - m_setInput() | S_INPUT + * | | prepare() - m_setFmt() | S_FMT + * | reqBuffer() | reqBuffer() - m_reqBuf() | REQ_BUF + * | queryBuffer() | queryBuffer() - m_queryBuf() | QUERY_BUF + * | setBuffer() | setBuffer() - m_qBuf() | Q_BUF + * | getBuffer() | getBuffer() - m_qBuf() | Q_BUF + * | putBuffer() | putBuffer() - m_dqBuf() | DQ_BUF + * | start() | start() - m_streamOn() | STREAM_ON + * | polling() | polling() - m_poll() | POLL + * |------------------------------------------------------------------ + * | setBufferRef() | | + * | getSize() | | + * | getColorFormat() | | + * | getBufferType() | | + * |------------------------------------------------------------------ + * + */ + +enum node_request_state { + NODE_REQUEST_STATE_BASE = 0, + NODE_REQUEST_STATE_READY, + NODE_REQUEST_STATE_QBUF_BLOCK, + NODE_REQUEST_STATE_QBUF_DONE, + NODE_REQUEST_STATE_DQBUF_BLOCK, + NODE_REQUEST_STATE_DQBUF_DONE, + NODE_REQUEST_STATE_STOPPED, + NODE_REQUEST_STATE_INVALID +}; + +enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION{ + NODE_LOCATION_SRC= 0, + NODE_LOCATION_DST +}; + +class ExynosCameraNodeRequest { +public: + /* Constructor */ + ExynosCameraNodeRequest(); + /* Destructor */ + virtual ~ExynosCameraNodeRequest(); + + void setState(enum node_request_state state); + enum node_request_state getState(void); + + void setRequest(unsigned int requestCount); + unsigned int getRequest(void); + +private: + unsigned int m_requestCount; + enum node_request_state m_requestState; +}; + +#define NODE_PREPARE_SIZE (1) +#define NODE_PREPARE_FORMAT (2) +#define NODE_PREPARE_BUFFER_TYPE (3) +#define NODE_PREPARE_BUFFER_REF (4) +#define NODE_PREPARE_COMPLETE (NODE_PREPARE_SIZE | \ + NODE_PREPARE_FORMAT | \ + NODE_PREPARE_BUFFER_TYPE | \ + NODE_PREPARE_BUFFER_REF) + +#define NODE_INIT_NEGATIVE_VALUE -1 +#define NODE_INIT_ZERO_VALUE 0 + +class ExynosCameraNode { +public: + enum EXYNOS_CAMERA_NODE_TYPE { + NODE_TYPE_BASE = 0, + NODE_TYPE_DUMMY = 999, + NODE_TYPE_MAX, + }; + + enum EXYNOS_CAMERA_NODE_STATE { + NODE_STATE_BASE = 0, + NODE_STATE_NONE, + NODE_STATE_CREATED, + NODE_STATE_OPENED, + NODE_STATE_IN_PREPARE, + NODE_STATE_RUNNING, + NODE_STATE_DESTROYED, + NODE_STATE_MAX + }; + +public: + /* Constructor */ + ExynosCameraNode(); + /* Destructor */ + virtual ~ExynosCameraNode(); + + /* Create the instance */ + virtual status_t create(); + /* Create the instance */ + virtual status_t create(const char *nodeName); + /* Create the instance */ + virtual status_t create(const char *nodeName, int cameraId); + /* Create the instance */ + virtual status_t create(const char *nodeName, const char *nodeAlias); + /* Create the instance */ + virtual status_t create(const char *nodeName, int cameraId, int fd); + /* Create the instance */ + virtual status_t create(const char *nodeName, + int cameraId, + enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION location, + ExynosJpegEncoderForCamera *jpegEncoder); + + /* Destroy the instance */ + virtual status_t destroy(void); + + /* open Node */ + virtual status_t open(int videoNodeNum); + /* open Node */ + virtual status_t open(int videoNodeNum, bool useThumbnailHWFC); + /* close Node */ + virtual status_t close(void); + /* get file descriptor */ + virtual status_t getFd(int *fd); + /* get Jpeg Encoder */ + virtual status_t getJpegEncoder(ExynosJpegEncoderForCamera **jpegEncoder); + /* get name */ + virtual char *getName(void); + /* get video Num */ + virtual int getNodeNum(void); + + /* set v4l2 color format */ + virtual status_t setColorFormat(int v4l2Colorformat, int planesCount, enum YUV_RANGE yuvRange = YUV_FULL_RANGE); + /* get v4l2 color format */ + virtual status_t getColorFormat(int *v4l2Colorformat, int *planesCount, enum YUV_RANGE *yuvRange = NULL); + + /* set size */ + virtual status_t setQuality(int quality); + virtual status_t setQuality(const unsigned char qtable[]); + + /* set size */ + virtual status_t setSize(int w, int h); + /* get size */ + virtual status_t getSize(int *w, int *h); + + /* set id */ + virtual status_t setId(int id); + + /* set memory info */ + virtual status_t setBufferType( + int bufferCount, + enum v4l2_buf_type type, + enum v4l2_memory bufferMemoryType); + /* get memory info */ + virtual status_t getBufferType( + int *bufferCount, + enum v4l2_buf_type *type, + enum v4l2_memory *bufferMemoryType); + + /* query buffer */ + virtual status_t queryBuf(void); + /* request buffers */ + virtual status_t reqBuffers(void); + /* clear buffers */ + virtual status_t clrBuffers(void); + /* check buffers */ + virtual unsigned int reqBuffersCount(void); + + /* set id */ + virtual status_t setControl(unsigned int id, int value); + virtual status_t getControl(unsigned int id, int *value); + + virtual status_t setExtControl(struct v4l2_ext_controls *ctrl); + + /* polling */ + virtual status_t polling(void); + + /* setInput */ + virtual status_t setInput(int sensorId); + /* getInput */ + virtual int getInput(void); + /* resetInput */ + virtual int resetInput(void); + + /* setCrop */ + virtual status_t setCrop(enum v4l2_buf_type type, int x, int y, int w, int h); + + /* setFormat */ + virtual status_t setFormat(void); + virtual status_t setFormat(unsigned int bytesPerPlane[]); + + /* set capture information */ + virtual status_t setExifInfo(exif_attribute_t *exifInfo); + virtual status_t setDebugInfo(debug_attribute_t *debugInfo); + + /* startNode */ + virtual status_t start(void); + /* stopNode */ + virtual status_t stop(void); + + /* Check if the instance was created */ + virtual bool isCreated(void); + /* Check if it start */ + virtual bool isStarted(void); + + /* prepare Buffers */ + virtual status_t prepareBuffer(ExynosCameraBuffer *buf); + + /* putBuffer */ + virtual status_t putBuffer(ExynosCameraBuffer *buf); + virtual status_t mapBuffer(ExynosCameraBuffer *buf); + + /* getBuffer */ + virtual status_t getBuffer(ExynosCameraBuffer *buf, int *dqIndex); + + /* dump the object info */ + virtual void dump(void); + /* dump state info */ + virtual void dumpState(void); + /* dump queue info */ + virtual void dumpQueue(void); + + /* set param */ + virtual int setParam(struct v4l2_streamparm *stream_parm); + virtual void removeItemBufferQ(); + +protected: + /* get pixel format */ + int m_pixelDepth(void); + /* check whether queued on index */ + bool m_getFlagQ(int index); + /* set queue flag on index */ + bool m_setFlagQ(int index, bool toggle); + /* polling */ + int m_polling(void); + + /* stream on */ + int m_streamOn(void); + /* stream off */ + int m_streamOff(void); + + /* set input */ + int m_setInput(int id); + /* set format */ + int m_setFmt(void); + /* req buf */ + int m_reqBuffers(int *reqCount); + /* clear buf */ + int m_clrBuffers(int *reqCount); + /* set crop */ + int m_setCrop(int v4l2BufType, ExynosRect *rect); + /* set contorl */ + int m_setControl(unsigned int id, int value); + /* get contorl */ + int m_getControl(unsigned int id, int *value); + /* set ext control */ + int m_setExtControl(struct v4l2_ext_controls *ctrl); + + /* qbuf from src, with metaBuf */ + int m_qBuf(ExynosCameraBuffer *buf); + int m_mBuf(ExynosCameraBuffer *buf); + + /* dqbuf */ + int m_dqBuf(ExynosCameraBuffer *buf, int *dqIndex); + + /* Buffer trace */ + status_t m_putBufferQ(ExynosCameraBuffer *buf, int *qindex); + status_t m_getBufferQ(ExynosCameraBuffer *buf, int *dqindex); + bool m_isExistBufferQ(ExynosCameraBuffer *buf); + void m_printBufferQ(); + void m_removeItemBufferQ(); + int m_YUV_RANGE_2_V4L2_COLOR_RANGE(enum YUV_RANGE yuvRange); + enum YUV_RANGE m_V4L2_COLOR_RANGE_2_YUV_RANGE(int v4l2ColorRange); + + /* + * thoes member value should be declare in private + * but we declare in publuc to support backward compatibility + */ +public: + +protected: + ExynosCameraNodeRequest m_nodeRequest; + + bool m_flagStart; + bool m_flagCreate; + + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + char m_alias[EXYNOS_CAMERA_NAME_STR_SIZE]; + + int m_cameraId; + int m_sensorId; + int m_fd; + int m_videoNodeNum; + struct v4l2_format m_v4l2Format; + struct v4l2_requestbuffers m_v4l2ReqBufs; + struct v4l2_crop m_crop; + + bool m_flagQ[VIDEO_MAX_FRAME]; + Mutex m_qLock; + + bool m_flagStreamOn; + /* for reprocessing */ + bool m_flagDup; + + int m_paramState; + + mutable Mutex m_nodeStateLock; + mutable Mutex m_nodeActionLock; + + Mutex m_queueBufferListLock; + ExynosCameraBuffer m_queueBufferList[MAX_BUFFERS]; + +/* ExynosCameraState m_nodeStateMgr; */ + int m_nodeState; + + enum EXYNOS_CAMERA_NODE_TYPE m_nodeType; + List m_dummyIndexQ; + +#ifdef EXYNOS_CAMERA_NODE_TRACE_Q_DURATION + ExynosCameraDurationTimer m_qTimer; +#endif + +#ifdef EXYNOS_CAMERA_NODE_TRACE_DQ_DURATION + ExynosCameraDurationTimer m_dqTimer; +#endif +}; + +}; +#endif /* EXYNOS_CAMERA_NODE_H__ */ + + diff --git a/libcamera/common_v2/ExynosCameraNodeJpegHAL.cpp b/libcamera/common_v2/ExynosCameraNodeJpegHAL.cpp new file mode 100644 index 0000000..0c77ef1 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraNodeJpegHAL.cpp @@ -0,0 +1,740 @@ +/* + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraNodeJpegHAL" + +#include"ExynosCameraNodeJpegHAL.h" +#include"ExynosCameraUtils.h" + +namespace android { + +/* HACK */ +exif_attribute_t exifInfo__; + +ExynosCameraNodeJpegHAL::ExynosCameraNodeJpegHAL() +{ + memset(m_name, 0x00, sizeof(m_name)); + memset(m_alias, 0x00, sizeof(m_alias)); + memset(&m_v4l2Format, 0x00, sizeof(struct v4l2_format)); + memset(&m_v4l2ReqBufs, 0x00, sizeof(struct v4l2_requestbuffers)); + memset(&m_exifInfo, 0x00, sizeof(exif_attribute_t)); + memset(&m_debugInfo, 0x00, sizeof(debug_attribute_t)); + + m_fd = NODE_INIT_NEGATIVE_VALUE; + + m_v4l2Format.fmt.pix_mp.width = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.height = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.pixelformat = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.num_planes = NODE_INIT_NEGATIVE_VALUE; + m_v4l2Format.fmt.pix_mp.colorspace = (enum v4l2_colorspace)7; /* V4L2_COLORSPACE_JPEG */ + /* + * 7 : Full YuvRange, 4 : Limited YuvRange + * you can refer m_YUV_RANGE_2_V4L2_COLOR_RANGE() and m_V4L2_COLOR_RANGE_2_YUV_RANGE() + */ + + m_v4l2ReqBufs.count = NODE_INIT_NEGATIVE_VALUE; + m_v4l2ReqBufs.memory = (v4l2_memory)NODE_INIT_ZERO_VALUE; + m_v4l2ReqBufs.type = (v4l2_buf_type)NODE_INIT_ZERO_VALUE; + + m_crop.type = (v4l2_buf_type)NODE_INIT_ZERO_VALUE; + m_crop.c.top = NODE_INIT_ZERO_VALUE; + m_crop.c.left = NODE_INIT_ZERO_VALUE; + m_crop.c.width = NODE_INIT_ZERO_VALUE; + m_crop.c.height =NODE_INIT_ZERO_VALUE; + + m_flagStart = false; + m_flagCreate = false; + + memset(m_flagQ, 0x00, sizeof(m_flagQ)); + m_flagStreamOn = false; + m_flagDup = false; + m_paramState = 0; + m_nodeState = 0; + m_cameraId = 0; + m_sensorId = -1; + m_videoNodeNum = -1; + + for (uint32_t i = 0; i < MAX_BUFFERS; i++) { + m_queueBufferList[i].index = NODE_INIT_NEGATIVE_VALUE; + } + + m_nodeType = NODE_TYPE_BASE; + + m_jpegEncoder = NULL; + m_jpegNodeLocation = NODE_LOCATION_DST; +} + +ExynosCameraNodeJpegHAL::~ExynosCameraNodeJpegHAL() +{ + EXYNOS_CAMERA_NODE_IN(); + + destroy(); +} + +status_t ExynosCameraNodeJpegHAL::create( + const char *nodeName, + int cameraId, + enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION location, + ExynosJpegEncoderForCamera *jpegEncoder) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + if (nodeName == NULL) { + CLOGE("ERR(%s[%d]):nodeName is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (cameraId >= 0) + m_cameraId = cameraId; + + if (jpegEncoder == NULL) { + CLOGE("ERR(%s[%d]):jpegEncoder is NULL!!", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* + * Parent's function was hided, because child's function was overrode. + * So, it should call parent's function explicitly. + */ + ret = ExynosCameraNode::create(nodeName, nodeName); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):create fail [%d]", __FUNCTION__, __LINE__, (int)ret); + return ret; + } + + m_jpegEncoder = jpegEncoder; + + m_nodeType = NODE_TYPE_BASE; + m_jpegNodeLocation = location; + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::open(int videoNodeNum) +{ + return open(videoNodeNum, false); +} + +status_t ExynosCameraNodeJpegHAL::open(int videoNodeNum, bool useThumbnailHWFC) +{ + EXYNOS_CAMERA_NODE_IN(); + + CLOGD("DEBUG(%s[%d]):open", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + char node_name[30]; + + if (m_nodeState != NODE_STATE_CREATED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + memset(&node_name, 0x00, sizeof(node_name)); + snprintf(node_name, sizeof(node_name), "%s%d", NODE_PREFIX, videoNodeNum); + + if (m_jpegEncoder == NULL) { + m_jpegEncoder = new ExynosJpegEncoderForCamera(useThumbnailHWFC); + if (m_jpegEncoder == NULL) { + CLOGE("ERR(%s[%d]):Cannot create ExynosJpegEncoderForCamera class", + __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = m_jpegEncoder->create(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera create fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + + m_jpegEncoder->EnableHWFC(); + /* + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera Enable HWFC fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_jpegEncoder->destroy(); + return ret; + } + */ + + CLOGD("DEBUG(%s[%d]):Node(%d)(%s) opened.", + __FUNCTION__, __LINE__, videoNodeNum, node_name); + } + + m_videoNodeNum = videoNodeNum; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_OPENED; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::close(void) +{ + EXYNOS_CAMERA_NODE_IN(); + + CLOGD("DEBUG(%s[%d]): close", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + int maxIndex = sizeof(m_debugInfo.debugData)/sizeof(char *); + + if (m_nodeState == NODE_STATE_CREATED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } + + for(int i = 0; i < maxIndex; i++) { + if(m_debugInfo.debugData[i] != NULL) { + ALOGD("%s: delete DebugInfo[%d].", __FUNCTION__, i); + delete m_debugInfo.debugData[i]; + } + m_debugInfo.debugData[i] = NULL; + m_debugInfo.debugSize[i] = 0; + } + + if (m_jpegEncoder != NULL) { + if (m_jpegNodeLocation == NODE_LOCATION_SRC + && m_videoNodeNum == FIMC_IS_VIDEO_HWFC_JPEG_NUM) { + ret = m_jpegEncoder->destroy(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):close fail", __FUNCTION__, __LINE__); + return ret; + } + + delete m_jpegEncoder; + } + m_jpegEncoder = NULL; + } + + m_nodeType = NODE_TYPE_BASE; + m_videoNodeNum = -1; + + m_nodeStateLock.lock(); + m_nodeState = NODE_STATE_CREATED; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::getFd(int *fd) +{ + *fd = m_fd; + + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::getJpegEncoder(ExynosJpegEncoderForCamera **jpegEncoder) +{ + *jpegEncoder = m_jpegEncoder; + + return NO_ERROR; +} + +char *ExynosCameraNodeJpegHAL::getName(void) +{ + return m_name; +} + +status_t ExynosCameraNodeJpegHAL::setColorFormat(int v4l2Colorformat, __unused int planesCount, __unused enum YUV_RANGE yuvRange) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + if(m_jpegNodeLocation == NODE_LOCATION_SRC) { + ret = m_jpegEncoder->setColorFormat(v4l2Colorformat); + } else { + int jpegFormat = V4L2_PIX_FMT_JPEG_422; + switch (v4l2Colorformat) { + case V4L2_PIX_FMT_YUYV: + jpegFormat = V4L2_PIX_FMT_JPEG_422; + break; + case V4L2_PIX_FMT_NV21: + jpegFormat = V4L2_PIX_FMT_JPEG_420; + break; + default: + CLOGE("ERR(%s[%d]):Invalid jpegColorFormat(%d)!", __FUNCTION__, __LINE__, v4l2Colorformat); + return INVALID_OPERATION; + } + ret = m_jpegEncoder->setJpegFormat(jpegFormat); + } + + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setColorFormat fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::setQuality(int quality) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + CLOGD("DEBUG(%s[%d]):Quality (%d)", __FUNCTION__, __LINE__, quality); + + switch (m_videoNodeNum) { + case FIMC_IS_VIDEO_HWFC_JPEG_NUM: + ret = m_jpegEncoder->setQuality(quality); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setQuality fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_nodeStateLock.unlock(); + return ret; + } + break; + case FIMC_IS_VIDEO_HWFC_THUMB_NUM: + ret = m_jpegEncoder->setThumbnailQuality(quality); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setThumbnailQuality fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_nodeStateLock.unlock(); + return ret; + } + break; + default: + CLOGE("ERR(%s[%d]):Invalid node num(%d)", __FUNCTION__, __LINE__, ret); + break; + } + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::setQuality(const unsigned char qtable[]) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + CLOGD("DEBUG(%s[%d]):setQuality(qtable[])", __FUNCTION__, __LINE__); + + switch (m_videoNodeNum) { + case FIMC_IS_VIDEO_HWFC_JPEG_NUM: + ret = m_jpegEncoder->setQuality(qtable); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setQuality fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_nodeStateLock.unlock(); + return ret; + } + break; + default: + CLOGE("ERR(%s[%d]):Invalid node num(%d)", __FUNCTION__, __LINE__, ret); + break; + } + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::setSize(int w, int h) +{ + EXYNOS_CAMERA_NODE_IN(); + + status_t ret = NO_ERROR; + + m_nodeStateLock.lock(); + if (m_nodeState != NODE_STATE_IN_PREPARE && m_nodeState != NODE_STATE_OPENED) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + m_nodeStateLock.unlock(); + return INVALID_OPERATION; + } + + CLOGD("DEBUG(%s[%d]):w (%d) h (%d)", __FUNCTION__, __LINE__, w, h); + + switch (m_videoNodeNum) { + case FIMC_IS_VIDEO_HWFC_JPEG_NUM: + ret = m_jpegEncoder->setSize(w, h); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setSize fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_nodeStateLock.unlock(); + return ret; + } + break; + case FIMC_IS_VIDEO_HWFC_THUMB_NUM: + ret = m_jpegEncoder->setThumbnailSize(w, h); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setThumbnailSize fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + m_nodeStateLock.unlock(); + return ret; + } + break; + default: + CLOGE("ERR(%s[%d]):Invalid node num(%d)", __FUNCTION__, __LINE__, ret); + break; + } + + m_nodeState = NODE_STATE_IN_PREPARE; + m_nodeStateLock.unlock(); + + EXYNOS_CAMERA_NODE_OUT(); + + return ret; +} + +status_t ExynosCameraNodeJpegHAL::reqBuffers(void) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::clrBuffers(void) +{ + m_jpegEncoder->destroy(); + return NO_ERROR; +} + +unsigned int ExynosCameraNodeJpegHAL::reqBuffersCount(void) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setControl(__unused unsigned int id, __unused int value) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::getControl(__unused unsigned int id, __unused int *value) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::polling(void) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setInput(int sensorId) +{ + m_sensorId = sensorId; + + return NO_ERROR; +} + +int ExynosCameraNodeJpegHAL::getInput(void) +{ + return m_sensorId; +} + +int ExynosCameraNodeJpegHAL::resetInput(void) +{ + m_sensorId = 0; + + return NO_ERROR; +} + +int ExynosCameraNodeJpegHAL::getNodeNum(void) +{ + return m_videoNodeNum; +} + +status_t ExynosCameraNodeJpegHAL::setFormat(void) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setFormat(__unused unsigned int bytesPerPlane[]) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setCrop(__unused enum v4l2_buf_type type, + __unused int x, __unused int y, __unused int w, __unused int h) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setExifInfo(exif_attribute_t *exifInfo) +{ + memcpy(&m_exifInfo, exifInfo, sizeof(exif_attribute_t)); + memcpy(&exifInfo__, exifInfo, sizeof(exif_attribute_t)); + + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::setDebugInfo(debug_attribute_t *debugInfo) +{ + /* Num of AppMarker */ + m_debugInfo.num_of_appmarker = debugInfo->num_of_appmarker; + + for(int i = 0; i < debugInfo->num_of_appmarker; i++) { + int app_marker_index = debugInfo->idx[i][0]; + + /* App Marker Index */ + m_debugInfo.idx[i][0] = app_marker_index; + + if(debugInfo->debugSize[app_marker_index] != 0) { + if (!m_debugInfo.debugData[app_marker_index]) { + CLOGD("(%s[%d]) : Alloc DebugInfo Buffer(%d)", + __FUNCTION__, + __LINE__, + debugInfo->debugSize[app_marker_index]); + m_debugInfo.debugData[app_marker_index] = new char[debugInfo->debugSize[app_marker_index]+1]; + } + + /* Data */ + memset((void *)m_debugInfo.debugData[app_marker_index], 0, debugInfo->debugSize[app_marker_index]); + memcpy((void *)(m_debugInfo.debugData[app_marker_index]), + (void *)(debugInfo->debugData[app_marker_index]), + debugInfo->debugSize[app_marker_index]); + /* Size */ + m_debugInfo.debugSize[app_marker_index] = debugInfo->debugSize[app_marker_index]; + } + } + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::start(void) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::stop(void) +{ + return NO_ERROR; +} + +bool ExynosCameraNodeJpegHAL::isCreated(void) +{ + return m_flagCreate; +} + +bool ExynosCameraNodeJpegHAL::isStarted(void) +{ + return m_flagStart; +} + +status_t ExynosCameraNodeJpegHAL::prepareBuffer(__unused ExynosCameraBuffer *buf) +{ + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::putBuffer(ExynosCameraBuffer *buf) +{ + status_t ret = NO_ERROR; + + if (buf == NULL) { + CLOGE("ERR(%s[%d]):buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + +#if 0 + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("[%s] [%d] m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +#endif + + if(m_jpegNodeLocation == NODE_LOCATION_SRC) { + switch (m_videoNodeNum) { + case FIMC_IS_VIDEO_HWFC_JPEG_NUM: + ret = m_jpegEncoder->setInBuf((int *)&(buf->fd), (int *)buf->size); + break; + case FIMC_IS_VIDEO_HWFC_THUMB_NUM: + ret = m_jpegEncoder->setInBuf2((int *)&(buf->fd), (int *)buf->size); + break; + default: + CLOGE("ERR(%s[%d]):Invalid node num(%d)", __FUNCTION__, __LINE__, ret); + break; + } + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setInBuffer fail, ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + m_srcBuffer = *buf; + } else { + switch (m_videoNodeNum) { + case FIMC_IS_VIDEO_HWFC_JPEG_NUM: + ret = m_jpegEncoder->setOutBuf(buf->fd[0], buf->size[0] + buf->size[1] + buf->size[2]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera setOutBuffer fail. ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_jpegEncoder->updateConfig(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera updateConfig fail. ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + + CLOGI("INFO(%s[%d]):Start encode. bufferIndex %d", + __FUNCTION__, __LINE__, buf->index); + /* HACK */ + /* ret = m_jpegEncoder->encode((int *)&buf->size, &m_exifInfo, (char **)buf->addr, &m_debugInfo); */ + ret = m_jpegEncoder->encode((int *)&buf->size, &exifInfo__, (char **)buf->addr, &m_debugInfo); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):ExynosJpegEncoderForCamera encode fail. ret(%d)", + __FUNCTION__, __LINE__, ret); + return ret; + } + break; + case FIMC_IS_VIDEO_HWFC_THUMB_NUM: + break; + default: + CLOGE("ERR(%s[%d]):Invalid node num(%d)", __FUNCTION__, __LINE__, ret); + break; + } + + m_dstBuffer = *buf; + } + + return NO_ERROR; +} + +status_t ExynosCameraNodeJpegHAL::getBuffer(ExynosCameraBuffer *buf, int *dqIndex) +{ + status_t ret = NO_ERROR; + +#if 0 + if (m_nodeState != NODE_STATE_RUNNING) { + CLOGE("ERR(%s[%d]):m_nodeState = [%d] is not valid", + __FUNCTION__, __LINE__, (int)m_nodeState); + return INVALID_OPERATION; + } +#endif + + if(m_jpegNodeLocation == NODE_LOCATION_SRC) { + *buf = m_srcBuffer; + *dqIndex = m_srcBuffer.index; + } else { + if (m_videoNodeNum == FIMC_IS_VIDEO_HWFC_JPEG_NUM) { + ssize_t jpegSize = -1; + /* Blocking function. + * This function call is returned when JPEG encoding operation is finished. + */ + CLOGI("INFO(%s[%d]):WaitForCompression. bufferIndex %d", + __FUNCTION__, __LINE__, m_dstBuffer.index); + jpegSize = m_jpegEncoder->WaitForCompression(); + if (jpegSize < 0) { + CLOGE("ERR(%s[%d]):Failed to JPEG Encoding. bufferIndex %d jpegSize %ld", + __FUNCTION__, __LINE__, m_dstBuffer.index, jpegSize); + ret = INVALID_OPERATION; + } + m_dstBuffer.size[0] = jpegSize; + } + *buf = m_dstBuffer; + *dqIndex = m_dstBuffer.index; + } + + return ret; +} + +void ExynosCameraNodeJpegHAL::dump(void) +{ + dumpState(); + dumpQueue(); + + return; +} + +void ExynosCameraNodeJpegHAL::dumpState(void) +{ + CLOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + if (strnlen(m_name, sizeof(char) * EXYNOS_CAMERA_NAME_STR_SIZE) > 0 ) + CLOGD("m_name[%s]", m_name); + if (strnlen(m_alias, sizeof(char) * EXYNOS_CAMERA_NAME_STR_SIZE) > 0 ) + CLOGD("m_alias[%s]", m_alias); + + CLOGD("m_fd[%d], width[%d], height[%d]", + m_fd, + m_v4l2Format.fmt.pix_mp.width, + m_v4l2Format.fmt.pix_mp.height); + CLOGD("m_format[%d], m_planes[%d], m_buffers[%d], m_memory[%d]", + m_v4l2Format.fmt.pix_mp.pixelformat, + m_v4l2Format.fmt.pix_mp.num_planes, + m_v4l2ReqBufs.count, + m_v4l2ReqBufs.memory); + CLOGD("m_type[%d], m_flagStart[%d], m_flagCreate[%d]", + m_v4l2ReqBufs.type, + m_flagStart, + m_flagCreate); + CLOGD("m_crop type[%d], X : Y[%d : %d], W : H[%d : %d]", + m_crop.type, + m_crop.c.top, + m_crop.c.left, + m_crop.c.width, + m_crop.c.height); + + CLOGI("Node state(%d)", m_nodeRequest.getState()); + + return; +} + +void ExynosCameraNodeJpegHAL::dumpQueue(void) +{ + return; +} + +}; diff --git a/libcamera/common_v2/ExynosCameraNodeJpegHAL.h b/libcamera/common_v2/ExynosCameraNodeJpegHAL.h new file mode 100644 index 0000000..24d3cd0 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraNodeJpegHAL.h @@ -0,0 +1,173 @@ +/* + * Copyright 2015, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed toggle an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA_NODE_JPEG_HAL_H__ +#define EXYNOS_CAMERA_NODE_JPEG_HAL_H__ + +#include "ExynosCameraNode.h" + +using namespace android; + +namespace android { +/* ExynosCameraNode + * + * ingroup Exynos + */ + +/* + * Mapping table + * + * |------------------------------------------------------------------------- + * | ExynosCamera | ExynosCameraNode | Jpeg HAL Interface + * |------------------------------------------------------------------------- + * | setSize() | setSize() - NONE | S_FMT + * | setColorFormat() | setColorFormat() - NONE | S_FMT + * | setBufferType() | setBufferTyoe() - NONE | S_FMT + * | prepare() | prepare() - m_setInput() | S_INPUT + * | | prepare() - m_setFmt() | S_FMT + * | reqBuffer() | reqBuffer() - m_reqBuf() | REQ_BUF + * | queryBuffer() | queryBuffer() - m_queryBuf() | QUERY_BUF + * | setBuffer() | setBuffer() - m_qBuf() | Q_BUF + * | getBuffer() | getBuffer() - m_qBuf() | Q_BUF + * | putBuffer() | putBuffer() - m_dqBuf() | DQ_BUF + * | start() | start() - m_streamOn() | STREAM_ON + * | polling() | polling() - m_poll() | POLL + * |------------------------------------------------------------------------- + * | setBufferRef() | | + * | getSize() | | + * | getColorFormat() | | + * | getBufferType() | | + * |------------------------------------------------------------------------- + * + */ + +class ExynosCameraNodeJpegHAL : public virtual ExynosCameraNode { +private: + +public: + /* Constructor */ + ExynosCameraNodeJpegHAL(); + /* Destructor */ + virtual ~ExynosCameraNodeJpegHAL(); + + /* Create the instance */ + status_t create(const char *nodeName, + int cameraId, + enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION location, + ExynosJpegEncoderForCamera *jpegEncoder); + + /* open Node */ + status_t open(int videoNodeNum); + /* open Node */ + status_t open(int videoNodeNum, bool useThumbnailHWFC); + /* close Node */ + status_t close(void); + /* get file descriptor */ + status_t getFd(int *fd); + /* get Jpeg Encoder */ + status_t getJpegEncoder(ExynosJpegEncoderForCamera **jpegEncoder); + /* get name */ + char *getName(void); + /* get video Num */ + int getNodeNum(void); + + /* set v4l2 color format */ + status_t setColorFormat(int v4l2Colorformat, int planeCount, enum YUV_RANGE yuvRange = YUV_FULL_RANGE); + + /* set size */ + status_t setQuality(int quality); + status_t setQuality(const unsigned char qtable[]); + + /* set size */ + status_t setSize(int w, int h); + + /* request buffers */ + status_t reqBuffers(void); + /* clear buffers */ + status_t clrBuffers(void); + /* check buffers */ + unsigned int reqBuffersCount(void); + + /* set id */ + status_t setControl(unsigned int id, int value); + status_t getControl(unsigned int id, int *value); + + /* polling */ + status_t polling(void); + + /* setInput */ + status_t setInput(int sensorId); + /* getInput */ + int getInput(void); + /* resetInput */ + int resetInput(void); + + /* setCrop */ + status_t setCrop(enum v4l2_buf_type type, int x, int y, int w, int h); + + /* setFormat */ + status_t setFormat(void); + status_t setFormat(unsigned int bytesPerPlane[]); + + /* set capture information */ + status_t setExifInfo(exif_attribute_t *exifInfo); + status_t setDebugInfo(debug_attribute_t *debugInfo); + + /* startNode */ + status_t start(void); + /* stopNode */ + status_t stop(void); + + /* Check if the instance was created */ + bool isCreated(void); + /* Check if it start */ + bool isStarted(void); + + /* prepare Buffers */ + virtual status_t prepareBuffer(ExynosCameraBuffer *buf); + + /* putBuffer */ + status_t putBuffer(ExynosCameraBuffer *buf); + + /* getBuffer */ + status_t getBuffer(ExynosCameraBuffer *buf, int *dqIndex); + + /* dump the object info */ + void dump(void); + /* dump state info */ + void dumpState(void); + /* dump queue info */ + void dumpQueue(void); + +private: + + /* + * thoes member value should be declare in private + * but we declare in publuc to support backward compatibility + */ +public: + +private: + ExynosJpegEncoderForCamera *m_jpegEncoder; + enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION m_jpegNodeLocation; + ExynosCameraBuffer m_srcBuffer; + ExynosCameraBuffer m_dstBuffer; + exif_attribute_t m_exifInfo; + debug_attribute_t m_debugInfo; +}; + +}; +#endif /* EXYNOS_CAMERA_NODE_JPEG_HAL_H__ */ diff --git a/libcamera/common_v2/ExynosCameraRequestManager.cpp b/libcamera/common_v2/ExynosCameraRequestManager.cpp new file mode 100644 index 0000000..8ea88fe --- /dev/null +++ b/libcamera/common_v2/ExynosCameraRequestManager.cpp @@ -0,0 +1,2814 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraRequestManager" + +#include "ExynosCameraRequestManager.h" + +namespace android { + +ExynosCamera3RequestResult::ExynosCamera3RequestResult(uint32_t key, uint32_t frameCount, EXYNOS_REQUEST_RESULT::TYPE type, camera3_capture_result *captureResult, camera3_notify_msg_t *notityMsg) +{ + m_init(); + + m_key = key; + m_type = type; + m_frameCount = frameCount; + + switch (type) { + case EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY: + if (notityMsg == NULL) { + ALOGE("ERR(%s[%d]):ExynosCamera3RequestResult CALLBACK_NOTIFY_ONLY frameCount(%u) notityMsg is NULL notityMsg(%p) ", __FUNCTION__, __LINE__, frameCount, notityMsg); + } else { + memcpy(&m_notityMsg, notityMsg, sizeof(camera3_notify_msg_t)); + } + break; + + case EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY: + if (captureResult != NULL) { + ALOGE("ERR(%s[%d]):ExynosCamera3RequestResult CALLBACK_BUFFER_ONLY frameCount(%u) captureResult is NULL captureResult(%p) ", __FUNCTION__, __LINE__, frameCount, captureResult); + } + break; + + case EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA: + if (captureResult != NULL) { + ALOGE("ERR(%s[%d]):ExynosCamera3RequestResult CALLBACK_PARTIAL_3AA frameCount(%u) captureResult is NULL captureResult(%p) ", __FUNCTION__, __LINE__, frameCount, captureResult); + } + break; + + case EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT: + if (captureResult == NULL) { + ALOGE("ERR(%s[%d]):ExynosCamera3RequestResult CALLBACK_ALL_RESULT frameCount(%u) captureResult is NULL captureResult(%p) ", __FUNCTION__, __LINE__, frameCount, captureResult); + } else { + memcpy(&m_captureResult, captureResult, sizeof(camera3_capture_result_t)); + } + break; + + case EXYNOS_REQUEST_RESULT::CALLBACK_INVALID: + default: + ALOGE("ERR(%s[%d]):ExynosCamera3RequestResult type have INVALID value type(%d) frameCount(%u) ", __FUNCTION__, __LINE__, type, frameCount); + + break; + } + +} + +ExynosCamera3RequestResult::~ExynosCamera3RequestResult() +{ + m_deinit(); +} + +status_t ExynosCamera3RequestResult::m_init() +{ + status_t ret = NO_ERROR; + m_key = 0; + m_type = EXYNOS_REQUEST_RESULT::CALLBACK_INVALID; + m_frameCount = 0; + memset(&m_captureResult, 0x00, sizeof(camera3_capture_result_t)); + memset(&m_notityMsg, 0x00, sizeof(camera3_notify_msg_t)); + + m_streamBufferList.clear(); + + return ret; +} + +status_t ExynosCamera3RequestResult::m_deinit() +{ + status_t ret = NO_ERROR; + m_key = 0; + m_type = EXYNOS_REQUEST_RESULT::CALLBACK_INVALID; + m_frameCount = 0; + memset(&m_captureResult, 0x00, sizeof(camera3_capture_result_t)); + memset(&m_notityMsg, 0x00, sizeof(camera3_notify_msg_t)); + + return ret; +} + +uint32_t ExynosCamera3RequestResult::getKey() +{ + return m_key; +} + +uint32_t ExynosCamera3RequestResult::getFrameCount() +{ + return m_frameCount; +} + +EXYNOS_REQUEST_RESULT::TYPE ExynosCamera3RequestResult::getType() +{ + EXYNOS_REQUEST_RESULT::TYPE ret = EXYNOS_REQUEST_RESULT::CALLBACK_INVALID; + switch (m_type) { + case EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT: + case EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA: + ret = m_type; + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_INVALID: + default: + ALOGE("ERR(%s[%d]):getResultType type have INVALID value type(%d) key(%u) frameCount(%u) ", __FUNCTION__, __LINE__, m_type, m_key, m_frameCount); + break; + } + return ret; +} + +status_t ExynosCamera3RequestResult::setCaptureRequest(camera3_capture_result_t *captureResult) +{ + memcpy(&m_captureResult, captureResult, sizeof(camera3_capture_result_t)); + return OK; +} + +status_t ExynosCamera3RequestResult::getCaptureRequest(camera3_capture_result_t *captureResult) +{ + memcpy(captureResult, &m_captureResult, sizeof(camera3_capture_result_t)); + return OK; +} + +status_t ExynosCamera3RequestResult::setNofityRequest(camera3_notify_msg_t *notifyResult) +{ + memcpy(&m_notityMsg, notifyResult, sizeof(camera3_notify_msg_t)); + return OK; +} + +status_t ExynosCamera3RequestResult::getNofityRequest(camera3_notify_msg_t *notifyResult) +{ + memcpy(notifyResult, &m_notityMsg, sizeof(camera3_notify_msg_t)); + return OK; +} + +status_t ExynosCamera3RequestResult::pushStreamBuffer(camera3_stream_buffer_t *streamBuffer) +{ + status_t ret = NO_ERROR; + if (streamBuffer == NULL){ + ALOGE("ERR(%s[%d]):pushStreamBuffer is failed streamBuffer == NULL streamBuffer(%p)", __FUNCTION__, __LINE__, streamBuffer); + ret = INVALID_OPERATION; + return ret; + } + + ret = m_pushBuffer(streamBuffer, &m_streamBufferList, &m_streamBufferListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_pushBuffer is failed ", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + return ret; + } + + return ret; +} + +status_t ExynosCamera3RequestResult::popStreamBuffer(camera3_stream_buffer_t *streamBuffer) +{ + status_t ret = NO_ERROR; + if (streamBuffer == NULL){ + ALOGE("ERR(%s[%d]):popStreamBuffer is failed streamBuffer == NULL streamBuffer(%p)", __FUNCTION__, __LINE__, streamBuffer); + ret = INVALID_OPERATION; + return ret; + } + + ret = m_popBuffer(streamBuffer, &m_streamBufferList, &m_streamBufferListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_pushBuffer is failed ", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + return ret; + } + + return ret; +} + +int ExynosCamera3RequestResult::getNumOfStreamBuffer() +{ + return m_getNumOfBuffer(&m_streamBufferList, &m_streamBufferListLock); +} + +status_t ExynosCamera3RequestResult::m_pushBuffer(camera3_stream_buffer_t *src, StreamBufferList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + camera3_stream_buffer_t *dst = new camera3_stream_buffer_t; + memcpy(dst, src, sizeof(camera3_stream_buffer_t)); + + lock->lock(); + list->push_back(dst); + lock->unlock(); + return ret; +} + +status_t ExynosCamera3RequestResult::m_popBuffer(camera3_stream_buffer_t *dst, StreamBufferList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + camera3_stream_buffer_t *src = NULL; + + lock->lock(); + if (list->size() > 0) { + src = list->front(); + list->pop_front(); + lock->unlock(); + } else { + lock->unlock(); + ALOGE("ERR(%s[%d]):m_popBuffer failed, size(%zu)", __FUNCTION__, __LINE__, list->size()); + ret = INVALID_OPERATION; + return ret; + } + + memcpy(dst, src, sizeof(camera3_stream_buffer_t)); + return ret; +} + +int ExynosCamera3RequestResult::m_getNumOfBuffer(StreamBufferList *list, Mutex *lock) +{ + int count = 0; + lock->lock(); + count = list->size(); + lock->unlock(); + return count; +} +#if 0 +ExynosCameraCbRequest::ExynosCameraCbRequest(uint32_t frameCount) +{ + m_init(); + m_frameCount = frameCount; +} + +ExynosCameraCbRequest::~ExynosCameraCbRequest() +{ + m_deinit(); +} + +uint32_t ExynosCameraCbRequest::getFrameCount() +{ + return m_frameCount; +} + +status_t ExynosCameraCbRequest::pushRequest(ResultRequest result) +{ + status_t ret = NO_ERROR; + ret = m_push(result, &m_callbackList, &m_callbackListLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):push request is failed, result frameCount(%u) type(%d)", __FUNCTION__, __LINE__, result->getFrameCount(), result->getType()); + } + + return ret; +} + +status_t ExynosCameraCbRequest::popRequest(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) +{ + status_t ret = NO_ERROR; + + resultList->clear(); + + ret = m_pop(reqType, &m_callbackList, resultList, &m_callbackListLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_pop request is failed, request type(%d) resultSize(%d)", __FUNCTION__, __LINE__, reqType, resultList->size()); + } + + return ret; +} + +status_t ExynosCameraCbRequest::getRequest(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) +{ + status_t ret = NO_ERROR; + ret = m_get(reqType, &m_callbackList, resultList, &m_callbackListLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_get request is failed, request type(%d) resultSize(%d)", __FUNCTION__, __LINE__, reqType, resultList->size()); + } + + return ret; +} + +status_t ExynosCameraCbRequest::setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag) +{ + status_t ret = NO_ERROR; + ret = m_setCallbackDone(reqType, flag, &m_statusLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_get request is failed, request type(%d) ", __FUNCTION__, __LINE__, reqType); + } + + return ret; +} + +bool ExynosCameraCbRequest::getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType) +{ + bool ret = false; + ret = m_getCallbackDone(reqType, &m_statusLock); + return ret; +} + +bool ExynosCameraCbRequest::isComplete() +{ + bool ret = false; + bool notify = false; + bool capture = false; + + notify = m_getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY, &m_statusLock); + capture = m_getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT, &m_statusLock); + + if (notify == true && capture == true) { + ret = true; + } + + return ret; +} + +status_t ExynosCameraCbRequest::m_init() +{ + status_t ret = NO_ERROR; + + m_callbackList.clear(); + for (int i = 0 ; i < EXYNOS_REQUEST_RESULT::CALLBACK_MAX ; i++) { + m_status[i] = false; + } + return ret; +} + +status_t ExynosCameraCbRequest::m_deinit() +{ + status_t ret = NO_ERROR; + ResultRequestListIterator iter; + ResultRequest result; + + if (m_callbackList.size() > 0) { + ALOGE("ERR(%s[%d]):delete cb objects, but result size is not ZERO frameCount(%u) result size(%u)", __FUNCTION__, __LINE__, m_frameCount, m_callbackList.size()); + for (iter = m_callbackList.begin(); iter != m_callbackList.end();) { + result = *iter; + ALOGE("ERR(%s[%d]):delete cb objects, frameCount(%u) type(%d)", __FUNCTION__, __LINE__, result->getFrameCount(), result->getType()); + m_callbackList.erase(iter++); + result = NULL; + } + } + + m_callbackList.clear(); + for (int i = 0 ; i < EXYNOS_REQUEST_RESULT::CALLBACK_MAX ; i++) { + m_status[i] = false; + } + + return ret; +} + +status_t ExynosCameraCbRequest::m_push(ResultRequest result, ResultRequestList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + if (result->getFrameCount() != m_frameCount) { + ALOGE("ERR(%s[%d]):m_push, check m_frame(%u) getFrameCount(%u)", __FUNCTION__, __LINE__, m_frameCount, result->getFrameCount()); + } + + lock->lock(); + list->push_back(result); + lock->unlock(); + return ret; +} + +status_t ExynosCameraCbRequest::m_pop(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *list, ResultRequestList *resultList, Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestListIterator iter; + + ResultRequest obj; + lock->lock(); + + if (list->size() > 0) { + for (iter = list->begin() ; iter != list->end() ;) { + obj = *iter; + if (obj->getType() == reqType) { + resultList->push_back(obj); + list->erase(iter++); + } else { + iter++; + } + } + } + + lock->unlock(); + return ret; +} + +status_t ExynosCameraCbRequest::m_get(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *list, ResultRequestList *resultList, Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestListIterator iter; + + ResultRequest obj; + lock->lock(); + + if (list->size() > 0) { + for (iter = list->begin() ; iter != list->end() ;iter++) { + obj = *iter; + if (obj->getType() == reqType) { + resultList->push_back(obj); + } + } + } else { + obj = NULL; + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):m_getCallbackResults failed, size is ZERO, reqType(%d) size(%d)", __FUNCTION__, __LINE__, reqType, list->size()); + } + + lock->unlock(); + return ret; +} + +status_t ExynosCameraCbRequest::m_setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag, Mutex *lock) +{ + status_t ret = NO_ERROR; + if (reqType >= EXYNOS_REQUEST_RESULT::CALLBACK_MAX) { + ALOGE("ERR(%s[%d]):m_setCallback failed, status erray out of bounded reqType(%d)", __FUNCTION__, __LINE__, reqType); + ret = INVALID_OPERATION; + return ret; + } + + lock->lock(); + m_status[reqType] = flag; + lock->unlock(); + return ret; +} + +bool ExynosCameraCbRequest::m_getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, Mutex *lock) +{ + bool ret = false; + if (reqType >= EXYNOS_REQUEST_RESULT::CALLBACK_MAX) { + ALOGE("ERR(%s[%d]):m_getCallback failed, status erray out of bounded reqType(%d)", __FUNCTION__, __LINE__, reqType); + return ret; + } + + lock->lock(); + ret = m_status[reqType]; + lock->unlock(); + return ret; +} +#endif + +ExynosCamera3Request::ExynosCamera3Request(camera3_capture_request_t* request, CameraMetadata previousMeta) +{ + ExynosCameraStream *stream = NULL; + int streamId = -1; + + m_init(); + + m_request = new camera3_capture_request_t; + memcpy(m_request, request, sizeof(camera3_capture_request_t)); + memset(m_streamIdList, 0x00, sizeof(m_streamIdList)); + + /* Deep copy the input buffer object, because the Camera sevice can reuse it + in successive request with different contents. + */ + if(request->input_buffer != NULL) { + ALOGD("DEBUG(%s[%d]):Allocating input buffer (%p)", __FUNCTION__, __LINE__, request->input_buffer); + m_request->input_buffer = new camera3_stream_buffer_t(); + memcpy(m_request->input_buffer, request->input_buffer, sizeof(camera3_stream_buffer_t)); + } + + m_key = m_request->frame_number; + m_numOfOutputBuffers = request->num_output_buffers; + m_isNeedInternalFrame = false; + + /* Deep copy the output buffer objects as well */ + camera3_stream_buffer_t* newOutputBuffers = NULL; + if((request != NULL) && (request->output_buffers != NULL) && (m_numOfOutputBuffers > 0)) { + newOutputBuffers = new camera3_stream_buffer_t[m_numOfOutputBuffers]; + memcpy(newOutputBuffers, request->output_buffers, sizeof(camera3_stream_buffer_t) * m_numOfOutputBuffers); + } + /* Nasty casting to assign a value to const pointer */ + *(camera3_stream_buffer_t**)(&m_request->output_buffers) = newOutputBuffers; + + for (int i = 0; i < m_numOfOutputBuffers; i++) { + stream = static_cast(request->output_buffers[i].stream->priv); + stream->getID(&streamId); + m_streamIdList[i] = streamId; + } + + if (request->settings != NULL) { + m_serviceMeta = request->settings; + m_resultMeta = request->settings; + } else { + ALOGV("DEBUG(%s[%d]):serviceMeta is NULL, use previousMeta", __FUNCTION__, __LINE__); + if (previousMeta.isEmpty()) { + ALOGE("ERR(%s[%d]):previous meta is empty, ERROR ", __FUNCTION__, __LINE__); + } else { + m_serviceMeta = previousMeta; + m_resultMeta = previousMeta; + } + } + + if (m_serviceMeta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { + m_captureIntent = m_resultMeta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; + ALOGV("DEBUG(%s[%d]):ANDROID_CONTROL_CAPTURE_INTENT is (%d)", __FUNCTION__, __LINE__, m_captureIntent); + } + + ALOGV("DEBUG(%s[%d]):key(%d), request->frame_count(%d), num_output_buffers(%d)", + __FUNCTION__, __LINE__, + m_key, request->frame_number, request->num_output_buffers); + +/* m_resultMeta = request->settings;*/ +} + +ExynosCamera3Request::~ExynosCamera3Request() +{ + m_deinit(); +} + +uint32_t ExynosCamera3Request::getKey() +{ + return m_key; +} + +void ExynosCamera3Request::setFrameCount(uint32_t frameCount) +{ + m_frameCount = frameCount; +} + +status_t ExynosCamera3Request::m_init() +{ + status_t ret = NO_ERROR; + + m_key = 0; + m_request = NULL; + m_requestId = 0; + m_captureIntent = 0; + m_frameCount = 0; + m_serviceMeta.clear(); + memset(&m_serviceShot, 0x00, sizeof(struct camera2_shot_ext)); + m_resultMeta.clear(); + memset(&m_resultShot, 0x00, sizeof(struct camera2_shot_ext)); + memset(&m_prevShot, 0x00, sizeof(struct camera2_shot_ext)); + m_requestState = EXYNOS_REQUEST::SERVICE; + m_factoryMap.clear(); + m_resultList.clear(); + m_numOfCompleteBuffers = 0; + m_numOfDuplicateBuffers = 0; + m_pipelineDepth = 0; + + for (int i = 0 ; i < EXYNOS_REQUEST_RESULT::CALLBACK_MAX ; i++) { + m_resultStatus[i] = false; + } + + return ret; +} + +status_t ExynosCamera3Request::m_deinit() +{ + status_t ret = NO_ERROR; + + if (m_request->input_buffer != NULL) { + delete m_request->input_buffer; + } + if (m_request->output_buffers != NULL) { + delete[] m_request->output_buffers; + } + + if (m_request != NULL) { + delete m_request; + } + m_request = NULL; + + m_frameCount = 0; + m_serviceMeta = NULL; + memset(&m_serviceShot, 0x00, sizeof(struct camera2_shot_ext)); + m_resultMeta = NULL; + memset(&m_resultShot, 0x00, sizeof(struct camera2_shot_ext)); + memset(&m_prevShot, 0x00, sizeof(struct camera2_shot_ext)); + m_requestState = EXYNOS_REQUEST::SERVICE; + m_resultList.clear(); + m_numOfCompleteBuffers = 0; + m_numOfDuplicateBuffers = 0; + + for (int i = 0 ; i < EXYNOS_REQUEST_RESULT::CALLBACK_MAX ; i++) { + m_resultStatus[i] = false; + } + + return ret; +} + + +uint32_t ExynosCamera3Request::getFrameCount() +{ + return m_frameCount; +} + +uint8_t ExynosCamera3Request::getCaptureIntent() +{ + return m_captureIntent; +} + +camera3_capture_request_t* ExynosCamera3Request::getService() +{ + if (m_request == NULL) + { + ALOGE("ERR(%s[%d]):getService is NULL m_request(%p) ", __FUNCTION__, __LINE__, m_request); + } + return m_request; +} + +uint32_t ExynosCamera3Request::setServiceMeta(CameraMetadata request) +{ + status_t ret = NO_ERROR; + m_serviceMeta = request; + return ret; +} + +CameraMetadata ExynosCamera3Request::getServiceMeta() +{ + return m_serviceMeta; +} + +status_t ExynosCamera3Request::setServiceShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + memcpy(&m_serviceShot, metadata, sizeof(struct camera2_shot_ext)); + + return ret; +} + +status_t ExynosCamera3Request::getServiceShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + memcpy(metadata, &m_serviceShot, sizeof(struct camera2_shot_ext)); + + return ret; +} + + +status_t ExynosCamera3Request::setResultMeta(CameraMetadata request) +{ + status_t ret = NO_ERROR; + m_resultMeta = request; + return ret; +} + +CameraMetadata ExynosCamera3Request::getResultMeta() +{ + return m_resultMeta; +} + + +status_t ExynosCamera3Request::setResultShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + memcpy(&m_resultShot, metadata, sizeof(struct camera2_shot_ext)); + + return ret; +} + +status_t ExynosCamera3Request::getResultShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + memcpy(metadata, &m_resultShot, sizeof(struct camera2_shot_ext)); + + return ret; +} + +status_t ExynosCamera3Request::setPrevShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + if (metadata == NULL) { + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):setPrevShot metadata is NULL ret(%d) ", __FUNCTION__, __LINE__, ret); + } else { + memcpy(&m_prevShot, metadata, sizeof(struct camera2_shot_ext)); + } + + return ret; +} + +status_t ExynosCamera3Request::getPrevShot(struct camera2_shot_ext *metadata) +{ + status_t ret = NO_ERROR; + if (metadata == NULL) { + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):getPrevShot metadata is NULL ret(%d) ", __FUNCTION__, __LINE__, ret); + } else { + memcpy(metadata, &m_prevShot, sizeof(struct camera2_shot_ext)); + } + + return ret; +} + +status_t ExynosCamera3Request::setRequestState(EXYNOS_REQUEST::STATE state) +{ + status_t ret = NO_ERROR; + switch(state) { + case EXYNOS_REQUEST::SERVICE: + case EXYNOS_REQUEST::RUNNING: + m_requestState = state; + break; + default: + ALOGE("ERR(%s[%d]):setState is invalid newstate(%d) ", __FUNCTION__, __LINE__, state); + break; + } + + return ret; +} + +EXYNOS_REQUEST::STATE ExynosCamera3Request::getRequestState() +{ + EXYNOS_REQUEST::STATE ret = EXYNOS_REQUEST::INVALID; + switch(m_requestState) { + case EXYNOS_REQUEST::SERVICE: + case EXYNOS_REQUEST::RUNNING: + ret = m_requestState; + break; + default: + ALOGE("ERR(%s[%d]):getState is invalid curstate(%d) ", __FUNCTION__, __LINE__, m_requestState); + break; + } + + return ret; +} + +uint32_t ExynosCamera3Request::getNumOfInputBuffer() +{ + uint32_t numOfInputBuffer = 0; + if (m_request->input_buffer != NULL) { + numOfInputBuffer = 1; + } + return numOfInputBuffer; +} + +camera3_stream_buffer_t* ExynosCamera3Request::getInputBuffer() +{ + if (m_request == NULL){ + ALOGE("ERR(%s[%d]):getInputBuffer m_request is NULL m_request(%p) ", __FUNCTION__, __LINE__, m_request); + return NULL; + } + + if (m_request->input_buffer == NULL){ + ALOGV("INFO(%s[%d]):getInputBuffer input_buffer is NULL m_request(%p) ", __FUNCTION__, __LINE__, m_request->input_buffer); + } + + return m_request->input_buffer; +} + +uint64_t ExynosCamera3Request::getSensorTimestamp() +{ + return m_resultShot.shot.udm.sensor.timeStampBoot; +} + +uint32_t ExynosCamera3Request::getNumOfOutputBuffer() +{ + return m_numOfOutputBuffers; +} + +void ExynosCamera3Request::increaseCompleteBufferCount(void) +{ + m_resultStatusLock.lock(); + m_numOfCompleteBuffers++; + m_resultStatusLock.unlock(); +} + +void ExynosCamera3Request::resetCompleteBufferCount(void) +{ + m_resultStatusLock.lock(); + m_numOfCompleteBuffers = 0; + m_resultStatusLock.unlock(); +} + +int ExynosCamera3Request::getCompleteBufferCount(void) +{ + return m_numOfCompleteBuffers; +} + +void ExynosCamera3Request::increaseDuplicateBufferCount(void) +{ + m_resultStatusLock.lock(); + m_numOfDuplicateBuffers++; + m_resultStatusLock.unlock(); +} + +void ExynosCamera3Request::resetDuplicateBufferCount(void) +{ + m_resultStatusLock.lock(); + m_numOfDuplicateBuffers = 0; + m_resultStatusLock.unlock(); +} +int ExynosCamera3Request::getDuplicateBufferCount(void) +{ + return m_numOfDuplicateBuffers; +} + +const camera3_stream_buffer_t* ExynosCamera3Request::getOutputBuffers() +{ + if (m_request == NULL){ + ALOGE("ERR(%s[%d]):getNumOfOutputBuffer m_request is NULL m_request(%p) ", __FUNCTION__, __LINE__, m_request); + return NULL; + } + + if (m_request->output_buffers == NULL){ + ALOGE("ERR(%s[%d]):getNumOfOutputBuffer output_buffers is NULL m_request(%p) ", __FUNCTION__, __LINE__, m_request->output_buffers); + return NULL; + } + + ALOGV("DEBUG(%s[%d]):m_request->output_buffers(%p)", __FUNCTION__, __LINE__, m_request->output_buffers); + + return m_request->output_buffers; +} + +status_t ExynosCamera3Request::pushResult(ResultRequest result) +{ + status_t ret = NO_ERROR; + + switch(result->getType()) { + case EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT: + case EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA: + ret = m_pushResult(result, &m_resultList, &m_resultListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):pushResult is failed request - Key(%u) frameCount(%u) / result - Key(%u) frameCount(%u)", __FUNCTION__, __LINE__, m_key, m_frameCount, result->getKey(), result->getFrameCount()); + ret = INVALID_OPERATION; + } + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_INVALID: + default: + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):getResultType type have INVALID value type(%d) key(%u) frameCount(%u)", __FUNCTION__, __LINE__, result->getType(), m_key, m_frameCount); + break; + } + + + return ret; +} + +void ExynosCamera3Request::setRequestId(int reqId) { + m_requestId = reqId; +} + +int ExynosCamera3Request::getRequestId(void) { + return m_requestId; +} + +ResultRequest ExynosCamera3Request::popResult(uint32_t resultKey) +{ + ResultRequest result = NULL; + + result = m_popResult(resultKey, &m_resultList, &m_resultListLock); + if (result < 0){ + ALOGE("ERR(%s[%d]):popResult is failed request - Key(%u) frameCount(%u) / result - Key(%u) frameCount(%u)", + __FUNCTION__, __LINE__, m_key, m_frameCount, result->getKey(), result->getFrameCount()); + result = NULL; + } + + return result; +} + +ResultRequest ExynosCamera3Request::getResult(uint32_t resultKey) +{ + ResultRequest result = NULL; + + result = m_getResult(resultKey, &m_resultList, &m_resultListLock); + if (result < 0){ + ALOGE("ERR(%s[%d]):popResult is failed request - Key(%u) frameCount(%u) / result - Key(%u) frameCount(%u)", + __FUNCTION__, __LINE__, m_key, m_frameCount, result->getKey(), result->getFrameCount()); + result = NULL; + } + + return result; +} + +status_t ExynosCamera3Request::m_pushResult(ResultRequest item, ResultRequestMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + lock->lock(); + listRet = list->insert( pair(item->getKey(), item)); + if (listRet.second == false) { + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):m_push failed, request already exist!! Request frameCnt( %d )", + __FUNCTION__, __LINE__, item->getFrameCount()); + } + lock->unlock(); + + return ret; +} + +ResultRequest ExynosCamera3Request::m_popResult(uint32_t key, ResultRequestMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + ResultRequestMapIterator iter; + ResultRequest request = NULL; + + lock->lock(); + iter = list->find(key); + if (iter != list->end()) { + request = iter->second; + list->erase( iter ); + } else { + ALOGE("ERR(%s[%d]):m_pop failed, request is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, request->getFrameCount()); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return request; +} + +ResultRequest ExynosCamera3Request::m_getResult(uint32_t key, ResultRequestMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + ResultRequestMapIterator iter; + ResultRequest request = NULL; + + lock->lock(); + iter = list->find(key); + if (iter != list->end()) { + request = iter->second; + } else { + ALOGE("ERR(%s[%d]):m_getResult failed, request is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, request->getFrameCount()); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return request; +} + +status_t ExynosCamera3Request::m_getAllResultKeys(ResultRequestkeys *keylist, ResultRequestMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestMapIterator iter; + + lock->lock(); + for (iter = list->begin(); iter != list->end() ; iter++) { + keylist->push_back(iter->first); + } + lock->unlock(); + return ret; +} + +status_t ExynosCamera3Request::m_getResultKeys(ResultRequestkeys *keylist, ResultRequestMap *list, EXYNOS_REQUEST_RESULT::TYPE type, Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestMapIterator iter; + ResultRequest result; + camera3_capture_result_t capture_result; + + lock->lock(); + /* validation check */ + if ((type < EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY) || + (type >= EXYNOS_REQUEST_RESULT::CALLBACK_MAX)) { + ALOGE("ERR(%s[%d]):INVALID value type(%d)", __FUNCTION__, __LINE__, type); + lock->unlock(); + return BAD_VALUE; + } + + for (iter = list->begin(); iter != list->end() ; iter++) { + result = iter->second; + + ALOGV("DEBUG(%s[%d]):result->getKey(%d)", __FUNCTION__, __LINE__, result->getKey()); + + if (type == result->getType()) + keylist->push_back(iter->first); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::m_push(int key, ExynosCamera3FrameFactory* item, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + lock->lock(); + listRet = list->insert( pair(key, item)); + if (listRet.second == false) { + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):m_push failed, request already exist!! Request frameCnt( %d )", + __FUNCTION__, __LINE__, key); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::m_pop(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + FrameFactoryMapIterator iter; + ExynosCamera3FrameFactory *factory = NULL; + + lock->lock(); + iter = list->find(key); + if (iter != list->end()) { + factory = iter->second; + *item = factory; + list->erase( iter ); + } else { + ALOGE("ERR(%s[%d]):m_pop failed, factory is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, key); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::m_get(int streamID, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + FrameFactoryMapIterator iter; + ExynosCamera3FrameFactory *factory = NULL; + + lock->lock(); + iter = list->find(streamID); + if (iter != list->end()) { + factory = iter->second; + *item = factory; + } else { + ALOGE("ERR(%s[%d]):m_pop failed, request is not EXIST Request streamID( %d )", + __FUNCTION__, __LINE__, streamID); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +bool ExynosCamera3Request::m_find(int streamID, FrameFactoryMap *list, Mutex *lock) +{ + bool ret = false; + pair listRet; + FrameFactoryMapIterator iter; + + lock->lock(); + iter = list->find(streamID); + if (iter != list->end()) { + ret = true; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::m_getList(FrameFactoryList *factorylist, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + FrameFactoryMapIterator iter; + ExynosCamera3FrameFactory *factory = NULL; + + lock->lock(); + for (iter = list->begin(); iter != list->end() ; iter++) { + factory = iter->second; + factorylist->push_back(factory); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::getAllResultKeys(ResultRequestkeys *keys) +{ + status_t ret = NO_ERROR; + + keys->clear(); + + ret = m_getAllResultKeys(keys, &m_resultList, &m_resultListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_getAllResultKeys is failed Request-Key(%u) frameCount(%u) / m_resultList.Size(%zu)", + __FUNCTION__, __LINE__, m_key, m_frameCount, m_resultList.size()); + } + + return ret; +} + +status_t ExynosCamera3Request::getResultKeys(ResultRequestkeys *keys, EXYNOS_REQUEST_RESULT::TYPE type) +{ + status_t ret = NO_ERROR; + + keys->clear(); + + ret = m_getResultKeys(keys, &m_resultList, type, &m_resultListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):getResultKeys is failed Request-Key(%u) frameCount(%u) / m_resultList.Size(%zu)", + __FUNCTION__, __LINE__, m_key, m_frameCount, m_resultList.size()); + } + + return ret; +} + +status_t ExynosCamera3Request::pushFrameFactory(int StreamID, ExynosCamera3FrameFactory* factory) +{ + status_t ret = NO_ERROR; + ret = m_push(StreamID, factory, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):pushFrameFactory is failed StreamID(%d) factory(%p)", + __FUNCTION__, __LINE__, StreamID, factory); + } + + return ret; +} + +ExynosCamera3FrameFactory* ExynosCamera3Request::popFrameFactory(int streamID) +{ + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory* factory = NULL; + + ret = m_pop(streamID, &factory, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):popFrameFactory is failed StreamID(%d) factory(%p)", + __FUNCTION__, __LINE__, streamID, factory); + } + return factory; +} + +ExynosCamera3FrameFactory* ExynosCamera3Request::getFrameFactory(int streamID) +{ + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory* factory = NULL; + + ret = m_get(streamID, &factory, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getFrameFactory is failed StreamID(%d) factory(%p)", + __FUNCTION__, __LINE__, streamID, factory); + } + + return factory; +} + +bool ExynosCamera3Request::isFrameFactory(int streamID) +{ + return m_find(streamID, &m_factoryMap, &m_factoryMapLock); +} + +status_t ExynosCamera3Request::getFrameFactoryList(FrameFactoryList *list) +{ + status_t ret = NO_ERROR; + + ret = m_getList(list, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):getFrameFactoryList is failed", __FUNCTION__, __LINE__); + } + + return ret; +} + +status_t ExynosCamera3Request::getAllRequestOutputStreams(List **list) +{ + status_t ret = NO_ERROR; + + ALOGV("DEBUG (%s[%d]):m_requestOutputStreamList.size(%zu)", + __FUNCTION__, __LINE__, m_requestOutputStreamList.size()); + + /* lock->lock(); */ + *list = &m_requestOutputStreamList; + /* lock->unlock(); */ + + return ret; +} + +status_t ExynosCamera3Request::pushRequestOutputStreams(int requestStreamId) +{ + status_t ret = NO_ERROR; + + /* lock->lock(); */ + m_requestOutputStreamList.push_back(requestStreamId); + /* lock->unlock(); */ + + return ret; +} + +status_t ExynosCamera3Request::getAllRequestInputStreams(List **list) +{ + status_t ret = NO_ERROR; + + ALOGV("DEBUG (%s[%d]):m_requestOutputStreamList.size(%zu)", __FUNCTION__, __LINE__, m_requestInputStreamList.size()); + + /* lock->lock(); */ + *list = &m_requestInputStreamList; + /* lock->unlock(); */ + + return ret; +} + +status_t ExynosCamera3Request::pushRequestInputStreams(int requestStreamId) +{ + status_t ret = NO_ERROR; + + /* lock->lock(); */ + m_requestInputStreamList.push_back(requestStreamId); + /* lock->unlock(); */ + + return ret; +} + +status_t ExynosCamera3Request::popAndEraseResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) +{ + status_t ret = NO_ERROR; + + resultList->clear(); + + ret = m_popAndEraseResultsByType(reqType, &m_resultList, resultList, &m_resultListLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_pop request is failed, request type(%d) resultSize(%zu)", + __FUNCTION__, __LINE__, reqType, resultList->size()); + } + + return ret; +} + +status_t ExynosCamera3Request::popResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) +{ + status_t ret = NO_ERROR; + + resultList->clear(); + + ret = m_popResultsByType(reqType, &m_resultList, resultList, &m_resultListLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_pop request is failed, request type(%d) resultSize(%zu)", + __FUNCTION__, __LINE__, reqType, resultList->size()); + } + + return ret; +} + +status_t ExynosCamera3Request::setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag) +{ + status_t ret = NO_ERROR; + ret = m_setCallbackDone(reqType, flag, &m_resultStatusLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_get request is failed, request type(%d) ", __FUNCTION__, __LINE__, reqType); + } + + return ret; +} + +bool ExynosCamera3Request::getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType) +{ + bool ret = false; + ret = m_getCallbackDone(reqType, &m_resultStatusLock); + return ret; +} + +bool ExynosCamera3Request::isComplete() +{ + bool ret = false; + bool notify = false; + bool capture = false; + + notify = m_getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY, &m_resultStatusLock); + capture = m_getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT, &m_resultStatusLock); + + if (notify == true && capture == true) { + ret = true; + } + + return ret; +} + +int ExynosCamera3Request::getStreamId(int bufferIndex) +{ + if (bufferIndex < 0 || bufferIndex >= m_numOfOutputBuffers) { + ALOGE("ERR(%s[%d]):Invalid buffer index(%d), outputBufferCount(%d)", + __FUNCTION__, __LINE__, bufferIndex, m_numOfOutputBuffers); + return -1; + } + + if (m_streamIdList == NULL) { + ALOGE("ERR(%s[%d]):m_streamIdList is NULL", __FUNCTION__, __LINE__); + return -1; + } + + return m_streamIdList[bufferIndex]; +} + +void ExynosCamera3Request::setNeedInternalFrame(bool isNeedInternalFrame) +{ + m_isNeedInternalFrame = isNeedInternalFrame; +} + +bool ExynosCamera3Request::getNeedInternalFrame(void) +{ + return m_isNeedInternalFrame; +} + +void ExynosCamera3Request::increasePipelineDepth(void) +{ + m_pipelineDepth++; +} + +void ExynosCamera3Request::updatePipelineDepth(void) +{ + const uint8_t pipelineDepth = m_pipelineDepth; + + m_resultShot.shot.dm.request.pipelineDepth = m_pipelineDepth; + m_resultMeta.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipelineDepth, 1); + ALOGV("DEBUG(%s):ANDROID_REQUEST_PIPELINE_DEPTH(%d)", __FUNCTION__, pipelineDepth); +} + +status_t ExynosCamera3Request::m_setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag, Mutex *lock) +{ + status_t ret = NO_ERROR; + if (reqType >= EXYNOS_REQUEST_RESULT::CALLBACK_MAX) { + ALOGE("ERR(%s[%d]):m_setCallback failed, status erray out of bounded reqType(%d)", + __FUNCTION__, __LINE__, reqType); + + ret = INVALID_OPERATION; + return ret; + } + + lock->lock(); + m_resultStatus[reqType] = flag; + lock->unlock(); + return ret; +} + +bool ExynosCamera3Request::m_getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, Mutex *lock) +{ + bool ret = false; + if (reqType >= EXYNOS_REQUEST_RESULT::CALLBACK_MAX) { + ALOGE("ERR(%s[%d]):m_getCallback failed, status erray out of bounded reqType(%d)", + __FUNCTION__, __LINE__, reqType); + + return ret; + } + + lock->lock(); + ret = m_resultStatus[reqType]; + lock->unlock(); + return ret; +} + +void ExynosCamera3Request::printCallbackDoneState() +{ + for (int i = 0 ; i < EXYNOS_REQUEST_RESULT::CALLBACK_MAX ; i++) + ALOGD("DEBUG(%s[%d]):m_key(%d), m_resultStatus[%d](%d)", + __FUNCTION__, __LINE__, m_key, i, m_resultStatus[i]); +} + +status_t ExynosCamera3Request::m_popAndEraseResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, + ResultRequestMap *list, + ResultRequestList *resultList, + Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestMapIterator iter; + ResultRequest obj; + + lock->lock(); + + if (list->size() > 0) { + for (iter = list->begin(); iter != list->end();) { + obj = iter->second; + if (obj->getType() == reqType) { + resultList->push_back(obj); + list->erase(iter++); + } else { + ++iter; + } + } + } + + lock->unlock(); + + return ret; +} + +status_t ExynosCamera3Request::m_popResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, + ResultRequestMap *list, + ResultRequestList *resultList, + Mutex *lock) +{ + status_t ret = NO_ERROR; + ResultRequestMapIterator iter; + ResultRequest obj; + + lock->lock(); + + if (list->size() > 0) { + for (iter = list->begin(); iter != list->end(); iter++) { + obj = iter->second; + if (obj->getType() == reqType) { + resultList->push_back(obj); + } + } + } + + lock->unlock(); + + return ret; +} + +ExynosCameraRequestManager::ExynosCameraRequestManager(int cameraId, ExynosCameraParameters *param) +{ + CLOGD("DEBUG(%s[%d])Create-ID(%d)", __FUNCTION__, __LINE__, cameraId); + + m_cameraId = cameraId; + m_parameters = param; + m_converter = NULL; + m_callbackOps = NULL; + m_requestKey = 0; + m_requestResultKey = 0; + memset(&m_dummyShot, 0x00, sizeof(struct camera2_shot_ext)); + memset(&m_currShot, 0x00, sizeof(struct camera2_shot_ext)); + memset(m_name, 0x00, sizeof(m_name)); + + for (int i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) + m_defaultRequestTemplate[i] = NULL; + + m_factoryMap.clear(); + m_zslFactoryMap.clear(); + + m_callbackSequencer = new ExynosCameraCallbackSequencer(); + + m_preShot = NULL; + m_preShot = new struct camera2_shot_ext; + m_callbackTraceCount = 0; +} + +ExynosCameraRequestManager::~ExynosCameraRequestManager() +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + memset(&m_dummyShot, 0x00, sizeof(struct camera2_shot_ext)); + memset(&m_currShot, 0x00, sizeof(struct camera2_shot_ext)); + + for (int i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) + free(m_defaultRequestTemplate[i]); + + for (int i = 0 ; i < EXYNOS_REQUEST_TYPE::MAX ; i++) { + m_serviceRequests[i].clear(); + m_runningRequests[i].clear(); + } + + m_requestFrameCountMap.clear(); + + if (m_converter != NULL) { + delete m_converter; + m_converter = NULL; + } + + m_factoryMap.clear(); + m_zslFactoryMap.clear(); + + if (m_callbackSequencer != NULL) { + delete m_callbackSequencer; + m_callbackSequencer= NULL; + } + + if (m_preShot != NULL) { + delete m_preShot; + m_preShot = NULL; + } + m_callbackTraceCount = 0; +} + +status_t ExynosCameraRequestManager::setMetaDataConverter(ExynosCameraMetadataConverter *converter) +{ + status_t ret = NO_ERROR; + if (m_converter != NULL) + CLOGD("DEBUG(%s[%d]):m_converter is not NULL(%p)", __FUNCTION__, __LINE__, m_converter); + + m_converter = converter; + return ret; +} + +ExynosCameraMetadataConverter* ExynosCameraRequestManager::getMetaDataConverter() +{ + if (m_converter == NULL) + CLOGD("DEBUG(%s[%d]):m_converter is NULL(%p)", __FUNCTION__, __LINE__, m_converter); + + return m_converter; +} + +status_t ExynosCameraRequestManager::setRequestsInfo(int key, ExynosCamera3FrameFactory *factory, ExynosCamera3FrameFactory *zslFactory) +{ + status_t ret = NO_ERROR; + if (factory == NULL) { + CLOGE("ERR(%s[%d]):m_factory is NULL key(%d) factory(%p)", + __FUNCTION__, __LINE__, key, factory); + + ret = INVALID_OPERATION; + return ret; + } + /* zslFactory can be NULL. In this case, use factory insted. + (Same frame factory for both normal capture and ZSL input) + */ + if (zslFactory == NULL) { + zslFactory = factory; + } + + ret = m_pushFactory(key ,factory, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_pushFactory is failed key(%d) factory(%p)", + __FUNCTION__, __LINE__, key, factory); + + ret = INVALID_OPERATION; + return ret; + } + ret = m_pushFactory(key ,zslFactory, &m_zslFactoryMap, &m_zslFactoryMapLock); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_pushFactory is failed key(%d) zslFactory(%p)", + __FUNCTION__, __LINE__, key, factory); + + ret = INVALID_OPERATION; + return ret; + } + + return ret; +} + +ExynosCamera3FrameFactory* ExynosCameraRequestManager::getFrameFactory(int key) +{ + status_t ret = NO_ERROR; + ExynosCamera3FrameFactory *factory = NULL; + if (key < 0) { + CLOGE("ERR(%s[%d]):getFrameFactory, type is invalid key(%d)", + __FUNCTION__, __LINE__, key); + + ret = INVALID_OPERATION; + return NULL; + } + + ret = m_popFactory(key ,&factory, &m_factoryMap, &m_factoryMapLock); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_pushFactory is failed key(%d) factory(%p)", + __FUNCTION__, __LINE__, key, factory); + + ret = INVALID_OPERATION; + return NULL; + } + return factory; +} + +status_t ExynosCameraRequestManager::isPrevRequest() +{ + if (m_previousMeta.isEmpty()) + return BAD_VALUE; + else + return OK; +} + +status_t ExynosCameraRequestManager::clearPrevRequest() +{ + m_previousMeta.clear(); + return OK; +} + +status_t ExynosCameraRequestManager::clearPrevShot() +{ + status_t ret = NO_ERROR; + if (m_preShot == NULL) { + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):clearPrevShot previous meta is NULL ret(%d) ", + __FUNCTION__, __LINE__, ret); + } else { + memset(m_preShot, 0x00, sizeof(struct camera2_shot_ext)); + } + return ret; +} + +status_t ExynosCameraRequestManager::constructDefaultRequestSettings(int type, camera_metadata_t **request) +{ + Mutex::Autolock l(m_requestLock); + + CLOGD("DEBUG(%s[%d]):Type = %d", __FUNCTION__, __LINE__, type); + + struct camera2_shot_ext shot_ext; + + if (m_defaultRequestTemplate[type]) { + *request = m_defaultRequestTemplate[type]; + return OK; + } + + m_converter->constructDefaultRequestSettings(type, request); + + m_defaultRequestTemplate[type] = *request; + + /* create default shot */ + m_converter->initShotData(&m_dummyShot); + + CLOGD("DEBUG(%s[%d]):Registered default request template(%d)", + __FUNCTION__, __LINE__, type); + return OK; +} + +status_t ExynosCameraRequestManager::m_pushBack(ExynosCameraRequest* item, RequestInfoList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + lock->lock(); + list->push_back(item); + lock->unlock(); + return ret; +} + +status_t ExynosCameraRequestManager::m_popBack(ExynosCameraRequest** item, RequestInfoList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + lock->lock(); + if (list->size() > 0) { + *item = list->back(); + list->pop_back(); + } else { + CLOGE("ERR(%s[%d]):m_popBack failed, size(%zu)", __FUNCTION__, __LINE__, list->size()); + ret = INVALID_OPERATION; + } + lock->unlock(); + return ret; +} + +status_t ExynosCameraRequestManager::m_pushFront(ExynosCameraRequest* item, RequestInfoList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + lock->lock(); + list->push_back(item); + lock->unlock(); + return ret; +} + +status_t ExynosCameraRequestManager::m_popFront(ExynosCameraRequest** item, RequestInfoList *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + lock->lock(); + if (list->size() > 0) { + *item = list->front(); + list->pop_front(); + } else { + CLOGE("ERR(%s[%d]):m_popFront failed, size(%zu)", __FUNCTION__, __LINE__, list->size()); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_get(uint32_t frameCount, + ExynosCameraRequest** item, + RequestInfoList *list, + Mutex *lock) +{ + status_t ret = INVALID_OPERATION; + RequestInfoListIterator iter; + ExynosCameraRequest* request = NULL; + + if (*item == NULL) { + CLOGE("ERR(%s[%d]):item is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (list == NULL) { + CLOGE("ERR(%s[%d]):list is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (lock == NULL) { + CLOGE("ERR(%s[%d]):lock is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + lock->lock(); + for (iter = list->begin(); iter != list->end(); ++iter) { + request = *iter; + if (request->getKey() == frameCount) { + ret = NO_ERROR; + break; + } + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_push(ExynosCameraRequest* item, RequestInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + lock->lock(); + listRet = list->insert( pair(item->getKey(), item)); + if (listRet.second == false) { + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):m_push failed, request already exist!! Request frameCnt( %d )", + __FUNCTION__, __LINE__, item->getFrameCount()); + } + lock->unlock(); + + m_printAllRequestInfo(list, lock); + + return ret; +} + +status_t ExynosCameraRequestManager::m_pop(uint32_t frameCount, ExynosCameraRequest** item, RequestInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + RequestInfoMapIterator iter; + ExynosCameraRequest *request = NULL; + + lock->lock(); + iter = list->find(frameCount); + if (iter != list->end()) { + request = iter->second; + *item = request; + list->erase( iter ); + } else { + CLOGE("ERR(%s[%d]):m_pop failed, request is not EXIST Request frameCnt(%d)", + __FUNCTION__, __LINE__, frameCount); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_get(uint32_t frameCount, ExynosCameraRequest** item, RequestInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + RequestInfoMapIterator iter; + ExynosCameraRequest *request = NULL; + + lock->lock(); + iter = list->find(frameCount); + if (iter != list->end()) { + request = iter->second; + *item = request; + } else { + CLOGE("ERR(%s[%d]):m_pop failed, request is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, frameCount); + ret = INVALID_OPERATION; + } + lock->unlock(); + + m_printAllRequestInfo(list, lock); + + return ret; +} + +void ExynosCameraRequestManager::m_printAllRequestInfo(RequestInfoMap *map, Mutex *lock) +{ + RequestInfoMapIterator iter; + ExynosCameraRequest *request = NULL; + ExynosCameraRequest *item = NULL; + camera3_capture_request_t *serviceRequest = NULL; + + lock->lock(); + iter = map->begin(); + + while(iter != map->end()) { + request = iter->second; + item = request; + + serviceRequest = item->getService(); +#if 0 + CLOGE("INFO(%s[%d]):key(%d), serviceFrameCount(%d), (%p) frame_number(%d), outputNum(%d)", __FUNCTION__, __LINE__, + request->getKey(), + request->getFrameCount(), + serviceRequest, + serviceRequest->frame_number, + serviceRequest->num_output_buffers); +#endif + iter++; + } + lock->unlock(); +} + +status_t ExynosCameraRequestManager::m_delete(ExynosCameraRequest *item) +{ + status_t ret = NO_ERROR; + + CLOGV("DEBUG(%s[%d]):m_delete -> delete request(%d)", __FUNCTION__, __LINE__, item->getFrameCount()); + + delete item; + item = NULL; + + return ret; +} + +status_t ExynosCameraRequestManager::m_pushFactory(int key, + ExynosCamera3FrameFactory* item, + FrameFactoryMap *list, + Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + lock->lock(); + listRet = list->insert( pair(key, item)); + if (listRet.second == false) { + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):m_push failed, request already exist!! Request frameCnt( %d )", + __FUNCTION__, __LINE__, key); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_popFactory(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + FrameFactoryMapIterator iter; + ExynosCamera3FrameFactory *factory = NULL; + + lock->lock(); + iter = list->find(key); + if (iter != list->end()) { + factory = iter->second; + *item = factory; + list->erase( iter ); + } else { + CLOGE("ERR(%s[%d]):m_pop failed, factory is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, key); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_getFactory(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + pair listRet; + FrameFactoryMapIterator iter; + ExynosCamera3FrameFactory *factory = NULL; + lock->lock(); + iter = list->find(key); + if (iter != list->end()) { + factory = iter->second; + *item = factory; + } else { + CLOGE("ERR(%s[%d]):m_pop failed, request is not EXIST Request frameCnt( %d )", + __FUNCTION__, __LINE__, key); + ret = INVALID_OPERATION; + } + lock->unlock(); + + return ret; +} + +ExynosCameraRequest* ExynosCameraRequestManager::registerServiceRequest(camera3_capture_request_t *request) +{ + status_t ret = NO_ERROR; + ExynosCameraRequest *obj = NULL; + struct camera2_shot_ext shot_ext; + CameraMetadata meta; + int32_t captureIntent = 0; + uint32_t bufferCnt = 0; + camera3_stream_buffer_t *inputbuffer = NULL; + const camera3_stream_buffer_t *outputbuffer = NULL; + ExynosCameraStream *stream = NULL; + ExynosCamera3FrameFactory *factory = NULL; + int32_t streamID = 0; + int32_t factoryID = 0; + bool needDummyStream = true; + bool isZslInput = false; + + if (request->settings == NULL) { + meta = m_previousMeta; + } else { + meta = request->settings; + } + + if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { + captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; + CLOGV("DEBUG(%s[%d]):ANDROID_CONTROL_CAPTURE_INTENT is (%d)", + __FUNCTION__, __LINE__, captureIntent); + } + + /* Check whether the input buffer (ZSL input) is specified. + Use zslFramFactory in the following section if ZSL input is used + */ + obj = new ExynosCamera3Request(request, m_previousMeta); + bufferCnt = obj->getNumOfInputBuffer(); + inputbuffer = obj->getInputBuffer(); + for(uint32_t i = 0 ; i < bufferCnt ; i++) { + stream = static_cast(inputbuffer[i].stream->priv); + stream->getID(&streamID); + factoryID = streamID % HAL_STREAM_ID_MAX; + + /* Stream ID validity */ + if(factoryID == HAL_STREAM_ID_ZSL_INPUT) { + isZslInput = true; + } else { + /* Ignore input buffer */ + CLOGE("ERR(%s[%d]):Invalid input streamID. captureIntent(%d) streamID(%d)", + __FUNCTION__, __LINE__, captureIntent, streamID); + } + } + + bufferCnt = obj->getNumOfOutputBuffer(); + outputbuffer = obj->getOutputBuffers(); + for(uint32_t i = 0 ; i < bufferCnt ; i++) { + stream = static_cast(outputbuffer[i].stream->priv); + stream->getID(&streamID); + factoryID = streamID % HAL_STREAM_ID_MAX; + switch(streamID % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_CALLBACK: + case HAL_STREAM_ID_RAW: + case HAL_STREAM_ID_ZSL_OUTPUT: + needDummyStream = false; + break; + default: + break; + } + + if (m_parameters->isReprocessing() == false) { + needDummyStream = false; + } + + /* Choose appropirate frame factory depends on input buffer is specified or not */ + if(isZslInput == true) { + ret = m_getFactory(factoryID, &factory, &m_zslFactoryMap, &m_zslFactoryMapLock); + CLOGD("DEBUG(%s[%d]):ZSL framefactory for streamID(%d)", + __FUNCTION__, __LINE__, streamID); + } else { + ret = m_getFactory(factoryID, &factory, &m_factoryMap, &m_factoryMapLock); + CLOGV("DEBUG(%s[%d]):Normal framefactory for streamID(%d)", + __FUNCTION__, __LINE__, streamID); + } + if (ret < 0) { + CLOGD("DEBUG(%s[%d]):m_getFactory is failed captureIntent(%d) streamID(%d)", + __FUNCTION__, __LINE__, captureIntent, streamID); + } + obj->pushFrameFactory(streamID, factory); + obj->pushRequestOutputStreams(streamID); + } + +#if !defined(ENABLE_FULL_FRAME) + /* attach dummy stream to this request if this request needs dummy stream */ + obj->setNeedInternalFrame(needDummyStream); +#endif + + obj->getServiceShot(&shot_ext); + meta = obj->getServiceMeta(); + m_converter->initShotData(&shot_ext); + + m_previousMeta = meta; + + CLOGV("DEBUG(%s[%d]):m_currReqeustList size(%zu), fn(%d)", + __FUNCTION__, __LINE__, m_serviceRequests[EXYNOS_REQUEST_TYPE::PREVIEW].size(), obj->getFrameCount()); + + if (meta.isEmpty()) { + CLOGD("DEBUG(%s[%d]):meta is EMPTY", __FUNCTION__, __LINE__); + } else { + CLOGV("DEBUG(%s[%d]):meta is NOT EMPTY", __FUNCTION__, __LINE__); + + } + + int reqId; + ret = m_converter->convertRequestToShot(meta, &shot_ext, &reqId); + obj->setRequestId(reqId); + + obj->setServiceShot(&shot_ext); + obj->setPrevShot(m_preShot); + + memcpy(m_preShot, &shot_ext, sizeof(struct camera2_shot_ext)); + + ret = m_pushBack(obj, &m_serviceRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0){ + CLOGE("ERR(%s[%d]):request m_pushBack is failed request(%d)", __FUNCTION__, __LINE__, obj->getFrameCount()); + + delete obj; + return NULL; + } + + m_callbackSequencer->pushFrameCount(obj->getKey()); + + return obj; +} + +status_t ExynosCameraRequestManager::getPreviousShot(struct camera2_shot_ext *pre_shot_ext) +{ + memcpy(pre_shot_ext, m_preShot, sizeof(struct camera2_shot_ext)); + + return NO_ERROR; +} + +uint32_t ExynosCameraRequestManager::getRequestCount(void) +{ + Mutex::Autolock l(m_requestLock); + return m_serviceRequests[EXYNOS_REQUEST_TYPE::PREVIEW].size() + m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW].size(); +} + +uint32_t ExynosCameraRequestManager::getServiceRequestCount(void) +{ + Mutex::Autolock lock(m_requestLock); + return m_serviceRequests[EXYNOS_REQUEST_TYPE::PREVIEW].size(); +} + +ExynosCameraRequest* ExynosCameraRequestManager::createServiceRequest() +{ + status_t ret = NO_ERROR; + ExynosCameraRequest *obj = NULL; + + ret = m_popFront(&obj, &m_serviceRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0){ + CLOGE("ERR(%s[%d]):request m_popFront is failed request", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + return NULL; + } + + ret = m_push(obj, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0){ + CLOGE("ERR(%s[%d]):request m_push is failed request", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + return NULL; + } + + ret = m_increasePipelineDepth(&m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to increase the pipeline depth", __FUNCTION__, __LINE__); + + return obj; +} + +status_t ExynosCameraRequestManager::deleteServiceRequest(uint32_t frameCount) +{ + status_t ret = NO_ERROR; + uint32_t key = 0; + ExynosCameraRequest *obj = NULL; + + ret = m_popKey(&key, frameCount); + if (ret < NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to m_popKey. frameCount %d", + __FUNCTION__, __LINE__, frameCount); + return INVALID_OPERATION; + } + + ret = m_pop(key, &obj, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0){ + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):request m_popFront is failed request", __FUNCTION__, __LINE__); + } else { + m_delete(obj); + } + + return ret; +} + +ExynosCameraRequest* ExynosCameraRequestManager::getServiceRequest(uint32_t frameCount) +{ + status_t ret = NO_ERROR; + uint32_t key = 0; + ExynosCameraRequest* obj = NULL; + + ret = m_getKey(&key, frameCount); + if (ret < NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to m_popKey. frameCount %d", + __FUNCTION__, __LINE__, frameCount); + return NULL; + } + + ret = m_get(key, &obj, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0){ + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):request m_popFront is failed request", __FUNCTION__, __LINE__); + } + + return obj; +} + +status_t ExynosCameraRequestManager::flush() +{ + status_t ret = NO_ERROR; + + for (int i = 0 ; i < EXYNOS_REQUEST_TYPE::MAX ; i++) { + m_serviceRequests[i].clear(); + m_runningRequests[i].clear(); + } + + m_requestFrameCountMap.clear(); + + if (m_callbackSequencer != NULL) + m_callbackSequencer->flush(); + return ret; +} + +status_t ExynosCameraRequestManager::m_getKey(uint32_t *key, uint32_t frameCount) +{ + status_t ret = NO_ERROR; + RequestFrameCountMapIterator iter; + + m_requestFrameCountMapLock.lock(); + iter = m_requestFrameCountMap.find(frameCount); + if (iter != m_requestFrameCountMap.end()) { + *key = iter->second; + } else { + CLOGE("ERR(%s[%d]):get request key is failed. request for framecount(%d) is not EXIST", + __FUNCTION__, __LINE__, frameCount); + ret = INVALID_OPERATION; + } + m_requestFrameCountMapLock.unlock(); + + return ret; +} + +status_t ExynosCameraRequestManager::m_popKey(uint32_t *key, uint32_t frameCount) +{ + status_t ret = NO_ERROR; + RequestFrameCountMapIterator iter; + + m_requestFrameCountMapLock.lock(); + iter = m_requestFrameCountMap.find(frameCount); + if (iter != m_requestFrameCountMap.end()) { + *key = iter->second; + m_requestFrameCountMap.erase(iter); + } else { + CLOGE("ERR(%s[%d]):get request key is failed. request for framecount(%d) is not EXIST", + __FUNCTION__, __LINE__, frameCount); + ret = INVALID_OPERATION; + } + m_requestFrameCountMapLock.unlock(); + + return ret; +} + +uint32_t ExynosCameraRequestManager::m_generateResultKey() +{ + m_requestResultKeyLock.lock(); + uint32_t key = m_requestResultKey++; + m_requestResultKeyLock.unlock(); + return key; +} + +uint32_t ExynosCameraRequestManager::m_getResultKey() +{ + m_requestResultKeyLock.lock(); + uint32_t key = m_requestResultKey; + m_requestResultKeyLock.unlock(); + return key; +} + +ResultRequest ExynosCameraRequestManager::createResultRequest(uint32_t frameCount, + EXYNOS_REQUEST_RESULT::TYPE type, + camera3_capture_result_t *captureResult, + camera3_notify_msg_t *notifyMsg) +{ + status_t ret = NO_ERROR; + uint32_t key = 0; + ResultRequest request; + + ret = m_getKey(&key, frameCount); + if (ret < NO_ERROR) { + CLOGE("ERR(%s[%d]):m_getKey is failed. framecount(%d)", __FUNCTION__, __LINE__, frameCount); + return NULL; + } + + request = new ExynosCamera3RequestResult(m_generateResultKey(), key, type, captureResult, notifyMsg); + + return request; +} + +status_t ExynosCameraRequestManager::setCallbackOps(const camera3_callback_ops *callbackOps) +{ + status_t ret = NO_ERROR; + m_callbackOps = callbackOps; + return ret; +} + +status_t ExynosCameraRequestManager::callbackRequest(ResultRequest result) +{ + status_t ret = NO_ERROR; + + ExynosCameraRequest* obj = NULL; + ret = m_get(result->getFrameCount(), &obj, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < NO_ERROR) { + CLOGE("ERR(%s[%d]):m_get is failed. requestKey(%d)", + __FUNCTION__, __LINE__, result->getFrameCount()); + return ret; + } + CLOGV("INFO(%s[%d]):type(%d) key(%u) frameCount(%u) ", + __FUNCTION__, __LINE__, result->getType(), result->getKey(), result->getFrameCount()); + + switch(result->getType()){ + case EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA: + case EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT: + obj->pushResult(result); + m_checkCallbackRequestSequence(); + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_INVALID: + default: + CLOGE("ERR(%s[%d]):callbackRequest type have INVALID value type(%d) key(%u) frameCount(%u) ", + __FUNCTION__, __LINE__, result->getType(), result->getKey(), result->getFrameCount()); + break; + } + + return ret; +} + +void ExynosCameraRequestManager::callbackSequencerLock() +{ + m_callbackSequencerLock.lock(); +} + +void ExynosCameraRequestManager::callbackSequencerUnlock() +{ + m_callbackSequencerLock.unlock(); +} + +status_t ExynosCameraRequestManager::setFrameCount(uint32_t frameCount, uint32_t requestKey) +{ + status_t ret = NO_ERROR; + pair listRet; + ExynosCameraRequest *request = NULL; + + m_requestFrameCountMapLock.lock(); + listRet = m_requestFrameCountMap.insert(pair(frameCount, requestKey)); + if (listRet.second == false) { + ret = INVALID_OPERATION; + CLOGE("ERR(%s[%d]):Failed, requestKey(%d) already exist!!", + __FUNCTION__, __LINE__, frameCount); + } + m_requestFrameCountMapLock.unlock(); + + ret = m_get(requestKey, &request, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0) + CLOGE("ERR(%s[%d]):m_get is failed. requestKey(%d)", __FUNCTION__, __LINE__, requestKey); + + request->setFrameCount(frameCount); + + return ret; +} + +status_t ExynosCameraRequestManager::m_checkCallbackRequestSequence() +{ + status_t ret = NO_ERROR; + uint32_t notifyIndex = 0; + ResultRequest callback; + bool flag = false; + uint32_t key = 0; + ResultRequestList callbackList; + ResultRequestListIterator callbackIter; + EXYNOS_REQUEST_RESULT::TYPE cbType; + + ExynosCameraRequest* callbackReq = NULL; + + CallbackListkeys *callbackReqList= NULL; + CallbackListkeysIter callbackReqIter; + + callbackList.clear(); + + ExynosCameraRequest *camera3Request = NULL; + + /* m_callbackSequencerLock.lock(); */ + + /* m_callbackSequencer->dumpList(); */ + + m_callbackSequencer->getFrameCountList(&callbackReqList); + + callbackReqIter = callbackReqList->begin(); + while (callbackReqIter != callbackReqList->end()) { + CLOGV("DEBUG(%s[%d]):(*callbackReqIter)(%d)", __FUNCTION__, __LINE__, (uint32_t)(*callbackReqIter)); + + key = (uint32_t)(*callbackReqIter); + ret = m_get(key, &callbackReq, &m_runningRequests[EXYNOS_REQUEST_TYPE::PREVIEW], &m_requestLock); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_get is failed. requestKey(%d)", __FUNCTION__, __LINE__, key); + break; + } + camera3Request = callbackReq; + + CLOGV("DEBUG(%s[%d]):frameCount(%u)", __FUNCTION__, __LINE__, callbackReq->getFrameCount()); + + /* Check NOTIFY Case */ + cbType = EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY; + ret = callbackReq->popAndEraseResultsByType(cbType, &callbackList); + if (ret < 0) { + CLOGE("ERR(%s[%d]):popRequest type(%d) callbackList Size(%zu)", + __FUNCTION__, __LINE__, cbType, callbackList.size()); + } + + if (callbackList.size() > 0) { + callbackIter = callbackList.begin(); + callback = *callbackIter; + CLOGV("DEBUG(%s[%d]):frameCount(%u), size of list(%zu)", __FUNCTION__, __LINE__, + callbackReq->getFrameCount(), callbackList.size()); + if (callbackReq->getCallbackDone(cbType) == false) { + CLOGV("DEBUG(%s[%d]):frameCount(%u), size of list(%zu) getCallbackDone(%d)", __FUNCTION__, __LINE__, + callbackReq->getFrameCount(), callbackList.size(), callbackReq->getCallbackDone(cbType)); + m_callbackRequest(callback); + } + + callbackReq->setCallbackDone(cbType, true); + } + + /* Check BUFFER Case */ + cbType = EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY; + if( callbackReq->getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY) == true ) { + /* Output Buffers will be updated finally */ + ret = callbackReq->popResultsByType(cbType, &callbackList); + if (ret < 0) { + CLOGE("ERR(%s[%d]):popRequest frameCount(%u) type(%d) callbackList Size(%zu)", + __FUNCTION__, __LINE__, callbackReq->getFrameCount(), cbType, callbackList.size()); + } + + CLOGV("DEBUG(%s[%d]):callbackList.size(%zu)", __FUNCTION__, __LINE__, callbackList.size()); + if (callbackList.size() > 0) { + callbackReq->setCallbackDone(cbType, true); + } + + for (callbackIter = callbackList.begin(); callbackIter != callbackList.end(); callbackIter++) { + callback = *callbackIter; + /* Output Buffers will be updated finally */ + /* m_callbackRequest(callback); */ + /* callbackReq->increaseCompleteBuffers(); */ + } + + /* if all buffer is done? :: packing buffers and making final result */ + if((int)callbackReq->getNumOfOutputBuffer() == callbackReq->getCompleteBufferCount()) { + m_callbackPackingOutputBuffers(callbackReq); + } + + cbType = EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA; + ret = callbackReq->popAndEraseResultsByType(cbType, &callbackList); + if (ret < 0) { + CLOGE("ERR(%s[%d]):popRequest frameCount(%u) type(%d) callbackList Size(%zu)", + __FUNCTION__, __LINE__, callbackReq->getFrameCount(), cbType, callbackList.size()); + } + + if (callbackList.size() > 0) { + callbackReq->setCallbackDone(cbType, true); + } + + for (callbackIter = callbackList.begin(); callbackIter != callbackList.end(); callbackIter++) { + callback = *callbackIter; + m_callbackRequest(callback); + } + } + + /* Check ALL_RESULT Case */ + cbType = EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT; + if( callbackReq->getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY) == true ) { + ret = callbackReq->popAndEraseResultsByType(cbType, &callbackList); + if (ret < 0) { + CLOGE("ERR(%s[%d]):popRequest type(%d) callbackList Size(%zu)", + __FUNCTION__, __LINE__, cbType, callbackList.size()); + } + + CLOGV("DEBUG(%s[%d]):callbackList.size(%zu)", __FUNCTION__, __LINE__, callbackList.size()); + if (callbackList.size() > 0) { + callbackReq->setCallbackDone(cbType, true); + } + + for (callbackIter = callbackList.begin(); callbackIter != callbackList.end(); callbackIter++) { + callback = *callbackIter; + camera3_capture_result_t tempResult; + callback->getCaptureRequest(&tempResult); + m_callbackRequest(callback); + } + } + + if ((callbackReq->getCallbackDone(EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY) == false) || + ((int)camera3Request->getNumOfOutputBuffer() > camera3Request->getCompleteBufferCount())) { + + if (m_callbackTraceCount > 10) { + CLOGW("WARN(%s[%d]):(*callbackReqIter)(%d)", __FUNCTION__, __LINE__, (uint32_t)(*callbackReqIter)); + CLOGW("WARN(%s[%d]):frameCount(%u)", __FUNCTION__, __LINE__, callbackReq->getFrameCount()); + m_callbackSequencer->dumpList(); + CLOGD("DEBUG(%s[%d]):callbackReq->getFrameCount(%d)", __FUNCTION__, __LINE__, callbackReq->getFrameCount()); + camera3Request->printCallbackDoneState(); + } + + m_callbackTraceCount++; + break; + } + + CLOGV("DEBUG(%s[%d]):callback is done complete(fc:%d), outcnt(%d), comcnt(%d)", + __FUNCTION__, __LINE__, callbackReq->getFrameCount(), + camera3Request->getNumOfOutputBuffer(), camera3Request->getCompleteBufferCount()); + + callbackReqIter++; + + if (callbackReq->isComplete() && + (int)camera3Request->getNumOfOutputBuffer() == camera3Request->getCompleteBufferCount()) { + CLOGV("DEBUG(%s[%d]):callback is done complete(%d)", __FUNCTION__, __LINE__, callbackReq->getFrameCount()); + + m_callbackSequencer->deleteFrameCount(camera3Request->getKey()); + deleteServiceRequest(camera3Request->getFrameCount()); + m_callbackTraceCount = 0; + + m_debugCallbackFPS(); + } + } + + /* m_callbackSequencerLock.unlock(); */ + + return ret; + +} + +// Count number of invocation and print FPS for every 30 frames. +void ExynosCameraRequestManager::m_debugCallbackFPS() { +#ifdef CALLBACK_FPS_CHECK + m_callbackFrameCnt++; + if(m_callbackFrameCnt == 1) { + // Initial invocation + m_callbackDurationTimer.start(); + } else if(m_callbackFrameCnt >= CALLBACK_FPS_CHECK+1) { + m_callbackDurationTimer.stop(); + long long durationTime = m_callbackDurationTimer.durationMsecs(); + + float meanInterval = durationTime / (float)CALLBACK_FPS_CHECK; + CLOGI("INFO(%s[%d]): CALLBACK_FPS_CHECK, duration %lld / 30 = %.2f ms. %.2f fps", + __FUNCTION__, __LINE__, durationTime, meanInterval, 1000 / meanInterval); + m_callbackFrameCnt = 0; + } +#endif +} +status_t ExynosCameraRequestManager::m_callbackRequest(ResultRequest result) +{ + status_t ret = NO_ERROR; + camera3_capture_result_t capture_result; + camera3_notify_msg_t notify_msg; + + CLOGV("INFO(%s[%d]):type(%d) key(%u) frameCount(%u) ", + __FUNCTION__, __LINE__, result->getType(), result->getKey(), result->getFrameCount()); + + switch(result->getType()){ + case EXYNOS_REQUEST_RESULT::CALLBACK_NOTIFY_ONLY: + result->getNofityRequest(¬ify_msg); + m_callbackNotifyRequest(¬ify_msg); + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY: + case EXYNOS_REQUEST_RESULT::CALLBACK_PARTIAL_3AA: + result->getCaptureRequest(&capture_result); + m_callbackCaptureRequest(&capture_result); + +#if 1 + free((camera_metadata_t *)capture_result.result); + if (capture_result.output_buffers != NULL) { + delete[] capture_result.output_buffers; + capture_result.output_buffers = NULL; + } +#endif + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT: + result->getCaptureRequest(&capture_result); + m_callbackCaptureRequest(&capture_result); + +#if 1 + free((camera_metadata_t *)capture_result.result); + if (capture_result.output_buffers != NULL) { + delete[] capture_result.output_buffers; + capture_result.output_buffers = NULL; + } +#endif + break; + case EXYNOS_REQUEST_RESULT::CALLBACK_INVALID: + default: + ret = BAD_VALUE; + CLOGE("ERR(%s[%d]):callbackRequest type have INVALID value type(%d) key(%u) frameCount(%u) ", + __FUNCTION__, __LINE__, result->getType(), result->getKey(), result->getFrameCount()); + break; + } + + return ret; +} + +status_t ExynosCameraRequestManager::m_callbackCaptureRequest(camera3_capture_result_t *result) +{ + status_t ret = NO_ERROR; + CLOGV("DEBUG(%s[%d]):frame number(%d), #out(%d)", + __FUNCTION__, __LINE__, result->frame_number, result->num_output_buffers); + + m_callbackOps->process_capture_result(m_callbackOps, result); + return ret; +} + +status_t ExynosCameraRequestManager::m_callbackNotifyRequest(camera3_notify_msg_t *msg) +{ + status_t ret = NO_ERROR; + + switch (msg->type) { + case CAMERA3_MSG_ERROR: + CLOGW("DEBUG(%s[%d]):msg frame(%d) type(%d) errorCode(%d)", + __FUNCTION__, __LINE__, msg->message.error.frame_number, msg->type, msg->message.error.error_code); + m_callbackOps->notify(m_callbackOps, msg); + break; + case CAMERA3_MSG_SHUTTER: + CLOGV("DEBUG(%s[%d]):msg frame(%d) type(%d) timestamp(%llu)", + __FUNCTION__, __LINE__, msg->message.shutter.frame_number, msg->type, msg->message.shutter.timestamp); + m_callbackOps->notify(m_callbackOps, msg); + break; + default: + CLOGE("ERR(%s[%d]):Msg type is invalid (%d)", __FUNCTION__, __LINE__, msg->type); + ret = BAD_VALUE; + break; + } + return ret; +} + +status_t ExynosCameraRequestManager::m_callbackPackingOutputBuffers(ExynosCameraRequest* callbackRequest) +{ + status_t ret = NO_ERROR; + camera3_stream_buffer_t *output_buffers; + int bufferIndex = -2; + ResultRequestkeys keys; + ResultRequestkeysIterator iter; + uint32_t key = 0; + ResultRequest result; + camera3_stream_buffer_t streamBuffer; + camera3_capture_result_t requestResult; + CameraMetadata resultMeta; + + CLOGV("DEBUG(%s[%d]):frameCount(%d), EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ALL", + __FUNCTION__, __LINE__, callbackRequest->getFrameCount()); + + /* make output stream buffers */ + output_buffers = new camera3_stream_buffer[callbackRequest->getNumOfOutputBuffer()]; + callbackRequest->getResultKeys(&keys, EXYNOS_REQUEST_RESULT::CALLBACK_BUFFER_ONLY); + bufferIndex = 0; + + for (iter = keys.begin(); iter != keys.end(); ++iter) { + key = *iter; + result = callbackRequest->popResult(key); + CLOGV("DEBUG(%s[%d]):result(%d)", __FUNCTION__, __LINE__, result->getKey()); + + while (result->getNumOfStreamBuffer() > 0) { + result->popStreamBuffer(&streamBuffer); + output_buffers[bufferIndex] = streamBuffer; + bufferIndex++; + } + } + + /* update pipeline depth */ + callbackRequest->updatePipelineDepth(); + resultMeta = callbackRequest->getResultMeta(); + + /* construct result for service */ + requestResult.frame_number = callbackRequest->getKey(); + requestResult.result = resultMeta.release(); + requestResult.num_output_buffers = bufferIndex; + requestResult.output_buffers = output_buffers; + requestResult.input_buffer = callbackRequest->getInputBuffer(); + requestResult.partial_result = 1; + + ResultRequest resultRequest = NULL; + CLOGV("INFO(%s[%d]):frame number(%d), #out(%d)", + __FUNCTION__, __LINE__, requestResult.frame_number, requestResult.num_output_buffers); + + resultRequest = this->createResultRequest(callbackRequest->getFrameCount(), + EXYNOS_REQUEST_RESULT::CALLBACK_ALL_RESULT, + &requestResult, + NULL); + callbackRequest->pushResult(resultRequest); + + return ret; +} + +/* Increase the pipeline depth value from each request in running request map */ +status_t ExynosCameraRequestManager::m_increasePipelineDepth(RequestInfoMap *map, Mutex *lock) +{ + status_t ret = NO_ERROR; + RequestInfoMapIterator requestIter; + ExynosCameraRequest *request = NULL; + struct camera2_shot_ext shot_ext; + + lock->lock(); + if (map->size() < 1) { + CLOGV("INFO(%s[%d]):map is empty. Skip to increase the pipeline depth", + __FUNCTION__, __LINE__); + ret = NO_ERROR; + goto func_exit; + } + + requestIter = map->begin(); + while (requestIter != map->end()) { + request = requestIter->second; + + request->increasePipelineDepth(); + requestIter++; + } + +func_exit: + lock->unlock(); + return ret; +} + +ExynosCameraCallbackSequencer::ExynosCameraCallbackSequencer() +{ + m_requestFrameCountList.clear(); +} + +ExynosCameraCallbackSequencer::~ExynosCameraCallbackSequencer() +{ + if (m_requestFrameCountList.size() > 0) { + ALOGE("ERR(%s[%d]):destructor size is not ZERO(%zu)", + __FUNCTION__, __LINE__, m_requestFrameCountList.size()); + } +} + +uint32_t ExynosCameraCallbackSequencer::popFrameCount() +{ + status_t ret = NO_ERROR; + uint32_t obj; + + obj = m_pop(EXYNOS_LIST_OPER::SINGLE_FRONT, &m_requestFrameCountList, &m_requestCbListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_get failed", __FUNCTION__, __LINE__); + return 0; + } + + return obj; +} + +status_t ExynosCameraCallbackSequencer::pushFrameCount(uint32_t frameCount) +{ + status_t ret = NO_ERROR; + + ret = m_push(EXYNOS_LIST_OPER::SINGLE_BACK, frameCount, &m_requestFrameCountList, &m_requestCbListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_push failed, frameCount(%d)", + __FUNCTION__, __LINE__, frameCount); + } + + return ret; +} + +uint32_t ExynosCameraCallbackSequencer::size() +{ + return m_requestFrameCountList.size(); +} + +status_t ExynosCameraCallbackSequencer::getFrameCountList(CallbackListkeys **list) +{ + status_t ret = NO_ERROR; + + *list = &m_requestFrameCountList; + + return ret; +} + +status_t ExynosCameraCallbackSequencer::deleteFrameCount(uint32_t frameCount) +{ + status_t ret = NO_ERROR; + + ret = m_delete(frameCount, &m_requestFrameCountList, &m_requestCbListLock); + if (ret < 0){ + ALOGE("ERR(%s[%d]):m_push failed, frameCount(%d)", __FUNCTION__, __LINE__, frameCount); + } + + return ret; +} + +void ExynosCameraCallbackSequencer::dumpList() +{ + CallbackListkeysIter iter; + CallbackListkeys *list = &m_requestFrameCountList; + + m_requestCbListLock.lock(); + + if (list->size() > 0) { + for (iter = list->begin(); iter != list->end();) { + ALOGE("DEBUG(%s[%d]):frameCount(%d), size(%zu)", __FUNCTION__, __LINE__, *iter, list->size()); + iter++; + } + } else { + ALOGE("ERR(%s[%d]):m_getCallbackResults failed, size is ZERO, size(%zu)", + __FUNCTION__, __LINE__, list->size()); + } + + m_requestCbListLock.unlock(); + +} + +status_t ExynosCameraCallbackSequencer::flush() +{ + status_t ret = NO_ERROR; + + m_requestFrameCountList.clear(); + return ret; +} + +status_t ExynosCameraCallbackSequencer::m_init() +{ + status_t ret = NO_ERROR; + + m_requestFrameCountList.clear(); + return ret; +} + +status_t ExynosCameraCallbackSequencer::m_deinit() +{ + status_t ret = NO_ERROR; + + m_requestFrameCountList.clear(); + return ret; +} + +status_t ExynosCameraCallbackSequencer::m_push(EXYNOS_LIST_OPER::MODE operMode, + uint32_t frameCount, + CallbackListkeys *list, + Mutex *lock) +{ + status_t ret = NO_ERROR; + bool flag = false; + + lock->lock(); + + switch (operMode) { + case EXYNOS_LIST_OPER::SINGLE_BACK: + list->push_back(frameCount); + break; + case EXYNOS_LIST_OPER::SINGLE_FRONT: + list->push_front(frameCount); + break; + case EXYNOS_LIST_OPER::SINGLE_ORDER: + case EXYNOS_LIST_OPER::MULTI_GET: + default: + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):m_push failed, mode(%d) size(%zu)", + __FUNCTION__, __LINE__, operMode, list->size()); + break; + } + + lock->unlock(); + + ALOGV("DEBUG(%s[%d]):m_push(%d), size(%zu)", + __FUNCTION__, __LINE__, frameCount, list->size()); + + return ret; +} + +uint32_t ExynosCameraCallbackSequencer::m_pop(EXYNOS_LIST_OPER::MODE operMode, CallbackListkeys *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + CallbackListkeysIter iter; + uint32_t obj = 0; + + bool flag = false; + + lock->lock(); + + switch (operMode) { + case EXYNOS_LIST_OPER::SINGLE_BACK: + if (list->size() > 0) { + obj = list->back(); + list->pop_back(); + } else { + ALOGE("ERR(%s[%d]):m_pop failed, size(%zu)", __FUNCTION__, __LINE__, list->size()); + ret = INVALID_OPERATION; + } + break; + case EXYNOS_LIST_OPER::SINGLE_FRONT: + if (list->size() > 0) { + obj = list->front(); + list->pop_front(); + } else { + ALOGE("ERR(%s[%d]):m_pop failed, size(%zu)", __FUNCTION__, __LINE__, list->size()); + ret = INVALID_OPERATION; + } + break; + case EXYNOS_LIST_OPER::SINGLE_ORDER: + case EXYNOS_LIST_OPER::MULTI_GET: + default: + ret = INVALID_OPERATION; + obj = 0; + ALOGE("ERR(%s[%d]):m_push failed, mode(%d) size(%zu)", + __FUNCTION__, __LINE__, operMode, list->size()); + break; + } + + lock->unlock(); + + ALOGI("INFO(%s[%d]):m_pop(%d), size(%zu)", __FUNCTION__, __LINE__, obj, list->size()); + + return obj; +} + +status_t ExynosCameraCallbackSequencer::m_delete(uint32_t frameCount, CallbackListkeys *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + CallbackListkeysIter iter; + + lock->lock(); + + if (list->size() > 0) { + for (iter = list->begin(); iter != list->end();) { + if (frameCount == (uint32_t)*iter) { + list->erase(iter++); + ALOGV("DEBUG(%s[%d]):frameCount(%d), size(%zu)", + __FUNCTION__, __LINE__, frameCount, list->size()); + } else { + iter++; + } + } + } else { + ret = INVALID_OPERATION; + ALOGE("ERR(%s[%d]):m_getCallbackResults failed, size is ZERO, size(%zu)", + __FUNCTION__, __LINE__, list->size()); + } + + lock->unlock(); + + ALOGV("INFO(%s[%d]):size(%zu)", __FUNCTION__, __LINE__, list->size()); + + return ret; +} +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraRequestManager.h b/libcamera/common_v2/ExynosCameraRequestManager.h new file mode 100644 index 0000000..cb8445d --- /dev/null +++ b/libcamera/common_v2/ExynosCameraRequestManager.h @@ -0,0 +1,536 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA_REQUEST_MANAGER_H__ +#define EXYNOS_CAMERA_REQUEST_MANAGER_H__ + +#include +#include +#include +#include +#include +#include + +#include "ExynosCameraDefine.h" +#include "ExynosCameraStreamManager.h" +#include "ExynosCamera3FrameFactory.h" +#include "ExynosCameraParameters.h" +#include "ExynosCamera3Parameters.h" +#include "ExynosCameraSensorInfo.h" +#include "ExynosCameraMetadataConverter.h" + +namespace android { + +using namespace std; + +namespace EXYNOS_REQUEST_RESULT { + enum TYPE { + CALLBACK_INVALID = -1, + CALLBACK_NOTIFY_ONLY = 0x00, + CALLBACK_BUFFER_ONLY = 0x01, + CALLBACK_PARTIAL_3AA = 0x02, + CALLBACK_ALL_RESULT = 0x03, + CALLBACK_MAX = 0x04, + }; +}; + +namespace EXYNOS_LIST_OPER { + enum MODE { + SINGLE_BACK = 0, + SINGLE_FRONT = 1, + SINGLE_ORDER = 2, + MULTI_GET = 3 + }; +}; + +typedef list< camera3_stream_buffer_t* > StreamBufferList; + +class ExynosCamera3; +class ExynosCameraRequest; +class ExynosCamera3FrameFactory; + +typedef status_t (ExynosCamera3::*factory_handler_t)(ExynosCameraRequest*, ExynosCamera3FrameFactory*); +typedef bool (ExynosCamera3::*factory_donehandler_t)(); + +class ExynosCameraRequestResult : public virtual RefBase { +public: + ExynosCameraRequestResult(){}; + ~ExynosCameraRequestResult(){}; + + virtual uint32_t getFrameCount() = 0; + virtual uint32_t getKey() = 0; + + virtual EXYNOS_REQUEST_RESULT::TYPE getType() = 0; + virtual status_t setCaptureRequest(camera3_capture_result_t *captureResult) = 0; + virtual status_t getCaptureRequest(camera3_capture_result_t *captureResult) = 0; + + virtual status_t setNofityRequest(camera3_notify_msg_t *notifyResult) = 0; + virtual status_t getNofityRequest(camera3_notify_msg_t *notifyResult) = 0; + virtual status_t pushStreamBuffer(camera3_stream_buffer_t *streamBuffer) = 0; + virtual status_t popStreamBuffer(camera3_stream_buffer_t *streamBuffer) = 0; + virtual int getNumOfStreamBuffer() = 0; + +}; + +class ExynosCamera3RequestResult : public virtual ExynosCameraRequestResult { +public: + ExynosCamera3RequestResult(uint32_t key, uint32_t frameCount, EXYNOS_REQUEST_RESULT::TYPE type, camera3_capture_result_t *captureResult = NULL, camera3_notify_msg_t *notityMsg = NULL); + ~ExynosCamera3RequestResult(); + + virtual uint32_t getFrameCount(); + virtual uint32_t getKey(); + + virtual EXYNOS_REQUEST_RESULT::TYPE getType(); + virtual status_t setCaptureRequest(camera3_capture_result_t *captureResult); + virtual status_t getCaptureRequest(camera3_capture_result_t *captureResult); + + virtual status_t setNofityRequest(camera3_notify_msg_t *notifyResult); + virtual status_t getNofityRequest(camera3_notify_msg_t *notifyResult); + + virtual status_t pushStreamBuffer(camera3_stream_buffer_t *streamBuffer); + virtual status_t popStreamBuffer(camera3_stream_buffer_t *streamBuffer); + virtual int getNumOfStreamBuffer(); + +private: + status_t m_init(); + status_t m_deinit(); + status_t m_pushBuffer(camera3_stream_buffer_t *src, StreamBufferList *list, Mutex *lock); + status_t m_popBuffer(camera3_stream_buffer_t *dst, StreamBufferList *list, Mutex *lock); + int m_getNumOfBuffer(StreamBufferList *list, Mutex *lock); + +private: + uint32_t m_key; + EXYNOS_REQUEST_RESULT::TYPE m_type; + uint32_t m_frameCount; + camera3_capture_result_t m_captureResult; + camera3_notify_msg_t m_notityMsg; + + StreamBufferList m_streamBufferList; + mutable Mutex m_streamBufferListLock; +}; + +namespace EXYNOS_REQUEST { + enum STATE { + INVALID = -1, + SERVICE = 0, + RUNNING = 1 + }; +}; + +typedef sp ResultRequest; +typedef list< uint32_t > ResultRequestkeys; +typedef list< uint32_t >::iterator ResultRequestkeysIterator; + +typedef list< sp > ResultRequestList; +typedef map< uint32_t, sp > ResultRequestMap; +typedef list< sp >::iterator ResultRequestListIterator; +typedef map< uint32_t, sp >::iterator ResultRequestMapIterator; + +typedef map< int32_t, ExynosCamera3FrameFactory* > FrameFactoryMap; +typedef map< int32_t, ExynosCamera3FrameFactory* >::iterator FrameFactoryMapIterator; +typedef list FrameFactoryList; +typedef list::iterator FrameFactoryListIterator; + +class ExynosCameraRequest : public virtual RefBase { +public: + ExynosCameraRequest(){}; + virtual ~ExynosCameraRequest(){}; + + virtual uint32_t getKey() = 0; + virtual void setFrameCount(uint32_t frameCount) = 0; + virtual uint32_t getFrameCount() = 0; + virtual uint8_t getCaptureIntent() = 0; + + virtual camera3_capture_request_t* getService() = 0; + + virtual uint32_t setServiceMeta(CameraMetadata meta) = 0; + virtual CameraMetadata getServiceMeta() = 0; + + virtual status_t setServiceShot(struct camera2_shot_ext *shot) = 0; + virtual status_t getServiceShot(struct camera2_shot_ext *shot) = 0; + + virtual status_t setResultMeta(CameraMetadata meta) = 0; + virtual CameraMetadata getResultMeta() = 0; + virtual status_t setResultShot(struct camera2_shot_ext *shot) = 0; + virtual status_t getResultShot(struct camera2_shot_ext *shot) = 0; + virtual status_t setRequestState(EXYNOS_REQUEST::STATE state) = 0; + virtual EXYNOS_REQUEST::STATE getRequestState() = 0; + virtual status_t setPrevShot(struct camera2_shot_ext *shot) = 0; + virtual status_t getPrevShot(struct camera2_shot_ext *shot) = 0; + + virtual uint32_t getNumOfInputBuffer() = 0; + virtual camera3_stream_buffer_t* getInputBuffer() = 0; + + virtual uint64_t getSensorTimestamp() = 0; + virtual uint32_t getNumOfOutputBuffer() = 0; + virtual const camera3_stream_buffer_t* getOutputBuffers() = 0; + virtual status_t pushResult(ResultRequest result) = 0; + virtual ResultRequest popResult(uint32_t resultKey) = 0; + virtual ResultRequest getResult(uint32_t resultKey) = 0; + virtual status_t getAllResultKeys(ResultRequestkeys *keys) = 0; + virtual status_t getResultKeys(ResultRequestkeys *keys, EXYNOS_REQUEST_RESULT::TYPE type) = 0; + + virtual status_t pushFrameFactory(int StreamID, ExynosCamera3FrameFactory* factory) = 0; + virtual ExynosCamera3FrameFactory* popFrameFactory(int streamID) = 0; + virtual ExynosCamera3FrameFactory* getFrameFactory(int streamID) = 0; + virtual bool isFrameFactory(int streamID) = 0; + virtual status_t getFrameFactoryList(FrameFactoryList *list) = 0; + + virtual void increaseCompleteBufferCount(void) = 0; + virtual void resetCompleteBufferCount(void) = 0; + virtual int getCompleteBufferCount(void) = 0; + + virtual void increaseDuplicateBufferCount() = 0; + virtual void resetDuplicateBufferCount() = 0; + virtual int getDuplicateBufferCount(void) = 0; + + virtual void setRequestId(int reqId) = 0; + virtual int getRequestId() = 0; + virtual status_t getAllRequestOutputStreams(List **list) = 0; + virtual status_t pushRequestOutputStreams(int requestStreamId) = 0; + virtual status_t getAllRequestInputStreams(List **list) = 0; + virtual status_t pushRequestInputStreams(int requestStreamId) = 0; + + virtual status_t popResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) = 0; + virtual status_t popAndEraseResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList) = 0; + virtual status_t setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag) = 0; + virtual bool getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType) = 0; + virtual void printCallbackDoneState() = 0; + virtual bool isComplete() = 0; + + virtual int getStreamId(int bufferIndex) = 0; + + virtual void setNeedInternalFrame(bool isNeedInternalFrame) = 0; + virtual bool getNeedInternalFrame(void) = 0; + virtual void increasePipelineDepth(void) = 0; + virtual void updatePipelineDepth(void) = 0; +}; + +class ExynosCamera3Request : public virtual ExynosCameraRequest { +public: + ExynosCamera3Request(camera3_capture_request_t* request, CameraMetadata previousMeta); + ~ExynosCamera3Request(); + + virtual uint32_t getKey(); + virtual void setFrameCount(uint32_t frameCount); + virtual uint32_t getFrameCount(); + virtual uint8_t getCaptureIntent(); + + virtual camera3_capture_request_t* getService(); + + virtual uint32_t setServiceMeta(CameraMetadata meta); + virtual CameraMetadata getServiceMeta(); + + virtual status_t setServiceShot(struct camera2_shot_ext *shot); + virtual status_t getServiceShot(struct camera2_shot_ext *shot); + + virtual status_t setResultMeta(CameraMetadata meta); + virtual CameraMetadata getResultMeta(); + virtual status_t setResultShot(struct camera2_shot_ext *shot); + virtual status_t getResultShot(struct camera2_shot_ext *shot); + virtual status_t setRequestState(EXYNOS_REQUEST::STATE state); + virtual EXYNOS_REQUEST::STATE getRequestState(); + virtual status_t setPrevShot(struct camera2_shot_ext *shot); + virtual status_t getPrevShot(struct camera2_shot_ext *shot); + + virtual uint64_t getSensorTimestamp(); + virtual uint32_t getNumOfInputBuffer(); + virtual camera3_stream_buffer_t* getInputBuffer(); + + virtual uint32_t getNumOfOutputBuffer(); + virtual const camera3_stream_buffer_t* getOutputBuffers(); + virtual status_t pushResult(ResultRequest result); + virtual ResultRequest popResult(uint32_t resultKey); + virtual ResultRequest getResult(uint32_t resultKey); + virtual status_t getAllResultKeys(ResultRequestkeys *keys); + virtual status_t getResultKeys(ResultRequestkeys *keys, EXYNOS_REQUEST_RESULT::TYPE type); + + virtual status_t pushFrameFactory(int StreamID, ExynosCamera3FrameFactory* factory); + virtual ExynosCamera3FrameFactory* popFrameFactory(int streamID); + virtual ExynosCamera3FrameFactory* getFrameFactory(int streamID); + virtual bool isFrameFactory(int streamID); + virtual status_t getFrameFactoryList(FrameFactoryList *list); + + virtual void increaseCompleteBufferCount(void); + virtual void resetCompleteBufferCount(void); + virtual int getCompleteBufferCount(void); + + virtual void increaseDuplicateBufferCount(void); + virtual void resetDuplicateBufferCount(void); + virtual int getDuplicateBufferCount(void); + + virtual void setRequestId(int reqId); + virtual int getRequestId(); + virtual status_t getAllRequestOutputStreams(List **list); + virtual status_t pushRequestOutputStreams(int requestStreamId); + virtual status_t getAllRequestInputStreams(List **list); + virtual status_t pushRequestInputStreams(int requestStreamId); + + virtual status_t popResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList); + virtual status_t popAndEraseResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestList *resultList); + virtual status_t setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag); + virtual bool getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType); + virtual void printCallbackDoneState(); + virtual bool isComplete(); + + virtual int getStreamId(int bufferIndex); + + virtual void setNeedInternalFrame(bool isNeedInternalFrame); + virtual bool getNeedInternalFrame(void); + virtual void increasePipelineDepth(void); + virtual void updatePipelineDepth(void); + +private: + virtual status_t m_init(); + virtual status_t m_deinit(); + virtual status_t m_pushResult(ResultRequest item, ResultRequestMap *list, Mutex *lock); + virtual ResultRequest m_popResult(uint32_t key, ResultRequestMap *list, Mutex *lock); + virtual ResultRequest m_getResult(uint32_t key, ResultRequestMap *list, Mutex *lock); + virtual status_t m_getAllResultKeys(ResultRequestkeys *keylist, ResultRequestMap *list, Mutex *lock); + virtual status_t m_getResultKeys(ResultRequestkeys *keylist, ResultRequestMap *list, EXYNOS_REQUEST_RESULT::TYPE type, Mutex *lock); + + virtual status_t m_push(int key, ExynosCamera3FrameFactory* item, FrameFactoryMap *list, Mutex *lock); + virtual status_t m_pop(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock); + virtual status_t m_get(int streamID, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock); + virtual bool m_find(int streamID, FrameFactoryMap *list, Mutex *lock); + virtual status_t m_getList(FrameFactoryList *factorylist, FrameFactoryMap *list, Mutex *lock); + virtual status_t m_setCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, bool flag, Mutex *lock); + virtual bool m_getCallbackDone(EXYNOS_REQUEST_RESULT::TYPE reqType, Mutex *lock); + virtual status_t m_popResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestMap *list, ResultRequestList *resultList, Mutex *lock); + virtual status_t m_popAndEraseResultsByType(EXYNOS_REQUEST_RESULT::TYPE reqType, ResultRequestMap *list, ResultRequestList *resultList, Mutex *lock); + +private: + uint32_t m_key; + uint32_t m_frameCount; + uint8_t m_captureIntent; + camera3_capture_request_t *m_request; + CameraMetadata m_serviceMeta; + struct camera2_shot_ext m_serviceShot; + CameraMetadata m_resultMeta; + struct camera2_shot_ext m_resultShot; + struct camera2_shot_ext m_prevShot; + int m_streamIdList[HAL_STREAM_ID_MAX]; + + EXYNOS_REQUEST::STATE m_requestState; + int m_requestId; + + ResultRequestMap m_resultList; + mutable Mutex m_resultListLock; + bool m_resultStatus[EXYNOS_REQUEST_RESULT::CALLBACK_MAX]; + mutable Mutex m_resultStatusLock; + int m_numOfOutputBuffers; + int m_numOfCompleteBuffers; + int m_numOfDuplicateBuffers; + List m_requestOutputStreamList; + List m_requestInputStreamList; + + FrameFactoryMap m_factoryMap; + mutable Mutex m_factoryMapLock; + + bool m_isNeedInternalFrame; + unsigned int m_pipelineDepth; +}; + +typedef list< uint32_t > CallbackListkeys; +typedef list< uint32_t >::iterator CallbackListkeysIter; + +class ExynosCameraCallbackSequencer{ +public: + ExynosCameraCallbackSequencer(); + ~ExynosCameraCallbackSequencer(); + + status_t pushFrameCount(uint32_t frameCount); + uint32_t popFrameCount(); + uint32_t size(); + status_t getFrameCountList(CallbackListkeys **list); + status_t deleteFrameCount(uint32_t frameCount); + void dumpList(); + status_t flush(); + +private: + status_t m_init(); + status_t m_deinit(); + status_t m_push(EXYNOS_LIST_OPER::MODE operMode, uint32_t frameCount, CallbackListkeys *list, Mutex *lock); + uint32_t m_pop(EXYNOS_LIST_OPER::MODE operMode, CallbackListkeys *list, Mutex *lock); + status_t m_delete(uint32_t frameCount, CallbackListkeys *list, Mutex *lock); + +private: + CallbackListkeys m_requestFrameCountList; + mutable Mutex m_requestCbListLock; + +}; + +namespace EXYNOS_REQUEST_TYPE { + enum TYPE { + PREVIEW = 0, + REPROCESSING = 1, + MAX = 2 + }; +}; + +class ExynosCameraRequestManager : public virtual RefBase { +public: + /* Constructor */ + ExynosCameraRequestManager(int cameraId, ExynosCameraParameters *param); + + /* Destructor */ + virtual ~ExynosCameraRequestManager(); + +public: + /* related to camera3 device operations */ + status_t constructDefaultRequestSettings(int type, camera_metadata_t **request); + + /* Android meta data translation functions */ + ExynosCameraRequest* registerServiceRequest(camera3_capture_request *request); + ExynosCameraRequest* createServiceRequest(); + status_t deleteServiceRequest(uint32_t frameCount); + ExynosCameraRequest* getServiceRequest(uint32_t frameCount); + + status_t setMetaDataConverter(ExynosCameraMetadataConverter *converter); + ExynosCameraMetadataConverter* getMetaDataConverter(); + + + status_t setRequestsInfo(int key, ExynosCamera3FrameFactory *factory, ExynosCamera3FrameFactory *zslFactory = NULL); + ExynosCamera3FrameFactory* getFrameFactory(int key); + + status_t flush(); + + /* other helper functions */ + status_t isPrevRequest(void); + status_t clearPrevRequest(void); + status_t clearPrevShot(void); + + status_t setCallbackOps(const camera3_callback_ops *callbackOps); + status_t callbackRequest(ResultRequest result); + ResultRequest createResultRequest(uint32_t frameCount, + EXYNOS_REQUEST_RESULT::TYPE type, + camera3_capture_result_t *captureResult, + camera3_notify_msg_t *notifyMsg); + status_t getPreviousShot(struct camera2_shot_ext *pre_shot_ext); + uint32_t getRequestCount(void); + uint32_t getServiceRequestCount(void); + void callbackSequencerLock(); + void callbackSequencerUnlock(); + + status_t setFrameCount(uint32_t frameCount, uint32_t requestKey); + +private: + typedef map RequestInfoMap; + typedef map::iterator RequestInfoMapIterator; + typedef list RequestInfoList; + typedef list::iterator RequestInfoListIterator; + typedef map RequestFrameCountMap; + typedef map::iterator RequestFrameCountMapIterator; + + status_t m_pushBack(ExynosCameraRequest* item, RequestInfoList *list, Mutex *lock); + status_t m_popBack(ExynosCameraRequest** item, RequestInfoList *list, Mutex *lock); + status_t m_pushFront(ExynosCameraRequest* item, RequestInfoList *list, Mutex *lock); + status_t m_popFront(ExynosCameraRequest** item, RequestInfoList *list, Mutex *lock); + status_t m_get(uint32_t frameCount, ExynosCameraRequest** item, RequestInfoList *list, Mutex *lock); + + status_t m_push(ExynosCameraRequest* item, RequestInfoMap *list, Mutex *lock); + status_t m_pop(uint32_t frameCount, ExynosCameraRequest** item, RequestInfoMap *list, Mutex *lock); + status_t m_get(uint32_t frameCount, ExynosCameraRequest** item, RequestInfoMap *list, Mutex *lock); + + void m_printAllRequestInfo(RequestInfoMap *map, Mutex *lock); + + status_t m_delete(ExynosCameraRequest *item); + + + status_t m_pushBack(ResultRequest item, RequestInfoList *list, Mutex *lock); + ResultRequest m_popBack(RequestInfoList *list, Mutex *lock); + status_t m_pushFront(ResultRequest item, RequestInfoList *list, Mutex *lock); + ResultRequest m_popFront(RequestInfoList *list, Mutex *lock); + status_t m_get(uint32_t frameCount, RequestInfoList *list, Mutex *lock); + + status_t m_pushFactory(int key, ExynosCamera3FrameFactory* item, FrameFactoryMap *list, Mutex *lock); + status_t m_popFactory(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock); + status_t m_getFactory(int key, ExynosCamera3FrameFactory** item, FrameFactoryMap *list, Mutex *lock); + + status_t m_callbackCaptureRequest(camera3_capture_result_t *result); + status_t m_callbackNotifyRequest(camera3_notify_msg_t *msg); + status_t m_callbackPackingOutputBuffers(ExynosCameraRequest* callbackRequest); + + status_t m_getKey(uint32_t *key, uint32_t frameCount); + status_t m_popKey(uint32_t *key, uint32_t frameCount); + + uint32_t m_generateResultKey(); + uint32_t m_getResultKey(); + + status_t m_checkCallbackRequestSequence(); + status_t m_callbackRequest(ResultRequest result); + + status_t m_increasePipelineDepth(RequestInfoMap *map, Mutex *lock); + + void m_debugCallbackFPS(); +#if 0 + /* Other helper functions */ + status_t initShotData(void); + status_t checkAvailableStreamFormat(int format); + uint32_t getFrameNumber(void); +#endif +private: + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + RequestInfoList m_serviceRequests[EXYNOS_REQUEST_TYPE::MAX]; + mutable Mutex m_serviceRequestLock[EXYNOS_REQUEST_TYPE::MAX]; + + RequestInfoMap m_runningRequests[EXYNOS_REQUEST_TYPE::MAX]; + mutable Mutex m_runningRequestLock[EXYNOS_REQUEST_TYPE::MAX]; + + mutable Mutex m_requestLock; + + int m_cameraId; + + camera_metadata_t *m_defaultRequestTemplate[CAMERA3_TEMPLATE_COUNT]; + CameraMetadata m_previousMeta; + + struct camera2_shot_ext m_dummyShot; + struct camera2_shot_ext m_currShot; + // TODO: remove this + ExynosCameraParameters *m_parameters; + ExynosCameraMetadataConverter *m_converter; + + const camera3_callback_ops_t *m_callbackOps; + + int32_t m_requestKey; + mutable Mutex m_requestKeyLock; + + int32_t m_requestResultKey; + mutable Mutex m_requestResultKeyLock; + + FrameFactoryMap m_factoryMap; + mutable Mutex m_factoryMapLock; + FrameFactoryMap m_zslFactoryMap; + mutable Mutex m_zslFactoryMapLock; + + ExynosCameraCallbackSequencer *m_callbackSequencer; + mutable Mutex m_callbackSequencerLock; + + int m_callbackTraceCount; + + struct camera2_shot_ext *m_preShot; + + RequestFrameCountMap m_requestFrameCountMap; + mutable Mutex m_requestFrameCountMapLock; + +#ifdef CALLBACK_FPS_CHECK + int32_t m_callbackFrameCnt = 0; + ExynosCameraDurationTimer m_callbackDurationTimer; +#endif +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraSingleton.h b/libcamera/common_v2/ExynosCameraSingleton.h new file mode 100644 index 0000000..9c29253 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSingleton.h @@ -0,0 +1,102 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosCameraSingleton.h + * \brief header file for ExynosCameraSingleton + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2015/06/10 + * + * Revision History: + * - 2015/06/10 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_SINGLETON_H +#define EXYNOS_CAMERA_SINGLETON_H + +using namespace android; + +/* Class declaration */ +//! ExynosCameraSingleton is template class to create single object +/*! + * \ingroup ExynosCamera + */ +template class ExynosCameraSingleton +{ +public: + //! getInstance + /*! + \remarks + to get singleton object, it must call this API. + that is why constructor is protected. (caller cannot new object) + + The usage. + + ex 1 : this is original. but other class cannot inherit aa. + + class aa : public ExynosCameraSingleton + { + protected: + friend class ExynosCameraSingleton; + + aa(); + virtual ~aa(); + } + + aa *obj = aa::getInstance(); + + + ex 2 : this make that other class can inherit aa. + class aa + { + protected: + friend class ExynosCameraSingleton; + + aa(); + virtual ~aa(); + } + + class bb : public aa + { + protected: + friend class ExynosCameraSingleton; + + bb(); + virtual ~bb(); + } + + aa *obj1 = ExynosCameraSingleton::getInstance(); + bb *obj2 = ExynosCameraSingleton::getInstance(); + + */ + static T* getInstance(void) + { + static T object; + return &object; + } + +protected: + ExynosCameraSingleton() {} + virtual ~ExynosCameraSingleton() {} + +private: + ExynosCameraSingleton(const ExynosCameraSingleton&); + ExynosCameraSingleton& operator=(const ExynosCameraSingleton&); +}; + +#endif //EXYNOS_CAMERA_SINGLETON_H diff --git a/libcamera/common_v2/ExynosCameraSizeTable.h b/libcamera/common_v2/ExynosCameraSizeTable.h new file mode 100644 index 0000000..d141352 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable.h @@ -0,0 +1,131 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_SIZE_TABLE_H +#define EXYNOS_CAMERA_SIZE_TABLE_H +#include +#include +#include "ExynosCameraConfig.h" + +namespace android { + +#define SIZE_OF_LUT 11 +#define SIZE_OF_RESOLUTION 3 + +enum EXYNOS_CAMERA_SIZE_RATIO_ID { + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +}; + +enum SIZE_LUT_INDEX { + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, + SIZE_LUT_INDEX_END +}; + +/* LSI Sensor */ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_2560_1440) +#include "ExynosCameraSizeTable2P2_WQHD.h" +#include "ExynosCameraSizeTable2P8_WQHD.h" +#include "ExynosCameraSizeTable2T2_WQHD.h" +#else +#include "ExynosCameraSizeTable2P2_FHD.h" +#include "ExynosCameraSizeTable2T2_WQHD.h" +#include "ExynosCameraSizeTableIMX240_2P2_WQHD.h" +#include "ExynosCameraSizeTable2P8_WQHD.h" +#endif +#include "ExynosCameraSizeTable2P2_12M.h" +#include "ExynosCameraSizeTable2P3.h" +#if defined(USE_REAR_FULL_OTF) +#include "ExynosCameraSizeTable3L2_FULL_OTF.h" +#else +#include "ExynosCameraSizeTable3L2.h" +#endif +#include "ExynosCameraSizeTable3L8.h" +#include "ExynosCameraSizeTable3M2.h" +#include "ExynosCameraSizeTable3M3.h" +#include "ExynosCameraSizeTable3H5.h" +#include "ExynosCameraSizeTable3H7.h" +#if defined(USE_REAR_FULL_OTF) +#include "ExynosCameraSizeTable3P3_FULL_OTF.h" +#else +#include "ExynosCameraSizeTable3P3.h" +#endif +#include "ExynosCameraSizeTable4H5.h" +#include "ExynosCameraSizeTable5E2.h" +#if defined(USE_FRONT_VFLIP_SENSOR) +#include "ExynosCameraSizeTable5E3_VFLIP.h" +#elif defined(USE_FRONT_FULL_OTF) +#include "ExynosCameraSizeTable5E3_FULL_OTF.h" +#else +#include "ExynosCameraSizeTable5E3.h" +#endif +#include "ExynosCameraSizeTable5E8.h" +#include "ExynosCameraSizeTable6A3.h" +#include "ExynosCameraSizeTable6B2.h" +#include "ExynosCameraSizeTable8B1.h" +#include "ExynosCameraSizeTable6D1.h" +#include "ExynosCameraSizeTable4E6.h" + +/* Sony Sensor */ +#include "ExynosCameraSizeTableIMX134.h" +#include "ExynosCameraSizeTableIMX135.h" +#include "ExynosCameraSizeTableIMX175.h" +#if 0 +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_2560_1440) +#include "ExynosCameraSizeTableIMX240_WQHD.h" +#include "ExynosCameraSizeTableIMX228_WQHD.h" +#else +#include "ExynosCameraSizeTableIMX240_FHD.h" +#endif +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_2560_1440) +#include "ExynosCameraSizeTableIMX240_WQHD.h" +#include "ExynosCameraSizeTableIMX240_2P2_WQHD.h" +#else +#include "ExynosCameraSizeTableIMX240_FHD.h" +#include "ExynosCameraSizeTableIMX240_2P2_FHD.h" +#endif +#include "ExynosCameraSizeTableIMX228_WQHD.h" +#include "ExynosCameraSizeTableIMX260_2L1_WQHD.h" +#include "ExynosCameraSizeTableIMX219.h" + +/*Siliconfile Sensor */ +#include "ExynosCameraSizeTableSR261.h" +#include "ExynosCameraSizeTableSR259.h" +#include "ExynosCameraSizeTableSR544.h" + +/*OV Sensor */ +#include "ExynosCameraSizeTableOV5670.h" + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2P2_12M.h b/libcamera/common_v2/ExynosCameraSizeTable2P2_12M.h new file mode 100644 index 0000000..d366eb8 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2P2_12M.h @@ -0,0 +1,465 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2P2_12M_H +#define EXYNOS_CAMERA_LUT_2P2_12M_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + CAC_W, + CAC_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2P2_12M_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1302 , /* [bns ] */ + 2304 , 1296 , /* [cac ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 1736 , 1302 , /* [bns ] */ + 1728 , 1296 , /* [cac ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16),(2592 + 12), /* [sensor ] */ + 1304 , 1302 , /* [bns ] */ + 1296 , 1296 , /* [cac ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1302 , /* [bns ] */ + 1944 , 1296 , /* [cac ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 1736 , 1302 , /* [bns ] */ + 1620 , 1296 , /* [cac ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1302 , /* [bns ] */ + 2160 , 1296 , /* [cac ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 1736 , 1302 , /* [bns ] */ + 1584 , 1296 , /* [cac ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_2P2_12M[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 4624 , 2604 , /* [bns ] */ + 4608 , 2592 , /* [cac ] */ + 4608 , 2592 , /* [bds ] */ + 4608 , 2592 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 3472 , 2604 , /* [bns ] */ + 3456 , 2592 , /* [cac ] */ + 3456 , 2592 , /* [bds ] */ + 3456 , 2592 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16),(2592 + 12), /* [sensor ] */ + 2608 , 2604 , /* [bns ] */ + 2592 , 2592 , /* [cac ] */ + 2592 , 2592 , /* [bds ] */ + 2592 , 2592 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2_12M_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 3080 , 1736 , /* [bns ] */ + 3072 , 1728 , /* [cac ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1736 , /* [bns ] */ + 2304 , 1728 , /* [cac ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16),(2592 + 12), /* [sensor ] */ + 1736 , 1736 , /* [bns ] */ + 1728 , 1728 , /* [cac ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 3080 , 1736 , /* [bns ] */ + 2592 , 1728 , /* [cac ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1736 , /* [bns ] */ + 2160 , 1728 , /* [cac ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 3080 , 1736 , /* [bns ] */ + 2880 , 1728 , /* [cac ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 2312 , 1736 , /* [bns ] */ + 2112 , 1728 , /* [cac ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2_12M[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 4624 , 2604 , /* [bns ] */ + 4608 , 2592 , /* [cac ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 3472 , 2604 , /* [bns ] */ + 3456 , 2592 , /* [cac ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16),(2592 + 12), /* [sensor ] */ + 2608 , 2604 , /* [bns ] */ + 2592 , 2592 , /* [cac ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 4624 , 2604 , /* [bns ] */ + 3888 , 2592 , /* [cac ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 3472 , 2604 , /* [bns ] */ + 3240 , 2592 , /* [cac ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4608 + 16),(2592 + 12), /* [sensor ] */ + 4624 , 2604 , /* [bns ] */ + 4320 , 2592 , /* [cac ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3456 + 16),(2592 + 12), /* [sensor ] */ + 3472 , 2604 , /* [bns ] */ + 3168 , 2592 , /* [cac ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_12M_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2296 + 16),(1288 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2312 , 1300 , /* [bns ] */ + 2304 , 1296 , /* [cac ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_12M_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1140 + 16),( 638 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1156 , 650 , /* [bns ] */ + 1152 , 648 , /* [cac ] */ + 1152 , 648 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_2P2_12M_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2296 + 16),(1288 + 12), /* [sensor ] */ + 2312 , 1300 , /* [bns ] */ + 2304 , 1296 , /* [cac ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2296 + 16),(1288 + 12), /* [sensor ] */ + 2312 , 1300 , /* [bns ] */ + 1728 , 1296 , /* [cac ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2296 + 16),(1288 + 12), /* [sensor ] */ + 2312 , 1300 , /* [bns ] */ + 1296 , 1296 , /* [cac ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2296 + 16),(1288 + 12), /* [sensor ] */ + 2312 , 1300 , /* [bns ] */ + 1584 , 1296 , /* [cac ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K2P2_12M_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P2_12M_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 1152, 648, SIZE_RATIO_16_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K2P2_12M_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 2592, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2P2_12M_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K2P2_12M_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2P2_12M_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P2_12M_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K2P2_12M_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2P2_12M_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2P2_FHD.h b/libcamera/common_v2/ExynosCameraSizeTable2P2_FHD.h new file mode 100644 index 0000000..a27cb23 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2P2_FHD.h @@ -0,0 +1,712 @@ +/* +** +**copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2P2_H +#define EXYNOS_CAMERA_LUT_2P2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Incread for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P2_BNS_DUAL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 for 16:9, 2.0 for 4:3 and 1:1 + BDS : NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1984 , 1448 , /* [bcrop ] */ + 1984 , 1448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 2236 , 1490 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1862 , 1490 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 2484 , 1490 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1822 , 1490 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +/* + * This is not BNS, BDS (just name is BNS) + * To keep source code. just let the name be. + */ +static int PREVIEW_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Incread for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P2_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4496 , 2988 , /* [bcrop ] */ + 4496 , 2988 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 3684 , 2988 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + + +static int PICTURE_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1088 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 16), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixe align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4496 , 2988 , /* [bcrop ] */ + 4496 , 2988 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 3684 , 2988 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_2P2_BNS_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* FHD_60 16:9 () */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 2648 , 1490 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* HD_120 16:9 (Fast AE) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1312 , 738 , /* [bns ] */ + 1280 , 720 , /* [target ] */ + }, +}; + +static int S5K2P2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K2P2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2P2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 2592, SIZE_RATIO_16_9}, + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K2P2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2P2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K2P2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2P2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2P2_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTable2P2_WQHD.h new file mode 100644 index 0000000..a377d1a --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2P2_WQHD.h @@ -0,0 +1,637 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2P2_H +#define EXYNOS_CAMERA_LUT_2P2_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 2656 , 1494 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1088, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K2P2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K2P2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2P2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K2P2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2P2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K2P2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K2P2_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + //{ 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2P2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2P3.h b/libcamera/common_v2/ExynosCameraSizeTable2P3.h new file mode 100644 index 0000000..06df10c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2P3.h @@ -0,0 +1,425 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2P3_H +#define EXYNOS_CAMERA_LUT_2P3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Incread for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_2P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1088 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 16), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixe align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int S5K2P3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K2P3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K2P3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2P3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 2592, SIZE_RATIO_16_9}, + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K2P3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2P3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2P3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K2P3_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2P3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2P8_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTable2P8_WQHD.h new file mode 100644 index 0000000..f2fc0d6 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2P8_WQHD.h @@ -0,0 +1,1593 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2P8_H +#define EXYNOS_CAMERA_LUT_2P8_H + +//#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2P8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P8_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P8_BDS_BNS15[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P8_BDS_BNS20_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1490 , 1490 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_2P8_BDS_BNS20_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1490 , 1490 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int PICTURE_SIZE_LUT_2P8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1992 , 1494 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1494 , 1494 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 2240 , 1494 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1864 , 1494 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 2488 , 1494 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1824 , 1494 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_DIS_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1992 , 1494 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1494 , 1494 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS15_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS20_WQHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2656 , 1494 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1992 , 1494 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1490 , 1490 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] *//* w=1080, Increased for 16 pixel align */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_DIS_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1992 , 1494 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1494 , 1494 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS15_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1088, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS15_DIS_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1992 , 1494 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1494 , 1494 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS20_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1490 , 1490 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1088, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2P8_BDS_BNS20_DIS_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2656 , 1494 , /* [bcrop ] */ + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1992 , 1494 , /* [bcrop ] */ + 1992 , 1494 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1494 , 1494 , /* [bcrop ] */ + 1494 , 1494 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P8[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P8[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 960 , 720 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_2P8[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 960 , 720 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_2P8[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + + /* WVGA_300 5:3 (Single) */ + { SIZE_RATIO_5_3, + ( 808 + 16),( 484 + 12), /* [sensor ] *//* Sensor binning ratio = 6 */ + 824 , 496 , /* [bns ] */ + 810 , 486 , /* [bcrop ] */ + 800 , 480 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_2P8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 4480 , 2988 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 3728 , 2988 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 4976 , 2988 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 3648 , 2988 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_2P8[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int LIVE_BROADCAST_SIZE_LUT_2P8[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K2P8_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_2560_1440) + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, +/* HACK: Exynos8890 EVT0 is not support */ +#if !defined (JUNGFRAU_EVT0) + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +/* HACK: Exynos7870 is not support */ +#if !defined (SMDK7870) + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +#endif +#endif +}; + +static int S5K2P8_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_2560_1440) + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, +#endif + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 960, 960, SIZE_RATIO_1_1}, /* For clip movie */ + { 960, 540, SIZE_RATIO_16_9}, /* for GearVR*/ + { 800, 600, SIZE_RATIO_4_3}, /* for GearVR */ + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 640, 360, SIZE_RATIO_16_9}, /* for SWIS & GearVR*/ + { 528, 432, SIZE_RATIO_11_9}, +/* HACK: Exynos8890 EVT0 is not support */ +#if !defined (JUNGFRAU_EVT0) + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K2P8_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2P8_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K2P8_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ + /* { 320, 240, SIZE_RATIO_4_3}, */ + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2P8_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K2P8_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3840, 2160, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 960, 960, SIZE_RATIO_1_1}, /* For clip movie */ + { 864, 480, SIZE_RATIO_16_9}, /* for PLB mode */ + { 432, 240, SIZE_RATIO_16_9}, /* for PLB mode */ +}; + +static int S5K2P8_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int S5K2P8_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static int S5K2P8_FPS_RANGE_LIST[][2] = +{ + /* + { 5000, 5000}, + { 7000, 7000}, + */ + { 15000, 15000}, + { 24000, 24000}, + /* { 4000, 30000}, */ + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2P8_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +/* For HAL3 */ +static int S5K2P8_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 512, 384, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +// { 176, 144, SIZE_RATIO_11_9}, /* Too small to create thumbnail */ +}; + +static camera_metadata_rational UNIT_MATRIX_2P8_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_2P8_3X3[] = { + {800, 1024}, {-172, 1024}, {-110, 1024}, + {-463, 1024}, {1305, 1024}, {146, 1024}, + {-119, 1024}, {286, 1024}, {552, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_2P8_3X3[] = { + {1758, 1024}, {-1014, 1024}, {-161, 1024}, + {-129, 1024}, {1119, 1024}, {134, 1024}, + {-13, 1024}, {225, 1024}, {604, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_2P8_3X3[] = { + {682, 1024}, {182, 1024}, {120, 1024}, + {244, 1024}, {902, 1024}, {-122, 1024}, + {14, 1024}, {-316, 1024}, {1142, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_2P8_3X3[] = { + {450, 1024}, {307, 1024}, {227, 1024}, + {8, 1024}, {1049, 1024}, {-33, 1024}, + {-7, 1024}, {-968, 1024}, {1815, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable2T2_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTable2T2_WQHD.h new file mode 100644 index 0000000..41ad14b --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable2T2_WQHD.h @@ -0,0 +1,811 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_2T2_H +#define EXYNOS_CAMERA_LUT_2T2_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2232 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 1680 , 1684 , /* [bns ] */ + 1674 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2496 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2080 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2784 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2032 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_2T2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5024 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4192 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5584 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4096 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_2T2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 5952 , 3348 , /* [bds ] */ + 5952 , 3348 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ + 4464 , 3348 , /* [bds ] */ + 4464 , 3348 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ + 3344 , 3344 , /* [bds ] */ + 3344 , 3344 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3076 , 2244 , /* [bns ] */ + 3344 , 2232 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 2244 , /* [bns ] */ + 2784 , 2232 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3976 , 2244 , /* [bns ] */ + 3712 , 2232 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 2244 , /* [bns ] */ + 2720 , 2232 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_2T2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 2440 , /* [bds ] */ + 2560 , 2440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ + 640 , 480 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5024 , 3356 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4192 , 3356 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5584 , 3356 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4096 , 3356 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +#ifdef ENABLE_8MP_FULL_FRAME +static int VIDEO_SIZE_LUT_2T2_8MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8MP */ + + /* 8MP full frame fix all scenario */ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, +}; +#endif + +#ifdef ENABLE_13MP_FULL_FRAME +static int VIDEO_SIZE_LUT_2T2_13MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8MP */ + + /* 8MP full frame fix all scenario */ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, +}; +#endif + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2968 + 16), (1668 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2984 , 1680 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1464 + 16), ( 820 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1480 , 832 , /* [bns ] */ + 1472 , 828 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_240 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1464 + 16), ( 820 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1480 , 832 , /* [bns ] */ + 1472 , 828 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_2T2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 2224 , 1674 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 1674 , 1674 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2032 , 1674 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K2T2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K2T2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K2T2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 5952, 3348, SIZE_RATIO_16_9}, + { 5312, 2988, SIZE_RATIO_16_9}, +#endif +#ifdef ENABLE_13MP_FULL_FRAME + { 4800, 2700, SIZE_RATIO_16_9}, +#endif +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 4464, 3348, SIZE_RATIO_4_3}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3344, 3344, SIZE_RATIO_1_1}, + { 3264, 2448, SIZE_RATIO_4_3}, +#elif !defined(ENABLE_8MP_FULL_FRAME) + { 3264, 2448, SIZE_RATIO_4_3}, +#endif + { 3264, 1836, SIZE_RATIO_16_9}, +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 2976, 2976, SIZE_RATIO_1_1}, +#endif + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K2T2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 4128, 3096, SIZE_RATIO_4_3}, + { 4096, 3072, SIZE_RATIO_4_3}, +#elif !defined(ENABLE_8MP_FULL_FRAME) + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 2304, SIZE_RATIO_16_9}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 3200, 2400, SIZE_RATIO_4_3}, +#endif + { 3072, 1728, SIZE_RATIO_16_9}, +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 2988, 2988, SIZE_RATIO_1_1}, +#endif + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2448, 2448, SIZE_RATIO_1_1}, +#endif + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K2T2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K2T2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K2T2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K2T2_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, +// { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K2T2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, + { 240000, 240000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3H5.h b/libcamera/common_v2/ExynosCameraSizeTable3H5.h new file mode 100644 index 0000000..f49952c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3H5.h @@ -0,0 +1,152 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3H5_H +#define EXYNOS_CAMERA_LUT_3H5_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int S5K3H5_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3H5_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3H5_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K3H5_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K3H5_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3H5_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3H5_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3H5_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3H5_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3H7.h b/libcamera/common_v2/ExynosCameraSizeTable3H7.h new file mode 100644 index 0000000..8d439ad --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3H7.h @@ -0,0 +1,263 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3H7_H +#define EXYNOS_CAMERA_LUT_3H7_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3H7[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 1826 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 2438 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_3H7[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 1826 , /* [bcrop ] */ + 3248 , 1826 , /* [bds ] */ + 3248 , 1826 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 2438 , /* [bcrop ] */ + 3248 , 2438 , /* [bds ] */ + 3248 , 2438 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3H7[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 1826 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 2438 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3H7[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3248 + 16),(2438 + 10), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3248 , 1826 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3H7[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 16),( 450 + 10), /* [sensor ] */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } + +}; + +static int S5K3H7_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3H7_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3H7_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K3H7_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K3H7_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3H7_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3H7_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3H7_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3H7_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3L2.h b/libcamera/common_v2/ExynosCameraSizeTable3L2.h new file mode 100644 index 0000000..b6a468d --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3L2.h @@ -0,0 +1,523 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3L2_H +#define EXYNOS_CAMERA_LUT_3L2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3776 , 3096 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3L2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2056 , 1542 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 4128 , 2322 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 4128 , 3096 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 3088 , 3088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 4128 , 2752 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 3872 , 3096 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 4128 , 2476 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3776 , 3096 , /* [bcrop ] */ + 3776 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 3776 , 3096 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3776 , 3096 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2056 , 1542 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 2056 , 1156 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1024 + 16),( 574 + 10), /* [sensor ] */ + 1040 , 584 , /* [bns ] */ + 1024 , 574 , /* [bcrop ] */ + 800 , 480 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2056 + 16), (1156 + 10), /* [sensor ] */ + 2072, 1166, /* [bns ] */ + 2056, 1156, /* [bcrop ] */ + 1920, 1080, /* [bds ] */ + 1920, 1080 /* [target ] */ + }, + { SIZE_RATIO_4_3, + (2056 + 16),(1544 + 10), /* [sensor ] */ + 2072, 1554, /* [bns ] */ + 2056, 1544, /* [bcrop ] */ + 960, 720, /* [bds ] */ + 960, 720 /* [target ] */ + }, + { SIZE_RATIO_1_1, + (2056 + 16), (1544 + 10), /* [sensor ] */ + 2072, 1554, /* [bns ] */ + 1536, 1536, /* [bcrop ] */ + 720, 720, /* [bds ] */ + 720, 720 /* [target ] */ + }, + { SIZE_RATIO_11_9, + (2056 + 16), (1544 + 10), /* [sensor ] */ + 2072, 1554, /* [bns ] */ + 1888, 1544, /* [bcrop ] */ + 352, 288, /* [bds ] */ + 352, 288 /* [target ] */ + } +}; +static int S5K3L2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3L2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3L2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K3L2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3L2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3L2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3L2_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static int S5K3L2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3L2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_3L2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_3L2_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_3L2_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_3L2_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_3L2_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3L2_FULL_OTF.h b/libcamera/common_v2/ExynosCameraSizeTable3L2_FULL_OTF.h new file mode 100644 index 0000000..198a8bd --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3L2_FULL_OTF.h @@ -0,0 +1,564 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3L2_H +#define EXYNOS_CAMERA_LUT_3L2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3L2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 2048 , 1152 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2056 , 1542 , /* [bcrop ] */ + 2056 , 1542 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 4128 , 2322 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 4128 , 3096 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 3088 , 3088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 4128 , 2752 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 3872 , 3096 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 4128 , 2476 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 3776 , 3096 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(2322 + 10), /* [sensor ] */ + 4144 , 2332 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2056 , 1542 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* FHD 60fps 16:9 */ + { SIZE_RATIO_16_9, + (2056 + 16),( 1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 2048 , 1156 , /* [bcrop ] */ + 2048 , 1156 , /* [bds ] */ + 2048 , 1156 , /* [target ] */ + }, + /* 60fps 4:3 */ + { SIZE_RATIO_4_3, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1536 , /* [bcrop ] */ + 2048 , 1536 , /* [bds ] */ + 2048 , 1536 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* 120fps 16:9 */ + { SIZE_RATIO_16_9, + ( 992 + 16),( 558 + 10), /* [sensor ] */ + 1008 , 568 , /* [bns ] */ + 992 , 558 , /* [bcrop ] */ + 992 , 558 , /* [bds ] */ + 992 , 558 , /* [target ] */ + }, + /* 120fps 4:3 */ + { SIZE_RATIO_4_3, + ( 992 + 16),( 744 + 10), /* [sensor ] */ + 1008 , 754 , /* [bns ] */ + 992 , 744 , /* [bcrop ] */ + 992 , 744 , /* [bds ] */ + 992 , 744 , /* [target ] */ + }, +}; + +static int VTCALL_SIZE_LUT_3L2[][SIZE_OF_LUT] = +{ + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 2048 , 1152 , /* [bds ] */ + 2048 , 1152 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1536 , /* [bcrop ] */ + 2048 , 1536 , /* [bds ] */ + 2048 , 1536 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 1536 , 1536 , /* [bcrop ] */ + 1536 , 1536 , /* [bds ] */ + 1536 , 1536 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 1872 , 1536 , /* [bcrop ] */ + 1872 , 1536 , /* [bds ] */ + 1872 , 1536 , /* [target ] */ + } +}; + +static int S5K3L2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#else + // for android 5.1 CTS + // Aspect ratio of maximum preview size should be same with maximum picture size's AR + // https://android.googlesource.com/platform/frameworks/base/+/a0496d3%5E!/ + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#else + { 1920, 1080, SIZE_RATIO_16_9}, // for USE_ADAPTIVE_CSC_RECORDING +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 960, 540, SIZE_RATIO_16_9}, // Gear VR + { 800, 600, SIZE_RATIO_4_3}, // Gear VR + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 640, 360, SIZE_RATIO_16_9}, // Gear VR + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_11_9}, +}; + +static int S5K3L2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1872, 1536, SIZE_RATIO_11_9}, + { 1536, 1536, SIZE_RATIO_1_1}, + { 992, 744, SIZE_RATIO_4_3}, + { 992, 558, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3L2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3L2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3L2_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K3L2_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int S5K3L2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static int S5K3L2_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3L2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_3L2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_3L2_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_3L2_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_3L2_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_3L2_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3L8.h b/libcamera/common_v2/ExynosCameraSizeTable3L8.h new file mode 100644 index 0000000..f2b1e28 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3L8.h @@ -0,0 +1,949 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3L8_H +#define EXYNOS_CAMERA_LUT_3L8_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 0, + Sensor Margin Height = 0 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3L8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 4208 , 2804 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#else + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 3888 , 3120 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [target ] */ /* w=1350, Reduced for 16 pixel align */ +#else + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 4208 , 2512 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#else + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 3808 , 3120 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#else + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3L8_BNS_15[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 1548 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 2064 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2080 , 2080 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1868 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2544 , 2080 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3L8_BNS_20[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1184 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1560 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1402 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1952 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1264 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1920 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3L8_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_3L8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 4128 , 2322 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 4128 , 3096 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ + 2976 , 2976 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ + 1088 , 1088 , /* [target ] *//* w=1080, Increased for 16 pixel align */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 4208 , 2804 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 3888 , 3120 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 4208 , 2512 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 3808 , 3120 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int VIDEO_SIZE_LUT_3L8_BNS_15[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 1548 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2752 , 1548 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2752 , 1548 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 2064 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2080 , 2080 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1868 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2608 , 2808 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2544 , 2080 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L8_BNS_20[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1184 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2104 , 1184 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1560 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1560 , 1560 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1402 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1952 , 1560 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1264 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1920 , 1560 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3L8_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_3L8_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* WVGA_300 5:3 (Single) */ + { SIZE_RATIO_16_9, + ( 808 + 16),( 484 + 12), /* [sensor ] *//* Sensor binning ratio = 6 */ + 824 , 496 , /* [bns ] */ + 810 , 486 , /* [bcrop ] */ + 800 , 480 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_3L8_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K3L8_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3L8_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3L8_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K3L8_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K3L8_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3L8_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K3L8_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K3L8_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + //{ 24000, 24000}, + //{ 4000, 30000}, + //{ 10000, 30000}, + //{ 15000, 30000}, + //{ 30000, 30000}, +}; + +static int S5K3L8_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3M2.h b/libcamera/common_v2/ExynosCameraSizeTable3M2.h new file mode 100644 index 0000000..d2708e2 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3M2.h @@ -0,0 +1,558 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3M2_H +#define EXYNOS_CAMERA_LUT_3M2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3M2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3096 + 16),(3096 + 10), /* [sensor ] */ + 3112 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3488 , 2322 , /* [bcrop ] */ + 3488 , 2322 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3736 , 2322 , /* [bcrop ] */ + 3736 , 2322 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3870 , 2322 , /* [bcrop ] */ + 3870 , 2322 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 2848 , 2322 , /* [bcrop ] */ + 2848 , 2322 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3M2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2064 , 1160 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2064 , 1548 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_3M2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 4128 , 2322 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 4128 , 3096 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3096 + 16),(3096 + 10), /* [sensor ] */ + 3112 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 3088 , 3088 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3096 + 16),(3096 + 10), /* [sensor ] */ + 3112 , 3106 , /* [bns ] */ + 3088 , 3088 , /* [bcrop ] */ + 3088 , 3088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3488 , 2322 , /* [bcrop ] */ + 3488 , 2322 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3736 , 2322 , /* [bcrop ] */ + 3736 , 2322 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 3870 , 2322 , /* [bcrop ] */ + 3870 , 2322 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 4144 , 3106 , /* [bns ] */ + 2848 , 2322 , /* [bcrop ] */ + 2848 , 2322 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2064 , 1160 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4128 + 16),(3096 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2064 , 1548 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M2_2MP_SENSOR_BINNING[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 2048 , 1152 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1536 , 1152 , /* [bcrop ] */ + 1536 , 1152 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1152 , 1152 , /* [bcrop ] */ + 1152 , 1152 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1728 , 1152 , /* [bcrop ] */ + 1728 , 1152 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1440 , 1152 , /* [bcrop ] */ + 1440 , 1152 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1920 , 1152 , /* [bcrop ] */ + 1920 , 1152 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 1408 , 1152 , /* [bcrop ] */ + 1408 , 1152 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3M2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2056 + 16),(1156 + 10), /* [sensor ] */ + 2072 , 1166 , /* [bns ] */ + 2056 , 1156 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3M2[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1020 + 16),( 574 + 10), /* [sensor ] */ + 1036 , 584 , /* [bns ] */ + 1008 , 574 , /* [bcrop ] */ + 1008 , 574 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_3M2[][SIZE_OF_LUT] = +{ + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1152 , /* [bcrop ] */ + 2048 , 1152 , /* [bds ] */ + 2048 , 1152 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 2048 , 1536 , /* [bcrop ] */ + 2048 , 1536 , /* [bds ] */ + 2048 , 1536 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 1536 , 1536 , /* [bcrop ] */ + 1536 , 1536 , /* [bds ] */ + 1536 , 1536 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2056 + 16),(1542 + 10), /* [sensor ] */ + 2072 , 1552 , /* [bns ] */ + 1872 , 1536 , /* [bcrop ] */ + 1872 , 1536 , /* [bds ] */ + 1872 , 1536 , /* [target ] */ + } +}; + +static int S5K3M2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3M2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3M2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K3M2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K3M2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 288, SIZE_RATIO_16_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3M2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 448, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3M2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3M2_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 3088, 3088, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K3M2_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int S5K3M2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static int S5K3M2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3M2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_3M2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_3M2_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_3M2_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_3M2_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_3M2_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3M3.h b/libcamera/common_v2/ExynosCameraSizeTable3M3.h new file mode 100644 index 0000000..060e55b --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3M3.h @@ -0,0 +1,949 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3M3_H +#define EXYNOS_CAMERA_LUT_3M3_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 0, + Sensor Margin Height = 0 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3M3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 4208 , 2804 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#else + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 3888 , 3120 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [target ] */ /* w=1350, Reduced for 16 pixel align */ +#else + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 4208 , 2512 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#else + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 3808 , 3120 , /* [bds ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#else + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3M3_BNS_15[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 1548 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 2064 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2080 , 2080 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1868 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2544 , 2080 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3M3_BNS_20[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1184 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1560 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1402 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1952 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1264 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1920 , 1560 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_3M3_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 2656 , 1494 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_3M3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ + 4128 , 2322 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 4128 , 3096 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ + 2976 , 2976 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ + 4128 , 2322 , /* [bds ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 4128 , 3096 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 2976 , 2976 , /* [bds ] */ + 1088 , 1088 , /* [target ] *//* w=1080, Increased for 16 pixel align */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 4208 , 2804 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 3888 , 3120 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 4208 , 2512 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 3808 , 3120 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int VIDEO_SIZE_LUT_3M3_BNS_15[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 1548 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2752 , 1548 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2752 , 1548 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2752 , 2064 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2080 , 2080 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1868 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2608 , 2808 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2800 , 1684 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2800 , 2080 , /* [bns ] */ + 2544 , 2080 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M3_BNS_20[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1184 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2104 , 1184 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1560 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1560 , 1560 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1402 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1952 , 1560 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 2104 , 1264 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 2104 , 1560 , /* [bns ] */ + 1920 , 1560 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3M3_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 2322 , /* [bcrop ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4128 , 3096 , /* [bcrop ] */ + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 2976 , 2976 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2804 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3888 , 3120 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 4208 , 2512 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4208 + 0 ),(3120 + 0) , /* [sensor ] */ + 4208 , 3120 , /* [bns ] */ + 3808 , 3120 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* WVGA_300 5:3 (Single) */ + { SIZE_RATIO_16_9, + ( 808 + 16),( 484 + 12), /* [sensor ] *//* Sensor binning ratio = 6 */ + 824 , 496 , /* [bns ] */ + 810 , 486 , /* [bcrop ] */ + 800 , 480 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_3M3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K3M3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3M3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3M3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K3M3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int S5K3M3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3M3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K3M3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K3M3_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + //{ 24000, 24000}, + //{ 4000, 30000}, + //{ 10000, 30000}, + //{ 15000, 30000}, + //{ 30000, 30000}, +}; + +static int S5K3M3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3P3.h b/libcamera/common_v2/ExynosCameraSizeTable3P3.h new file mode 100644 index 0000000..fdde0b7 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3P3.h @@ -0,0 +1,655 @@ +/* +** +**copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3P3_H +#define EXYNOS_CAMERA_LUT_3P3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3P3_BNS_DUAL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 for 16:9, 2.0 for 4:3 and 1:1 + BDS : NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +/* + * This is not BNS, BDS (just name is BNS) + * To keep source code. just let the name be. + */ +static int PREVIEW_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3072 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3686 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 2764 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4224 , 3456 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3P3_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10),//(5312 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 2602 ,//5328 , 3000 , /* [bns ] */ + 4616 , 2600 ,//5312 , 2988 , /* [bcrop ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bds ] */ + 1920 , 1080 ,//1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 4616 , 3464 ,//3984 , 2988 , /* [bcrop ] */ + 4608 , 3456 ,//3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3456 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 3464 , 3464 ,//2988 , 2988 , /* [bcrop ] */ + 3456 , 3456 ,//2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, +}; + + +static int PICTURE_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 4608 , 2592 , /* [bds ] */ + 4608 , 2592 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 4608 , 3456 , /* [bds ] */ + 4608 , 3456 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 3456 , 3456 , /* [bds ] */ + 3456 , 3456 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3056 , 2448 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4224 , 3456 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10),//(5312 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 2602 ,//5328 , 3000 , /* [bns ] */ + 4616 , 2600 ,//5312 , 2988 , /* [bcrop ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4616 , 3464 , /* [bcrop ] */ + 4608 , 3456 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3456 + 16),(3456 + 10), /* [sensor ] */ + 4626 , 3466 , /* [bns ] */ + 3464 , 3464 , /* [bcrop ] */ + 3456 , 3456 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1136 + 16),( 632 + 16), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1152 , 648 , /* [bns ] */ + 1148 , 644 , /* [bcrop ] */ + 1136 , 638 , /* [bds ] */ + 1136 , 638 , /* [target ] */ + } +}; + +static int S5K3P3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +// { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3P3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3P3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 3456, SIZE_RATIO_4_3}, + { 4608, 2592, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K3P3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K3P3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3P3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3P3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3P3_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3P3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable3P3_FULL_OTF.h b/libcamera/common_v2/ExynosCameraSizeTable3P3_FULL_OTF.h new file mode 100644 index 0000000..c6864cd --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable3P3_FULL_OTF.h @@ -0,0 +1,921 @@ +/* +** +**copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_3P3_H +#define EXYNOS_CAMERA_LUT_3P3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3P3_BNS_DUAL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 for 16:9, 2.0 for 4:3 and 1:1 + BDS : NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +/* + * This is not BNS, BDS (just name is BNS) + * To keep source code. just let the name be. + */ +static int PREVIEW_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3P3_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10),//(5312 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 2602 ,//5328 , 3000 , /* [bns ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bcrop ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bds ] */ + 1920 , 1080 ,//1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 4608 , 3456 ,//3984 , 2988 , /* [bcrop ] */ + 4608 , 3456 ,//3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 3456 , 3456 ,//2988 , 2988 , /* [bcrop ] */ + 3456 , 3456 ,//2988 , 2988 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_3P3_FULL_OTF_HAL3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(3456 + 10),//(5312 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//5328 , 3000 , /* [bns ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bcrop ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bds ] */ + 1920 , 1080 ,//1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 4608 , 3456 ,//3984 , 2988 , /* [bcrop ] */ + 4608 , 3456 ,//3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10),//(3984 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 3466 ,//4000 , 3000 , /* [bns ] */ + 3456 , 3456 ,//2988 , 2988 , /* [bcrop ] */ + 3456 , 3456 ,//2988 , 2988 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 4608 , 2592 , /* [bds ] */ + 4608 , 2592 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 4608 , 3456 , /* [bds ] */ + 4608 , 3456 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 3456 , 3456 , /* [bds ] */ + 3456 , 3456 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 4128 , 2752 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] */ + 3872 , 3096 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 4128 , 2476 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] */ + 3776 , 3096 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_3P3_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10),//(5312 + 16),(2988 + 12), /* [sensor ] */ + 4624 , 2602 ,//5328 , 3000 , /* [bns ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bcrop ] */ + 4608 , 2592 ,//5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 4608 , 3456 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4626 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 3456 , 3456 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2296 + 16),(1290 + 10), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2312 , 1300 , /* [bns ] */ + 2296 , 1290 , /* [bcrop ] */ + 2296 , 1290 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1136 + 16),( 638 + 10), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1152 , 648 , /* [bns ] */ + 1136 , 638 , /* [bcrop ] */ + 1136 , 638 , /* [bds ] */ + 1136 , 638 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_3P3[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1264 + 16),( 708 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1280 , 720 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + + /* WVGA_300 5:3 (Single) */ + { SIZE_RATIO_5_3, + ( 808 + 16),( 484 + 12), /* [sensor ] *//* Sensor binning ratio = 6 */ + 824 , 496 , /* [bns ] */ + 810 , 486 , /* [bcrop ] */ + 800 , 480 , /* [bds ] */ + 800 , 480 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_3P3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4608 + 16),(2592 + 10), /* [sensor ] */ + 4624 , 2602 , /* [bns ] */ + 4608 , 2592 , /* [bcrop ] */ + 4608 , 2592 , /* [bds ] */ + 4608 , 2592 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4608 , 3456 , /* [bcrop ] */ + 4608 , 3456 , /* [bds ] */ + 4608 , 3456 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3456 , 3456 , /* [bcrop ] */ + 3456 , 3456 , /* [bds ] */ + 3456 , 3456 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2752 , /* [bcrop ] */ + 4128 , 2752 , /* [bds ] */ + 4128 , 2752 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3872 , 3096 , /* [bcrop ] */ + 3872 , 3096 , /* [bds ] */ + 3872 , 3096 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 4128 , 2476 , /* [bcrop ] */ + 4128 , 2476 , /* [bds ] */ + 4128 , 2476 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4608 + 16),(3456 + 10), /* [sensor ] */ + 4624 , 3466 , /* [bns ] */ + 3784 , 3096 , /* [bcrop ] */ + 3784 , 3096 , /* [bds ] */ + 3776 , 3096 , /* [target ] */ + } +}; + + +static int S5K3P3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +// { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3P3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K3P3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 3456, SIZE_RATIO_4_3}, + { 4608, 2592, SIZE_RATIO_16_9}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K3P3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1136, 638, SIZE_RATIO_16_9}, +}; + +static int S5K3P3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K3P3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K3P3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K3P3_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int S5K3P3_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static int S5K3P3_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, +// { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K3P3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_3P3_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_3P3_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_3P3_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_3P3_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_3P3_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; + +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable4E6.h b/libcamera/common_v2/ExynosCameraSizeTable4E6.h new file mode 100644 index 0000000..26dd570 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable4E6.h @@ -0,0 +1,752 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_4E6_H +#define EXYNOS_CAMERA_LUT_4E6_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1072 , 1072 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1072 , 1072 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1728 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#endif + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2432 , 1950 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ +#endif + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1554 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#endif + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2368 , 1950 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#endif + } +}; + +static int DUAL_PREVIEW_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 2592 , 1458 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 2592 , 1944 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ + 1936 , 1936 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1936 , 1936 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1728 , /* [bcrop ] */ + 2592 , 1728 , /* [bds ] */ + 2592 , 1728 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2432 , 1950 , /* [bcrop ] */ + 2432 , 1950 , /* [bds ] */ + 2432 , 1950 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1554 , /* [bcrop ] */ + 2592 , 1554 , /* [bds ] */ + 2592 , 1554 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2368 , 1950 , /* [bcrop ] */ + 2368 , 1950 , /* [bds ] */ + 2368 , 1950 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int PICTURE_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 2592 , 1458 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 2592 , 1944 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ + 1936 , 1936 , /* [bds ] */ + 1936 , 1936 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1728 , /* [bcrop ] */ + 2592 , 1728 , /* [bds ] */ + 2592 , 1728 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2432 , 1950 , /* [bcrop ] */ + 2432 , 1950 , /* [bds ] */ + 2432 , 1950 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 2560 , 1536 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2384 , 1950 , /* [bcrop ] */ + 2384 , 1950 , /* [bds ] */ + 2384 , 1950 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1728 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2432 , 1950 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1554 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2368 , 1950 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +#if defined(ENABLE_8MP_FULL_FRAME) || defined(ENABLE_13MP_FULL_FRAME) +static int VIDEO_SIZE_LUT_4E6_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + } +}; +#endif + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4E6[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] *//* Sensor binning ratio = 2 */ + 1640 , 924 , /* [bns ] */ + 1632 , 918 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4E6[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 636 + 16),( 478 + 10), /* [sensor ] *//* Sensor binning ratio = 4 */ + 652 , 488 , /* [bns ] */ + 624 , 468 , /* [bcrop ] */ + 624 , 468 , /* [bds ] */ + 624 , 468 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 2592 , 1458 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 2592 , 1944 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ + 1936 , 1936 , /* [bds ] */ + 1936 , 1936 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1728 , /* [bcrop ] */ + 2592 , 1728 , /* [bds ] */ + 2592 , 1728 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2432 , 1950 , /* [bcrop ] */ + 2432 , 1950 , /* [bds ] */ + 2432 , 1950 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 2560 , 1536 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 16), (1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2384 , 1950 , /* [bcrop ] */ + 2384 , 1950 , /* [bds ] */ + 2384 , 1950 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (1288 + 16),(968 + 12), /* [sensor ] */ + 1304 , 980 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + /* Bcrop size 1152*864 -> 1280*960, for flicker algorithm */ + { SIZE_RATIO_4_3, + (1288 + 16),(968 + 12), /* [sensor ] */ + 1304 , 980 , /* [bns ] */ + 1280 , 960 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (1288 + 16),(968 + 12), /* [sensor ] */ + 1304 , 980 , /* [bns ] */ + 976 , 976 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + /* Bcrop size 1056*864 -> 1168*960, for flicker algorithm */ + { SIZE_RATIO_11_9, + (1288 + 16),(968 + 12), /* [sensor ] */ + 1304 , 980 , /* [bns ] */ + 1168 , 960 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int DUAL_VIDEO_SIZE_LUT_4E6[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Dual) */ + { SIZE_RATIO_16_9, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Dual) */ + { SIZE_RATIO_4_3, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Dual) */ + { SIZE_RATIO_1_1, + (2592 + 16),(1950 + 10), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1936 , 1936 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + } +}; + +static int S5K4E6_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K4E6_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 2240, 1680, SIZE_RATIO_4_3}, /* For Easy 360 */ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 960, 960, SIZE_RATIO_1_1}, /* for Clip Movie */ + { 640, 360, SIZE_RATIO_16_9}, /* for SWIS */ + { 720, 720, SIZE_RATIO_1_1}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K4E6_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2592, 1458, SIZE_RATIO_16_9}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1936, 1936, SIZE_RATIO_1_1}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K4E6_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K4E6_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K4E6_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K4E6_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif + { 960, 960, SIZE_RATIO_1_1}, /* for Clip Movie */ + { 864, 480, SIZE_RATIO_16_9}, /* for PLB mode */ + { 432, 240, SIZE_RATIO_16_9}, /* for PLB mode */ +}; + +static int S5K4E6_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, +// { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K4E6_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +/* For HAL3 */ +static int S5K4E6_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1458, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1936, 1936, SIZE_RATIO_1_1}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +/* availble Jpeg size (only for HAL_PIXEL_FORMAT_BLOB) */ +static int S5K4E6_JPEG_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1458, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1936, 1936, SIZE_RATIO_1_1}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static camera_metadata_rational UNIT_MATRIX_4E6_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable4H5.h b/libcamera/common_v2/ExynosCameraSizeTable4H5.h new file mode 100644 index 0000000..ee71a7e --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable4H5.h @@ -0,0 +1,840 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_4H5_H +#define EXYNOS_CAMERA_LUT_4H5_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_4H5[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3056 , 2448 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_4H5_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3060 , 2448 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_4H5[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 3264 , 1836 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 3264 , 2448 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 2448 , 2448 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_4H5[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3056 , 2448 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_4H5_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3060 , 2448 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] *//* Sensor binning ratio = 2 */ + 1640 , 924 , /* [bns ] */ + 1624 , 914 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] *//* Sensor binning ratio = 2 */ + 1640 , 924 , /* [bns ] */ + 1632 , 918 , /* [bcrop ] */ + 1632 , 918 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 16),( 594 + 10), /* [sensor ] */ + 816 , 604 , /* [bns ] */ + 800 , 594 , /* [bcrop ] */ + 800 , 594 , /* [bds ] */ + 800 , 594 , /* [target ] */ + }, + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + ( 800 + 16),( 450 + 10), /* [sensor ] *//* Sensor binning ratio = 4 */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } +}; + +static int S5K4H5_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K4H5_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1440, 1080, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K4H5_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K4H5_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int S5K4H5_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K4H5_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_4H5_YC[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 0),(1836 + 0), /* [sensor ] */ + 3264 , 1836 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 0),(2448 + 0) , /* [sensor ] */ + (3264 - 0),(2448 - 0) , /* [bns ] */ + (3264 - 0),(2448 - 0) , /* [bcrop ] */ + (3264 - 0),(2448 - 0) , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1620 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3060 , 2448 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1350 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1800 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1320 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_4H5_YC[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 0),(1836 + 0), /* [sensor ] */ + 3264 , 1836 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 3264 , 1836 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 3264 , 2448 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 2448 , 2448 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_4H5_YC[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 0),(1836 + 0), /* [sensor ] */ + 3264 , 1836 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1620 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3060 , 2448 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1350 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1800 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 0),(2448 + 0), /* [sensor ] */ + 3264 , 2448 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1320 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_4H5_YC[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 0),( 914 + 0), /* [sensor ] *//* Sensor binning ratio = 2 */ + 1640 , 924 , /* [bns ] */ + 1632 , 918 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 0),( 450 + 0), /* [sensor ] *//* Sensor binning ratio = 4 */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } +}; + +static int S5K4H5_YC_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_YC_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K4H5_YC_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_YC_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int S5K4H5_YC_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K4H5_YC_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K4H5_YC_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K4H5_YC_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K4H5_YC_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable5E2.h b/libcamera/common_v2/ExynosCameraSizeTable5E2.h new file mode 100644 index 0000000..9d5033b --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable5E2.h @@ -0,0 +1,723 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_5E2_H +#define EXYNOS_CAMERA_LUT_5E2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_5E2[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 16), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + + +static int VTCALL_SIZE_LUT_5E2[][SIZE_OF_LUT] = +{ + /* 16:9 (VT Call) */ + { SIZE_RATIO_16_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (VT Call) */ + { SIZE_RATIO_4_3, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 960 , /* [bcrop ] */ + 1280 , 960 , /* [bds ] */ + 1280 , 960 , /* [target ] */ + }, + /* 1:1 (VT Call) */ + { SIZE_RATIO_1_1, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 720 , 720 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT Call) */ + { SIZE_RATIO_11_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1056 , 864 , /* [bcrop ] */ + 1056 , 864 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int S5K5E2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, +#endif + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1440, SIZE_RATIO_16_9}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E2_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1930, SIZE_RATIO_4_3}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K5E2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E2_YC[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 10), /* [sensor ] */ + 2576 , 1450 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_5E2_YC[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 10), /* [sensor ] */ + 2576 , 1450 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E2_YC[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 10), /* [sensor ] */ + 2576 , 1450 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1930 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + + +static int S5K5E2_YC_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E2_YC_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E2_YC_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1440, SIZE_RATIO_16_9}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E2_YC_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E2_YC_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E2_YC_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E2_YC_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E2_YC_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E2_YC_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_5E2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_5E2_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_5E2_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_5E2_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_5E2_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; + +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable5E3.h b/libcamera/common_v2/ExynosCameraSizeTable5E3.h new file mode 100644 index 0000000..3ebceb0 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable5E3.h @@ -0,0 +1,543 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_5E3_H +#define EXYNOS_CAMERA_LUT_5E3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1448 , /* [bcrop ] */ + 2576 , 1448 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int DUAL_PREVIEW_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2544 , 1696 , /* [bds ] */ + 2544 , 1696 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2416 , 1932 , /* [bcrop ] */ + 2416 , 1932 , /* [bds ] */ + 2416 , 1932 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 2560 , 1536 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 2288 , 1872 , /* [bds ] */ + 2288 , 1872 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1448 , /* [bcrop ] */ + 2576 , 1448 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1448 , /* [bcrop ] */ + 2576 , 1448 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0),(1932 + 0), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E3_FULL[][SIZE_OF_LUT] = +{ + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 1840 , 1472 , /* [bds ] */ + 1840 , 1472 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* 16:9 (VT Call) */ + { SIZE_RATIO_16_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (VT Call) */ + { SIZE_RATIO_4_3, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 960 , /* [bcrop ] */ + 1280 , 960 , /* [bds ] */ + 1280 , 960 , /* [target ] */ + }, + /* 1:1 (VT Call) */ + { SIZE_RATIO_1_1, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 720 , 720 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT Call) */ + { SIZE_RATIO_11_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1056 , 864 , /* [bcrop ] */ + 1056 , 864 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int S5K5E3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#else + // for android 5.1 CTS + // Aspect ratio of maximum preview size should be same with maximum picture size's AR + // https://android.googlesource.com/platform/frameworks/base/+/a0496d3%5E!/ + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K5E3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1440, SIZE_RATIO_4_3}, +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1440, SIZE_RATIO_4_3}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1932, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K5E3_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_5E3_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_5E3_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_5E3_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_5E3_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_5E3_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable5E3_FULL_OTF.h b/libcamera/common_v2/ExynosCameraSizeTable5E3_FULL_OTF.h new file mode 100644 index 0000000..920c75e --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable5E3_FULL_OTF.h @@ -0,0 +1,518 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_5E3_H +#define EXYNOS_CAMERA_LUT_5E3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2544 , 1696 , /* [bds ] */ + 1056 , 704 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 800 , 480 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 2288 , 1872 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2544 , 1696 , /* [bds ] */ + 720 , 480 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 800 , 480 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 2288 , 1872 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2544 , 1696 , /* [bds ] */ + 1056 , 704 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 800 , 480 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 2288 , 1872 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; +static int VTCALL_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* 16:9 (VT Call) */ + { SIZE_RATIO_16_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (VT Call) */ + { SIZE_RATIO_4_3, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1280 , 960 , /* [bcrop ] */ + 1280 , 960 , /* [bds ] */ + 1280 , 960 , /* [target ] */ + }, + /* 1:1 (VT Call) */ + { SIZE_RATIO_1_1, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 720 , 720 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT Call) */ + { SIZE_RATIO_11_9, + (1280 + 0) ,( 960 + 0) , /* [sensor ] */ + 1280 , 960 , /* [bns ] */ + 1056 , 864 , /* [bcrop ] */ + 1056 , 864 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2544 , 1696 , /* [bcrop ] */ + 2544 , 1696 , /* [bds ] */ + 720 , 480 , /* [target ] */ + }, + /* 5:4 (Single) - Dummy Unused */ + { SIZE_RATIO_5_4, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2576 , 1932 , /* [bcrop ] */ + 2576 , 1932 , /* [bds ] */ + 2576 , 1932 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 800 , 480 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2576 + 0) ,(1932 + 0) , /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2288 , 1872 , /* [bcrop ] */ + 2288 , 1872 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int S5K5E3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#else + // for android 5.1 CTS + // Aspect ratio of maximum preview size should be same with maximum picture size's AR + // https://android.googlesource.com/platform/frameworks/base/+/a0496d3%5E!/ + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1932, SIZE_RATIO_4_3}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int S5K5E3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1932, SIZE_RATIO_4_3}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K5E3_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, +// { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, +// { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_5E3_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_5E3_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_5E3_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_5E3_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_5E3_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; + +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable5E3_VFLIP.h b/libcamera/common_v2/ExynosCameraSizeTable5E3_VFLIP.h new file mode 100644 index 0000000..119a427 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable5E3_VFLIP.h @@ -0,0 +1,361 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_5E3_H +#define EXYNOS_CAMERA_LUT_5E3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1708 , /* [bcrop ] */ + 2560 , 1708 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2400 , 1920 , /* [bcrop ] */ + 2400 , 1920 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1920 + 12), /* [sensor ] */ + 2576 , 1932 , /* [bns ] */ + 2352 , 1920 , /* [bcrop ] */ + 2352 , 1920 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; +static int VTCALL_SIZE_LUT_5E3[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (1264 + 16),(950 + 10), /* [sensor ] */ + 1280, 960, /* [bns ] */ + 1264, 712, /* [bcrop ] */ + 1248, 704, /* [bds ] */ + 1248, 704 /* [target ] */ + }, + { SIZE_RATIO_4_3, + (1264 + 16),(950 + 10), /* [sensor ] */ + 1280, 960, /* [bns ] */ + 1264, 948, /* [bcrop ] */ + 960, 720, /* [bds ] */ + 960, 720 /* [target ] */ + }, + { SIZE_RATIO_1_1, + (1264 + 16),(950 + 10), /* [sensor ] */ + 1280, 960, /* [bns ] */ + 948, 948, /* [bcrop ] */ + 720, 720, /* [bds ] */ + 720, 720 /* [target ] */ + }, + { SIZE_RATIO_11_9, + (1264 + 16),(950 + 10), /* [sensor ] */ + 1280, 960, /* [bns ] */ + 1152, 948, /* [bcrop ] */ + 352, 288, /* [bds ] */ + 352, 288 /* [target ] */ + } +}; + +static int S5K5E3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#else + // for android 5.1 CTS + // Aspect ratio of maximum preview size should be same with maximum picture size's AR + // https://android.googlesource.com/platform/frameworks/base/+/a0496d3%5E!/ + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1440, SIZE_RATIO_16_9}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K5E3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E3_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable5E8.h b/libcamera/common_v2/ExynosCameraSizeTable5E8.h new file mode 100644 index 0000000..c8d4bc2 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable5E8.h @@ -0,0 +1,318 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_5E8_H +#define EXYNOS_CAMERA_LUT_5E8_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_5E8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , +}, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1934 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, +}; + +static int PICTURE_SIZE_LUT_5E8[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1934 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_5E8[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1920 + 10), /* [sensor ] */ + 2576 , 1936 , /* [bns ] */ + 2560 , 1920 , /* [bcrop ] */ + 2560 , 1920 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2576 + 16),(1934 + 10), /* [sensor ] */ + 2576 , 1934 , /* [bns ] */ + 1920 , 1920 , /* [bcrop ] */ + 1920 , 1920 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + + + +static int S5K5E8_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, +#endif + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 400, 400, SIZE_RATIO_1_1}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E8_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E8_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1440, SIZE_RATIO_16_9}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K5E8_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K5E8_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K5E8_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K5E8_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K5E8_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1930, SIZE_RATIO_4_3}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int S5K5E8_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K5E8_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +#if 0 +static camera_metadata_rational UNIT_MATRIX_5E8_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_5E8_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_5E8_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_5E8_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_5E8_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable6A3.h b/libcamera/common_v2/ExynosCameraSizeTable6A3.h new file mode 100644 index 0000000..caaf386 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable6A3.h @@ -0,0 +1,145 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_6A3_H +#define EXYNOS_CAMERA_LUT_6A3_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int S5K6A3_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1280_720) + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, +#endif + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 480, SIZE_RATIO_1_1}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K6A3_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1280_720)) + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K6A3_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K6A3_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1392, 1392, SIZE_RATIO_1_1}, /* For CTS */ + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K6A3_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K6A3_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K6A3_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +}; + +static int S5K6A3_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K6A3_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable6B2.h b/libcamera/common_v2/ExynosCameraSizeTable6B2.h new file mode 100644 index 0000000..7290445 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable6B2.h @@ -0,0 +1,396 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_6B2_H +#define EXYNOS_CAMERA_LUT_6B2_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_6B2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1920 , 1080 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1440 , 1080 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1072 , 1072 , /* [bcrop ] */ + 1072 , 1072 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1616 , 1080 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1344 , 1080 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1792 , 1080 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1312 , 1080 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int PICTURE_SIZE_LUT_6B2[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1920 , 1080 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1440 , 1080 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1072 , 1072 , /* [bcrop ] */ + 1072 , 1072 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1616 , 1080 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1344 , 1080 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 2560 , 1536 , /* [bcrop ] */ + 2560 , 1536 , /* [bds ] */ + 2560 , 1536 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 2384 , 1950 , /* [bcrop ] */ + 2384 , 1950 , /* [bds ] */ + 2384 , 1950 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_6B2[][SIZE_OF_LUT] = +{ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1920 , 1080 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1440 , 1080 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1072 , 1072 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1616 , 1080 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1344 , 1080 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1792 , 108 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1312 , 1080 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; +static int VTCALL_SIZE_LUT_6B2[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1280 , 960 , /* [bcrop ] */ + 1280 , 960 , /* [bds ] */ + 1280 , 960 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 720 , 720 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (1920 + 16),(1080 + 10), /* [sensor ] */ + 1936 , 1090 , /* [bns ] */ + 1056 , 864 , /* [bcrop ] */ + 1056 , 864 , /* [bds ] */ + 352 , 288 , /* [target ] */ + }, +}; + +static int S5K6B2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K6B2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, + { 240, 320, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K6B2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int S5K6B2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K6B2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K6B2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K6B2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, + { 240, 320, SIZE_RATIO_3_4}, +#endif +}; + +static int S5K6B2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K6B2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +/* For HAL3 */ +static camera_metadata_rational UNIT_MATRIX_6B2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable6D1.h b/libcamera/common_v2/ExynosCameraSizeTable6D1.h new file mode 100644 index 0000000..09e143e --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable6D1.h @@ -0,0 +1,564 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_6D1_H +#define EXYNOS_CAMERA_LUT_6D1_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 16 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_6D1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1920 , 1440 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1440 , 1440 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1072 , 1072 , /* [target ] *//* w=1080, Reduced for 16 pixel align */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2160 , 1440 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1792 , 1440 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2400 , 1440 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1760 , 1440 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ + } +}; + +static int PICTURE_SIZE_LUT_6D1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1920 , 1440 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1440 , 1440 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_6D1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1920 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#endif + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1440 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1072 , 1072 , /* [bds ] */ + 1072 , 1072 , /* [target ] *//* w=1080, Reduced for 16 pixel align */ +#else + 1440 , 1440 , /* [bds ] */ + 1072 , 1072 , /* [target ] *//* w=1080, Reduced for 16 pixel align */ +#endif + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2160 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#else + 2160 , 1440 , /* [bds ] */ + 1616 , 1080 , /* [target ] *//* w=1620, Reduced for 16 pixel align */ +#endif + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1792 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ +#else + 1792 , 1440 , /* [bds ] */ + 1344 , 1080 , /* [target ] *//* w=1350, Reduced for 16 pixel align */ +#endif + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2400 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#else + 2400 , 1440 , /* [bds ] */ + 1792 , 1080 , /* [target ] *//* w=1800, Reduced for 16 pixel align */ +#endif + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1760 , 1440 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#else + 1760 , 1440 , /* [bds ] */ + 1312 , 1080 , /* [target ] *//* w=1320, Reduced for 16 pixel align */ +#endif + } +}; + +#ifdef ENABLE_8MP_FULL_FRAME +static int VIDEO_SIZE_LUT_6D1_8MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + } +}; +#endif + +static int DUAL_PREVIEW_SIZE_LUT_6D1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1920 , 1440 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1440 , 1440 , /* [bcrop ] */ + 1072 , 1072 , /* [bds ] */ + 1072 , 1072 , /* [target ] *//* w=1080, Reduced for 16 pixel align */ + }, + /* 3:2 (Dual) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2160 , 1440 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Dual) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1800 , 1440 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Dual) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2400 , 1440 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Dual) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1760 , 1440 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int DUAL_VIDEO_SIZE_LUT_6D1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Dual) */ + { SIZE_RATIO_16_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2560 , 1440 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Dual) */ + { SIZE_RATIO_4_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1920 , 1440 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Dual) */ + { SIZE_RATIO_1_1, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1440 , 1440 , /* [bcrop ] */ + 1072 , 1072 , /* [bds ] */ + 1072 , 1072 , /* [target ] *//* w=1080, Reduced for 16 pixel align */ + }, + /* 3:2 (Dual) */ + { SIZE_RATIO_3_2, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2160 , 1440 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Dual) */ + { SIZE_RATIO_5_4, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1800 , 1440 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Dual) */ + { SIZE_RATIO_5_3, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 2400 , 1440 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Dual) */ + { SIZE_RATIO_11_9, + (2560 + 16),(1440 + 16), /* [sensor ] */ + 2576 , 1456 , /* [bns ] */ + 1760 , 1440 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int S5K6D1_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +#ifdef ENABLE_8MP_FULL_FRAME + { 256, 144, SIZE_RATIO_16_9}, +#endif + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K6D1_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 2560, 1440, SIZE_RATIO_16_9}, + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K6D1_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K6D1_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K6D1_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K6D1_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +#ifdef ENABLE_8MP_FULL_FRAME + { 256, 144, SIZE_RATIO_16_9} +#else + { 176, 144, SIZE_RATIO_11_9} +#endif +}; + +static int S5K6D1_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int S5K6D1_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K6D1_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 25000, 30000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTable8B1.h b/libcamera/common_v2/ExynosCameraSizeTable8B1.h new file mode 100644 index 0000000..f9fe885 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTable8B1.h @@ -0,0 +1,155 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_8B1_H +#define EXYNOS_CAMERA_LUT_8B1_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int S5K8B1_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K8B1_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int S5K8B1_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int S5K8B1_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int S5K8B1_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int S5K8B1_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int S5K8B1_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +}; + +static int S5K8B1_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int S5K8B1_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX134.h b/libcamera/common_v2/ExynosCameraSizeTableIMX134.h new file mode 100644 index 0000000..ed9fa4d --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX134.h @@ -0,0 +1,388 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX134_H +#define EXYNOS_CAMERA_LUT_IMX134_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + + +static int PREVIEW_SIZE_LUT_IMX134[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3056 , 2448 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + }, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + /* 3:4 ( Tablet Horizontal UI ) */ + { SIZE_RATIO_3_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 1836 , 2448 , /* [bcrop ] */ + 480 , 640 , /* [bds ] */ + 480 , 640 , /* [target ] */ + } +#endif +}; + +static int PICTURE_SIZE_LUT_IMX134[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 3264 , 1836 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 3264 , 2448 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 2448 , 2448 , /* [target ] */ + }, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + /* 3:4 ( Tablet Horizontal UI ) */ + { SIZE_RATIO_3_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 1836 , 2448 , /* [bcrop ] */ + 480 , 640 , /* [bds ] */ + 480 , 640 , /* [target ] */ + } +#endif +}; + +static int VIDEO_SIZE_LUT_IMX134[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 3264 , 2176 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 3056 , 2448 , /* [bds ] *//* w=3060, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 3264 , 1958 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 2992 , 2448 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + }, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + /* 3:4 ( Tablet Horizontal UI ) */ + { SIZE_RATIO_3_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 1836 , 2448 , /* [bcrop ] */ + 480 , 640 , /* [bds ] */ + 480 , 640 , /* [target ] */ + } +#endif +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX134[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] */ + 1640 , 924 , /* [bns ] */ + 1624 , 914 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX134[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 16),( 450 + 10), /* [sensor ] */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } +}; + +static int IMX134_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int IMX134_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, + { 240, 320, SIZE_RATIO_3_4}, +#endif +}; + +static int IMX134_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX134_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int IMX134_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX134_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX134_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, + { 240, 320, SIZE_RATIO_3_4}, +#endif +}; + +static int IMX134_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX134_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX135.h b/libcamera/common_v2/ExynosCameraSizeTableIMX135.h new file mode 100644 index 0000000..9447f09 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX135.h @@ -0,0 +1,152 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX135_H +#define EXYNOS_CAMERA_LUT_IMX135_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int IMX135_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int IMX135_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX135_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX135_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int IMX135_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX135_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX135_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int IMX135_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX135_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX175.h b/libcamera/common_v2/ExynosCameraSizeTableIMX175.h new file mode 100644 index 0000000..f13d24a --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX175.h @@ -0,0 +1,347 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX175_H +#define EXYNOS_CAMERA_LUT_IMX175_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX175[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_IMX175[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 3264 , 1836 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 3264 , 2448 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 2448 , 2448 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX175[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Reduced for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] */ + 1640 , 924 , /* [bns ] */ + 1632 , 918 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX175[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 16),( 450 + 10), /* [sensor ] */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } +}; + +static int IMX175_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int IMX175_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX175_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX175_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int IMX175_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX175_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX175_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int IMX175_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX175_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX219.h b/libcamera/common_v2/ExynosCameraSizeTableIMX219.h new file mode 100644 index 0000000..3fd2f7f --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX219.h @@ -0,0 +1,343 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX219_H +#define EXYNOS_CAMERA_LUT_IMX219_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX219[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_IMX219[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 3264 , 1836 , /* [bds ] */ + 3264 , 1836 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 3264 , 2448 , /* [bds ] */ + 3264 , 2448 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 2448 , 2448 , /* [bds ] */ + 2448 , 2448 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX219[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (3264 + 16),(1836 + 10), /* [sensor ] */ + 3280 , 1846 , /* [bns ] */ + 3264 , 1836 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2448 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2448 , 2448 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Reduced for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 2176 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3060 , 2448 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 3264 , 1958 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3264 + 16),(2448 + 10), /* [sensor ] */ + 3280 , 2458 , /* [bns ] */ + 2992 , 2448 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX219[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1624 + 16),( 914 + 10), /* [sensor ] */ + 1640 , 924 , /* [bns ] */ + 1632 , 918 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX219[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + ( 800 + 16),( 450 + 10), /* [sensor ] */ + 816 , 460 , /* [bns ] */ + 800 , 450 , /* [bcrop ] */ + 800 , 450 , /* [bds ] */ + 800 , 450 , /* [target ] */ + } +}; + +static int IMX219_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int IMX219_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 3840, 2160, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX219_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX219_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, +}; + +static int IMX219_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX219_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX219_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int IMX219_FPS_RANGE_LIST[][2] = +{ + { 20000, 20000}, + { 20000, 24000}, + { 24000, 24000} +}; + +static int IMX219_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif + diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX228_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX228_WQHD.h new file mode 100644 index 0000000..0bc2207 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX228_WQHD.h @@ -0,0 +1,811 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX228_H +#define EXYNOS_CAMERA_LUT_IMX228_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2232 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 1680 , 1684 , /* [bns ] */ + 1674 , 1674 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2496 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2080 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2784 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2240 , 1684 , /* [bns ] */ + 2032 , 1672 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_IMX228[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5024 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4192 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5584 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4096 , 3356 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_IMX228[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 5952 , 3348 , /* [bds ] */ + 5952 , 3348 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ + 4464 , 3348 , /* [bds ] */ + 4464 , 3348 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ + 3344 , 3344 , /* [bds ] */ + 3344 , 3344 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3076 , 2244 , /* [bns ] */ + 3344 , 2232 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 2244 , /* [bns ] */ + 2784 , 2232 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 3976 , 2244 , /* [bns ] */ + 3712 , 2232 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 2984 , 2244 , /* [bns ] */ + 2720 , 2232 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX228[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 2440 , /* [bds ] */ + 2560 , 2440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4464 , 3348 , /* [bcrop ] */ + 640 , 480 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3344 + 16), (3356 + 12), /* [sensor ] */ + 3360 , 3368 , /* [bns ] */ + 3344 , 3344 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5024 , 3356 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4192 , 3356 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5584 , 3356 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4464 + 16), (3356 + 12), /* [sensor ] */ + 4480 , 3368 , /* [bns ] */ + 4096 , 3356 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +#ifdef ENABLE_8MP_FULL_FRAME +static int VIDEO_SIZE_LUT_IMX228_8MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8MP */ + + /* 8MP full frame fix all scenario */ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, +}; +#endif + +#ifdef ENABLE_13MP_FULL_FRAME +static int VIDEO_SIZE_LUT_IMX228_13MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8MP */ + + /* 8MP full frame fix all scenario */ + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5952 + 16), (3356 + 12), /* [sensor ] */ + 5968 , 3368 , /* [bns ] */ + 5952 , 3348 , /* [bcrop ] */ + 4800 , 2700 , /* [bds ] *//* 13MP (4800x2700) special handling in ExynosCameraParameters class */ + 4800 , 2700 , /* [target ] */ + }, +}; +#endif + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2968 + 16), (1668 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2984 , 1680 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1464 + 16), ( 820 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1480 , 832 , /* [bns ] */ + 1472 , 828 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_240 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1464 + 16), ( 820 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1480 , 832 , /* [bns ] */ + 1472 , 828 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_IMX228_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 2976 , 1674 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 2224 , 1674 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1680 , /* [bns ] */ + 1674 , 1674 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2968 + 16),(1668 + 12), /* [sensor ] */ + 2984 , 1684 , /* [bns ] */ + 2032 , 1674 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int IMX228_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + /*{ 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1},*/ +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + /*{ 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, */ +#endif + { 1280, 720, SIZE_RATIO_16_9}, + /*{ 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3},*/ + { 800, 450, SIZE_RATIO_16_9}, + /*{ 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, */ + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + /*{ 176, 144, SIZE_RATIO_11_9},*/ +}; + +static int IMX228_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + /*{ 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1},*/ +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + {1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX228_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 5952, 3348, SIZE_RATIO_16_9}, + { 5312, 2988, SIZE_RATIO_16_9}, +#endif +#ifdef ENABLE_13MP_FULL_FRAME + { 4800, 2700, SIZE_RATIO_16_9}, +#endif +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 4464, 3348, SIZE_RATIO_4_3}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3344, 3344, SIZE_RATIO_1_1}, + { 3264, 2448, SIZE_RATIO_4_3}, +#elif !defined(ENABLE_8MP_FULL_FRAME) + { 3264, 2448, SIZE_RATIO_4_3}, +#endif + { 3264, 1836, SIZE_RATIO_16_9}, +#if 0 //!defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 2976, 2976, SIZE_RATIO_1_1}, +#endif + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX228_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 4128, 3096, SIZE_RATIO_4_3}, + { 4096, 3072, SIZE_RATIO_4_3}, +#elif !defined(ENABLE_8MP_FULL_FRAME) + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 2304, SIZE_RATIO_16_9}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 3200, 2400, SIZE_RATIO_4_3}, +#endif + { 3072, 1728, SIZE_RATIO_16_9}, +#if !defined(ENABLE_8MP_FULL_FRAME) && !defined(ENABLE_13MP_FULL_FRAME) + { 2988, 2988, SIZE_RATIO_1_1}, +#endif + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2448, 2448, SIZE_RATIO_1_1}, +#endif + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX228_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX228_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + /*{ 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1},*/ + { 1280, 720, SIZE_RATIO_16_9}, + /*{ 960, 720, SIZE_RATIO_4_3},*/ + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX228_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int IMX228_FPS_RANGE_LIST[][2] = +{ +// { 5000, 5000}, +// { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, +// { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX228_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, + { 240000, 240000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_FHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_FHD.h new file mode 100644 index 0000000..6b54fc6 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_FHD.h @@ -0,0 +1,853 @@ +/* +** +**copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX240_2P2_H +#define EXYNOS_CAMERA_LUT_IMX240_2P2_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_IMX240_2P2_BNS_DUAL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 for 16:9, 2.0 for 4:3 and 1:1 + BDS : NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1984 , 1448 , /* [bcrop ] */ + 1984 , 1448 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 2236 , 1490 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1862 , 1490 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 2484 , 1490 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1822 , 1490 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +/* + * This is not BNS, BDS (just name is BNS) + * To keep source code. just let the name be. + */ +static int PREVIEW_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_IMX240_2P2_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4496 , 2988 , /* [bcrop ] */ + 4496 , 2988 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 3684 , 2988 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + + +static int PICTURE_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixe align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_2P2_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4496 , 2988 , /* [bcrop ] */ + 4496 , 2988 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3684 , 2988 , /* [bcrop ] */ + 3684 , 2988 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +#ifdef ENABLE_8MP_FULL_FRAME +static int VIDEO_SIZE_LUT_IMX240_2P2_8MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8M */ + + /* 8MP full frame fix all scenario */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + } +}; +#endif + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 960 , 720 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_HIGH_SPEED_IMX240_2P2_BNS_FULL_OTF[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = OFF */ + + /* FHD_60 16:9 () */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 2648 , 1490 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* HD_120 16:9 (Fast AE) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1312 , 738 , /* [bns ] */ + 1280 , 720 , /* [target ] */ + }, +}; + +static int VTCALL_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int IMX240_2P2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX240_2P2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX240_2P2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifndef ENABLE_8MP_FULL_FRAME + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, +#endif + { 3264, 1836, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2976, 2976, SIZE_RATIO_1_1}, +#endif + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX240_2P2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4608, 2592, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3456, 2592, SIZE_RATIO_4_3}, +#ifndef ENABLE_8MP_FULL_FRAME + { 3200, 2400, SIZE_RATIO_4_3}, +#endif + { 3072, 1728, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2988, 2988, SIZE_RATIO_1_1}, +#endif + { 2656, 1494, SIZE_RATIO_16_9}, + { 2592, 2592, SIZE_RATIO_1_1}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2448, 2448, SIZE_RATIO_1_1}, +#endif + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX240_2P2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX240_2P2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int IMX240_2P2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int IMX240_2P2_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX240_2P2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_WQHD.h new file mode 100644 index 0000000..66a58e2 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX240_2P2_WQHD.h @@ -0,0 +1,970 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX240_2P2_H +#define EXYNOS_CAMERA_LUT_IMX240_2P2_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 2656 , 1494 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int DUAL_PREVIEW_SIZE_LUT_IMX240_2P2_FHD[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1490 , 1490 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 4480 , 2988 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 3728 , 2988 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 4976 , 2988 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 3648 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 960 , 720 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_IMX240_2P2_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_IMX240_2P2[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 4480 , 2988 , /* [bds ] */ + 4480 , 2988 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 3728 , 2988 , /* [bds ] */ + 3728 , 2988 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 4976 , 2988 , /* [bds ] */ + 4976 , 2988 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 3648 , 2988 , /* [bds ] */ + 3648 , 2988 , /* [target ] */ + } +}; + +static int YUV_SIZE_LUT_IMX240_2P2_BDS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ + } +}; + + +static int IMX240_2P2_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX240_2P2_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX240_2P2_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +}; + +static int IMX240_2P2_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX240_2P2_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX240_2P2_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int IMX240_2P2_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int IMX240_2P2_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + //{ 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX240_2P2_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; + +/* For HAL3 */ +static int IMX240_2P2_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 512, 384, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, +// { 176, 144, SIZE_RATIO_11_9}, /* Too small to create thumbnail */ +}; + +/* For HAL3 */ +static int IMX240_2P2_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +/* For HAL3 */ +static int IMX240_2P2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, +}; + +static camera_metadata_rational UNIT_MATRIX_IMX240_2P2_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_IMX240_3X3[] = { + {800, 1024}, {-172, 1024}, {-110, 1024}, + {-463, 1024}, {1305, 1024}, {146, 1024}, + {-119, 1024}, {286, 1024}, {552, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_IMX240_3X3[] = { + {1758, 1024}, {-1014, 1024}, {-161, 1024}, + {-129, 1024}, {1119, 1024}, {134, 1024}, + {-13, 1024}, {225, 1024}, {604, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX1_2P2_3X3[] = { + {1094, 1024}, {-306, 1024}, {-146, 1024}, + {-442, 1024}, {1388, 1024}, {52, 1024}, + {-104, 1024}, {250, 1024}, {600, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_2P2_3X3[] = { + {2263, 1024}, {-1364, 1024}, {-145, 1024}, + {-194, 1024}, {1257, 1024}, {-56, 1024}, + {-24, 1024}, {187, 1024}, {618, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_IMX240_3X3[] = { + {682, 1024}, {182, 1024}, {120, 1024}, + {244, 1024}, {902, 1024}, {-122, 1024}, + {14, 1024}, {-316, 1024}, {1142, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_IMX240_3X3[] = { + {450, 1024}, {307, 1024}, {227, 1024}, + {8, 1024}, {1049, 1024}, {-33, 1024}, + {-7, 1024}, {-968, 1024}, {1815, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_2P2_3X3[] = { + {612, 1024}, {233, 1024}, {139, 1024}, + {199, 1024}, {831, 1024}, {-6, 1024}, + {15, 1024}, {-224, 1024}, {1049, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_2P2_3X3[] = { + {441, 1024}, {317, 1024}, {226, 1024}, + {29, 1024}, {908, 1024}, {87, 1024}, + {9, 1024}, {-655, 1024}, {1486, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX240_FHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX240_FHD.h new file mode 100644 index 0000000..f919a1c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX240_FHD.h @@ -0,0 +1,620 @@ +/* +** +**copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX240_H +#define EXYNOS_CAMERA_LUT_IMX240_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +#ifdef ENABLE_8MP_FULL_FRAME +static int VIDEO_SIZE_LUT_IMX240_8MP_FULL[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 8M */ + + /* 8MP full frame fix all scenario */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 3840 , 2160 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 3840 , 2160 , /* [target ] */ + } +}; +#endif + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int IMX240_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ +}; + +static int IMX240_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX240_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifndef ENABLE_8MP_FULL_FRAME + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, +#endif + { 3264, 1836, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2976, 2976, SIZE_RATIO_1_1}, +#endif + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX240_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifndef ENABLE_8MP_FULL_FRAME + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 3200, 2400, SIZE_RATIO_4_3}, +#endif + { 3072, 1728, SIZE_RATIO_16_9}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2988, 2988, SIZE_RATIO_1_1}, +#endif + { 2656, 1494, SIZE_RATIO_16_9}, + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, +#ifndef ENABLE_8MP_FULL_FRAME + { 2448, 2448, SIZE_RATIO_1_1}, +#endif + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX240_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX240_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int IMX240_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9} +#endif +}; + +static int IMX240_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX240_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX240_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX240_WQHD.h new file mode 100644 index 0000000..77c30a3 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX240_WQHD.h @@ -0,0 +1,638 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX240_H +#define EXYNOS_CAMERA_LUT_IMX240_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 2.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 1504 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1088 , 1088 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2236 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1862 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2484 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2000 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PREVIEW_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 2656 , 1494 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ +#else + 1920 , 1440 , /* [bds ] */ + 1920 , 1440 , /* [target ] */ +#endif + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1488 , 1488 , /* [bds ] */ + 1088 , 1088 , /* [target ] */ +#else + 1440 , 1440 , /* [bds ] */ + 1440 , 1440 , /* [target ] */ +#endif + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ +#else + 2160 , 1440 , /* [bds ] */ + 2160 , 1440 , /* [target ] */ +#endif + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ +#else + 1792 , 1440 , /* [bds ] */ + 1792 , 1440 , /* [target ] */ +#endif + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ +#else + 2400 , 1440 , /* [bds ] */ + 2400 , 1440 , /* [target ] */ +#endif + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ +#ifdef LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ +#else + 1760 , 1440 , /* [bds ] */ + 1760 , 1440 , /* [target ] */ +#endif + } +}; + +static int PICTURE_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ + 5312 , 2988 , /* [bds ] */ + 5312 , 2988 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 3984 , 2988 , /* [bds ] */ + 3984 , 2988 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 2988 , 2988 , /* [bds ] */ + 2988 , 2988 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3536 , 1988 , /* [bcrop ] */ +#if defined(USE_BDS_RECORDING) +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 1920 , 1080 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#else +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 3536 , 1988 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ +#else + 3536 , 1988 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ +#endif /* LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING */ +#endif /* USE_BDS_RECORDING */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2650 , 1988 , /* [bcrop ] */ + 1440 , 1080 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 2004 , 2000 , /* [bns ] */ + 1988 , 1988 , /* [bcrop ] */ + 1088 , 1088 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 2982 , 1988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2486 , 1988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 3552 , 2000 , /* [bns ] */ + 3314 , 1988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 2664 , 2000 , /* [bns ] */ + 2430 , 1988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX240[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 5312 , 2988 , /* [bcrop ] */ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_RECORDING) + 2656 , 1494 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ +#else + 2560 , 1440 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 2560 , 1440 , /* [target ] */ +#endif + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3984 , 2988 , /* [bcrop ] */ + 1984 , 1488 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2992 + 16),(2988 + 12), /* [sensor ] */ + 3008 , 3000 , /* [bns ] */ + 2988 , 2988 , /* [bcrop ] */ + 1488 , 1488 , /* [bds ] *//* w=1080, Increased for 16 pixel align */ + 1088 , 1088 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4480 , 2988 , /* [bcrop ] */ + 1616 , 1080 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3728 , 2988 , /* [bcrop ] */ + 1344 , 1080 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (5312 + 16),(2988 + 12), /* [sensor ] */ + 5328 , 3000 , /* [bns ] */ + 4976 , 2988 , /* [bcrop ] */ + 1792 , 1080 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (3984 + 16),(2988 + 12), /* [sensor ] */ + 4000 , 3000 , /* [bns ] */ + 3648 , 2988 , /* [bcrop ] */ + 1312 , 1080 , /* [bds ] *//* w=1320, Reduced for 16 pixel align */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* FHD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] *//* Sensor binning ratio = 2 */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1312 + 16),( 736 + 12), /* [sensor ] *//* Sensor binning ratio = 4 */ + 1328 , 748 , /* [bns ] */ + 1312 , 738 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_IMX240_BNS[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 2648 , 1490 , /* [bcrop ] */ + 1920 , 1080 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1986 , 1490 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1488 , 1488 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2648 + 16),(1488 + 12), /* [sensor ] */ + 2664 , 1500 , /* [bns ] */ + 1822 , 1490 , /* [bcrop ] */ + 352 , 288 , /* [bds ] */ + 352 , 288 , /* [target ] */ + } +}; + +static int IMX240_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX240_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 528, 432, SIZE_RATIO_11_9}, + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX240_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 5312, 2988, SIZE_RATIO_16_9}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX240_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX240_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX240_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int IMX240_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif +}; + +static int IMX240_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + //{ 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX240_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableIMX260_2L1_WQHD.h b/libcamera/common_v2/ExynosCameraSizeTableIMX260_2L1_WQHD.h new file mode 100644 index 0000000..a6cab19 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableIMX260_2L1_WQHD.h @@ -0,0 +1,827 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_IMX260_2L1_H +#define EXYNOS_CAMERA_LUT_IMX260_2L1_H + +#include "ExynosCameraConfig.h" + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 0, + Sensor Margin Height = 0 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4032 + 0) ,(2268 + 0), /* [sensor ] */ + 4032 , 2268 , /* [bns ] */ + 4032 , 2268 , /* [bcrop ] */ + 2688 , 1512 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4032 + 0) ,(3024 + 0), /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 3024 , /* [bcrop ] */ + 2688 , 2016 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3024 + 0),(3024 + 0), /* [sensor ] */ + 3024 , 3024 , /* [bns ] */ + 3024 , 3024 , /* [bcrop ] */ + 2016 , 2016 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 2688 , /* [bcrop ] */ + 2688 , 1792 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3780 , 3024 , /* [bcrop ] */ + 2480 , 1984 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4030 , 2418 , /* [bcrop ] */ + 2640 , 1584 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3696 , 3024 , /* [bcrop ] */ + 2464 , 2016 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PREVIEW_SIZE_LUT_IMX260_2L1_BNS[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.5 + BDS = 1440p */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4032 + 0) ,(2268 + 0), /* [sensor ] */ + 2688 , 1512 , /* [bns ] */ + 2688 , 1512 , /* [bcrop ] */ + 2688 , 1512 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4032 + 0) ,(3024 + 0), /* [sensor ] */ + 2688 , 2016 , /* [bns ] */ + 2688 , 2016 , /* [bcrop ] */ + 2688 , 2016 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3024 + 0),(3024 + 0), /* [sensor ] */ + 2016 , 2016 , /* [bns ] */ + 2016 , 2016 , /* [bcrop ] */ + 2016 , 2016 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 2688 , 1356 , /* [bns ] */ + 2688 , 1356 , /* [bcrop ] */ + 2688 , 1356 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 2688 , 1356 , /* [bns ] */ + 2530 , 1356 , /* [bcrop ] */ + 2530 , 1356 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 2688 , 1356 , /* [bns ] */ + 2688 , 1612 , /* [bcrop ] */ + 2688 , 1612 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 2688 , 1356 , /* [bns ] */ + 2464 , 1356 , /* [bcrop ] */ + 2464 , 1356 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (4032 + 0),(2268 + 0), /* [sensor ] */ + 4032 , 2268 , /* [bns ] */ + 4032 , 2268 , /* [bcrop ] */ + 4032 , 2268 , /* [bds ] */ + 4032 , 2268 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (4032 + 0),(3024 + 0), /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 3024 , /* [bcrop ] */ + 4032 , 3024 , /* [bds ] */ + 4032 , 3024 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3024 + 0),(3024 + 0), /* [sensor ] */ + 3024 , 3024 , /* [bns ] */ + 3024 , 3024 , /* [bcrop ] */ + 3024 , 3024 , /* [bds ] */ + 3024 , 3024 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4032 + 0) ,(2268 + 0) , /* [sensor ] */ + 4032 , 2268 , /* [bns ] */ + 4032 , 2268 , /* [bcrop ] */ + 2688 , 1512 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 3024 , /* [bcrop ] */ + 2688 , 2016 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3024 + 0) ,(3024 + 0) , /* [sensor ] */ + 3024 , 3024 , /* [bns ] */ + 3024 , 3024 , /* [bcrop ] */ + 2016 , 2016 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 2688 , /* [bcrop ] */ + 2688 , 1792 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3780 , 3024 , /* [bcrop ] */ + 2480 , 1984 , /* [bds ] */ + 1344 , 1080 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4030 , 2418 , /* [bcrop ] */ + 2640 , 1584 , /* [bds ] */ + 1792 , 1080 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3696 , 3024 , /* [bcrop ] */ + 2464 , 2016 , /* [bds ] */ + 1312 , 1080 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_120 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 2016 , 1134 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* HD_120 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1504 , 1128 , /* [bcrop ] */ + 960 , 720 , /* [bds ] */ + 960 , 720 , /* [target ] */ + }, + /* HD_120 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1120 , 1120 , /* [bcrop ] */ + 720 , 720 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* HD_120 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1680 , 1120 , /* [bcrop ] */ + 1056 , 704 , /* [bds ] */ + 1056 , 704 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 1.0 + BDS = ON */ + + /* HD_240 16:9 (Single) */ + { SIZE_RATIO_16_9, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 2016 , 1134 , /* [bcrop ] */ + 2016 , 1134 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1280 , 720 , /* [target ] */ + }, + /* HD_240 4:3 (Single) */ + { SIZE_RATIO_4_3, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1504 , 1128 , /* [bcrop ] */ + 1504 , 1128 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 960 , 720 , /* [target ] */ + }, + /* HD_240 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1120 , 1120 , /* [bcrop ] */ + 1120 , 1120 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 720 , 720 , /* [target ] */ + }, + /* HD_240 3:2 (Single) */ + { SIZE_RATIO_3_2, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1680 , 1120 , /* [bcrop ] */ + 1680 , 1120 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 1056 , 704 , /* [target ] */ + } +}; + +static int VTCALL_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 (VT_Call) */ + { SIZE_RATIO_16_9, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 2016 , 1134 , /* [bcrop ] */ + 2016 , 1134 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (VT_Call) */ + { SIZE_RATIO_4_3, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1504 , 1128 , /* [bcrop ] */ + 1504 , 1128 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (VT_Call) */ + { SIZE_RATIO_1_1, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1120 , 1120 , /* [bcrop ] */ + 1120 , 1120 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + /* 3:2 (VT_Call) */ + { SIZE_RATIO_3_2, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1680 , 1120 , /* [bcrop ] */ + 1680 , 1120 , /* [bds ] */ + 1616 , 1080 , /* [target ] */ + }, + /* 11:9 (VT_Call) */ + { SIZE_RATIO_11_9, + (2016 + 0) ,(1134 + 0) , /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1232 , 1008 , /* [bcrop ] */ + 1232 , 1008 , /* [bds ] */ + 1232 , 1008 , /* [target ] */ + } +}; + +static int LIVE_BROADCAST_SIZE_LUT_IIMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = 2 + BNS ratio = 1.0 + BDS = ON */ + + /* 16:9 */ + { SIZE_RATIO_16_9, + (2016 + 0) ,(1134 + 0), /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 2016 , 1134 , /* [bcrop ] */ + 2016 , 1134 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 */ + { SIZE_RATIO_4_3, + (2016 + 0) ,(1134 + 0), /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1472 , 1104 , /* [bcrop ] */ + 1472 , 1104 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 */ + { SIZE_RATIO_1_1, + (2016 + 0) ,(1134 + 0), /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1120 , 1120 , /* [bcrop ] */ + 1120 , 1120 , /* [bds ] */ + 1080 , 1080 , /* [target ] */ + }, + /* 11:9 */ + { SIZE_RATIO_11_9, + (2016 + 0) ,(1134 + 0), /* [sensor ] */ + 2016 , 1134 , /* [bns ] */ + 1232 , 1008 , /* [bcrop ] */ + 1232 , 1008 , /* [bds ] */ + 1232 , 1008 , /* [target ] */ + }, +}; + +static int FAST_AE_STABLE_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = ON + BNS ratio = 4.0 / FPS = 120 + BDS = ON */ + + /* FAST_AE 4:3 (Single) */ + { SIZE_RATIO_4_3, + (1008 + 0) , (756 + 0) , /* [sensor ] */ + 1008 , 756 , /* [bns ] */ + 1008 , 756 , /* [bcrop ] */ + 1008 , 756 , /* [bds ] */ + 1008 , 756 , /* [target ] */ + }, +}; + +static int YUV_SIZE_LUT_IMX260_2L1[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = 1080p */ + + /* 16:9 (Single) */ + { SIZE_RATIO_16_9, + (4032 + 0) ,(2268 + 0) , /* [sensor ] */ + 4032 , 2268 , /* [bns ] */ + 4032 , 2268 , /* [bcrop ] */ + 4032 , 2268 , /* [bds ] *//* UHD (3840x2160) special handling in ExynosCameraParameters class */ + 4032 , 2268 , /* [target ] */ + }, + /* 4:3 (Single) */ + { SIZE_RATIO_4_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 3024 , /* [bcrop ] */ + 4032 , 3024 , /* [bds ] */ + 4032 , 3024 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (3024 + 0) ,(3024 + 0) , /* [sensor ] */ + 3024 , 3024 , /* [bns ] */ + 3024 , 3024 , /* [bcrop ] */ + 3024 , 3024 , /* [bds ] */ + 3024 , 3024 , /* [target ] */ + }, + /* 3:2 (Single) */ + { SIZE_RATIO_3_2, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4032 , 2688 , /* [bcrop ] */ + 4032 , 2688 , /* [bds ] */ + 4032 , 2688 , /* [target ] */ + }, + /* 5:4 (Single) */ + { SIZE_RATIO_5_4, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3780 , 3024 , /* [bcrop ] */ + 3780 , 3024 , /* [bds ] */ + 3780 , 3024 , /* [target ] */ + }, + /* 5:3 (Single) */ + { SIZE_RATIO_5_3, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 4030 , 2418 , /* [bcrop ] */ + 4030 , 2418 , /* [bds ] */ + 4030 , 2418 , /* [target ] */ + }, + /* 11:9 (Single) */ + { SIZE_RATIO_11_9, + (4032 + 0) ,(3024 + 0) , /* [sensor ] */ + 4032 , 3024 , /* [bns ] */ + 3696 , 3024 , /* [bcrop ] */ + 3696 , 3024 , /* [bds ] */ + 3696 , 3024 , /* [target ] */ + } +}; + +static int DEPTH_MAP_SIZE_LUT_IMX260_2L1[][SIZE_OF_RESOLUTION] = +{ + { SIZE_RATIO_16_9, + 504 , 282 , /* [vci ] */ + }, + { SIZE_RATIO_4_3, + 504 , 378 , /* [vci ] */ + }, + { SIZE_RATIO_1_1, + 376 , 376 , /* [vci ] */ + }, + { SIZE_RATIO_3_2, + 504 , 378 , /* [vci ] */ + }, + { SIZE_RATIO_5_4, + 504 , 378 , /* [vci ] */ + }, + { SIZE_RATIO_5_3, + 504 , 378 , /* [vci ] */ + }, + { SIZE_RATIO_11_9, + 504 , 378 , /* [vci ] */ + }, +}; + +static int IMX260_2L1_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(LIMIT_SCP_SIZE_UNTIL_FHD_ON_CAPTURE) +#else + { 2560, 1440, SIZE_RATIO_16_9}, + { 1920, 1440, SIZE_RATIO_4_3}, + { 1440, 1440, SIZE_RATIO_1_1}, +#endif +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 1024, 768, SIZE_RATIO_4_3}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9} +}; + +static int IMX260_2L1_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, +#endif + { 3840, 2160, SIZE_RATIO_16_9}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1056, 864, SIZE_RATIO_11_9}, + { 960, 960, SIZE_RATIO_1_1}, /* for Clip Movie */ + { 960, 540, SIZE_RATIO_16_9}, /* for GearVR*/ + { 640, 360, SIZE_RATIO_16_9}, /* for SWIS & GearVR*/ + { 528, 432, SIZE_RATIO_11_9}, + { 800, 600, SIZE_RATIO_4_3}, /* for GearVR */ + { 800, 480, SIZE_RATIO_5_3}, + { 672, 448, SIZE_RATIO_3_2}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int IMX260_2L1_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4032, 3024, SIZE_RATIO_4_3}, + { 4032, 2268, SIZE_RATIO_16_9}, + { 3024, 3024, SIZE_RATIO_1_1}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2880, 2160, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2160, 2160, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int IMX260_2L1_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4128, 3096, SIZE_RATIO_4_3}, + { 4128, 2322, SIZE_RATIO_16_9}, + { 4096, 3072, SIZE_RATIO_4_3}, + { 4096, 2304, SIZE_RATIO_16_9}, + { 3840, 2160, SIZE_RATIO_16_9}, + { 3200, 2400, SIZE_RATIO_4_3}, + { 3072, 1728, SIZE_RATIO_16_9}, + { 2988, 2988, SIZE_RATIO_1_1}, + { 2656, 1494, SIZE_RATIO_16_9}, /* use S-note */ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1936, SIZE_RATIO_4_3}, /* not exactly matched ratio */ + { 2560, 1920, SIZE_RATIO_4_3}, + { 2448, 2448, SIZE_RATIO_1_1}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 1440, 1080, SIZE_RATIO_4_3}, /* binning capture */ + { 1080, 1080, SIZE_RATIO_1_1}, /* binning capture */ + { 1616, 1080, SIZE_RATIO_3_2}, /* binning capture */ + { 1232, 1008, SIZE_RATIO_11_9}, /* binning capture */ + { 1056, 704, SIZE_RATIO_3_2}, /* binning capture */ + { 720, 720, SIZE_RATIO_1_1}, /* dummy size for binning mode */ + { 352, 288, SIZE_RATIO_11_9}, /* dummy size for binning mode */ +}; + +static int IMX260_2L1_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, +/* TODO : will be supported after enable S/W scaler correctly */ +// { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int IMX260_2L1_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1440, SIZE_RATIO_1_1}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1088, 1088, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* for CAMERA2_API_SUPPORT */ + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int IMX260_2L1_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#ifdef USE_UHD_RECORDING + { 3840, 2160, SIZE_RATIO_16_9}, +#endif +#ifdef USE_WQHD_RECORDING + { 2560, 1440, SIZE_RATIO_16_9}, +#endif + { 960, 960, SIZE_RATIO_1_1}, /* for Clip Movie */ + { 864, 480, SIZE_RATIO_16_9}, /* for PLB mode */ + { 432, 240, SIZE_RATIO_16_9}, /* for PLB mode */ +}; + +/* For HAL3 */ +static int IMX260_2L1_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4032, 3024, SIZE_RATIO_4_3}, + { 4032, 2268, SIZE_RATIO_16_9}, + { 3024, 3024, SIZE_RATIO_1_1}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2880, 2160, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2160, 2160, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, + { 256, 144, SIZE_RATIO_16_9}, /* DngCreatorTest */ + { 176, 144, SIZE_RATIO_11_9}, /* RecordingTest */ +}; + +/* availble Jpeg size (only for HAL_PIXEL_FORMAT_BLOB) */ +static int IMX260_2L1_JPEG_LIST[][SIZE_OF_RESOLUTION] = +{ + { 4032, 3024, SIZE_RATIO_4_3}, + { 4032, 2268, SIZE_RATIO_16_9}, + { 3024, 3024, SIZE_RATIO_1_1}, + { 3984, 2988, SIZE_RATIO_4_3}, + { 3264, 2448, SIZE_RATIO_4_3}, + { 3264, 1836, SIZE_RATIO_16_9}, + { 2976, 2976, SIZE_RATIO_1_1}, + { 2880, 2160, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2160, 2160, SIZE_RATIO_1_1}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +/* For HAL3 */ +static int IMX260_2L1_HIGH_SPEED_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1280, 720, SIZE_RATIO_16_9}, +}; + +static int IMX260_2L1_FPS_RANGE_LIST[][2] = +{ + //{ 5000, 5000}, + //{ 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + //{ 4000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int IMX260_2L1_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 10000, 24000}, + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, + { 240000, 240000}, +}; + +/* For HAL3 */ +static int IMX260_2L1_HIGH_SPEED_VIDEO_FPS_RANGE_LIST[][2] = +{ + { 30000, 120000}, + { 120000, 120000}, + { 30000, 240000}, + { 240000, 240000}, +}; + +static camera_metadata_rational UNIT_MATRIX_IMX260_2L1_3X3[] = +{ + {128, 128}, {0, 128}, {0, 128}, + {0, 128}, {128, 128}, {0, 128}, + {0, 128}, {0, 128}, {128, 128} +}; + +static camera_metadata_rational COLOR_MATRIX1_IMX260_3X3[] = +{ + {735, 1024}, {-149, 1024}, {-125, 1024}, + {-624, 1024}, {1580, 1024}, {34, 1024}, + {-134, 1024}, {296, 1024}, {442, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_IMX260_3X3[] = +{ + {1294, 1024}, {-603, 1024}, {-115, 1024}, + {-550, 1024}, {1571, 1024}, {160, 1024}, + {-67, 1024}, {191, 1024}, {642, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX1_2L1_3X3[] = +{ + {593, 1024}, {78, 1024}, {-118, 1024}, + {-491, 1024}, {1412, 1024},{72, 1024}, + {-137, 1024}, {417, 1024}, {428, 1024} +}; + +static camera_metadata_rational COLOR_MATRIX2_2L1_3X3[] = +{ + {1032, 1024}, {-174, 1024}, {-276, 1024}, + {-442, 1024}, {1495, 1024}, {39, 1024}, + {-76, 1024}, {333, 1024}, {614, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_IMX260_3X3[] = +{ + {665, 1024}, {107, 1024}, {216, 1024}, + {264, 1024}, {702, 1024}, {58, 1024}, + {16, 1024}, {-328, 1024}, {1157, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_IMX260_3X3[] = +{ + {622, 1024}, {191, 1024}, {175, 1024}, + {199, 1024}, {832, 1024}, {-7, 1024}, + {15, 1024}, {-540, 1024}, {1370, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX1_2L1_3X3[] = +{ + {893, 1024}, {-200, 1024}, {294, 1024}, + {317, 1024}, {697, 1024}, {10, 1024}, + {-16, 1024}, {-557, 1024}, {1418, 1024} +}; + +static camera_metadata_rational FORWARD_MATRIX2_2L1_3X3[] = +{ + {750, 1024}, {-150, 1024}, {387, 1024}, + {186, 1024}, {748, 1024}, {90, 1024}, + {-55, 1024}, {-987, 1024}, {1887, 1024} +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableOV5670.h b/libcamera/common_v2/ExynosCameraSizeTableOV5670.h new file mode 100644 index 0000000..8f0bb5c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableOV5670.h @@ -0,0 +1,256 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_OV5670_H +#define EXYNOS_CAMERA_LUT_OV5670_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 12 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_OV5670[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1466 , /* [bcrop ] */ + 2600 , 1466 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1952 , /* [bcrop ] */ + 2600 , 1952 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1944 , 1944 , /* [bcrop ] */ + 1944 , 1944 , /* [bds ] */ + 1072 , 1072 , /* [target ] */ + }, +}; + +static int PICTURE_SIZE_LUT_OV5670[][SIZE_OF_LUT] = +{ + { SIZE_RATIO_16_9, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1466 , /* [bcrop ] */ + 2600 , 1466 , /* [bds ] */ + 2560 , 1440 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1952 , /* [bcrop ] */ + 2600 , 1952 , /* [bds ] */ + 2560 , 1920 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 1944 , 1944 , /* [bcrop ] */ + 1944 , 1944 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_OV5670[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1466 , /* [bcrop ] */ + 2600 , 1466 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2608 + 0),(1960 + 0), /* [sensor ] */ + 2608 , 1960 , /* [bns ] */ + 2600 , 1952 , /* [bcrop ] */ + 2600 , 1952 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, +}; + + +static int OV5670_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, +#endif + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, + { 960, 720, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_11_9}, + { 736, 736, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int OV5670_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int OV5670_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2576, 1932, SIZE_RATIO_4_3}, + { 2560, 1440, SIZE_RATIO_16_9}, + { 2560, 1920, SIZE_RATIO_4_3}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int OV5670_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int OV5670_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int OV5670_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int OV5670_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int OV5670_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int OV5670_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraSizeTableSR259.h b/libcamera/common_v2/ExynosCameraSizeTableSR259.h new file mode 100644 index 0000000..048db34 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableSR259.h @@ -0,0 +1,171 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_SR259_H +#define EXYNOS_CAMERA_LUT_SR259_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int SR259_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, /* for 32 align of fimc limitation */ + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, /* for 32 align of fimc limitation */ + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int SR259_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1024, 768, SIZE_RATIO_4_3}, + { 880, 720, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 672, 448, SIZE_RATIO_3_2}, /* for 32 align of ficm limitation */ + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int SR259_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1616, 1212, SIZE_RATIO_4_3}, + { 1600, 900, SIZE_RATIO_16_9}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int SR259_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int SR259_YUV_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1616, 1212, SIZE_RATIO_4_3}, + { 1600, 900, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 1056, 704, SIZE_RATIO_3_2}, /* for 32 align of fimc limitation */ + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, /* for 32 align of fimc limitation */ + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int SR259_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1}, +}; + +static int SR259_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9}, +}; + +static int SR259_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +}; + +static int SR259_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int SR259_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif + diff --git a/libcamera/common_v2/ExynosCameraSizeTableSR261.h b/libcamera/common_v2/ExynosCameraSizeTableSR261.h new file mode 100644 index 0000000..ccf83a1 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableSR261.h @@ -0,0 +1,155 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_SR261_H +#define EXYNOS_CAMERA_LUT_SR261_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +-----------------------------*/ + +static int SR261_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 720, SIZE_RATIO_16_9}, + { 1056, 704, SIZE_RATIO_3_2}, /* for 32 align of fimc limitation */ + { 960, 720, SIZE_RATIO_4_3}, + { 736, 736, SIZE_RATIO_1_1}, /* for 32 align of fimc limitation */ + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int SR261_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if !(defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080)) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, /* for 32 align of ficm limitation */ + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int SR261_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int SR261_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int SR261_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int SR261_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int SR261_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +}; + +static int SR261_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int SR261_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif + diff --git a/libcamera/common_v2/ExynosCameraSizeTableSR544.h b/libcamera/common_v2/ExynosCameraSizeTableSR544.h new file mode 100644 index 0000000..0400ed4 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraSizeTableSR544.h @@ -0,0 +1,386 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_LUT_SR544_H +#define EXYNOS_CAMERA_LUT_SR544_H + +/* ------------------------- + SIZE_RATIO_16_9 = 0, + SIZE_RATIO_4_3, + SIZE_RATIO_1_1, + SIZE_RATIO_3_2, + SIZE_RATIO_5_4, + SIZE_RATIO_5_3, + SIZE_RATIO_11_9, + SIZE_RATIO_END +---------------------------- + RATIO_ID, + SENSOR_W = 1, + SENSOR_H, + BNS_W, + BNS_H, + BCROP_W, + BCROP_H, + BDS_W, + BDS_H, + TARGET_W, + TARGET_H, +----------------------------- + Sensor Margin Width = 16, + Sensor Margin Height = 10 +-----------------------------*/ + +static int PREVIEW_SIZE_LUT_SR544[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2592 + 8), (1458 + 8), /* [sensor ] */ + 2600 , 1466 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 1280 , 960 , /* [target ] */ + }, + /* 1:1 (Single, Dual) */ + { SIZE_RATIO_1_1, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1952 , 1944 , /* [bcrop ] */ + 1952 , 1944 , /* [bds ] */ + 720 , 720 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2220 , 1480 , /* [bcrop ] */ + 2220 , 1480 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1620 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1480 , /* [bcrop ] */ + 1850 , 1480 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1350 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1110 , /* [bcrop ] */ + 1850 , 1110 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1850 , 1110 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2384 , 1944 , /* [bcrop ] */ + 2384 , 1944 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int PICTURE_SIZE_LUT_SR544[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = OFF */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2592 + 8), (1458 + 8), /* [sensor ] */ + 2600 , 1466 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 2592 , 1458 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 2592 , 1944 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1952 , 1944 , /* [bcrop ] */ + 1952 , 1944 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2220 , 1480 , /* [bcrop ] */ + 2220 , 1480 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1620 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1480 , /* [bcrop ] */ + 1850 , 1480 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1350 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1110 , /* [bcrop ] */ + 1850 , 1110 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1850 , 1110 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2384 , 1944 , /* [bcrop ] */ + 2384 , 1944 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_SR544[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS :NO */ + + /* 16:9 (Single, Dual) */ + { SIZE_RATIO_16_9, + (2592 + 8), (1458 + 8), /* [sensor ] */ + 2600 , 1466 , /* [bns ] */ + 2592 , 1458 , /* [bcrop ] */ + 2592 , 1458 , /* [bds ] */ + 1920 , 1080 , /* [target ] */ + }, + /* 4:3 (Single, Dual) */ + { SIZE_RATIO_4_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2592 , 1944 , /* [bcrop ] */ + 2592 , 1944 , /* [bds ] */ + 1440 , 1080 , /* [target ] */ + }, + /* 1:1 (Single) */ + { SIZE_RATIO_1_1, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1952 , 1944 , /* [bcrop ] */ + 1952 , 1944 , /* [bds ] */ + 1920 , 1920 , /* [target ] */ + }, + /* 3:2 (Single, Dual) */ + { SIZE_RATIO_3_2, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2220 , 1480 , /* [bcrop ] */ + 2220 , 1480 , /* [bds ] *//* w=1620, Reduced for 16 pixel align */ + 1620 , 1080 , /* [target ] */ + }, + /* 5:4 (Single, Dual) */ + { SIZE_RATIO_5_4, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1480 , /* [bcrop ] */ + 1850 , 1480 , /* [bds ] *//* w=1350, Reduced for 16 pixel align */ + 1350 , 1080 , /* [target ] */ + }, + /* 5:3 (Single, Dual) */ + { SIZE_RATIO_5_3, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 1850 , 1110 , /* [bcrop ] */ + 1850 , 1110 , /* [bds ] *//* w=1800, Reduced for 16 pixel align */ + 1850 , 1110 , /* [target ] */ + }, + /* 11:9 (Single, Dual) */ + { SIZE_RATIO_11_9, + (2592 + 8), (1944 + 8), /* [sensor ] */ + 2600 , 1952 , /* [bns ] */ + 2384 , 1944 , /* [bcrop ] */ + 2384 , 1944 , /* [bds ] */ + 1056 , 864 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_SR544[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = NO */ + + /* HD_60 16:9 (Single) */ + { SIZE_RATIO_16_9, + (1280 + 16),( 720 + 10), /* [sensor ] */ + 1280 , 720 , /* [bns ] */ + 1280 , 720 , /* [bcrop ] */ + 1280 , 720 , /* [bds ] */ + 1280 , 720 , /* [target ] */ + } +}; + +static int VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_SR544[][SIZE_OF_LUT] = +{ + /* Binning = OFF + BNS ratio = 1.0 + BDS = NO */ + + /* VGA_120 4:3 (Fast AE) */ + { SIZE_RATIO_4_3, + ( 640 + 8),( 480 + 8), /* [sensor ] */ + 640 , 480 , /* [bns ] */ + 640 , 480 , /* [bcrop ] */ + 640 , 480 , /* [bds ] */ + 640 , 480 , /* [target ] */ + }, +}; + +static int SR544_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1280, 960, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 640, 480, SIZE_RATIO_4_3}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, +}; + +static int SR544_HIDDEN_PREVIEW_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + { 1920, 1080, SIZE_RATIO_16_9}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1072, 1072, SIZE_RATIO_1_1}, +#endif + { 1056, 864, SIZE_RATIO_11_9}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 720, SIZE_RATIO_1_1}, + { 720, 480, SIZE_RATIO_3_2}, + { 672, 448, SIZE_RATIO_3_2}, + { 528, 432, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 480, 270, SIZE_RATIO_16_9}, +}; + +static int SR544_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 2592, 1944, SIZE_RATIO_4_3}, + { 2592, 1458, SIZE_RATIO_16_9}, + { 2048, 1536, SIZE_RATIO_4_3}, + { 2048, 1152, SIZE_RATIO_16_9}, + { 1920, 1920, SIZE_RATIO_1_1}, + { 1920, 1080, SIZE_RATIO_16_9}, + { 1600, 1200, SIZE_RATIO_4_3}, + { 1440, 1080, SIZE_RATIO_4_3}, + { 1280, 720, SIZE_RATIO_16_9}, + { 1072, 1072, SIZE_RATIO_1_1}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, +}; + +static int SR544_HIDDEN_PICTURE_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1600, 1200, SIZE_RATIO_4_3}, + { 1280, 960, SIZE_RATIO_4_3}, + { 1024, 768, SIZE_RATIO_4_3}, + { 800, 600, SIZE_RATIO_4_3}, + { 800, 480, SIZE_RATIO_5_3}, + { 800, 450, SIZE_RATIO_16_9}, + { 720, 480, SIZE_RATIO_3_2}, + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_11_9}, + { 480, 320, SIZE_RATIO_3_2}, + { 320, 240, SIZE_RATIO_4_3}, + { 320, 180, SIZE_RATIO_16_9}, +}; + +static int SR544_THUMBNAIL_LIST[][SIZE_OF_RESOLUTION] = +{ + { 512, 384, SIZE_RATIO_4_3}, + { 512, 288, SIZE_RATIO_16_9}, + { 384, 384, SIZE_RATIO_1_1}, + { 320, 240, SIZE_RATIO_4_3}, + { 0, 0, SIZE_RATIO_1_1} +}; + +static int SR544_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ + { 1920, 1080, SIZE_RATIO_16_9}, + { 1280, 720, SIZE_RATIO_16_9}, + { 960, 720, SIZE_RATIO_4_3}, + { 640, 480, SIZE_RATIO_4_3}, + { 480, 320, SIZE_RATIO_3_2}, + { 352, 288, SIZE_RATIO_11_9}, + { 320, 240, SIZE_RATIO_4_3}, + { 176, 144, SIZE_RATIO_11_9} +}; + +static int SR544_HIDDEN_VIDEO_LIST[][SIZE_OF_RESOLUTION] = +{ +#if defined (USE_HORIZONTAL_UI_TABLET_4G_VT) + { 480, 640, SIZE_RATIO_3_4}, +#endif +}; + +static int SR544_FPS_RANGE_LIST[][2] = +{ + { 5000, 5000}, + { 7000, 7000}, + { 15000, 15000}, + { 24000, 24000}, + { 4000, 30000}, + { 8000, 30000}, + { 10000, 30000}, + { 15000, 30000}, + { 30000, 30000}, +}; + +static int SR544_HIDDEN_FPS_RANGE_LIST[][2] = +{ + { 30000, 60000}, + { 60000, 60000}, + { 60000, 120000}, + { 120000, 120000}, +}; +#endif diff --git a/libcamera/common_v2/ExynosCameraStreamManager.cpp b/libcamera/common_v2/ExynosCameraStreamManager.cpp new file mode 100644 index 0000000..e0b6eb5 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraStreamManager.cpp @@ -0,0 +1,739 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraStreamManager" + +#include "ExynosCameraStreamManager.h" + +namespace android { + +ExynosCamera3Stream::ExynosCamera3Stream(int id, camera3_stream_t *stream) +{ + m_init(); + m_id = id; + m_stream = stream; +} + +ExynosCamera3Stream::~ExynosCamera3Stream() +{ + m_deinit(); +} + +status_t ExynosCamera3Stream::m_init() +{ + m_stream = NULL; + m_id = -1; + m_actualFormat = -1; + m_planeCount = -1; + m_outputPortId = -1; + m_registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + m_registerBuffer = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + m_requestbuffer = -1; + m_bufferManager = NULL; + + return NO_ERROR; +} + +status_t ExynosCamera3Stream::m_deinit() +{ + m_stream = NULL; + m_id = -2; + m_actualFormat = -2; + m_planeCount = -2; + m_outputPortId = -2; + m_registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + m_registerBuffer = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + m_requestbuffer = -2; + + if (m_bufferManager != NULL) { + delete m_bufferManager; + m_bufferManager = NULL; + } + + return NO_ERROR; +} + +status_t ExynosCamera3Stream::setStream(camera3_stream_t *stream) +{ + status_t ret = NO_ERROR; + if (stream == NULL) { + ALOGE("ERR(%s[%d]):setStream is NULL ", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + } else { + m_stream = stream; + } + + return ret; +} + +status_t ExynosCamera3Stream::getStream(camera3_stream_t **stream) +{ + status_t ret = NO_ERROR; + if (m_stream != NULL) { + *stream = m_stream; + } else { + ALOGE("ERR(%s[%d]):getStream m_stream is NULL ", __FUNCTION__, __LINE__); + *stream = NULL; + ret = INVALID_OPERATION; + } + return ret; +} + +status_t ExynosCamera3Stream::setID(int id) +{ + status_t ret = NO_ERROR; + + if (id < 0) { + ALOGE("ERR(%s[%d]):setStreamID invalid value(%d)", __FUNCTION__, __LINE__, id); + ret = INVALID_OPERATION; + } else { + m_id = id; + } + return ret; +} + +status_t ExynosCamera3Stream::getID(int *id) +{ + status_t ret = NO_ERROR; + + if (m_id < 0) { + ALOGE("ERR(%s[%d]):getStreamID invalid value(%d)", __FUNCTION__, __LINE__, m_id); + ret = INVALID_OPERATION; + } else { + *id = m_id; + } + + return ret; +} + +status_t ExynosCamera3Stream::setFormat(int format) +{ + status_t ret = NO_ERROR; + + if (format < 0) { + ALOGE("ERR(%s[%d]):setFormat invalid value(%d)", __FUNCTION__, __LINE__, format); + ret = INVALID_OPERATION; + } else { + m_actualFormat = format; + } + return ret; +} + +status_t ExynosCamera3Stream::getFormat(int *format) +{ + status_t ret = NO_ERROR; + + if (m_actualFormat < 0) { + ALOGE("ERR(%s[%d]):getFormat invalid value(%d)", __FUNCTION__, __LINE__, m_actualFormat); + ret = INVALID_OPERATION; + } else { + *format = m_actualFormat; + } + return ret; +} + +status_t ExynosCamera3Stream::setPlaneCount(int planes) +{ + status_t ret = NO_ERROR; + + if (planes < 0) { + ALOGE("ERR(%s[%d]):setPlaneCount invalid value(%d)", __FUNCTION__, __LINE__, planes); + ret = INVALID_OPERATION; + } else { + m_planeCount = planes; + } + return ret; +} + +status_t ExynosCamera3Stream::getPlaneCount(int *planes) +{ + status_t ret = NO_ERROR; + + if (m_planeCount < 0) { + ALOGE("ERR(%s[%d]):getFormat invalid value(%d)", __FUNCTION__, __LINE__, m_planeCount); + ret = INVALID_OPERATION; + } else { + *planes = m_planeCount; + } + return ret; +} + +status_t ExynosCamera3Stream::setOutputPortId(int id) +{ + status_t ret = NO_ERROR; + + if (id < 0) { + ALOGE("ERR(%s[%d]):Invalid outputPortId %d", + __FUNCTION__, __LINE__, id); + ret = BAD_VALUE; + } else { + m_outputPortId = id; + } + + return ret; +} + +status_t ExynosCamera3Stream::getOutputPortId(int *id) +{ + status_t ret = NO_ERROR; + + if (m_outputPortId < 0) { + ALOGE("ERR(%s[%d]):Invalid outputPortId %d", + __FUNCTION__, __LINE__, m_outputPortId); + ret = BAD_VALUE; + } else { + *id = m_outputPortId; + } + + return ret; +} + +status_t ExynosCamera3Stream::setRegisterStream(EXYNOS_STREAM::STATE state) +{ + status_t ret = NO_ERROR; + switch (state) { + case EXYNOS_STREAM::HAL_STREAM_STS_INVALID: + case EXYNOS_STREAM::HAL_STREAM_STS_VALID: + m_registerStream = state; + break; + case EXYNOS_STREAM::HAL_STREAM_STS_INIT: + default: + ALOGE("ERR(%s[%d]):setConfigState invalid value(%d)", __FUNCTION__, __LINE__, state); + break; + } + return ret; +} + +status_t ExynosCamera3Stream::getRegisterStream(EXYNOS_STREAM::STATE *state) +{ + status_t ret = NO_ERROR; + switch (m_registerStream) { + case EXYNOS_STREAM::HAL_STREAM_STS_INIT: + case EXYNOS_STREAM::HAL_STREAM_STS_INVALID: + case EXYNOS_STREAM::HAL_STREAM_STS_VALID: + *state = m_registerStream; + break; + default: + ALOGE("ERR(%s[%d]):setConfigState invalid value(%d)", __FUNCTION__, __LINE__, *state); + break; + } + return ret; +} + +status_t ExynosCamera3Stream::setRegisterBuffer(EXYNOS_STREAM::STATE state) +{ + status_t ret = NO_ERROR; + switch (state) { + case EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED: + case EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED: + m_registerBuffer = state; + break; + case EXYNOS_STREAM::HAL_STREAM_STS_INIT: + default: + ALOGE("ERR(%s[%d]):setRegisterBuffer invalid value(%d)", __FUNCTION__, __LINE__, state); + break; + } + return ret; +} + +status_t ExynosCamera3Stream::getRegisterBuffer(EXYNOS_STREAM::STATE *state) +{ + status_t ret = NO_ERROR; + switch (m_registerBuffer) { + case EXYNOS_STREAM::HAL_STREAM_STS_INIT: + case EXYNOS_STREAM::HAL_STREAM_STS_REGISTERED: + case EXYNOS_STREAM::HAL_STREAM_STS_UNREGISTERED: + *state = m_registerBuffer; + break; + default: + ALOGE("ERR(%s[%d]):getRegisterBuffer invalid value(%d)", __FUNCTION__, __LINE__, *state); + break; + } + return ret; +} + +status_t ExynosCamera3Stream::setRequestBuffer(int bufferCnt) +{ + status_t ret = NO_ERROR; + + if (bufferCnt < 0) { + ALOGE("ERR(%s[%d]):setRequestBuffer invalid value(%d)", __FUNCTION__, __LINE__, bufferCnt); + ret = INVALID_OPERATION; + } else { + m_requestbuffer = bufferCnt; + m_stream->max_buffers = m_requestbuffer; + } + return ret; +} + +status_t ExynosCamera3Stream::getRequestBuffer(int *bufferCnt) +{ + status_t ret = NO_ERROR; + + if (m_planeCount < 0) { + ALOGE("ERR(%s[%d]):getRequestBuffer invalid value(%d)", __FUNCTION__, __LINE__, m_requestbuffer); + ret = INVALID_OPERATION; + } else { + *bufferCnt = m_requestbuffer; + } + return ret; +} + +status_t ExynosCamera3Stream::setBufferManager(ExynosCameraBufferManager *bufferManager) +{ + status_t ret = NO_ERROR; + + if (m_bufferManager != NULL) { + ALOGE("ERR(%s[%d]):m_bufferManager is not NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + bufferManager = NULL; + } else { + m_bufferManager = bufferManager; + ALOGD("DEBUG(%s[%d]):m_bufferManager(%p)", __FUNCTION__, __LINE__, m_bufferManager); + } + + return ret; +} + +status_t ExynosCamera3Stream::getBufferManager(ExynosCameraBufferManager **bufferManager) +{ + status_t ret = NO_ERROR; + + if (m_bufferManager == NULL) { + ALOGE("ERR(%s[%d]):m_bufferManager is NULL", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + *bufferManager = NULL; + } else { + *bufferManager = m_bufferManager; + ALOGV("DEBUG(%s[%d]):m_bufferManager(%p)", __FUNCTION__, __LINE__, m_bufferManager); + } + + return ret; +} + +ExynosCameraStreamManager::ExynosCameraStreamManager(int cameraId) +{ + ALOGD("DEBUG(%s[%d]):ID(%d)", __FUNCTION__, __LINE__, cameraId); + m_cameraId = cameraId; + m_init(); +} + +ExynosCameraStreamManager::~ExynosCameraStreamManager() +{ + ALOGD("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + m_deinit(); +} + +void ExynosCameraStreamManager::m_init() +{ + ALOGD("DEBUG(%s[%d]):ID(%d)", __FUNCTION__, __LINE__, m_cameraId); + m_exynosconfig = NULL; + m_streamInfoMap.clear(); + m_yuvStreamCount = 0; + m_yuvStreamMaxCount = 0; + + for (int i = 0; i < MAX_YUV_STREAM_COUNT; i++) + m_yuvStreamIdMap[i] = -1; +} + +void ExynosCameraStreamManager::m_deinit() +{ + ALOGD("DEBUG(%s[%d]):ID(%d)", __FUNCTION__, __LINE__, m_cameraId); + StreamInfoMap::iterator iter; + + ExynosCameraStream *streaminfo = NULL; + + m_streamInfoLock.lock(); + for (iter = m_streamInfoMap.begin() ; iter != m_streamInfoMap.end() ;) { + streaminfo = iter->second; + m_streamInfoMap.erase(iter++); + delete streaminfo; + streaminfo = NULL; + } + m_streamInfoMap.clear(); + m_streamInfoLock.unlock(); +} + +status_t ExynosCameraStreamManager::dumpCurrentStreamList(void) +{ + ALOGD("DEBUG(%s[%d]):Stream List dump-----", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + ExynosCameraStream *streaminfo = NULL; + int id = 0; + EXYNOS_STREAM::STATE registerStream = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + EXYNOS_STREAM::STATE registerBuf = EXYNOS_STREAM::HAL_STREAM_STS_INIT; + camera3_stream_t *currentStream = NULL; + + + + if (m_streamInfoMap.empty()) { + ALOGE("ERR(%s[%d]):list is empty", __FUNCTION__, __LINE__); + return NOT_ENOUGH_DATA; + } + + for (StreamInfoIterator s = m_streamInfoMap.begin(); s != m_streamInfoMap.end();) { + streaminfo = s->second; + if (streaminfo == NULL) { + ALOGE("ERR(%s[%d]):streaminfo is NULL id(%d)", __FUNCTION__, __LINE__, s->first); + s++; + continue; + } + + ret = streaminfo->getStream(¤tStream); + if (ret < 0) { + ALOGE("ERR(%s[%d]):m_insert failed -> delete stream, id(%d)", __FUNCTION__, __LINE__, s->first); + s++; + continue; + } + + streaminfo->getID(&id); + streaminfo->getRegisterStream(®isterStream); + streaminfo->getRegisterBuffer(®isterBuf); + ALOGD("DEBUG(%s[%d]):Stream(%p), ID(%d), type(%d), usage(0x%x) format(0x%x) (%d,%d)", __FUNCTION__, + __LINE__, currentStream, id, + currentStream->stream_type, currentStream->usage, currentStream->format, + currentStream->width, currentStream->height); + ALOGD("DEBUG(%s[%d]):status %d / %d", __FUNCTION__, __LINE__, registerStream, registerBuf); + s++; + } + + return OK; +} + +status_t ExynosCameraStreamManager::setConfig(struct ExynosConfigInfo *config) +{ + status_t ret = NO_ERROR; + m_exynosconfig = config; + + ALOGD("DEBUG(%s[%d]):preview(%d), raw(%d), jpeg(%d), recording(%d), yuvCallback(%d)", + __FUNCTION__, __LINE__, + m_exynosconfig->current->bufInfo.num_preview_buffers, + m_exynosconfig->current->bufInfo.num_bayer_buffers, + m_exynosconfig->current->bufInfo.num_picture_buffers, + m_exynosconfig->current->bufInfo.num_recording_buffers, + m_exynosconfig->current->bufInfo.num_preview_cb_buffers); + + return ret; +} + +ExynosCameraStream* ExynosCameraStreamManager::createStream(int id, camera3_stream_t *stream) +{ + int ret = NO_ERROR; + ExynosCameraStream *obj = NULL; + obj = new ExynosCamera3Stream(id, stream); + if (obj == NULL){ + ALOGE("ERR(%s[%d]):m_insert failed stream id(%d)", __FUNCTION__, __LINE__, id); + return NULL; + } + stream->priv = static_cast(obj); + + ret = m_insert(id, obj, &m_streamInfoMap, &m_streamInfoLock); + if (ret < 0){ + m_delete(obj); + ALOGE("ERR(%s[%d]):m_insert failed -> delete stream, id(%d)", __FUNCTION__, __LINE__, id); + } + return obj; +} + +status_t ExynosCameraStreamManager::deleteStream(int id) +{ + int ret = NO_ERROR; + + ret = m_delete(id, &m_streamInfoMap, &m_streamInfoLock); + if (ret < 0) { + ALOGE("ERR(%s[%d]):eraseStream failed stream id(%d)", __FUNCTION__, __LINE__, id); + } + + ret = m_decreaseYuvStreamCount(id); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to decreaseYuvStreamCount. streamId %d", + __FUNCTION__, __LINE__, id); + } + + return ret; +} + +status_t ExynosCameraStreamManager::getStream(int id, ExynosCameraStream **stream) +{ + status_t ret = NO_ERROR; + *stream = NULL; + + ret = m_get(id, stream, &m_streamInfoMap, &m_streamInfoLock); + + if (ret < 0) { + *stream = NULL; + return BAD_VALUE; + } + + return ret; +} + +bool ExynosCameraStreamManager::findStream(int id) +{ + status_t ret = NO_ERROR; + bool found = true; + + ret = m_find(id, &m_streamInfoMap, &m_streamInfoLock); + if (ret != NO_ERROR) + found = false; + + return found; +} + +status_t ExynosCameraStreamManager::getStreamKeys(List *keylist) +{ + status_t ret = NO_ERROR; + StreamInfoIterator iter; + + for (iter = m_streamInfoMap.begin(); iter != m_streamInfoMap.end() ; iter++) { + ALOGV("DEBUG(%s[%d]):stream key is(%d)", __FUNCTION__, __LINE__, iter->first); + + keylist->push_back(iter->first); + } + + return ret; +} + +status_t ExynosCameraStreamManager::setYuvStreamMaxCount(int32_t count) +{ + m_yuvStreamMaxCount = count; + + return NO_ERROR; +} + +int32_t ExynosCameraStreamManager::getYuvStreamCount(void) +{ + return m_yuvStreamCount; +} + +int ExynosCameraStreamManager::getYuvStreamId(int outputPortId) +{ + if (outputPortId < 0 || outputPortId >= 3) { + ALOGE("ERR(%s[%d]):Invalid outputPortId %d", + __FUNCTION__, __LINE__, outputPortId); + return -1; + } + + return m_yuvStreamIdMap[outputPortId]; +} + +int ExynosCameraStreamManager::getOutputPortId(int streamId) +{ + int outputPortId = -1; + + if (streamId < 0) { + ALOGE("ERR(%s[%d]):Invalid streamId %d", + __FUNCTION__, __LINE__, streamId); + return -1; + } + + for (int i = 0; i < 3; i++) { + if (m_yuvStreamIdMap[i] == streamId) + outputPortId = i; + } + + return outputPortId; +} + +status_t ExynosCameraStreamManager::m_insert(int id, ExynosCameraStream *item, StreamInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + lock->lock(); + pair listRet; + + listRet = list->insert( pair(id, item)); + + while(listRet.second == false) { + id += HAL_STREAM_ID_MAX; + listRet = list->insert( pair(id, item)); + ALOGW("WARN(%s[%d]):insert id is re-define id + HAL_STREAM_ID_MAX = (%d)", __FUNCTION__, __LINE__, id); + } + + if (listRet.second == true) { + ALOGI("INFO(%s[%d]):inserted stream and id is (%d)", __FUNCTION__, __LINE__, id); + item->setID(id); + } + + ret = m_increaseYuvStreamCount(id); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):Failed to updateYuvStreamCount. yuvStreamCount %d streamId %d", + __FUNCTION__, __LINE__, m_yuvStreamCount, id); + return ret; + } + + lock->unlock(); + return ret; +} + +status_t ExynosCameraStreamManager::m_erase(int id, ExynosCameraStream **item, StreamInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + StreamInfoMap::iterator iter; + + lock->lock(); + iter = list->find(id); + if (iter != list->end()) { + *item = iter->second; + list->erase(iter); + } else { + ret = BAD_VALUE; + ALOGE("ERR(%s[%d]):StreamInfoMap is not EXIST stream id(%d)", __FUNCTION__, __LINE__, id); + *item = NULL; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraStreamManager::m_find(int id, StreamInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + StreamInfoMap::iterator iter; + + if (list == NULL) { + ALOGE("ERR(%s[%d]):list is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (lock == NULL) { + ALOGE("ERR(%s[%d]):lock is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + lock->lock(); + iter = list->find(id); + if (iter == list->end()) { + ret = BAD_VALUE; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraStreamManager::m_get(int id, ExynosCameraStream **item, StreamInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + StreamInfoMap::iterator iter; + + lock->lock(); + iter = list->find(id); + if (iter != list->end()) { + *item = iter->second; + } else { + ALOGE("ERR(%s[%d]):StreamInfoMap is not EXIST stream id(%d)", __FUNCTION__, __LINE__, id); + ret = BAD_VALUE; + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraStreamManager::m_delete(int id, StreamInfoMap *list, Mutex *lock) +{ + status_t ret = NO_ERROR; + StreamInfoMap::iterator iter; + ExynosCameraStream *item = NULL; + + lock->lock(); + iter = list->find(id); + if (iter != list->end()) { + item = iter->second; + list->erase(iter); + m_delete(item); + } else { + ret = BAD_VALUE; + ALOGE("ERR(%s[%d]):StreamInfoMap is not EXIST stream id(%d)", __FUNCTION__, __LINE__, id); + } + lock->unlock(); + + return ret; +} + +status_t ExynosCameraStreamManager::m_delete(ExynosCameraStream *stream) +{ + status_t ret = NO_ERROR; + camera3_stream_t *obj = NULL; + int streamId = -1; + + if (stream == NULL){ + ret = BAD_VALUE; + ALOGE("ERR(%s[%d]):StreamInfoMap is not EXIST stream", __FUNCTION__, __LINE__); + return ret; + } + + ret = stream->getStream(&obj); + obj->priv = NULL; + + stream->getID(&streamId); + + delete stream; + stream = NULL; + return ret; +} + +status_t ExynosCameraStreamManager::m_increaseYuvStreamCount(int id) +{ + status_t ret = NO_ERROR; + + switch (id % HAL_STREAM_ID_MAX) { + case HAL_STREAM_ID_PREVIEW: + case HAL_STREAM_ID_VIDEO: + case HAL_STREAM_ID_CALLBACK: + m_yuvStreamIdMap[m_yuvStreamCount++] = id; + break; + case HAL_STREAM_ID_RAW: + case HAL_STREAM_ID_JPEG: + case HAL_STREAM_ID_ZSL_OUTPUT: + case HAL_STREAM_ID_ZSL_INPUT: + /* Not YUV streams */ + break; + default: + ALOGE("ERR(%s[%d]):Unsupported stream id %d", + __FUNCTION__, __LINE__); + ret = BAD_VALUE; + break; + } + + if (m_yuvStreamCount > m_yuvStreamMaxCount) { + ALOGE("ERR(%s[%d]):Too many YUV stream!. maxYuvStreamCount %d currentYuvStreamCount %d", + __FUNCTION__, __LINE__, + m_yuvStreamMaxCount, m_yuvStreamCount); + ret = INVALID_OPERATION; + } + + return ret; +} + +status_t ExynosCameraStreamManager::m_decreaseYuvStreamCount(int id) +{ + if (id < 0) { + ALOGE("ERR(%s[%d]):Invalid streamId %d", + __FUNCTION__, __LINE__, id); + return BAD_VALUE; + } + + for (int i = 0; i < 3; i++) { + if (m_yuvStreamIdMap[i] == id) { + m_yuvStreamIdMap[i] = -1; + m_yuvStreamCount--; + } + } + + return NO_ERROR; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraStreamManager.h b/libcamera/common_v2/ExynosCameraStreamManager.h new file mode 100644 index 0000000..585f6f1 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraStreamManager.h @@ -0,0 +1,182 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef EXYNOS_CAMERA_STREAM_MANAGER_H +#define EXYNOS_CAMERA_STREAM_MANAGER_H + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "ExynosCameraDefine.h" +#include "ExynosCameraParameters.h" +#include "ExynosCameraBufferManager.h" + +#include "exynos_format.h" + +namespace android { + +using namespace std; + +namespace EXYNOS_STREAM { + enum STATE { + HAL_STREAM_STS_INIT = 0x00, + HAL_STREAM_STS_INVALID = 0x01, + HAL_STREAM_STS_VALID = 0x02, + HAL_STREAM_STS_UNREGISTERED = 0x11, + HAL_STREAM_STS_REGISTERED = 0x12 + }; +}; + +class ExynosCameraStream : public RefBase { +public: + ExynosCameraStream(){}; + virtual ~ExynosCameraStream(){}; + + virtual status_t setStream(camera3_stream_t *stream) = 0; + virtual status_t getStream(camera3_stream_t **stream) = 0; + virtual status_t setID(int id) = 0; + virtual status_t getID(int *id) = 0; + virtual status_t setFormat(int id) = 0; + virtual status_t getFormat(int *format) = 0; + virtual status_t setPlaneCount(int format) = 0; + virtual status_t getPlaneCount(int *format) = 0; + virtual status_t setOutputPortId(int id) = 0; + virtual status_t getOutputPortId(int *id) = 0; + virtual status_t setRegisterStream(EXYNOS_STREAM::STATE state) = 0; + virtual status_t getRegisterStream(EXYNOS_STREAM::STATE *state) = 0; + virtual status_t setRegisterBuffer(EXYNOS_STREAM::STATE state) = 0; + virtual status_t getRegisterBuffer(EXYNOS_STREAM::STATE *state) = 0; + virtual status_t setRequestBuffer(int bufferCnt) = 0; + virtual status_t getRequestBuffer(int *bufferCnt) = 0; + virtual status_t setBufferManager(ExynosCameraBufferManager *bufferManager) = 0; + virtual status_t getBufferManager(ExynosCameraBufferManager **bufferManager) = 0; +}; + + +class ExynosCamera3Stream : public ExynosCameraStream { +private: + ExynosCamera3Stream(){}; + +public: + + + ExynosCamera3Stream(int id, camera3_stream_t *stream); + virtual ~ExynosCamera3Stream(); + + virtual status_t setStream(camera3_stream_t *stream); + virtual status_t getStream(camera3_stream_t **stream); + virtual status_t setID(int id); + virtual status_t getID(int *id); + virtual status_t setFormat(int format); + virtual status_t getFormat(int *format); + virtual status_t setPlaneCount(int planes); + virtual status_t getPlaneCount(int *planes); + virtual status_t setOutputPortId(int id); + virtual status_t getOutputPortId(int *id); + virtual status_t setRegisterStream(EXYNOS_STREAM::STATE state); + virtual status_t getRegisterStream(EXYNOS_STREAM::STATE *state); + virtual status_t setRegisterBuffer(EXYNOS_STREAM::STATE state); + virtual status_t getRegisterBuffer(EXYNOS_STREAM::STATE *state); + virtual status_t setRequestBuffer(int bufferCnt); + virtual status_t getRequestBuffer(int *bufferCnt); + virtual status_t setBufferManager(ExynosCameraBufferManager *bufferManager); + virtual status_t getBufferManager(ExynosCameraBufferManager **bufferManager); + +private: + status_t m_init(); + status_t m_deinit(); + +private: + camera3_stream_t *m_stream; + int m_id; + int m_actualFormat; + int m_planeCount; + int m_outputPortId; + EXYNOS_STREAM::STATE m_registerStream; + EXYNOS_STREAM::STATE m_registerBuffer; + int m_requestbuffer; + ExynosCameraBufferManager *m_bufferManager; +}; + +class ExynosCameraStreamManager : public RefBase { +public: + /* Constructor */ + ExynosCameraStreamManager(int cameraId); + + /* Destructor */ + virtual ~ExynosCameraStreamManager(); + + ExynosCameraStream* createStream(int id, camera3_stream_t *stream); + status_t deleteStream(int id); + + status_t getStream(int id, ExynosCameraStream **stream); + + status_t getStreamKeys(List* keylist); + + bool findStream(int id); + + status_t setConfig(struct ExynosConfigInfo *config); + + status_t setYuvStreamMaxCount(int32_t count); + int32_t getYuvStreamCount(void); + + int getYuvStreamId(int outputPortId); + int getOutputPortId(int streamId); + + status_t dumpCurrentStreamList(void); + +protected: + typedef map StreamInfoMap; + typedef map::iterator StreamInfoIterator; + +private: + void m_init(); + void m_deinit(); + + status_t m_insert(int id, ExynosCameraStream *item, StreamInfoMap *list, Mutex *lock); + status_t m_erase(int id, ExynosCameraStream **item, StreamInfoMap *list, Mutex *lock); + status_t m_find(int id, StreamInfoMap *list, Mutex *lock); + status_t m_get(int id, ExynosCameraStream **item, StreamInfoMap *list, Mutex *lock); + status_t m_delete(int id, StreamInfoMap *list, Mutex *lock); + status_t m_delete(ExynosCameraStream *stream); + + status_t m_increaseYuvStreamCount(int id); + status_t m_decreaseYuvStreamCount(int id); + +protected: + int m_cameraId; + struct ExynosConfigInfo *m_exynosconfig; + + StreamInfoMap m_streamInfoMap; + mutable Mutex m_streamInfoLock; + int32_t m_yuvStreamCount; + int32_t m_yuvStreamMaxCount; + int m_yuvStreamIdMap[3]; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/ExynosCameraStreamMutex.h b/libcamera/common_v2/ExynosCameraStreamMutex.h new file mode 100644 index 0000000..0364ba4 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraStreamMutex.h @@ -0,0 +1,69 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosCameraStreamMutex.h + * \brief header file for ExynosCameraStreamMutex + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2016/01/29 + * + * Revision History: + * - 2016/01/29 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_STREAM_MUTEX_H +#define EXYNOS_CAMERA_STREAM_MUTEX_H + +#include + +#include "ExynosCameraSingleton.h" + +using namespace android; + +/* Class declaration */ +//! ExynosCameraStreamMutex is global mutex for node open ~ V4L2_CID_IS_END_OF_STREAM. +/*! + * \ingroup ExynosCamera + */ +class ExynosCameraStreamMutex: public ExynosCameraSingleton +{ +protected: + friend class ExynosCameraSingleton; + + //! Constructor + ExynosCameraStreamMutex(){}; + + //! Destructor + virtual ~ExynosCameraStreamMutex(){}; + +public: + //! setInfo + /*! + \remarks + return streamMutex + */ + Mutex *getStreamMutex(void) + { + return &m_streamMutex; + } + +private: + Mutex m_streamMutex; +}; + +#endif //EXYNOS_CAMERA_STREAM_MUTEX_H diff --git a/libcamera/common_v2/ExynosCameraThread.h b/libcamera/common_v2/ExynosCameraThread.h new file mode 100644 index 0000000..f8fd817 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraThread.h @@ -0,0 +1,101 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_THREAD_H +#define EXYNOS_CAMERA_THREAD_H + +#include + +using namespace android; + +template +class ExynosCameraThread : public Thread { + +typedef bool (T::*thread_loop)(void); +public: + ExynosCameraThread( + T *hw, + thread_loop loop, + const char *name, + int32_t priority = PRIORITY_DEFAULT) + { + m_hardware = hw; + m_threadLoop = loop; + m_name = name; + m_priority = priority; + m_flatStart = false; + } + + virtual status_t readyToRun() { + m_flatStart = true; + + return Thread::readyToRun(); + } + + virtual status_t setup( + T *hw, + thread_loop loop, + const char *name, + int32_t priority = PRIORITY_DEFAULT) + { + m_hardware = hw; + m_threadLoop = loop; + m_name = name; + m_priority = priority; + return NO_ERROR; + } + + virtual status_t run(void) { + + ALOGV("DEBUG(%s):Thread(%s) start running", __FUNCTION__, m_name); + + return Thread::run(m_name, m_priority, 0); + } + + virtual status_t run(int32_t priority, size_t stack = 0) { + + ALOGV("DEBUG(%s):Thread(%s) start running", __FUNCTION__, m_name); + if (m_priority != priority) + m_priority = priority; + + return Thread::run(m_name, m_priority, stack); + } + + virtual void stop(void) { + m_flatStart = false; + requestExit(); + } + +private: + virtual bool threadLoop() { + bool ret = (m_hardware->*m_threadLoop)(); + + if (m_flatStart == false) + ret = m_flatStart; + + return ret; + } + +private: + T *m_hardware; + const char *m_name; + thread_loop m_threadLoop; + int32_t m_priority; + bool m_flatStart; +}; + +#endif diff --git a/libcamera/common_v2/ExynosCameraThreadFactory.h b/libcamera/common_v2/ExynosCameraThreadFactory.h new file mode 100644 index 0000000..0d99118 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraThreadFactory.h @@ -0,0 +1,33 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "ExynosCameraThread.h" + +using namespace android; + +class ExynosCameraThreadFactory { +public: + template + static Thread *createThread( + T *hw, + bool (T::*loop)(void), + const char *name, + int32_t priority = PRIORITY_DEFAULT) + { + return new ExynosCameraThread(hw, loop, name, priority); + }; +}; diff --git a/libcamera/common_v2/ExynosCameraUtils.cpp b/libcamera/common_v2/ExynosCameraUtils.cpp new file mode 100644 index 0000000..3f08d36 --- /dev/null +++ b/libcamera/common_v2/ExynosCameraUtils.cpp @@ -0,0 +1,2260 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraUtils" +#include + +#include "ExynosCameraUtils.h" + +#define ADD_BAYER_BY_NEON + +namespace android { + +status_t getCropRectAlign( + int src_w, int src_h, + int dst_w, int dst_h, + int *crop_x, int *crop_y, + int *crop_w, int *crop_h, + int align_w, int align_h, + int zoom, float zoomRatio) +{ + *crop_w = src_w; + *crop_h = src_h; + + if (src_w == 0 || src_h == 0 || dst_w == 0 || dst_h == 0) { + ALOGE("ERR(%s):width or height values is 0, src(%dx%d), dst(%dx%d)", + __func__, src_w, src_h, dst_w, dst_h); + return BAD_VALUE; + } + + /* Calculation aspect ratio */ + if ( src_w != dst_w + || src_h != dst_h) { + float src_ratio = 1.0f; + float dst_ratio = 1.0f; + + /* ex : 1024 / 768 */ + src_ratio = ROUND_OFF_HALF(((float)src_w / (float)src_h), 2); + + /* ex : 352 / 288 */ + dst_ratio = ROUND_OFF_HALF(((float)dst_w / (float)dst_h), 2); + + if (dst_ratio <= src_ratio) { + /* shrink w */ + *crop_w = src_h * ((float)dst_w / (float)dst_h); + *crop_h = src_h; + } else { + /* shrink h */ + *crop_w = src_w; + *crop_h = src_w / ((float)dst_w / (float)dst_h); + } + } + + /* Calculation zoom */ + if (zoom != 0) { +#if defined(SCALER_MAX_SCALE_UP_RATIO) + /* + * After dividing float & casting int, + * zoomed size can be smaller too much. + * so, when zoom until max, ceil up about floating point. + */ + if (((int)((float)*crop_w / zoomRatio)) * SCALER_MAX_SCALE_UP_RATIO < *crop_w || + ((int)((float)*crop_h / zoomRatio)) * SCALER_MAX_SCALE_UP_RATIO < *crop_h) { + *crop_w = (int)ceil(((float)*crop_w / zoomRatio)); + *crop_h = (int)ceil(((float)*crop_h / zoomRatio)); + } else +#endif + { + *crop_w = (int)((float)*crop_w / zoomRatio); + *crop_h = (int)((float)*crop_h / zoomRatio); + } + } + + if (dst_w == dst_h) { + int align_value = 0; + align_value = (align_w < align_h)? align_h : align_w; + + *crop_w = ALIGN_UP(*crop_w, align_value); + *crop_h = ALIGN_UP(*crop_h, align_value); + + if (*crop_w > src_w) { + *crop_w = ALIGN_DOWN(src_w, align_value); + *crop_h = *crop_w; + } else if (*crop_h > src_h) { + *crop_h = ALIGN_DOWN(src_h, align_value); + *crop_w = *crop_h; + } + } else { + *crop_w = ALIGN_UP(*crop_w, align_w); + *crop_h = ALIGN_UP(*crop_h, align_h); + + if (*crop_w > src_w) + *crop_w = ALIGN_DOWN(src_w, align_w); + if (*crop_h > src_h) + *crop_h = ALIGN_DOWN(src_h, align_h); + } + + *crop_x = ALIGN_DOWN(((src_w - *crop_w) >> 1), 2); + *crop_y = ALIGN_DOWN(((src_h - *crop_h) >> 1), 2); + + if (*crop_x < 0 || *crop_y < 0) { + ALOGE("ERR(%s):crop size too big (%d, %d, %d, %d)", + __func__, *crop_x, *crop_y, *crop_w, *crop_h); + return BAD_VALUE; + } + + return NO_ERROR; +} + +uint32_t bracketsStr2Ints( + char *str, + uint32_t num, + ExynosRect2 *rect2s, + int *weights, + int mode) +{ + char *curStr = str; + char buf[128]; + char *bracketsOpen; + char *bracketsClose; + + int tempArray[5] = {0,}; + uint32_t validFocusedAreas = 0; + bool isValid; + bool nullArea = false; + isValid = true; + + for (uint32_t i = 0; i < num + 1; i++) { + if (curStr == NULL) { + if (i != num) { + nullArea = false; + } + break; + } + + bracketsOpen = strchr(curStr, '('); + if (bracketsOpen == NULL) { + if (i != num) { + nullArea = false; + } + break; + } + + bracketsClose = strchr(bracketsOpen, ')'); + if (bracketsClose == NULL) { + ALOGE("ERR(%s):subBracketsStr2Ints(%s) fail", __func__, buf); + if (i != num) { + nullArea = false; + } + break; + } else if (i == num) { + return 0; + } + + strncpy(buf, bracketsOpen, bracketsClose - bracketsOpen + 1); + buf[bracketsClose - bracketsOpen + 1] = 0; + + if (subBracketsStr2Ints(5, buf, tempArray) == false) { + nullArea = false; + break; + } + + rect2s[i].x1 = tempArray[0]; + rect2s[i].y1 = tempArray[1]; + rect2s[i].x2 = tempArray[2]; + rect2s[i].y2 = tempArray[3]; + weights[i] = tempArray[4]; + + if (mode) { + isValid = true; + + for (int j = 0; j < 4; j++) { + if (tempArray[j] < -1000 || tempArray[j] > 1000) + isValid = false; + } + + if (tempArray[4] < 0 || tempArray[4] > 1000) + isValid = false; + + if (!rect2s[i].x1 && !rect2s[i].y1 && !rect2s[i].x2 && !rect2s[i].y2 && !weights[i]) + nullArea = true; + else if (weights[i] == 0) + isValid = false; + else if (!(tempArray[0] == 0 && tempArray[2] == 0) && tempArray[0] >= tempArray[2]) + isValid = false; + else if (!(tempArray[1] == 0 && tempArray[3] == 0) && tempArray[1] >= tempArray[3]) + isValid = false; + else if (!(tempArray[0] == 0 && tempArray[2] == 0) && (tempArray[1] == 0 && tempArray[3] == 0)) + isValid = false; + else if ((tempArray[0] == 0 && tempArray[2] == 0) && !(tempArray[1] == 0 && tempArray[3] == 0)) + isValid = false; + + if (isValid) + validFocusedAreas++; + else + return 0; + } else { + if (rect2s[i].x1 || rect2s[i].y1 || rect2s[i].x2 || rect2s[i].y2 || weights[i]) + validFocusedAreas++; + } + + curStr = bracketsClose; + } + if (nullArea && mode) + validFocusedAreas = num; + + if (validFocusedAreas == 0) + validFocusedAreas = 1; + + ALOGD("DEBUG(%s[%d]):(%d,%d,%d,%d,%d) - validFocusedAreas(%d)", __FUNCTION__, __LINE__, + tempArray[0], tempArray[1], tempArray[2], tempArray[3], tempArray[4], validFocusedAreas); + + return validFocusedAreas; +} + +bool subBracketsStr2Ints(int num, char *str, int *arr) +{ + if (str == NULL || arr == NULL) { + ALOGE("ERR(%s):str or arr is NULL", __func__); + return false; + } + + /* ex : (-10,-10,0,0,300) */ + char buf[128]; + char *bracketsOpen; + char *bracketsClose; + char *tok; + char *savePtr; + + bracketsOpen = strchr(str, '('); + if (bracketsOpen == NULL) { + ALOGE("ERR(%s):no '('", __func__); + return false; + } + + bracketsClose = strchr(bracketsOpen, ')'); + if (bracketsClose == NULL) { + ALOGE("ERR(%s):no ')'", __func__); + return false; + } + + strncpy(buf, bracketsOpen + 1, bracketsClose - bracketsOpen + 1); + buf[bracketsClose - bracketsOpen + 1] = 0; + + tok = strtok_r(buf, ",", &savePtr); + if (tok == NULL) { + ALOGE("ERR(%s):strtok_r(%s) fail", __func__, buf); + return false; + } + + arr[0] = atoi(tok); + + for (int i = 1; i < num; i++) { + tok = strtok_r(NULL, ",", &savePtr); + if (tok == NULL) { + if (i < num - 1) { + ALOGE("ERR(%s):strtok_r() (index : %d, num : %d) fail", __func__, i, num); + return false; + } + break; + } + + arr[i] = atoi(tok); + } + + return true; +} + +void convertingRectToRect2(ExynosRect *rect, ExynosRect2 *rect2) +{ + rect2->x1 = rect->x; + rect2->y1 = rect->y; + rect2->x2 = rect->x + rect->w; + rect2->y2 = rect->y + rect->h; +} + +void convertingRect2ToRect(ExynosRect2 *rect2, ExynosRect *rect) +{ + rect->x = rect2->x1; + rect->y = rect2->y1; + rect->w = rect2->x2 - rect2->x1; + rect->h = rect2->y2 - rect2->y1; +} + +bool isRectNull(ExynosRect *rect) +{ + if ( rect->x == 0 + && rect->y == 0 + && rect-> w == 0 + && rect->h == 0 + && rect->fullW == 0 + && rect->fullH == 0 + && rect->colorFormat == 0) + return true; + + return false; +} + +bool isRectNull(ExynosRect2 *rect2) +{ + if ( rect2->x1 == 0 + && rect2->y1 == 0 + && rect2->x2 == 0 + && rect2->y2 == 0) + return true; + + return false; +} + +bool isRectEqual(ExynosRect *rect1, ExynosRect *rect2) +{ + if ( rect1->x == rect2->x + && rect1->y == rect2->y + && rect1->w == rect2->w + && rect1->h == rect2->h + && rect1->fullW == rect2->fullW + && rect1->fullH == rect2->fullH + && rect1->colorFormat == rect2->colorFormat) + return true; + + return false; +} + +bool isRectEqual(ExynosRect2 *rect1, ExynosRect2 *rect2) +{ + if ( rect1->x1 == rect2->x1 + && rect1->y1 == rect2->y1 + && rect1->x2 == rect2->x2 + && rect1->y2 == rect2->y2) + return true; + + return false; +} + +ExynosRect2 convertingActualArea2HWArea(ExynosRect2 *srcRect, const ExynosRect *regionRect) +{ + int x = regionRect->x; + int y = regionRect->y; + int w = regionRect->w; + int h = regionRect->h; + + ExynosRect2 newRect2; + + newRect2.x1 = srcRect->x1 - x; + newRect2.y1 = srcRect->y1 - y; + newRect2.x2 = srcRect->x2 - x; + newRect2.y2 = srcRect->y2 - y; + + if (newRect2.x1 < 0) + newRect2.x1 = 0; + else if (w <= newRect2.x1) + newRect2.x1 = w - 1; + + if (newRect2.y1 < 0) + newRect2.y1 = 0; + else if (h <= newRect2.y1) + newRect2.y1 = h - 1; + + if (newRect2.x2 < 0) + newRect2.x2 = 0; + else if (w <= newRect2.x2) + newRect2.x2 = w - 1; + + if (newRect2.y2 < 0) + newRect2.y2 = 0; + else if (h <= newRect2.y2) + newRect2.y2 = h - 1; + + if (newRect2.x2 < newRect2.x1) + newRect2.x2 = newRect2.x1; + + if (newRect2.y2 < newRect2.y1) + newRect2.y2 = newRect2.y1; + + ALOGV("INFO(%s[%d]): src(%d %d %d %d) region(%d %d %d %d) newRect(%d %d %d %d)", __FUNCTION__, __LINE__, + srcRect->x1, srcRect->y1, srcRect->x2, srcRect->y2, + regionRect->x , regionRect->y, regionRect->w, regionRect->h, + newRect2.x1 , newRect2.y1, newRect2.x2, newRect2.y2); + + return newRect2; +} + +ExynosRect2 convertingAndroidArea2HWArea(ExynosRect2 *srcRect, const ExynosRect *regionRect) +{ + int x = regionRect->x; + int y = regionRect->y; + int w = regionRect->w; + int h = regionRect->h; + + ExynosRect2 newRect2; + + newRect2.x1 = (srcRect->x1 + 1000) * w / 2000; + newRect2.y1 = (srcRect->y1 + 1000) * h / 2000; + newRect2.x2 = (srcRect->x2 + 1000) * w / 2000; + newRect2.y2 = (srcRect->y2 + 1000) * h / 2000; + + if (newRect2.x1 < 0) + newRect2.x1 = 0; + else if (w <= newRect2.x1) + newRect2.x1 = w - 1; + + if (newRect2.y1 < 0) + newRect2.y1 = 0; + else if (h <= newRect2.y1) + newRect2.y1 = h - 1; + + if (newRect2.x2 < 0) + newRect2.x2 = 0; + else if (w <= newRect2.x2) + newRect2.x2 = w - 1; + + if (newRect2.y2 < 0) + newRect2.y2 = 0; + else if (h <= newRect2.y2) + newRect2.y2 = h - 1; + + if (newRect2.x2 < newRect2.x1) + newRect2.x2 = newRect2.x1; + + if (newRect2.y2 < newRect2.y1) + newRect2.y2 = newRect2.y1; + + ALOGV("INFO(%s[%d]): src(%d %d %d %d) region(%d %d %d %d) newRect(%d %d %d %d)", __FUNCTION__, __LINE__, + srcRect->x1, srcRect->y1, srcRect->x2, srcRect->y2, + regionRect->x , regionRect->y, regionRect->w, regionRect->h, + newRect2.x1 , newRect2.y1, newRect2.x2, newRect2.y2); + + return newRect2; +} + +ExynosRect2 convertingAndroidArea2HWAreaBcropOut(ExynosRect2 *srcRect, const ExynosRect *regionRect) +{ + /* do nothing, same as noraml converting */ + return convertingAndroidArea2HWArea(srcRect, regionRect); +} + +ExynosRect2 convertingAndroidArea2HWAreaBcropIn(ExynosRect2 *srcRect, const ExynosRect *regionRect) +{ + ExynosRect2 newRect2; + + int x = regionRect->x; + int y = regionRect->y; + int w = regionRect->w; + int h = regionRect->h; + + newRect2 = convertingAndroidArea2HWArea(srcRect, regionRect); + + /* add x, y size */ + newRect2.x1 += x; + newRect2.y1 += y; + newRect2.x2 += x; + newRect2.y2 += y; + + ALOGI("INFO(%s[%d]): src(%d %d %d %d) region(%d %d %d %d) newRect(%d %d %d %d)", + __FUNCTION__, __LINE__, + srcRect->x1, srcRect->y1, srcRect->x2, srcRect->y2, + regionRect->x , regionRect->y, regionRect->w, regionRect->h, + newRect2.x1 , newRect2.y1, newRect2.x2, newRect2.y2); + + return newRect2; +} + +ExynosRect2 convertingSrcArea2DstArea(ExynosRect2 *srcRect, const ExynosRect *srcRegionRect, const ExynosRect *dstRegionRect) +{ + int x = dstRegionRect->x; + int y = dstRegionRect->y; + int w = dstRegionRect->w; + int h = dstRegionRect->h; + + ExynosRect2 newRect2; + + newRect2.x1 = (srcRect->x1 * w) / srcRegionRect->w; + newRect2.y1 = (srcRect->y1 * h) / srcRegionRect->h; + newRect2.x2 = (srcRect->x2 * w) / srcRegionRect->w; + newRect2.y2 = (srcRect->y2 * h) / srcRegionRect->h; + + if (newRect2.x1 < 0) + newRect2.x1 = 0; + else if (w <= newRect2.x1) + newRect2.x1 = w - 1; + + if (newRect2.y1 < 0) + newRect2.y1 = 0; + else if (h <= newRect2.y1) + newRect2.y1 = h - 1; + + if (newRect2.x2 < 0) + newRect2.x2 = 0; + else if (w <= newRect2.x2) + newRect2.x2 = w - 1; + + if (newRect2.y2 < 0) + newRect2.y2 = 0; + else if (h <= newRect2.y2) + newRect2.y2 = h - 1; + + if (newRect2.x2 < newRect2.x1) + newRect2.x2 = newRect2.x1; + + if (newRect2.y2 < newRect2.y1) + newRect2.y2 = newRect2.y1; + + ALOGV("INFO(%s[%d]): src(%d %d %d %d) srcRegion(%d %d %d %d) dstRegion(%d %d %d %d) newRect(%d %d %d %d)", + __FUNCTION__, __LINE__, + srcRect->x1, srcRect->y1, srcRect->x2, srcRect->y2, + srcRegionRect->x, srcRegionRect->y, srcRegionRect->w, srcRegionRect->h, + dstRegionRect->x, dstRegionRect->y, dstRegionRect->w, dstRegionRect->h, + newRect2.x1, newRect2.y1, newRect2.x2, newRect2.y2); + + return newRect2; +} + +status_t getResolutionList(String8 &string8Buf, struct ExynosSensorInfoBase *sensorInfo, + int *w, int *h, int mode, int camid) +{ + bool found = false; + bool flagFirst = true; + char strBuf[32]; + int sizeOfResSize = 0; + int cropX = 0, cropY = 0, cropW = 0, cropH = 0; + int max_w = 0, max_h = 0; + + /* this is up to /packages/apps/Camera/res/values/arrays.xml */ + int (*RESOLUTION_LIST)[SIZE_OF_RESOLUTION] = {NULL,}; + + switch (mode) { + case MODE_PREVIEW: + if (camid == CAMERA_ID_BACK) { + RESOLUTION_LIST = sensorInfo->rearPreviewList; + sizeOfResSize = sensorInfo->rearPreviewListMax; + } else { + RESOLUTION_LIST = sensorInfo->frontPreviewList; + sizeOfResSize = sensorInfo->frontPreviewListMax; + } + break; + case MODE_PICTURE: + if (camid == CAMERA_ID_BACK) { + RESOLUTION_LIST = sensorInfo->rearPictureList; + sizeOfResSize = sensorInfo->rearPictureListMax; + } else { + RESOLUTION_LIST = sensorInfo->frontPictureList; + sizeOfResSize = sensorInfo->frontPictureListMax; + } + break; + case MODE_VIDEO: + if (camid == CAMERA_ID_BACK) { + RESOLUTION_LIST = sensorInfo->rearVideoList; + sizeOfResSize = sensorInfo->rearVideoListMax; + } else { + RESOLUTION_LIST = sensorInfo->frontVideoList; + sizeOfResSize = sensorInfo->frontVideoListMax; + } + break; + case MODE_THUMBNAIL: + RESOLUTION_LIST = sensorInfo->thumbnailList; + sizeOfResSize = sensorInfo->thumbnailListMax; + break; + default: + ALOGE("ERR(%s):invalid mode(%d)", __func__, mode); + return BAD_VALUE; + } + + for (int i = 0; i < sizeOfResSize; i++) { + if ( RESOLUTION_LIST[i][0] <= *w + && RESOLUTION_LIST[i][1] <= *h) { + if (flagFirst == true) { + snprintf(strBuf, sizeof(strBuf), "%dx%d", RESOLUTION_LIST[i][0], RESOLUTION_LIST[i][1]); + string8Buf.append(strBuf); + max_w = RESOLUTION_LIST[i][0]; + max_h = RESOLUTION_LIST[i][1]; + + flagFirst = false; + } else { +#ifndef NO_MCSC_RESTRICTION + if ((mode == MODE_PICTURE || mode == MODE_THUMBNAIL) && + ((max_w) / 16 > RESOLUTION_LIST[i][0] || + (max_h / 16) > RESOLUTION_LIST[i][1])) { + ALOGI("INFO(%s)skipped : size(%d x %d)", + __FUNCTION__, RESOLUTION_LIST[i][0], RESOLUTION_LIST[i][1]); + continue; + } +#endif + snprintf(strBuf, sizeof(strBuf), ",%dx%d", RESOLUTION_LIST[i][0], RESOLUTION_LIST[i][1]); + string8Buf.append(strBuf); + } + + found = true; + } + } + + if (found == false) { + ALOGE("ERR(%s):cannot find resolutions", __func__); + } else { + *w = max_w; + *h = max_h; + } + + return NO_ERROR; +} + +void setZoomRatioList(int *list, int len, float maxZoomRatio) +{ + float zoom_ratio_delta = pow(maxZoomRatio, 1.0f / len); + + for (int i = 0; i <= len; i++) { + list[i] = (int)(pow(zoom_ratio_delta, i) * 1000); + ALOGV("INFO(%s):list[%d]:(%d), (%f)", __func__, i, list[i], (float)((float)list[i] / 1000)); + } +} + +status_t getZoomRatioList(String8 & string8Buf, int maxZoom, int maxZoomRatio, int *list) +{ + bool flagFirst = true; + char strBuf[32]; + + int cur = 0; + int step = maxZoom - 1; + + setZoomRatioList(list, maxZoom - 1, (float)(maxZoomRatio / 1000)); + + for (int i = 0; i < step; i++) { + cur = (int)(list[i] / 10); + snprintf(strBuf, sizeof(strBuf), "%d", cur); + string8Buf.append(strBuf); + string8Buf.append(","); + } + + snprintf(strBuf, sizeof(strBuf), "%d", (maxZoomRatio / 10)); + string8Buf.append(strBuf); + + /* ex : "100,130,160,190,220,250,280,310,340,360,400" */ + + return NO_ERROR; +} + +status_t getSupportedFpsList(String8 & string8Buf, int min, int max, int camid, struct ExynosSensorInfoBase *sensorInfo) +{ + bool found = false; + bool flagFirst = true; + char strBuf[32]; + int numOfList = 0; + int (*sizeList)[2]; + + if (camid == CAMERA_ID_BACK) { + sizeList = sensorInfo->rearFPSList; + for (int i = 0; i < sensorInfo->rearFPSListMax; i++) { + if (min <= sizeList[i][0] && + sizeList[i][1] <= max) { + if (flagFirst == true) { + flagFirst = false; + snprintf(strBuf, sizeof(strBuf), "(%d,%d)", sizeList[i][0], sizeList[i][1]); + } else { + snprintf(strBuf, sizeof(strBuf), ",(%d,%d)", sizeList[i][0], sizeList[i][1]); + } + string8Buf.append(strBuf); + + found = true; + } + } + } else { + sizeList = sensorInfo->frontFPSList; + for (int i = 0; i < sensorInfo->frontFPSListMax; i++) { + if (min <= sizeList[i][0] && + sizeList[i][1] <= max) { + if (flagFirst == true) { + flagFirst = false; + snprintf(strBuf, sizeof(strBuf), "(%d,%d)", sizeList[i][0], sizeList[i][1]); + } else { + snprintf(strBuf, sizeof(strBuf), ",(%d,%d)", sizeList[i][0], sizeList[i][1]); + } + string8Buf.append(strBuf); + + found = true; + } + } + } + + if (found == false) + ALOGE("ERR(%s):cannot find fps list", __func__); + + return NO_ERROR; +} + + +int32_t getMetaDmRequestFrameCount(struct camera2_shot_ext *shot_ext) +{ + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return -1; + } + return shot_ext->shot.dm.request.frameCount; +} + +int32_t getMetaDmRequestFrameCount(struct camera2_dm *dm) +{ + if (dm == NULL) { + ALOGE("ERR(%s[%d]): buffer is NULL", __FUNCTION__, __LINE__); + return -1; + } + return dm->request.frameCount; +} + +void setMetaCtlAeTargetFpsRange(struct camera2_shot_ext *shot_ext, uint32_t min, uint32_t max) +{ + ALOGI("INFO(%s):aeTargetFpsRange(min=%d, min=%d)", __FUNCTION__, min, max); + shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = min; + shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = max; +} + +void getMetaCtlAeTargetFpsRange(struct camera2_shot_ext *shot_ext, uint32_t *min, uint32_t *max) +{ + *min = shot_ext->shot.ctl.aa.aeTargetFpsRange[0]; + *max = shot_ext->shot.ctl.aa.aeTargetFpsRange[1]; +} + +void setMetaCtlSensorFrameDuration(struct camera2_shot_ext *shot_ext, uint64_t duration) +{ + shot_ext->shot.ctl.sensor.frameDuration = duration; +} + +void getMetaCtlSensorFrameDuration(struct camera2_shot_ext *shot_ext, uint64_t *duration) +{ + *duration = shot_ext->shot.ctl.sensor.frameDuration; +} + +void setMetaCtlAeMode(struct camera2_shot_ext *shot_ext, enum aa_aemode aeMode) +{ + if (shot_ext->shot.ctl.sensor.exposureTime == 0) + shot_ext->shot.ctl.aa.aeMode = aeMode; +} + +void getMetaCtlAeMode(struct camera2_shot_ext *shot_ext, enum aa_aemode *aeMode) +{ + *aeMode = shot_ext->shot.ctl.aa.aeMode; +} + +void setMetaCtlAeLock(struct camera2_shot_ext *shot_ext, bool lock) +{ + if (lock == true) + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + else + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; +} + +void getMetaCtlAeLock(struct camera2_shot_ext *shot_ext, bool *lock) +{ + if (shot_ext->shot.ctl.aa.aeLock == AA_AE_LOCK_OFF) + *lock = false; + else + *lock = true; +} + +#ifdef USE_SUBDIVIDED_EV +void setMetaCtlExposureCompensationStep(struct camera2_shot_ext *shot_ext, float expCompensationStep) +{ + shot_ext->shot.ctl.aa.vendor_aeExpCompensationStep = expCompensationStep; +} +#endif + +void setMetaCtlExposureCompensation(struct camera2_shot_ext *shot_ext, int32_t expCompensation) +{ + shot_ext->shot.ctl.aa.aeExpCompensation = expCompensation; +} + +void getMetaCtlExposureCompensation(struct camera2_shot_ext *shot_ext, int32_t *expCompensation) +{ + *expCompensation = shot_ext->shot.ctl.aa.aeExpCompensation; +} + +void setMetaCtlExposureTime(struct camera2_shot_ext *shot_ext, uint64_t exposureTime) +{ + if (exposureTime != 0) { + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_OFF; + setMetaCtlAeRegion(shot_ext, 0, 0, 0, 0, 0); + } + shot_ext->shot.ctl.sensor.exposureTime = exposureTime; +} + +void getMetaCtlExposureTime(struct camera2_shot_ext *shot_ext, uint64_t *exposureTime) +{ + *exposureTime = shot_ext->shot.ctl.sensor.exposureTime; +} + +void setMetaCtlCaptureExposureTime(struct camera2_shot_ext *shot_ext, uint32_t exposureTime) +{ + shot_ext->shot.ctl.aa.vendor_captureExposureTime = exposureTime; +} + +void getMetaCtlCaptureExposureTime(struct camera2_shot_ext *shot_ext, uint32_t *exposureTime) +{ + *exposureTime = shot_ext->shot.ctl.aa.vendor_captureExposureTime; +} + +#ifdef SUPPORT_DEPTH_MAP +void setMetaCtlDisparityMode(struct camera2_shot_ext *shot_ext, enum companion_disparity_mode disparity_mode) +{ + shot_ext->shot.uctl.companionUd.disparity_mode = disparity_mode; +} +#endif + +void setMetaCtlWbLevel(struct camera2_shot_ext *shot_ext, int32_t wbLevel) +{ + shot_ext->shot.ctl.aa.vendor_awbValue = wbLevel; +} + +void getMetaCtlWbLevel(struct camera2_shot_ext *shot_ext, int32_t *wbLevel) +{ + *wbLevel = shot_ext->shot.ctl.aa.vendor_awbValue; +} + +status_t setMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int x, int y, int w, int h) +{ + shot_ext->shot.ctl.scaler.cropRegion[0] = x; + shot_ext->shot.ctl.scaler.cropRegion[1] = y; + shot_ext->shot.ctl.scaler.cropRegion[2] = w; + shot_ext->shot.ctl.scaler.cropRegion[3] = h; + + return NO_ERROR; +} +status_t setMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int zoom, + int srcW, int srcH, + int dstW, int dstH, float zoomRatio) +{ + int newX = 0; + int newY = 0; + int newW = 0; + int newH = 0; + + if (getCropRectAlign(srcW, srcH, + dstW, dstH, + &newX, &newY, + &newW, &newH, + 16, 2, + zoom, zoomRatio) != NO_ERROR) { + ALOGE("ERR(%s):getCropRectAlign(%d, %d, %d, %d) fail", + __func__, srcW, srcH, dstW, dstH); + return BAD_VALUE; + } + + newX = ALIGN(newX, 2); + newY = ALIGN(newY, 2); + + ALOGV("DEBUG(%s):size(%d, %d, %d, %d), level(%d)", + __FUNCTION__, newX, newY, newW, newH, zoom); + + shot_ext->shot.ctl.scaler.cropRegion[0] = newX; + shot_ext->shot.ctl.scaler.cropRegion[1] = newY; + shot_ext->shot.ctl.scaler.cropRegion[2] = newW; + shot_ext->shot.ctl.scaler.cropRegion[3] = newH; + + return NO_ERROR; +} + +void getMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int *x, int *y, + int *w, int *h) +{ + *x = shot_ext->shot.ctl.scaler.cropRegion[0]; + *y = shot_ext->shot.ctl.scaler.cropRegion[1]; + *w = shot_ext->shot.ctl.scaler.cropRegion[2]; + *h = shot_ext->shot.ctl.scaler.cropRegion[3]; +} + +void setMetaCtlAeRegion( + struct camera2_shot_ext *shot_ext, + int x, int y, + int w, int h, + int weight) +{ + shot_ext->shot.ctl.aa.aeRegions[0] = x; + shot_ext->shot.ctl.aa.aeRegions[1] = y; + shot_ext->shot.ctl.aa.aeRegions[2] = w; + shot_ext->shot.ctl.aa.aeRegions[3] = h; + shot_ext->shot.ctl.aa.aeRegions[4] = weight; +} + +void getMetaCtlAeRegion( + struct camera2_shot_ext *shot_ext, + int *x, int *y, + int *w, int *h, + int *weight) +{ + *x = shot_ext->shot.ctl.aa.aeRegions[0]; + *y = shot_ext->shot.ctl.aa.aeRegions[1]; + *w = shot_ext->shot.ctl.aa.aeRegions[2]; + *h = shot_ext->shot.ctl.aa.aeRegions[3]; + *weight = shot_ext->shot.ctl.aa.aeRegions[4]; +} + + +void setMetaCtlAntibandingMode(struct camera2_shot_ext *shot_ext, enum aa_ae_antibanding_mode antibandingMode) +{ + shot_ext->shot.ctl.aa.aeAntibandingMode = antibandingMode; +} + +void getMetaCtlAntibandingMode(struct camera2_shot_ext *shot_ext, enum aa_ae_antibanding_mode *antibandingMode) +{ + *antibandingMode = shot_ext->shot.ctl.aa.aeAntibandingMode; +} + +void setMetaCtlSceneMode(struct camera2_shot_ext *shot_ext, enum aa_mode mode, enum aa_scene_mode sceneMode) +{ + enum processing_mode default_edge_mode = PROCESSING_MODE_FAST; + enum processing_mode default_noise_mode = PROCESSING_MODE_FAST; + int default_edge_strength = 5; + int default_noise_strength = 5; + + shot_ext->shot.ctl.aa.mode = mode; + shot_ext->shot.ctl.aa.sceneMode = sceneMode; + + switch (sceneMode) { + case AA_SCENE_MODE_FACE_PRIORITY: + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF + && shot_ext->shot.ctl.sensor.exposureTime == 0) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_DISABLED; + if(shot_ext->shot.ctl.aa.vendor_isoMode != AA_ISOMODE_MANUAL) { + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + } + + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_ACTION: + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_PORTRAIT: + case AA_SCENE_MODE_LANDSCAPE: + /* set default setting */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_NIGHT: + /* AE_LOCK is prohibited */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF || + shot_ext->shot.ctl.aa.aeLock == AA_AE_LOCK_ON) { + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + } + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_NIGHT_PORTRAIT: + case AA_SCENE_MODE_THEATRE: + case AA_SCENE_MODE_BEACH: + case AA_SCENE_MODE_SNOW: + /* set default setting */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_SUNSET: + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_DAYLIGHT; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_STEADYPHOTO: + case AA_SCENE_MODE_FIREWORKS: + case AA_SCENE_MODE_SPORTS: + /* set default setting */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_PARTY: + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_MANUAL; + shot_ext->shot.ctl.aa.vendor_isoValue = 200; + shot_ext->shot.ctl.sensor.sensitivity = 200; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 4; /* "4" is default + 1. */ + break; + case AA_SCENE_MODE_CANDLELIGHT: + /* set default setting */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + case AA_SCENE_MODE_AQUA: + /* set default setting */ + if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_OFF) + shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_CENTER; + + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO; + shot_ext->shot.ctl.aa.vendor_isoMode = AA_ISOMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_isoValue = 0; + shot_ext->shot.ctl.sensor.sensitivity = 0; + shot_ext->shot.ctl.noise.mode = default_noise_mode; + shot_ext->shot.ctl.noise.strength = default_noise_strength; + shot_ext->shot.ctl.edge.mode = default_edge_mode; + shot_ext->shot.ctl.edge.strength = default_edge_strength; + shot_ext->shot.ctl.color.vendor_saturation = 3; /* "3" is default. */ + break; + default: + break; + } +} + +void getMetaCtlSceneMode(struct camera2_shot_ext *shot_ext, enum aa_mode *mode, enum aa_scene_mode *sceneMode) +{ + *mode = shot_ext->shot.ctl.aa.mode; + *sceneMode = shot_ext->shot.ctl.aa.sceneMode; +} + +void setMetaCtlAwbMode(struct camera2_shot_ext *shot_ext, enum aa_awbmode awbMode) +{ + shot_ext->shot.ctl.aa.awbMode = awbMode; +} + +void getMetaCtlAwbMode(struct camera2_shot_ext *shot_ext, enum aa_awbmode *awbMode) +{ + *awbMode = shot_ext->shot.ctl.aa.awbMode; +} + +void setMetaCtlAwbLock(struct camera2_shot_ext *shot_ext, bool lock) +{ + if (lock == true) + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + else + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_OFF; +} + +void getMetaCtlAwbLock(struct camera2_shot_ext *shot_ext, bool *lock) +{ + if (shot_ext->shot.ctl.aa.awbLock == AA_AWB_LOCK_OFF) + *lock = false; + else + *lock = true; +} + +void setMetaVtMode(struct camera2_shot_ext *shot_ext, enum camera_vt_mode mode) +{ + shot_ext->shot.uctl.vtMode = mode; +} + +void setMetaVideoMode(struct camera2_shot_ext *shot_ext, enum aa_videomode mode) +{ + shot_ext->shot.ctl.aa.vendor_videoMode = mode; +} + +void setMetaCtlAfRegion(struct camera2_shot_ext *shot_ext, + int x, int y, int w, int h, int weight) +{ + shot_ext->shot.ctl.aa.afRegions[0] = x; + shot_ext->shot.ctl.aa.afRegions[1] = y; + shot_ext->shot.ctl.aa.afRegions[2] = w; + shot_ext->shot.ctl.aa.afRegions[3] = h; + shot_ext->shot.ctl.aa.afRegions[4] = weight; +} + +void getMetaCtlAfRegion(struct camera2_shot_ext *shot_ext, + int *x, int *y, int *w, int *h, int *weight) +{ + *x = shot_ext->shot.ctl.aa.afRegions[0]; + *y = shot_ext->shot.ctl.aa.afRegions[1]; + *w = shot_ext->shot.ctl.aa.afRegions[2]; + *h = shot_ext->shot.ctl.aa.afRegions[3]; + *weight = shot_ext->shot.ctl.aa.afRegions[4]; +} + +void setMetaCtlColorCorrectionMode(struct camera2_shot_ext *shot_ext, enum colorcorrection_mode mode) +{ + shot_ext->shot.ctl.color.mode = mode; +} + +void getMetaCtlColorCorrectionMode(struct camera2_shot_ext *shot_ext, enum colorcorrection_mode *mode) +{ + *mode = shot_ext->shot.ctl.color.mode; +} + +void setMetaCtlAaEffect(struct camera2_shot_ext *shot_ext, aa_effect_mode_t mode) +{ + shot_ext->shot.ctl.aa.effectMode = mode; +} + +void getMetaCtlAaEffect(struct camera2_shot_ext *shot_ext, aa_effect_mode_t *mode) +{ + *mode = shot_ext->shot.ctl.aa.effectMode; +} + +void setMetaCtlBrightness(struct camera2_shot_ext *shot_ext, int32_t brightness) +{ + shot_ext->shot.ctl.color.vendor_brightness = brightness; +} + +void getMetaCtlBrightness(struct camera2_shot_ext *shot_ext, int32_t *brightness) +{ + *brightness = shot_ext->shot.ctl.color.vendor_brightness; +} + +void setMetaCtlSaturation(struct camera2_shot_ext *shot_ext, int32_t saturation) +{ + shot_ext->shot.ctl.color.vendor_saturation = saturation; +} + +void getMetaCtlSaturation(struct camera2_shot_ext *shot_ext, int32_t *saturation) +{ + *saturation = shot_ext->shot.ctl.color.vendor_saturation; +} + +void setMetaCtlHue(struct camera2_shot_ext *shot_ext, int32_t hue) +{ + shot_ext->shot.ctl.color.vendor_hue = hue; +} + +void getMetaCtlHue(struct camera2_shot_ext *shot_ext, int32_t *hue) +{ + *hue = shot_ext->shot.ctl.color.vendor_hue; +} + +void setMetaCtlContrast(struct camera2_shot_ext *shot_ext, uint32_t contrast) +{ + shot_ext->shot.ctl.color.vendor_contrast = contrast; +} + +void getMetaCtlContrast(struct camera2_shot_ext *shot_ext, uint32_t *contrast) +{ + *contrast = shot_ext->shot.ctl.color.vendor_contrast; +} + +void setMetaCtlSharpness(struct camera2_shot_ext *shot_ext, enum processing_mode edge_mode, int32_t edge_sharpness, + enum processing_mode noise_mode, int32_t noise_sharpness) +{ + shot_ext->shot.ctl.edge.mode = edge_mode; + shot_ext->shot.ctl.edge.strength = (uint8_t) edge_sharpness; + shot_ext->shot.ctl.noise.mode = noise_mode; + shot_ext->shot.ctl.noise.strength = (uint8_t) noise_sharpness; +} + +void getMetaCtlSharpness(struct camera2_shot_ext *shot_ext, enum processing_mode *edge_mode, int32_t *edge_sharpness, + enum processing_mode *noise_mode, int32_t *noise_sharpness) +{ + *edge_mode = shot_ext->shot.ctl.edge.mode; + *edge_sharpness = (int32_t) shot_ext->shot.ctl.edge.strength; + *noise_mode = shot_ext->shot.ctl.noise.mode; + *noise_sharpness = (int32_t) shot_ext->shot.ctl.noise.strength; +} + +void setMetaCtlIso(struct camera2_shot_ext *shot_ext, enum aa_isomode mode, uint32_t iso) +{ + shot_ext->shot.ctl.aa.vendor_isoMode = mode; + shot_ext->shot.ctl.aa.vendor_isoValue = iso; + shot_ext->shot.ctl.sensor.sensitivity = iso; +} + +void getMetaCtlIso(struct camera2_shot_ext *shot_ext, enum aa_isomode *mode, uint32_t *iso) +{ + *mode = shot_ext->shot.ctl.aa.vendor_isoMode; + *iso = shot_ext->shot.ctl.aa.vendor_isoValue; +} + +void setMetaCtlFdMode(struct camera2_shot_ext *shot_ext, enum facedetect_mode mode) +{ + shot_ext->shot.ctl.stats.faceDetectMode = mode; +} + +void getStreamFrameValid(struct camera2_stream *shot_stream, uint32_t *fvalid) +{ + *fvalid = shot_stream->fvalid; +} + +void getStreamFrameCount(struct camera2_stream *shot_stream, uint32_t *fcount) +{ + *fcount = shot_stream->fcount; +} + +status_t setMetaDmSensorTimeStamp(struct camera2_shot_ext *shot_ext, uint64_t timeStamp) +{ + status_t status = NO_ERROR; + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):buffer is NULL", __FUNCTION__, __LINE__); + status = INVALID_OPERATION; + return status; + } + + shot_ext->shot.dm.sensor.timeStamp = timeStamp; + return status; +} + +nsecs_t getMetaDmSensorTimeStamp(struct camera2_shot_ext *shot_ext) +{ + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):buffer is NULL", __FUNCTION__, __LINE__); + return 0; + } + return shot_ext->shot.dm.sensor.timeStamp; +} + +void setMetaNodeLeaderRequest(struct camera2_shot_ext* shot_ext, int value) +{ + shot_ext->node_group.leader.request = value; + + ALOGV("INFO(%s[%d]):(%d)", __FUNCTION__, __LINE__, + shot_ext->node_group.leader.request); +} + +void setMetaNodeLeaderVideoID(struct camera2_shot_ext* shot_ext, int value) +{ + shot_ext->node_group.leader.vid = value; + + ALOGV("INFO(%s[%d]):(%d)", __FUNCTION__, __LINE__, + shot_ext->node_group.leader.vid); +} + +void setMetaNodeLeaderInputSize(struct camera2_shot_ext* shot_ext, unsigned int x, unsigned int y, unsigned int w, unsigned int h) +{ + shot_ext->node_group.leader.input.cropRegion[0] = x; + shot_ext->node_group.leader.input.cropRegion[1] = y; + shot_ext->node_group.leader.input.cropRegion[2] = w; + shot_ext->node_group.leader.input.cropRegion[3] = h; + + ALOGV("INFO(%s[%d]):(%d, %d, %d, %d)", __FUNCTION__, __LINE__, + shot_ext->node_group.leader.input.cropRegion[0], + shot_ext->node_group.leader.input.cropRegion[1], + shot_ext->node_group.leader.input.cropRegion[2], + shot_ext->node_group.leader.input.cropRegion[3]); +} + +void setMetaNodeLeaderOutputSize(struct camera2_shot_ext * shot_ext, unsigned int x, unsigned int y, unsigned int w, unsigned int h) +{ + shot_ext->node_group.leader.output.cropRegion[0] = x; + shot_ext->node_group.leader.output.cropRegion[1] = y; + shot_ext->node_group.leader.output.cropRegion[2] = w; + shot_ext->node_group.leader.output.cropRegion[3] = h; + + ALOGV("INFO(%s[%d]):(%d, %d, %d, %d)", __FUNCTION__, __LINE__, + shot_ext->node_group.leader.output.cropRegion[0], + shot_ext->node_group.leader.output.cropRegion[1], + shot_ext->node_group.leader.output.cropRegion[2], + shot_ext->node_group.leader.output.cropRegion[3]); +} + +void setMetaNodeCaptureRequest(struct camera2_shot_ext* shot_ext, int index, int value) +{ + shot_ext->node_group.capture[index].request = value; + + ALOGV("INFO(%s[%d]):(%d)(%d)", __FUNCTION__, __LINE__, + index, + shot_ext->node_group.capture[index].request); +} + +void setMetaNodeCaptureVideoID(struct camera2_shot_ext* shot_ext, int index, int value) +{ + shot_ext->node_group.capture[index].vid = value; + + ALOGV("INFO(%s[%d]):(%d)(%d)", __FUNCTION__, __LINE__, + index, + shot_ext->node_group.capture[index].vid); +} + +void setMetaNodeCaptureInputSize(struct camera2_shot_ext* shot_ext, int index, unsigned int x, unsigned int y, unsigned int w, unsigned int h) +{ + shot_ext->node_group.capture[index].input.cropRegion[0] = x; + shot_ext->node_group.capture[index].input.cropRegion[1] = y; + shot_ext->node_group.capture[index].input.cropRegion[2] = w; + shot_ext->node_group.capture[index].input.cropRegion[3] = h; + + ALOGV("INFO(%s[%d]):(%d)(%d, %d, %d, %d)", __FUNCTION__, __LINE__, + index, + shot_ext->node_group.capture[index].input.cropRegion[0], + shot_ext->node_group.capture[index].input.cropRegion[1], + shot_ext->node_group.capture[index].input.cropRegion[2], + shot_ext->node_group.capture[index].input.cropRegion[3]); +} + +void setMetaNodeCaptureOutputSize(struct camera2_shot_ext * shot_ext, int index, unsigned int x, unsigned int y, unsigned int w, unsigned int h) +{ + shot_ext->node_group.capture[index].output.cropRegion[0] = x; + shot_ext->node_group.capture[index].output.cropRegion[1] = y; + shot_ext->node_group.capture[index].output.cropRegion[2] = w; + shot_ext->node_group.capture[index].output.cropRegion[3] = h; + + ALOGV("INFO(%s[%d]):(%d)(%d, %d, %d, %d)", __FUNCTION__, __LINE__, + index, + shot_ext->node_group.capture[index].output.cropRegion[0], + shot_ext->node_group.capture[index].output.cropRegion[1], + shot_ext->node_group.capture[index].output.cropRegion[2], + shot_ext->node_group.capture[index].output.cropRegion[3]); +} + +void setMetaBypassDrc(struct camera2_shot_ext *shot_ext, int value) +{ + shot_ext->drc_bypass = value; +} + +void setMetaBypassDis(struct camera2_shot_ext *shot_ext, int value) +{ + shot_ext->dis_bypass = value; +} + +void setMetaBypassDnr(struct camera2_shot_ext *shot_ext, int value) +{ + shot_ext->dnr_bypass = value; +} + +void setMetaBypassFd(struct camera2_shot_ext *shot_ext, int value) +{ + shot_ext->fd_bypass = value; +} + +void setMetaSetfile(struct camera2_shot_ext *shot_ext, int value) +{ + shot_ext->setfile = value; +} + + +int mergeSetfileYuvRange(int setfile, int yuvRange) +{ + int ret = setfile; + + ret &= (0x0000ffff); + ret |= (yuvRange << 16); + + return ret; +} + +int getPlaneSizeFlite(int width, int height) +{ + int PlaneSize; + int Alligned_Width; + int Bytes; + + Alligned_Width = (width + 9) / 10 * 10; + Bytes = Alligned_Width * 8 / 5 ; + + PlaneSize = Bytes * height; + + return PlaneSize; +} + +int getBayerLineSize(int width, int bayerFormat) +{ + int bytesPerLine = 0; + + if (width <= 0) { + ALOGE("ERR(%s[%d]):Invalid input width size (%d)", __FUNCTION__, __LINE__, width); + return bytesPerLine; + } + + switch (bayerFormat) { + case V4L2_PIX_FMT_SBGGR16: + bytesPerLine = ROUND_UP(width * 2, CAMERA_16PX_ALIGN); + break; + case V4L2_PIX_FMT_SBGGR12: + bytesPerLine = ROUND_UP((width * 3 / 2), CAMERA_16PX_ALIGN); + break; + case V4L2_PIX_FMT_SBGGR10: + bytesPerLine = ROUND_UP((width * 5 / 4), CAMERA_16PX_ALIGN); + break; + case V4L2_PIX_FMT_SBGGR8: + bytesPerLine = ROUND_UP(width , CAMERA_16PX_ALIGN); + break; + default: + ALOGW("WRN(%s[%d]):Invalid bayer format(%d)", __FUNCTION__, __LINE__, bayerFormat); + bytesPerLine = ROUND_UP(width * 2, CAMERA_16PX_ALIGN); + break; + } + + return bytesPerLine; +} + +int getBayerPlaneSize(int width, int height, int bayerFormat) +{ + int planeSize = 0; + int bytesPerLine = 0; + + if (width <= 0 || height <= 0) { + ALOGE("ERR(%s[%d]):Invalid input size (%d x %d)", __FUNCTION__, __LINE__, width, height); + return planeSize; + } + + bytesPerLine = getBayerLineSize(width, bayerFormat); + planeSize = bytesPerLine * height; + + return planeSize; +} + +bool dumpToFile(char *filename, char *srcBuf, unsigned int size) +{ + FILE *yuvFd = NULL; + char *buffer = NULL; + + yuvFd = fopen(filename, "w+"); + + if (yuvFd == NULL) { + ALOGE("ERR(%s):open(%s) fail", + __func__, filename); + return false; + } + + buffer = (char *)malloc(size); + + if (buffer == NULL) { + ALOGE("ERR(%s):malloc file", __func__); + fclose(yuvFd); + return false; + } + + memcpy(buffer, srcBuf, size); + + fflush(stdout); + + fwrite(buffer, 1, size, yuvFd); + + fflush(yuvFd); + + if (yuvFd) + fclose(yuvFd); + if (buffer) + free(buffer); + + ALOGD("DEBUG(%s):filedump(%s, size(%d) is successed!!", + __func__, filename, size); + + return true; +} + + +bool dumpToFile2plane(char *filename, char *srcBuf, char *srcBuf1, unsigned int size, unsigned int size1) +{ + FILE *yuvFd = NULL; + char *buffer = NULL; + + yuvFd = fopen(filename, "w+"); + + if (yuvFd == NULL) { + ALOGE("ERR(%s):open(%s) fail", + __func__, filename); + return false; + } + + buffer = (char *)malloc(size + size1); + + if (buffer == NULL) { + ALOGE("ERR(%s):malloc file", __func__); + fclose(yuvFd); + return false; + } + + memcpy(buffer, srcBuf, size); + memcpy(buffer + size, srcBuf1, size1); + + fflush(stdout); + + fwrite(buffer, 1, size + size1, yuvFd); + + fflush(yuvFd); + + if (yuvFd) + fclose(yuvFd); + if (buffer) + free(buffer); + + ALOGD("DEBUG(%s):filedump(%s, size(%d) is successed!!", + __func__, filename, size); + + return true; +} + + +status_t getYuvPlaneSize(int format, unsigned int *size, + unsigned int width, unsigned int height) +{ + unsigned int frame_ratio = 1; + unsigned int frame_size = width * height; + unsigned int src_bpp = 0; + unsigned int src_planes = 0; + + if (getYuvFormatInfo(format, &src_bpp, &src_planes) < 0){ + ALOGE("ERR(%s[%d]): invalid format(%x)", __FUNCTION__, __LINE__, format); + return BAD_VALUE; + } + + src_planes = (src_planes == 0) ? 1 : src_planes; + frame_ratio = 8 * (src_planes -1) / (src_bpp - 8); + + switch (src_planes) { + case 1: + switch (format) { + case V4L2_PIX_FMT_BGR32: + case V4L2_PIX_FMT_RGB32: + size[0] = frame_size << 2; + break; + case V4L2_PIX_FMT_RGB565X: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_VYUY: + case V4L2_PIX_FMT_YVYU: + size[0] = frame_size << 1; + break; + case V4L2_PIX_FMT_YUV420: + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + case V4L2_PIX_FMT_NV21M: + size[0] = (frame_size * 3) >> 1; + break; + case V4L2_PIX_FMT_YVU420: + size[0] = frame_size + (ALIGN((width >> 1), 16) * ((height >> 1) * 2)); + break; + default: + ALOGE("%s::invalid color type", __func__); + return false; + break; + } + size[1] = 0; + size[2] = 0; + break; + case 2: + size[0] = frame_size; + size[1] = frame_size / frame_ratio; + size[2] = 0; + break; + case 3: + size[0] = frame_size; + size[1] = frame_size / frame_ratio; + size[2] = frame_size / frame_ratio; + break; + default: + ALOGE("%s::invalid color foarmt", __func__); + return false; + break; + } + + return NO_ERROR; +} + +status_t getYuvFormatInfo(unsigned int v4l2_pixel_format, + unsigned int *bpp, unsigned int *planes) +{ + switch (v4l2_pixel_format) { + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + *bpp = 12; + *planes = 1; + break; + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + case V4L2_PIX_FMT_NV12MT: + case V4L2_PIX_FMT_NV12MT_16X16: + *bpp = 12; + *planes = 2; + break; + case V4L2_PIX_FMT_YUV420: + case V4L2_PIX_FMT_YVU420: + *bpp = 12; + *planes = 1; + break; + case V4L2_PIX_FMT_YUV420M: + case V4L2_PIX_FMT_YVU420M: + *bpp = 12; + *planes = 3; + break; + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVYU: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_VYUY: + *bpp = 16; + *planes = 1; + break; + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + *bpp = 16; + *planes = 2; + break; + case V4L2_PIX_FMT_YUV422P: + *bpp = 16; + *planes = 3; + break; + default: + return BAD_VALUE; + break; + } + + return NO_ERROR; +} + +int getYuvPlaneCount(unsigned int v4l2_pixel_format) +{ + int ret = 0; + unsigned int bpp = 0; + unsigned int planeCnt = 0; + + ret = getYuvFormatInfo(v4l2_pixel_format, &bpp, &planeCnt); + if (ret < 0) { + ALOGE("ERR(%s[%d]): BAD_VALUE", __FUNCTION__, __LINE__); + return -1; + } + + return planeCnt; +} + +int displayExynosBuffer( ExynosCameraBuffer *buffer) { + ALOGD("-----------------------------------------------"); + ALOGD(" buffer.index = %d ", buffer->index); + ALOGD(" buffer.planeCount = %d ", buffer->planeCount); + for(int i = 0 ; i < buffer->planeCount ; i++ ) { + ALOGD(" buffer.fd[%d] = %d ", i, buffer->fd[i]); + ALOGD(" buffer.size[%d] = %d ", i, buffer->size[i]); + ALOGD(" buffer.addr[%d] = %p ", i, buffer->addr[i]); + } + ALOGD("-----------------------------------------------"); + return 0; +} + +#ifdef SENSOR_NAME_GET_FROM_FILE +int getSensorIdFromFile(int camId) +{ + FILE *fp = NULL; + int numread = -1; + char sensor_name[50]; + int sensorName = -1; + bool ret = true; + + if (camId == CAMERA_ID_BACK) { + fp = fopen(SENSOR_NAME_PATH_BACK, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } else { + fp = fopen(SENSOR_NAME_PATH_FRONT, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } + + if (fgets(sensor_name, sizeof(sensor_name), fp) == NULL) { + ALOGE("ERR(%s[%d]):failed to read sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + + numread = strlen(sensor_name); + ALOGD("DEBUG(%s[%d]):Sensor name is %s(%d)", __FUNCTION__, __LINE__, sensor_name, numread); + + /* TODO: strncmp for check sensor name, str is vendor specific sensor name + * ex) + * if (strncmp((const char*)sensor_name, "str", numread - 1) == 0) { + * sensorName = SENSOR_NAME_IMX135; + * } + */ + sensorName = atoi(sensor_name); + +err: + if (fp != NULL) + fclose(fp); + + return sensorName; +} +#endif + +#ifdef SENSOR_FW_GET_FROM_FILE +const char *getSensorFWFromFile(struct ExynosSensorInfoBase *info, int camId) +{ + FILE *fp = NULL; + int numread = -1; + + if (camId == CAMERA_ID_BACK) { + fp = fopen(SENSOR_FW_PATH_BACK, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } else { + fp = fopen(SENSOR_FW_PATH_FRONT, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } + if (fgets(info->sensor_fw, sizeof(info->sensor_fw), fp) == NULL) { + ALOGE("ERR(%s[%d]):failed to read sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + + numread = strlen(info->sensor_fw); + ALOGD("DEBUG(%s[%d]):Sensor fw is %s(%d)", __FUNCTION__, __LINE__, info->sensor_fw, numread); + +err: + if (fp != NULL) + fclose(fp); + + return (const char *)info->sensor_fw; +} +#endif + + +int checkBit(unsigned int *target, int index) +{ + int ret = 0; + if (*target & (1 << index)) { + ret = 1; + } else { + ret = 0; + } + return ret; +} + +void clearBit(unsigned int *target, int index, bool isStatePrint) +{ + *target = *target &~ (1 << index); + + if (isStatePrint) + ALOGD("INFO(%s[%d]):(0x%x)", __FUNCTION__, __LINE__, *target); +} + +void setBit(unsigned int *target, int index, bool isStatePrint) +{ + *target = *target | (1 << index); + + if (isStatePrint) + ALOGD("INFO(%s[%d]):(0x%x)", __FUNCTION__, __LINE__, *target); +} + +void resetBit(unsigned int *target, int value, bool isStatePrint) +{ + *target = value; + + if (isStatePrint) + ALOGD("INFO(%s[%d]):(0x%x)", __FUNCTION__, __LINE__, *target); +} + +status_t addBayerBuffer(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + __unused ExynosRect *dstRect, +#ifndef ADD_BAYER_BY_NEON + __unused +#endif + bool isPacked) +{ + status_t ret = NO_ERROR; + + if (srcBuf == NULL) { + ALOGE("ERR(%s[%d]):srcBuf == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* assume bayer buffer is 0 */ + if (srcBuf->size[0] != dstBuf->size[0]) + ALOGW("WARN(%s[%d]):srcBuf->size[0] (%d)!= dstBuf->size[0](%d). weird", + __FUNCTION__, __LINE__, srcBuf->size[0], dstBuf->size[0]); + + unsigned int copySize = (srcBuf->size[0] < dstBuf->size[0]) ? srcBuf->size[0] : dstBuf->size[0]; + +#ifdef ADD_BAYER_BY_NEON + if (isPacked == true) + ret = addBayerBufferByNeonPacked(srcBuf, dstBuf, dstRect, copySize); + else + ret = addBayerBufferByNeon(srcBuf, dstBuf, copySize); +#else + ret = addBayerBufferByCpu(srcBuf, dstBuf, copySize); +#endif + + if (ret != NO_ERROR) + ALOGE("ERR(%s[%d]):addBayerBuffer() fail", __FUNCTION__, __LINE__); + + return ret; +} + +status_t addBayerBufferByNeon(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + unsigned int copySize) +{ + if (srcBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):srcBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (dstBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):dstBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* bayer is max 16bit, so add by short */ + unsigned short*firstSrcAddr = (unsigned short *)srcBuf->addr[0]; + unsigned short*firstDstAddr = (unsigned short *)dstBuf->addr[0]; + unsigned short*srcAddr = firstSrcAddr; + unsigned short*dstAddr = firstDstAddr; + + /* + * loop as copySize / 32 byte + * 32 byte is perfect align size of cache. + * 64 byte is not faster than 32byte. + */ + unsigned int alignByte = 64; + unsigned int alignShort = 32; + unsigned int realCopySize = copySize / alignByte; + unsigned int remainedCopySize = copySize % alignByte; + + ALOGD("DEBUG(%s[%d]):srcAddr(%p), dstAddr(%p), copySize(%d), sizeof(short)(%d),\ + alignByte(%d), realCopySize(%d), remainedCopySize(%d)", + __FUNCTION__, __LINE__, srcAddr, dstAddr, copySize, sizeof(short), alignByte, + realCopySize, remainedCopySize); + + unsigned short* src0_ptr, *src1_ptr; + uint16x8_t src0_u16x8_0; + uint16x8_t src0_u16x8_1; + uint16x8_t src0_u16x8_2; + uint16x8_t src0_u16x8_3; + + src0_ptr = dstAddr; + src1_ptr = srcAddr; + + for (unsigned int i = 0; i < realCopySize; i++) { + src0_u16x8_0 = vqaddq_u16(vshlq_n_u16(vld1q_u16((uint16_t*)(src0_ptr)), 6), + vshlq_n_u16(vld1q_u16((uint16_t*)(src1_ptr)), 6)); + src0_u16x8_1 = vqaddq_u16(vshlq_n_u16(vld1q_u16((uint16_t*)(src0_ptr + 8)), 6), + vshlq_n_u16(vld1q_u16((uint16_t*)(src1_ptr + 8)), 6)); + src0_u16x8_2 = vqaddq_u16(vshlq_n_u16(vld1q_u16((uint16_t*)(src0_ptr + 16)), 6), + vshlq_n_u16(vld1q_u16((uint16_t*)(src1_ptr + 16)), 6)); + src0_u16x8_3 = vqaddq_u16(vshlq_n_u16(vld1q_u16((uint16_t*)(src0_ptr + 24)), 6), + vshlq_n_u16(vld1q_u16((uint16_t*)(src1_ptr + 24)), 6)); + + vst1q_u16((src0_ptr), vshrq_n_u16(src0_u16x8_0, 6)); + vst1q_u16((src0_ptr + 8), vshrq_n_u16(src0_u16x8_1, 6)); + vst1q_u16((src0_ptr + 16),vshrq_n_u16(src0_u16x8_2, 6)); + vst1q_u16((src0_ptr + 24),vshrq_n_u16(src0_u16x8_3, 6)); + + src0_ptr = firstDstAddr + (alignShort * (i + 1)); + src1_ptr = firstSrcAddr + (alignShort * (i + 1)); + } + + for (unsigned int i = 0; i < remainedCopySize; i++) { + dstAddr[i] = SATURATING_ADD(dstAddr[i], srcAddr[i]); + } + + return NO_ERROR; +} + +status_t addBayerBufferByNeonPacked(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + ExynosRect *dstRect, + unsigned int copySize) +{ + if (srcBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):srcBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (dstBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):dstBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* bayer is max 16bit, so add by short */ + unsigned char *firstSrcAddr = (unsigned char *)srcBuf->addr[0]; + unsigned char *firstDstAddr = (unsigned char *)dstBuf->addr[0]; + unsigned char *srcAddr = firstSrcAddr; + unsigned char *dstAddr = firstDstAddr; + + /* + * * loop as copySize / 32 byte + * * 32 byte is perfect align size of cache. + * * 64 byte is not faster than 32byte. + * */ + + unsigned int alignByte = 12; + unsigned int alignShort = 6; + unsigned int realCopySize = copySize / alignByte; + unsigned int remainedCopySize = copySize % alignByte; + + uint16x8_t src_u16x8_0; + uint16x8_t dst_u16x8_0; + + unsigned int width_byte = dstRect->w * 12 / 8; + width_byte = ALIGN_UP(width_byte, 16); + + ALOGD("DEBUG(%s[%d]):srcAddr(%p), dstAddr(%p), copySize(%d), sizeof(short)(%d),\ + alignByte(%d), realCopySize(%d), remainedCopySize(%d), pixel width(%d), pixel height(%d),\ + 16 aligned byte width(%d)", + __FUNCTION__, __LINE__, srcAddr, dstAddr, copySize, sizeof(short), + alignByte, realCopySize, remainedCopySize, dstRect->w, dstRect->h, width_byte); + + + unsigned char dst_temp[16]; + unsigned short dstPix_0, dstPix_1, dstPix_2, dstPix_3, dstPix_4, dstPix_5, dstPix_6, dstPix_7; + unsigned short srcPix_0, srcPix_1, srcPix_2, srcPix_3, srcPix_4, srcPix_5, srcPix_6, srcPix_7; + unsigned int col; + + for (unsigned int row = 0; row < (unsigned int)dstRect->h; row++) { + for (col = 0; col + alignByte <= width_byte; col += alignByte) { + dstAddr = firstDstAddr + width_byte * row + col; + srcAddr = firstSrcAddr + width_byte * row + col; + + unsigned short temp_0 = dstAddr[0]; + unsigned short temp_1 = dstAddr[1]; + unsigned short temp_cmbd = COMBINE_P0(temp_0, temp_1); + unsigned short temp_2 = dstAddr[2]; + unsigned short temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + dstPix_0 = temp_cmbd; + dstPix_1 = temp_cmbd2; + + temp_0 = dstAddr[3]; + temp_1 = dstAddr[4]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = dstAddr[5]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + dstPix_2 = temp_cmbd; + dstPix_3 = temp_cmbd2; + + temp_0 = dstAddr[6]; + temp_1 = dstAddr[7]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = dstAddr[8]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + dstPix_4 = temp_cmbd; + dstPix_5 = temp_cmbd2; + + temp_0 = dstAddr[9]; + temp_1 = dstAddr[10]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = dstAddr[11]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + dstPix_6 = temp_cmbd; + dstPix_7 = temp_cmbd2; + + temp_0 = srcAddr[0]; + temp_1 = srcAddr[1]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = srcAddr[2]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + srcPix_0 = temp_cmbd; + srcPix_1 = temp_cmbd2; + + temp_0 = srcAddr[3]; + temp_1 = srcAddr[4]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = srcAddr[5]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + + srcPix_2 = temp_cmbd; + srcPix_3 = temp_cmbd2; + + temp_0 = srcAddr[6]; + temp_1 = srcAddr[7]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = srcAddr[8]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + srcPix_4 = temp_cmbd; + srcPix_5 = temp_cmbd2; + + temp_0 = srcAddr[9]; + temp_1 = srcAddr[10]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = srcAddr[11]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + srcPix_6 = temp_cmbd; + srcPix_7 = temp_cmbd2; + + src_u16x8_0 = vsetq_lane_u16(srcPix_0, src_u16x8_0, 0); + src_u16x8_0 = vsetq_lane_u16(srcPix_1, src_u16x8_0, 1); + src_u16x8_0 = vsetq_lane_u16(srcPix_2, src_u16x8_0, 2); + src_u16x8_0 = vsetq_lane_u16(srcPix_3, src_u16x8_0, 3); + src_u16x8_0 = vsetq_lane_u16(srcPix_4, src_u16x8_0, 4); + src_u16x8_0 = vsetq_lane_u16(srcPix_5, src_u16x8_0, 5); + src_u16x8_0 = vsetq_lane_u16(srcPix_6, src_u16x8_0, 6); + src_u16x8_0 = vsetq_lane_u16(srcPix_7, src_u16x8_0, 7); + + dst_u16x8_0 = vsetq_lane_u16(dstPix_0, dst_u16x8_0, 0); + dst_u16x8_0 = vsetq_lane_u16(dstPix_1, dst_u16x8_0, 1); + dst_u16x8_0 = vsetq_lane_u16(dstPix_2, dst_u16x8_0, 2); + dst_u16x8_0 = vsetq_lane_u16(dstPix_3, dst_u16x8_0, 3); + dst_u16x8_0 = vsetq_lane_u16(dstPix_4, dst_u16x8_0, 4); + dst_u16x8_0 = vsetq_lane_u16(dstPix_5, dst_u16x8_0, 5); + dst_u16x8_0 = vsetq_lane_u16(dstPix_6, dst_u16x8_0, 6); + dst_u16x8_0 = vsetq_lane_u16(dstPix_7, dst_u16x8_0, 7); + + dst_u16x8_0 = vqaddq_u16(vshlq_n_u16(dst_u16x8_0, 4), vshlq_n_u16(src_u16x8_0, 4)); + dst_u16x8_0 = vshlq_n_u16(vshrq_n_u16(dst_u16x8_0, 6), 2); + + dstPix_0 = vgetq_lane_u16(dst_u16x8_0, 0); + dstPix_1 = vgetq_lane_u16(dst_u16x8_0, 1); + dstPix_2 = vgetq_lane_u16(dst_u16x8_0, 2); + dstPix_3 = vgetq_lane_u16(dst_u16x8_0, 3); + dstPix_4 = vgetq_lane_u16(dst_u16x8_0, 4); + dstPix_5 = vgetq_lane_u16(dst_u16x8_0, 5); + dstPix_6 = vgetq_lane_u16(dst_u16x8_0, 6); + dstPix_7 = vgetq_lane_u16(dst_u16x8_0, 7); + + dstAddr[0] = (unsigned char)(dstPix_0); + dstAddr[1] = (unsigned char)((COMBINE_P3(dstPix_0, dstPix_1))); + dstAddr[2] = (unsigned char)(dstPix_1 >> 4); + dstAddr[3] = (unsigned char)(dstPix_2); + dstAddr[4] = (unsigned char)((COMBINE_P3(dstPix_2, dstPix_3))); + dstAddr[5] = (unsigned char)(dstPix_3 >> 4); + dstAddr[6] = (unsigned char)(dstPix_4); + dstAddr[7] = (unsigned char)((COMBINE_P3(dstPix_4, dstPix_5))); + dstAddr[8] = (unsigned char)(dstPix_5 >> 4); + dstAddr[9] = (unsigned char)(dstPix_6); + dstAddr[10] = (unsigned char)((COMBINE_P3(dstPix_6, dstPix_7))); + dstAddr[11] = (unsigned char)(dstPix_7 >> 4); + } + } + +#if 0 + /* for the case of pixel width which is not a multiple of 8. The section of codes need to be verified */ + for (unsigned int i = 0; i < remainedCopySize; i += 3) { + unsigned char temp_0 = dstAddr[0]; + unsigned char temp_1 = dstAddr[1]; + unsigned char temp_cmbd = COMBINE_P0(temp_0, temp_1); + unsigned char temp_2 = dstAddr[2]; + unsigned char temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + unsigned char dstPix_0 = temp_cmbd; + unsigned char dstPix_1 = temp_cmbd2; + + temp_0 = srcAddr[0]; + temp_1 = srcAddr[1]; + temp_cmbd = COMBINE_P0(temp_0, temp_1); + temp_2 = srcAddr[2]; + temp_cmbd2 = COMBINE_P1(temp_1, temp_2); + srcPix_0 = temp_cmbd; + srcPix_1 = temp_cmbd2; + + dstPix_0 = SATURATING_ADD(dstPix_0, srcPix_0); + dstPix_1 = SATURATING_ADD(dstPix_1, srcPix_1); + + dstAddr[0] = (unsigned char)(dstPix_0); + dstAddr[1] = (unsigned char)((COMBINE_P3(dstPix_0, dstPix_1))); + dstAddr[2] = (unsigned char)(dstPix_1 >> 4); + + dstAddr += 3; + srcAddr += 3; + } +#endif + + return NO_ERROR; +} + +status_t addBayerBufferByCpu(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + unsigned int copySize) +{ + if (srcBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):srcBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (dstBuf->addr[0] == NULL) { + ALOGE("ERR(%s[%d]):dstBuf->addr[0] == NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* bayer is max 16bit, so add by short */ + unsigned short *firstSrcAddr = (unsigned short *)srcBuf->addr[0]; + unsigned short *firstDstAddr = (unsigned short *)dstBuf->addr[0]; + unsigned short *srcAddr = firstSrcAddr; + unsigned short *dstAddr = firstDstAddr; + + /* + * loop as copySize / 32 byte + * 32 byte is perfect align size of cache. + * 64 byte is not faster than 32byte. + */ + unsigned int alignByte = 32; + unsigned int alignShort = 16; + unsigned int realCopySize = copySize / alignByte; + unsigned int remainedCopySize = copySize % alignByte; + + ALOGD("DEBUG(%s[%d]):srcAddr(%p), dstAddr(%p), copySize(%d), sizeof(short)(%d),\ + alignByte(%d), realCopySize(%d), remainedCopySize(%d)", + __FUNCTION__, __LINE__, srcAddr, dstAddr, copySize, sizeof(short), alignByte, + realCopySize, remainedCopySize); + + for (unsigned int i = 0; i < realCopySize; i++) { + dstAddr[0] = SATURATING_ADD(dstAddr[0], srcAddr[0]); + dstAddr[1] = SATURATING_ADD(dstAddr[1], srcAddr[1]); + dstAddr[2] = SATURATING_ADD(dstAddr[2], srcAddr[2]); + dstAddr[3] = SATURATING_ADD(dstAddr[3], srcAddr[3]); + dstAddr[4] = SATURATING_ADD(dstAddr[4], srcAddr[4]); + dstAddr[5] = SATURATING_ADD(dstAddr[5], srcAddr[5]); + dstAddr[6] = SATURATING_ADD(dstAddr[6], srcAddr[6]); + dstAddr[7] = SATURATING_ADD(dstAddr[7], srcAddr[7]); + dstAddr[8] = SATURATING_ADD(dstAddr[8], srcAddr[8]); + dstAddr[9] = SATURATING_ADD(dstAddr[9], srcAddr[9]); + dstAddr[10] = SATURATING_ADD(dstAddr[10], srcAddr[10]); + dstAddr[11] = SATURATING_ADD(dstAddr[11], srcAddr[11]); + dstAddr[12] = SATURATING_ADD(dstAddr[12], srcAddr[12]); + dstAddr[13] = SATURATING_ADD(dstAddr[13], srcAddr[13]); + dstAddr[14] = SATURATING_ADD(dstAddr[14], srcAddr[14]); + dstAddr[15] = SATURATING_ADD(dstAddr[15], srcAddr[15]); + + // jump next 32bytes. + //srcAddr += alignShort; + //dstAddr += alignShort; + /* This is faster on compiler lever */ + srcAddr = firstSrcAddr + (alignShort * (i + 1)); + dstAddr = firstDstAddr + (alignShort * (i + 1)); + } + + for (unsigned int i = 0; i < remainedCopySize; i++) { + dstAddr[i] = SATURATING_ADD(dstAddr[i], srcAddr[i]); + } + + return NO_ERROR; +} + +char clip(int i) +{ + if(i < 0) + return 0; + else if(i > 255) + return 255; + else + return i; +} + +/* + ** The only convertingYUYVtoRGB888() code is covered by BSD. + ** URL from which the open source has been downloaded is + ** http://www.mathworks.com/matlabcentral/fileexchange/26249-yuy2-to-rgb-converter/content/YUY2toRGB.zip + */ +void convertingYUYVtoRGB888(char *dstBuf, char *srcBuf, int width, int height) +{ + int Y0, Y1, U, V, C0, C1, D, E; + + for(int y = 0; y < height; y++) { + for(int x = 0; x < (width / 2); x++) + { + Y0 = srcBuf[(2 * y * width) + (4 * x)]; + Y1 = srcBuf[(2 * y * width) + (4 * x) + 2]; + U = srcBuf[(2 * y * width) + (4 * x) + 1]; + V = srcBuf[(2 * y * width) + (4 * x) + 3]; + C0 = Y0 - 16; + C1 = Y1 - 16; + D = U - 128; + E = V - 128; + dstBuf[6 * (x + (y * width / 2))] = + clip(((298 * C0) + (409 * E) + 128) >> 8); // R0 + dstBuf[6 * (x + (y * width / 2)) + 1] = + clip(((298 * C0) - (100 * D) - (208 * E) + 128) >> 8); // G0 + dstBuf[6 * (x + (y * width / 2)) + 2] = + clip(((298 * C0) + (516 * D) + 128) >> 8); // B0 + dstBuf[6 * (x + (y * width / 2)) + 3] = + clip(((298 * C1) + (409 * E) + 128) >> 8); // R1 + dstBuf[6 * (x + (y * width / 2)) + 4] = + clip(((298 * C1) - (100 * D) - (208 * E) + 128) >> 8); // G1 + dstBuf[6 * (x + (y * width / 2)) + 5] = + clip(((298 * C1) + (516 * D) + 128) >> 8); // B1 + } + } +} + +void checkAndroidVersion(void) { + char value[PROPERTY_VALUE_MAX] = {0}; + char targetAndroidVersion[PROPERTY_VALUE_MAX] = {0}; + + snprintf(targetAndroidVersion, PROPERTY_VALUE_MAX, "%d.%d", TARGET_ANDROID_VER_MAJ, TARGET_ANDROID_VER_MIN); + + property_get("ro.build.version.release", value, "0"); + + if (strncmp(targetAndroidVersion, value, PROPERTY_VALUE_MAX)) + ALOGD("DEBUG(%s[%d]): Tartget Android version (%s), build version (%s)", + __FUNCTION__, __LINE__, targetAndroidVersion, value); + else + ALOGI("Andorid build version release %s", value); +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/ExynosCameraUtils.h b/libcamera/common_v2/ExynosCameraUtils.h new file mode 100644 index 0000000..5df4e0c --- /dev/null +++ b/libcamera/common_v2/ExynosCameraUtils.h @@ -0,0 +1,268 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_UTILS_H +#define EXYNOS_CAMERA_UTILS_H + +#include +#include +#include +#include + +#include "exynos_format.h" +#include "ExynosRect.h" + +#include "ExynosCameraConfig.h" +#include "ExynosCameraSensorInfo.h" +#include "videodev2_exynos_media.h" +#include "ExynosCameraBuffer.h" + +#define ROUND_OFF(x, dig) (floor((x) * pow(10.0f, dig)) / pow(10.0f, dig)) +#define GET_MAX_NUM(a, b, c) \ + ((a) < (b) ? \ + ((b) < (c) ? (c) : (b)) \ + :((a) < (c) ? (c) : (a)) ) + +#define SAFE_DELETE(obj) \ + do { \ + if (obj) { \ + delete obj; \ + obj = NULL; \ + } \ + } while(0) + +namespace android { + +bool getCropRect( + int src_w, int src_h, + int dst_w, int dst_h, + int *crop_x, int *crop_y, + int *crop_w, int *crop_h, + int zoom); + +bool getCropRect2( + int src_w, int src_h, + int dst_w, int dst_h, + int *new_src_x, int *new_src_y, + int *new_src_w, int *new_src_h, + int zoom); + +status_t getCropRectAlign( + int src_w, int src_h, + int dst_w, int dst_h, + int *crop_x, int *crop_y, + int *crop_w, int *crop_h, + int align_w, int align_h, + int zoom, float zoomRatio); + +uint32_t bracketsStr2Ints( + char *str, + uint32_t num, + ExynosRect2 *rect2s, + int *weights, + int mode); +bool subBracketsStr2Ints(int num, char *str, int *arr); + +void convertingRectToRect2(ExynosRect *rect, ExynosRect2 *rect2); +void convertingRect2ToRect(ExynosRect2 *rect2, ExynosRect *rect); + +bool isRectNull(ExynosRect *rect); +bool isRectNull(ExynosRect2 *rect2); +bool isRectEqual(ExynosRect *rect1, ExynosRect *rect2); +bool isRectEqual(ExynosRect2 *rect1, ExynosRect2 *rect2); + +ExynosRect2 convertingActualArea2HWArea(ExynosRect2 *srcRect, const ExynosRect *regionRect); +ExynosRect2 convertingAndroidArea2HWArea(ExynosRect2 *srcRect, const ExynosRect *regionRect); +ExynosRect2 convertingAndroidArea2HWAreaBcropOut(ExynosRect2 *srcRect, const ExynosRect *regionRect); +ExynosRect2 convertingAndroidArea2HWAreaBcropIn(ExynosRect2 *srcRect, const ExynosRect *regionRect); +ExynosRect2 convertingSrcArea2DstArea(ExynosRect2 *srcRect, const ExynosRect *srcRegionRect, const ExynosRect *dstRegionRect); + +status_t getResolutionList(String8 &string8Buf, struct ExynosSensorInfoBase *sensorInfo, + int *w, int *h, int mode, int camid); +void setZoomRatioList(int *list, int len, float maxZoomRatio); +status_t getZoomRatioList(String8 & string8Buf, int maxZoom, int maxZoomRatio, int *list); +status_t getSupportedFpsList(String8 & string8Buf, int min, int max, + int camid, struct ExynosSensorInfoBase *sensorInfo); + + +/* + * Control struct camera2_shot_ext + */ +int32_t getMetaDmRequestFrameCount(struct camera2_shot_ext *shot_ext); +int32_t getMetaDmRequestFrameCount(struct camera2_dm *dm); + +void setMetaCtlAeTargetFpsRange(struct camera2_shot_ext *shot_ext, uint32_t min, uint32_t max); +void getMetaCtlAeTargetFpsRange(struct camera2_shot_ext *shot_ext, uint32_t *min, uint32_t *max); + +void setMetaCtlSensorFrameDuration(struct camera2_shot_ext *shot_ext, uint64_t duration); +void getMetaCtlSensorFrameDuration(struct camera2_shot_ext *shot_ext, uint64_t *duration); + +void setMetaCtlAeMode(struct camera2_shot_ext *shot_ext, enum aa_aemode aeMode); +void getMetaCtlAeMode(struct camera2_shot_ext *shot_ext, enum aa_aemode *aeMode); + +void setMetaCtlAeLock(struct camera2_shot_ext *shot_ext, bool lock); +void getMetaCtlAeLock(struct camera2_shot_ext *shot_ext, bool *lock); +void setMetaVtMode(struct camera2_shot_ext *shot_ext, enum camera_vt_mode mode); +void setMetaVideoMode(struct camera2_shot_ext *shot_ext, enum aa_videomode mode); + +void setMetaCtlExposureCompensation(struct camera2_shot_ext *shot_ext, int32_t expCompensation); +void getMetaCtlExposureCompensation(struct camera2_shot_ext *shot_ext, int32_t *expCompensatione); +#ifdef USE_SUBDIVIDED_EV +void setMetaCtlExposureCompensationStep(struct camera2_shot_ext *shot_ext, float expCompensationStep); +#endif +void setMetaCtlExposureTime(struct camera2_shot_ext *shot_ext, uint64_t exposureTime); +void getMetaCtlExposureTime(struct camera2_shot_ext *shot_ext, uint64_t *exposureTime); +void setMetaCtlCaptureExposureTime(struct camera2_shot_ext *shot_ext, uint32_t exposureTime); +void getMetaCtlCaptureExposureTime(struct camera2_shot_ext *shot_ext, uint32_t *exposureTime); + +void setMetaCtlWbLevel(struct camera2_shot_ext *shot_ext, int32_t wbLevel); +void getMetaCtlWbLevel(struct camera2_shot_ext *shot_ext, int32_t *wbLevel); + +status_t setMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int x, int y, int w, int h); +status_t setMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int zoom, + int srcW, int srcH, + int dstW, int dstH, float zoomRatio); +void getMetaCtlCropRegion( + struct camera2_shot_ext *shot_ext, + int *x, int *y, + int *w, int *h); + +void setMetaCtlAeRegion( + struct camera2_shot_ext *shot_ext, + int x, int y, + int w, int h, + int weight); +void getMetaCtlAeRegion( + struct camera2_shot_ext *shot_ext, + int *x, int *y, + int *w, int *h, + int *weight); + +void setMetaCtlAntibandingMode(struct camera2_shot_ext *shot_ext, enum aa_ae_antibanding_mode antibandingMode); +void getMetaCtlAntibandingMode(struct camera2_shot_ext *shot_ext, enum aa_ae_antibanding_mode *antibandingMode); + +void setMetaCtlSceneMode(struct camera2_shot_ext *shot_ext, enum aa_mode mode, enum aa_scene_mode sceneMode); +void getMetaCtlSceneMode(struct camera2_shot_ext *shot_ext, enum aa_mode *mode, enum aa_scene_mode *sceneMode); + +void setMetaCtlAwbMode(struct camera2_shot_ext *shot_ext, enum aa_awbmode awbMode); +void getMetaCtlAwbMode(struct camera2_shot_ext *shot_ext, enum aa_awbmode *awbMode); +void setMetaCtlAwbLock(struct camera2_shot_ext *shot_ext, bool lock); +void getMetaCtlAwbLock(struct camera2_shot_ext *shot_ext, bool *lock); +void setMetaCtlAfRegion(struct camera2_shot_ext *shot_ext, + int x, int y, int w, int h, int weight); +void getMetaCtlAfRegion(struct camera2_shot_ext *shot_ext, + int *x, int *y, int *w, int *h, int *weight); + +void setMetaCtlColorCorrectionMode(struct camera2_shot_ext *shot_ext, enum colorcorrection_mode mode); +void getMetaCtlColorCorrectionMode(struct camera2_shot_ext *shot_ext, enum colorcorrection_mode *mode); +void setMetaCtlAaEffect(struct camera2_shot_ext *shot_ext, aa_effect_mode_t effect); +void getMetaCtlAaEffect(struct camera2_shot_ext *shot_ext, aa_effect_mode_t *effect); +void setMetaCtlBrightness(struct camera2_shot_ext *shot_ext, int32_t brightness); +void getMetaCtlBrightness(struct camera2_shot_ext *shot_ext, int32_t *brightness); + +void setMetaCtlSaturation(struct camera2_shot_ext *shot_ext, int32_t saturation); +void getMetaCtlSaturation(struct camera2_shot_ext *shot_ext, int32_t *saturation); + +void setMetaCtlHue(struct camera2_shot_ext *shot_ext, int32_t hue); +void getMetaCtlHue(struct camera2_shot_ext *shot_ext, int32_t *hue); + +void setMetaCtlContrast(struct camera2_shot_ext *shot_ext, uint32_t contrast); +void getMetaCtlContrast(struct camera2_shot_ext *shot_ext, uint32_t *contrast); + +void setMetaCtlSharpness(struct camera2_shot_ext *shot_ext, enum processing_mode edge_mode, int32_t edge_sharpness, + enum processing_mode noise_mode, int32_t noise_sharpness); +void getMetaCtlSharpness(struct camera2_shot_ext *shot_ext, enum processing_mode *mode, int32_t *sharpness, + enum processing_mode *noise_mode, int32_t *noise_sharpness); + + +void setMetaCtlIso(struct camera2_shot_ext *shot_ext, enum aa_isomode mode, uint32_t iso); +void getMetaCtlIso(struct camera2_shot_ext *shot_ext, enum aa_isomode *mode, uint32_t *iso); +void setMetaCtlFdMode(struct camera2_shot_ext *shot_ext, enum facedetect_mode mode); + +void getStreamFrameValid(struct camera2_stream *shot_stream, uint32_t *fvalid); +void getStreamFrameCount(struct camera2_stream *shot_stream, uint32_t *fcount); + +status_t setMetaDmSensorTimeStamp(struct camera2_shot_ext *shot_ext, uint64_t timeStamp); +nsecs_t getMetaDmSensorTimeStamp(struct camera2_shot_ext *shot_ext); + +void setMetaNodeLeaderRequest(struct camera2_shot_ext* shot_ext, int value); +void setMetaNodeLeaderVideoID(struct camera2_shot_ext* shot_ext, int value); +void setMetaNodeLeaderInputSize(struct camera2_shot_ext * shot_ext, unsigned int x, unsigned int y, unsigned int w, unsigned int h); +void setMetaNodeLeaderOutputSize(struct camera2_shot_ext * shot_ext, unsigned int x, unsigned int y, unsigned int w, unsigned int h); +void setMetaNodeCaptureRequest(struct camera2_shot_ext* shot_ext, int index, int value); +void setMetaNodeCaptureVideoID(struct camera2_shot_ext* shot_ext, int index, int value); +void setMetaNodeCaptureInputSize(struct camera2_shot_ext * shot_ext, int index, unsigned int x, unsigned int y, unsigned int w, unsigned int h); +void setMetaNodeCaptureOutputSize(struct camera2_shot_ext * shot_ext, int index, unsigned int x, unsigned int y, unsigned int w, unsigned int h); + +void setMetaBypassDrc(struct camera2_shot_ext *shot_ext, int value); +void setMetaBypassDis(struct camera2_shot_ext *shot_ext, int value); +void setMetaBypassDnr(struct camera2_shot_ext *shot_ext, int value); +void setMetaBypassFd(struct camera2_shot_ext *shot_ext, int value); + +void setMetaSetfile(struct camera2_shot_ext *shot_ext, int value); + + +int mergeSetfileYuvRange(int setfile, int yuvRange); + +int getPlaneSizeFlite(int width, int height); +int getBayerLineSize(int width, int bayerFormat); +int getBayerPlaneSize(int width, int height, int bayerFormat); + +bool dumpToFile(char *filename, char *srcBuf, unsigned int size); +bool dumpToFile2plane(char *filename, char *srcBuf, char *srcBuf1, unsigned int size, unsigned int size1); + +/* TODO: This functions need to be commonized */ +status_t getYuvPlaneSize(int format, unsigned int *size, + unsigned int width, unsigned int height); +status_t getYuvFormatInfo(unsigned int v4l2_pixel_format, + unsigned int *bpp, unsigned int *planes); +int getYuvPlaneCount(unsigned int v4l2_pixel_format); +int displayExynosBuffer( ExynosCameraBuffer *buffer); + +int checkBit(unsigned int *target, int index); +void clearBit(unsigned int *target, int index, bool isStatePrint = false); +void setBit(unsigned int *target, int index, bool isStatePrint = false); +void resetBit(unsigned int *target, int value, bool isStatePrint = false); + +status_t addBayerBuffer(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + ExynosRect *dstRect, + bool isPacked = false); +status_t addBayerBufferByNeon(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + unsigned int copySize); +status_t addBayerBufferByNeonPacked(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + ExynosRect *dstRect, + unsigned int copySize); +status_t addBayerBufferByCpu(struct ExynosCameraBuffer *srcBuf, + struct ExynosCameraBuffer *dstBuf, + unsigned int copySize); + +char clip(int i); +void convertingYUYVtoRGB888(char *dstBuf, char *srcBuf, int width, int height); + +void checkAndroidVersion(void); + +}; /* namespace android */ + +#endif + diff --git a/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut.h b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut.h new file mode 100644 index 0000000..024e8e7 --- /dev/null +++ b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut.h @@ -0,0 +1,69 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_DOF_LUT_H +#define EXYNOS_CAMERA_DOF_LUT_H + +#include "ExynosCameraFusionInclude.h" + +using namespace android; + +#define DEFAULT_DISTANCE_FAR (100000.0f) /* 100m == 10000cm == 100000mm */ +#define DEFAULT_DISTANCE_NEAR (0.0f) + +struct DOF_LUT { + int distance; // mm + float lensShift; // mm + float farField; // mm + float nearField; // mm + + DOF_LUT(int distance, + float lensShift, + float farField, + float nearField) : + distance (distance), + lensShift(lensShift), + farField (farField), + nearField(nearField) + {} + + DOF_LUT() { + distance = 0; + lensShift = 0.0f; + farField = 0.0f; + nearField = 0.0f; + }; +}; + +struct DOF { + const DOF_LUT *lut; + int lutCnt; + + float lensShiftOn12M; // mm + float lensShiftOn01M; // mm + + DOF() + { + lut = NULL; + lutCnt = 0; + + lensShiftOn12M = 0.0f; + lensShiftOn01M = 0.0f; + }; +}; + +#endif // EXYNOS_CAMERA_DOF_LUT_H diff --git a/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3L8.h b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3L8.h new file mode 100644 index 0000000..c5909bf --- /dev/null +++ b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3L8.h @@ -0,0 +1,75 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_DOF_LUT_3L8_H +#define EXYNOS_CAMERA_DOF_LUT_3L8_H + +#include "ExynosCameraFusionInclude.h" + +const DOF_LUT DOF_LUT_3L8[] = +{ + DOF_LUT(10000, 0.001, DEFAULT_DISTANCE_FAR, 2302), + DOF_LUT( 5000, 0.003, DEFAULT_DISTANCE_FAR, 1872), + DOF_LUT( 4000, 0.003, DEFAULT_DISTANCE_FAR, 1712), + DOF_LUT( 3000, 0.004, DEFAULT_DISTANCE_FAR, 1499), + DOF_LUT( 2000, 0.007, 6031, 1200), + DOF_LUT( 1900, 0.007, 5203, 1163), + DOF_LUT( 1800, 0.007, 4515, 1125), + DOF_LUT( 1700, 0.008, 3933, 1085), + DOF_LUT( 1600, 0.008, 3435, 1044), + DOF_LUT( 1500, 0.009, 3004, 1000), + DOF_LUT( 1400, 0.010, 2627, 955), + DOF_LUT( 1300, 0.010, 2295, 907), + DOF_LUT( 1200, 0.011, 2000, 858), + DOF_LUT( 1100, 0.012, 1736, 805), + DOF_LUT( 1000, 0.013, 1499, 751), + DOF_LUT( 900, 0.015, 1285, 693), + DOF_LUT( 800, 0.017, 1090, 632), + DOF_LUT( 700, 0.019, 912, 568), + DOF_LUT( 600, 0.022, 749, 501), + DOF_LUT( 500, 0.027, 599, 429), + DOF_LUT( 450, 0.030, 528, 392), + DOF_LUT( 400, 0.034, 461, 354), + DOF_LUT( 350, 0.039, 395, 314), + DOF_LUT( 300, 0.045, 333, 273), + DOF_LUT( 250, 0.054, 272, 231), + DOF_LUT( 200, 0.068, 214, 188), + DOF_LUT( 150, 0.091, 158, 143), + DOF_LUT( 140, 0.098, 147, 134), + DOF_LUT( 130, 0.106, 136, 125), + DOF_LUT( 120, 0.115, 125, 116), + DOF_LUT( 110, 0.126, 114, 106), + DOF_LUT( 100, 0.139, 103, 97), + DOF_LUT( 90, 0.155, 93, 88), + DOF_LUT( 80, 0.175, 82, 78), + DOF_LUT( 70, 0.202, 72, 69), + DOF_LUT( 60, 0.237, 61, 59), + DOF_LUT( 50, 0.289, 51, 49), +}; + +struct DOF_3L8 : public DOF +{ + DOF_3L8() { + lut = DOF_LUT_3L8; + lutCnt = sizeof(DOF_LUT_3L8) / sizeof(DOF_LUT); + + lensShiftOn12M = 0.001f; + lensShiftOn01M = 0.034f; // this is 0.4M's value. + } +}; + +#endif // EXYNOS_CAMERA_DOF_LUT_3L8_H diff --git a/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3M3.h b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3M3.h new file mode 100644 index 0000000..a2ae671 --- /dev/null +++ b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut3M3.h @@ -0,0 +1,75 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_DOF_LUT_3M3_H +#define EXYNOS_CAMERA_DOF_LUT_3M3_H + +#include "ExynosCameraFusionInclude.h" + +const DOF_LUT DOF_LUT_3M3[] = +{ + DOF_LUT(10000, 0.001, DEFAULT_DISTANCE_FAR, 2302), + DOF_LUT( 5000, 0.003, DEFAULT_DISTANCE_FAR, 1872), + DOF_LUT( 4000, 0.003, DEFAULT_DISTANCE_FAR, 1712), + DOF_LUT( 3000, 0.004, DEFAULT_DISTANCE_FAR, 1499), + DOF_LUT( 2000, 0.007, 6031, 1200), + DOF_LUT( 1900, 0.007, 5203, 1163), + DOF_LUT( 1800, 0.007, 4515, 1125), + DOF_LUT( 1700, 0.008, 3933, 1085), + DOF_LUT( 1600, 0.008, 3435, 1044), + DOF_LUT( 1500, 0.009, 3004, 1000), + DOF_LUT( 1400, 0.010, 2627, 955), + DOF_LUT( 1300, 0.010, 2295, 907), + DOF_LUT( 1200, 0.011, 2000, 858), + DOF_LUT( 1100, 0.012, 1736, 805), + DOF_LUT( 1000, 0.013, 1499, 751), + DOF_LUT( 900, 0.015, 1285, 693), + DOF_LUT( 800, 0.017, 1090, 632), + DOF_LUT( 700, 0.019, 912, 568), + DOF_LUT( 600, 0.022, 749, 501), + DOF_LUT( 500, 0.027, 599, 429), + DOF_LUT( 450, 0.030, 528, 392), + DOF_LUT( 400, 0.034, 461, 354), + DOF_LUT( 350, 0.039, 395, 314), + DOF_LUT( 300, 0.045, 333, 273), + DOF_LUT( 250, 0.054, 272, 231), + DOF_LUT( 200, 0.068, 214, 188), + DOF_LUT( 150, 0.091, 158, 143), + DOF_LUT( 140, 0.098, 147, 134), + DOF_LUT( 130, 0.106, 136, 125), + DOF_LUT( 120, 0.115, 125, 116), + DOF_LUT( 110, 0.126, 114, 106), + DOF_LUT( 100, 0.139, 103, 97), + DOF_LUT( 90, 0.155, 93, 88), + DOF_LUT( 80, 0.175, 82, 78), + DOF_LUT( 70, 0.202, 72, 69), + DOF_LUT( 60, 0.237, 61, 59), + DOF_LUT( 50, 0.289, 51, 49), +}; + +struct DOF_3M3 : public DOF +{ + DOF_3M3() { + lut = DOF_LUT_3M3; + lutCnt = sizeof(DOF_LUT_3M3) / sizeof(DOF_LUT); + + lensShiftOn12M = 0.001f; + lensShiftOn01M = 0.034f; // this is 0.4M's value. + } +}; + +#endif // EXYNOS_CAMERA_DOF_LUT_3M3_H diff --git a/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut4H8.h b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut4H8.h new file mode 100644 index 0000000..1dabf63 --- /dev/null +++ b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLut4H8.h @@ -0,0 +1,75 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_DOF_LUT_4H8_H +#define EXYNOS_CAMERA_DOF_LUT_4H8_H + +#include "ExynosCameraFusionInclude.h" + +const DOF_LUT DOF_LUT_4H8[] = +{ + DOF_LUT(10000, 0.003, DEFAULT_DISTANCE_FAR, 3586), + DOF_LUT( 5000, 0.007, 47686, 2641), + DOF_LUT( 4000, 0.008, 14070, 2333), + DOF_LUT( 3000, 0.011, 6469, 1954), + DOF_LUT( 2000, 0.016, 3110, 1475), + DOF_LUT( 1900, 0.017, 2874, 1420), + DOF_LUT( 1800, 0.018, 2651, 1363), + DOF_LUT( 1700, 0.019, 2439, 1305), + DOF_LUT( 1600, 0.020, 2238, 1246), + DOF_LUT( 1500, 0.022, 2047, 1184), + DOF_LUT( 1400, 0.023, 1865, 1121), + DOF_LUT( 1300, 0.025, 1691, 1056), + DOF_LUT( 1200, 0.027, 1525, 989), + DOF_LUT( 1100, 0.030, 1367, 921), + DOF_LUT( 1000, 0.033, 1216, 850), + DOF_LUT( 900, 0.036, 1071, 776), + DOF_LUT( 800, 0.041, 932, 701), + DOF_LUT( 700, 0.047, 799, 623), + DOF_LUT( 600, 0.055, 671, 543), + DOF_LUT( 500, 0.066, 548, 460), + DOF_LUT( 450, 0.073, 488, 417), + DOF_LUT( 400, 0.082, 430, 374), + DOF_LUT( 350, 0.094, 373, 330), + DOF_LUT( 300, 0.110, 316, 285), + DOF_LUT( 250, 0.133, 261, 240), + DOF_LUT( 200, 0.167, 207, 193), + DOF_LUT( 150, 0.225, 154, 146), + DOF_LUT( 140, 0.242, 143, 137), + DOF_LUT( 130, 0.261, 133, 127), + DOF_LUT( 120, 0.284, 122, 118), + DOF_LUT( 110, 0.312, 112, 108), + DOF_LUT( 100, 0.345, 102, 98), + DOF_LUT( 90, 0.385, 91, 89), + DOF_LUT( 80, 0.437, 81, 79), + DOF_LUT( 70, 0.505, 71, 69), + DOF_LUT( 60, 0.598, 61, 59), + DOF_LUT( 50, 0.733, 50, 50), +}; + +struct DOF_4H8 : public DOF +{ + DOF_4H8() { + lut = DOF_LUT_4H8; + lutCnt = sizeof(DOF_LUT_4H8) / sizeof(DOF_LUT); + + lensShiftOn12M = 0.003f; + lensShiftOn01M = 0.082f; // this is 0.4M's value. + } +}; + +#endif // EXYNOS_CAMERA_DOF_LUT_4H8_H diff --git a/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLutBase.h b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLutBase.h new file mode 100644 index 0000000..490aa8e --- /dev/null +++ b/libcamera/common_v2/Fusion/DofLut/ExynosCameraDofLutBase.h @@ -0,0 +1,75 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_DOF_LUT_BASE_H +#define EXYNOS_CAMERA_DOF_LUT_BASE_H + +#include "ExynosCameraFusionInclude.h" + +const DOF_LUT DOF_LUT_BASE[] = +{ + DOF_LUT(10000, 0.001, DEFAULT_DISTANCE_FAR, 2302), + DOF_LUT( 5000, 0.003, DEFAULT_DISTANCE_FAR, 1872), + DOF_LUT( 4000, 0.003, DEFAULT_DISTANCE_FAR, 1712), + DOF_LUT( 3000, 0.004, DEFAULT_DISTANCE_FAR, 1499), + DOF_LUT( 2000, 0.007, 6031, 1200), + DOF_LUT( 1900, 0.007, 5203, 1163), + DOF_LUT( 1800, 0.007, 4515, 1125), + DOF_LUT( 1700, 0.008, 3933, 1085), + DOF_LUT( 1600, 0.008, 3435, 1044), + DOF_LUT( 1500, 0.009, 3004, 1000), + DOF_LUT( 1400, 0.010, 2627, 955), + DOF_LUT( 1300, 0.010, 2295, 907), + DOF_LUT( 1200, 0.011, 2000, 858), + DOF_LUT( 1100, 0.012, 1736, 805), + DOF_LUT( 1000, 0.013, 1499, 751), + DOF_LUT( 900, 0.015, 1285, 693), + DOF_LUT( 800, 0.017, 1090, 632), + DOF_LUT( 700, 0.019, 912, 568), + DOF_LUT( 600, 0.022, 749, 501), + DOF_LUT( 500, 0.027, 599, 429), + DOF_LUT( 450, 0.030, 528, 392), + DOF_LUT( 400, 0.034, 461, 354), + DOF_LUT( 350, 0.039, 395, 314), + DOF_LUT( 300, 0.045, 333, 273), + DOF_LUT( 250, 0.054, 272, 231), + DOF_LUT( 200, 0.068, 214, 188), + DOF_LUT( 150, 0.091, 158, 143), + DOF_LUT( 140, 0.098, 147, 134), + DOF_LUT( 130, 0.106, 136, 125), + DOF_LUT( 120, 0.115, 125, 116), + DOF_LUT( 110, 0.126, 114, 106), + DOF_LUT( 100, 0.139, 103, 97), + DOF_LUT( 90, 0.155, 93, 88), + DOF_LUT( 80, 0.175, 82, 78), + DOF_LUT( 70, 0.202, 72, 69), + DOF_LUT( 60, 0.237, 61, 59), + DOF_LUT( 50, 0.289, 51, 49), +}; + +struct DOF_BASE : public DOF +{ + DOF_BASE() { + lut = DOF_LUT_BASE; + lutCnt = sizeof(DOF_LUT_BASE) / sizeof(DOF_LUT); + + lensShiftOn12M = 0.001f; + lensShiftOn01M = 0.034f; // this is 0.4M's value. + } +}; + +#endif // EXYNOS_CAMERA_DOF_LUT_BASE_H diff --git a/libcamera/common_v2/Fusion/ExynosCameraFusionInclude.h b/libcamera/common_v2/Fusion/ExynosCameraFusionInclude.h new file mode 100644 index 0000000..8d0441c --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraFusionInclude.h @@ -0,0 +1,41 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_FUSION_INCLUDE_H +#define EXYNOS_CAMERA_FUSION_INCLUDE_H + +using namespace android; + +#include + +#include "ExynosCameraSingleton.h" +#include "ExynosCameraSizeTable.h" +#include "ExynosCameraBuffer.h" +#include "ExynosCameraBufferManager.h" +#include "ExynosCameraUtils.h" +#include "ExynosCameraActivityAutofocus.h" + +#include "ExynosCameraDofLut.h" +#include "ExynosCameraDofLutBase.h" +#include "ExynosCameraDofLut3L8.h" +#include "ExynosCameraDofLut4H8.h" +#include "ExynosCameraDofLut3M3.h" + +#include "ExynosCameraFusionMetaDataConverter.h" +#include "ExynosCameraFusionWrapper.h" + +#endif diff --git a/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.cpp b/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.cpp new file mode 100644 index 0000000..284bd9c --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.cpp @@ -0,0 +1,456 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#define LOG_TAG "ExynosCameraFusionMetaDataConverter" + +#include "ExynosCameraFusionMetaDataConverter.h" + +//#define EXYNOS_CAMERA_FUSION_META_DATA_CONVERTER_DEBUG + +#ifdef EXYNOS_CAMERA_FUSION_META_DATA_CONVERTER_DEBUG +#define META_CONVERTER_LOG CLOGD +#else +#define META_CONVERTER_LOG CLOGV +#endif + +void ExynosCameraFusionMetaDataConverter::translateFocusPos(int cameraId, + camera2_shot_ext *shot_ext, + DOF *dof, + float *nearFieldCm, + float *lensShiftUm, + float *farFieldCm) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + // FW's meta + float currentPos = (float)shot_ext->shot.udm.af.lensPositionCurrent; + float calibCurrentPos = currentPos; + + // FW's meta + float macroPos = (float)shot_ext->shot.udm.af.lensPositionMacro; + + // FW's meta + float infinityPos = (float)shot_ext->shot.udm.af.lensPositionInfinity; + + if (macroPos < calibCurrentPos) + calibCurrentPos = macroPos; + + if (calibCurrentPos < infinityPos) + calibCurrentPos = infinityPos; + + // convert pos to lens shift + *lensShiftUm = 0; + float tempPos = (calibCurrentPos - infinityPos) * (dof->lensShiftOn01M - dof->lensShiftOn12M); + float fullPos = (macroPos - infinityPos); + + if (tempPos != 0.0f && fullPos != 0.0f) { + *lensShiftUm = tempPos / fullPos; + } + + *lensShiftUm += dof->lensShiftOn12M; + + if (*lensShiftUm < 0.0f) { + META_CONVERTER_LOG("DEBUG(%s[%d]):*lensShiftUm(%f) <= 0.0f. so, set 0.0f",__FUNCTION__, __LINE__, *lensShiftUm); + *lensShiftUm = 0.0f; + } + + // DOF table + lens_shift_um + *nearFieldCm = m_findLensField(m_cameraId, dof, *lensShiftUm, false); + + // DOF table + lens_shift_um + *farFieldCm = m_findLensField(m_cameraId, dof, *lensShiftUm, true); + + // mm -> cm + if (*nearFieldCm != 0.0f) + *nearFieldCm /= 10.0f; + + // mm -> cm + if (*farFieldCm != 0.0f) + *farFieldCm /= 10.0f; + + // mm -> um + *lensShiftUm *= 1000.0f; + + META_CONVERTER_LOG("DEBUG(%s[%d]):macroPos(%f), currentPos(%f), calibCurrentPos(%f), infinityPos(%f), lensShiftOn01M(%f), lensShiftOn12M(%f) -> nearFieldCm(%f), lensShiftUm(%f), farFieldCm(%f)", + __FUNCTION__, __LINE__, + macroPos, + currentPos, + calibCurrentPos, + infinityPos, + dof->lensShiftOn01M, + dof->lensShiftOn12M, + *nearFieldCm, + *lensShiftUm, + *farFieldCm); +} + +ExynosRect ExynosCameraFusionMetaDataConverter::translateFocusRoi(int cameraId, + struct camera2_shot_ext *shot_ext) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + ExynosRect rect; + + // Focus ROI + rect.x = shot_ext->shot.dm.aa.afRegions[0]; + rect.y = shot_ext->shot.dm.aa.afRegions[1]; + rect.w = shot_ext->shot.dm.aa.afRegions[2] - shot_ext->shot.dm.aa.afRegions[0]; + rect.h = shot_ext->shot.dm.aa.afRegions[3] - shot_ext->shot.dm.aa.afRegions[1]; + rect.fullW = rect.w; + rect.fullH = rect.h; + + META_CONVERTER_LOG("DEBUG(%s[%d]):afRegions:(%d, %d, %d, %d)", + __FUNCTION__, __LINE__, + rect.x, + rect.y, + rect.w, + rect.h); + + return rect; +} + +bool ExynosCameraFusionMetaDataConverter::translateAfStatus(int cameraId, + struct camera2_shot_ext *shot_ext) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + bool ret = false; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + ExynosCameraActivityAutofocus::AUTOFOCUS_STATE afState = ExynosCameraActivityAutofocus::afState2AUTOFOCUS_STATE(shot_ext->shot.dm.aa.afState); + + switch (afState) { + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SUCCEESS: + ret = true; + break; + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_FAIL: + case ExynosCameraActivityAutofocus::AUTOFOCUS_STATE_SCANNING: + default: + ret = false; + break; + } + + META_CONVERTER_LOG("DEBUG(%s[%d]):afState:(%d) ExynosCameraActivityAutofocus::AUTOFOCUS_STATE(%d), ret(%d)", + __FUNCTION__, __LINE__, + shot_ext->shot.dm.aa.afState, afState, ret); + + return ret; +} + +float ExynosCameraFusionMetaDataConverter::translateAnalogGain(int cameraId, + struct camera2_shot_ext *shot_ext) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + // AE gain + META_CONVERTER_LOG("DEBUG(%s[%d]):analogGain:(%d)", + __FUNCTION__, __LINE__, + shot_ext->shot.udm.sensor.analogGain); + + return (float)shot_ext->shot.udm.sensor.analogGain; +} + +void ExynosCameraFusionMetaDataConverter::translateScalerSetting(int cameraId, + struct camera2_shot_ext *shot_ext, + int perFramePos, + ExynosRect *yRect, + ExynosRect *cbcrRect) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + yRect->w = shot_ext->node_group.capture[perFramePos].output.cropRegion[2] - + shot_ext->node_group.capture[perFramePos].output.cropRegion[0]; + + yRect->h = shot_ext->node_group.capture[perFramePos].output.cropRegion[3] - + shot_ext->node_group.capture[perFramePos].output.cropRegion[1]; + + yRect->fullW = yRect->w; + yRect->fullH = yRect->h; + + cbcrRect->w = yRect->w / 2; + cbcrRect->h = yRect->h; + + cbcrRect->fullW = cbcrRect->w; + cbcrRect->fullH = cbcrRect->h; + + META_CONVERTER_LOG("DEBUG(%s[%d]):scalerSetting:perFramePos(%d) y(%d x %d) cbcr(%d x %d)", + __FUNCTION__, __LINE__, + perFramePos, + yRect->w, + yRect->h, + cbcrRect->w, + cbcrRect->h); +} + +void ExynosCameraFusionMetaDataConverter::translateCropSetting(int cameraId, + struct camera2_shot_ext *shot_ext, + int perFramePos, + ExynosRect2 *yRect, + ExynosRect2 *cbcrRect) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + ExynosRect scalerYRect; + ExynosRect scalerCbRect; + + translateScalerSetting(cameraId, shot_ext, perFramePos, &scalerYRect, &scalerCbRect); + + yRect->x1 = 0; + yRect->x2 = scalerYRect.w; + + yRect->y1 = 0; + yRect->y2 = scalerYRect.h; + + cbcrRect->x1 = 0; + cbcrRect->x2 = scalerCbRect.w; + + cbcrRect->y1 = 0; + cbcrRect->y2 = scalerCbRect.h; + + META_CONVERTER_LOG("DEBUG(%s[%d]):cropSetting:perFramePos(%d) y(%d<->%d x %d<->%d) cbcr(%d<->%d x %d<->%d)", + __FUNCTION__, __LINE__, + perFramePos, + yRect->x1, yRect->x2, yRect->y1, yRect->y2, + cbcrRect->x1, cbcrRect->x2, cbcrRect->y1, cbcrRect->y2); +} + +float ExynosCameraFusionMetaDataConverter::translateZoomRatio(int cameraId, + struct camera2_shot_ext *shot_ext) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + // zoomRatio + float zoomRatio = shot_ext->shot.udm.zoomRatio; + + //getMetaCtlZoom(shot_ext, &zoomRatio); + + META_CONVERTER_LOG("DEBUG(%s[%d]):zoomRatio:(%f)", + __FUNCTION__, __LINE__, + zoomRatio); + + return zoomRatio; +} + +void ExynosCameraFusionMetaDataConverter::translate2Parameters(int cameraId, + CameraParameters *params, + struct camera2_shot_ext *shot_ext, + DOF *dof, + ExynosRect pictureRect) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + status_t ret = NO_ERROR; + + if (shot_ext == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):shot_ext == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + /////////////////////////////////////////////// + // Focus distances + float nearFieldCm = 0.0f; + float lensShiftUm = 0.0f; + float farFieldCm = 0.0f; + + translateFocusPos(cameraId, shot_ext, dof, &nearFieldCm, &lensShiftUm, &farFieldCm); + + CLOGD("DEBUG(%s[%d]):focus-distances: nearFiledCm(%f), lensShiftUm(%f), farFiledCm(%f)", + __FUNCTION__, __LINE__, + nearFieldCm, + lensShiftUm, + farFieldCm); + + char tempStr[EXYNOS_CAMERA_NAME_STR_SIZE]; + + sprintf(tempStr, "%.1f,%.1f,%.1f", nearFieldCm, lensShiftUm, farFieldCm); + params->set(CameraParameters::KEY_FOCUS_DISTANCES, tempStr); + + /////////////////////////////////////////////// + // Focus ROI + ExynosRect bayerRoiRect; + bayerRoiRect = translateFocusRoi(cameraId, shot_ext); + + // calibrate to picture size. + ExynosRect pictureRoiRect; + float wRatio = (float)pictureRect.w / (float)((bayerRoiRect.x * 2) + bayerRoiRect.w); + float hRatio = (float)pictureRect.h / (float)((bayerRoiRect.y * 2) + bayerRoiRect.h); + + pictureRoiRect.x = (int)((float)bayerRoiRect.x * wRatio); + pictureRoiRect.y = (int)((float)bayerRoiRect.y * hRatio); + pictureRoiRect.w = (int)((float)bayerRoiRect.w * wRatio); + pictureRoiRect.h = (int)((float)bayerRoiRect.h * hRatio); + + params->set("roi_startx", pictureRoiRect.x); + params->set("roi_starty", pictureRoiRect.y); + params->set("roi_width", pictureRoiRect.w); + params->set("roi_height", pictureRoiRect.h); + + CLOGD("DEBUG(%s[%d]):Roi(afRegion):bayerRoiRect(%d, %d, %d, %d) -> pictureRoiRect(%d, %d, %d, %d) in pictureSize(%d x %d)", + __FUNCTION__, __LINE__, + bayerRoiRect.x, bayerRoiRect.y, bayerRoiRect.w, bayerRoiRect.h, + pictureRoiRect.x, pictureRoiRect.y, pictureRoiRect.w, pictureRoiRect.h, + pictureRect.w, pictureRect.h); + + /////////////////////////////////////////////// + // AE gain + float analogGain = 0.0f; + + analogGain = translateAnalogGain(cameraId, shot_ext); + + // min : 100 + float analogGainRatio = (float)(shot_ext->shot.udm.sensor.analogGain) / 100.0f; + + CLOGD("DEBUG(%s[%d]):ae_info_gain(analogGain):(%f), analogGainRatio(%f)", __FUNCTION__, __LINE__, analogGain, analogGainRatio); + + params->setFloat("ae_info_gain", analogGainRatio); +} + +float ExynosCameraFusionMetaDataConverter::m_findLensField(int cameraId, + DOF *dof, + float currentLensShift, + bool flagFar) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + if (dof == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):dof == NULL on cameraId(%d), assert!!!!", __FUNCTION__, __LINE__, m_cameraId); + } + + bool found = false; + int foundIndex = 0; + + float targetShift = 0.0; + float targetField = 0.0f; + + // variables for interpolation + float weight = 0.0f; + float leftLensShift = 0.0f; + float leftField = 0.0f; + float rightLensShift = 0.0f; + float rightField = 0.0f; + + if (flagFar == true) + targetField = DEFAULT_DISTANCE_FAR; + else + targetField = DEFAULT_DISTANCE_NEAR; + + for (foundIndex = 0; foundIndex < dof->lutCnt; foundIndex++) { + if (currentLensShift == dof->lut[foundIndex].lensShift) { + if (flagFar == true) + targetField = dof->lut[foundIndex].farField; + else + targetField = dof->lut[foundIndex].nearField; + + found = true; + break; + } else if (currentLensShift < dof->lut[foundIndex].lensShift) { + break; + } + } + + if (found == true) { + targetShift = dof->lut[foundIndex].lensShift; + } else { + if (dof->lutCnt == 0) { + CLOGW("WARN(%s[%d]):use targetField(%f), by currentLensShift(%f), dof->lutCnt(%d)", + __FUNCTION__, __LINE__, targetField, currentLensShift, dof->lutCnt); + } else { + // clipping in the end of table + if (foundIndex == 0 || dof->lutCnt - 1 < foundIndex) { + if (dof->lutCnt - 1 < foundIndex) + foundIndex = dof->lutCnt - 1; + + if (flagFar == true) + targetField = dof->lut[foundIndex].farField; + else + targetField = dof->lut[foundIndex].nearField; + + META_CONVERTER_LOG("DEBUG(%s[%d]):clip on foundIndex(%d)", __FUNCTION__, __LINE__, foundIndex); + + } else { // calibrate between two value. + leftLensShift = dof->lut[foundIndex-1].lensShift; + rightLensShift = dof->lut[foundIndex].lensShift; + + if (flagFar == true) { + leftField = dof->lut[foundIndex-1].farField; + rightField = dof->lut[foundIndex].farField; + } else { + leftField = dof->lut[foundIndex-1].nearField; + rightField = dof->lut[foundIndex].nearField; + } + + weight = (currentLensShift - leftLensShift) / (rightLensShift - leftLensShift); + + targetField = ((rightField - leftField) * weight) + leftField; + + CLOGD("DEBUG(%s[%d]):calibrated on foundIndex(%d), weight(%f) = currentLensShift(%f) - leftLensShift(%f)) / (rightLensShift(%f) - leftLensShift(%f))", + __FUNCTION__, __LINE__, foundIndex, weight, currentLensShift, leftLensShift, rightLensShift, leftLensShift); + } + + targetShift = dof->lut[foundIndex].lensShift; + } + } + +done: + META_CONVERTER_LOG("DEBUG(%s[%d]):use foundIndex(%d) targetShift(%f)'s targetField(%f), by currentLensShift(%f), dof->lutCnt(%d)", + __FUNCTION__, __LINE__, foundIndex, targetShift, targetField, currentLensShift, dof->lutCnt); + + return targetField; +} diff --git a/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.h b/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.h new file mode 100644 index 0000000..f74f1ad --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraFusionMetaDataConverter.h @@ -0,0 +1,93 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosCameraFusionMetaDataConverter.h + * \brief header file for ExynosCameraFusionMetaDataConverter + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2015/06/24 + * + * Revision History: + * - 2014/10/08 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_FUSION_META_CONVERTER_H +#define EXYNOS_CAMERA_FUSION_META_CONVERTER_H + +#include "string.h" +#include +#include + +#include "ExynosCameraFusionInclude.h" + +//! ExynosCameraFusionMetaDataConverter +/*! + * \ingroup ExynosCamera + */ +class ExynosCameraFusionMetaDataConverter +{ +private: + ExynosCameraFusionMetaDataConverter(){}; + virtual ~ExynosCameraFusionMetaDataConverter(){}; + +public: + static void translateFocusPos(int cameraId, + camera2_shot_ext *shot_ext, + DOF *dof, + float *nearFieldCm, + float *lensShiftUm, + float *farFieldCm); + + static ExynosRect translateFocusRoi(int cameraId, + camera2_shot_ext *shot_ext); + + static bool translateAfStatus(int cameraId, + camera2_shot_ext *shot_ext); + + static float translateAnalogGain(int cameraId, + camera2_shot_ext *shot_ext); + + static void translateScalerSetting(int cameraId, + camera2_shot_ext *shot_ext, + int perFramePos, + ExynosRect *yRect, + ExynosRect *cbcrRect); + + static void translateCropSetting(int cameraId, + camera2_shot_ext *shot_ext, + int perFramePos, + ExynosRect2 *yRect, + ExynosRect2 *cbcrRect); + + static float translateZoomRatio(int cameraId, + camera2_shot_ext *shot_ext); + + static void translate2Parameters(int cameraId, + CameraParameters *params, + struct camera2_shot_ext *shot_ext, + DOF *dof, + ExynosRect pictureRect); + +private: + static float m_findLensField(int cameraId, + DOF *dof, + float currentLensShift, + bool flagFar); +}; + +#endif //EXYNOS_CAMERA_FUSION_META_CONVERTER_H diff --git a/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.cpp b/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.cpp new file mode 100644 index 0000000..bc4182a --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.cpp @@ -0,0 +1,354 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#define LOG_TAG "ExynosCameraFusionWrapper" + +#include "ExynosCameraFusionWrapper.h" + +ExynosCameraFusionWrapper::ExynosCameraFusionWrapper() +{ + ALOGD("DEBUG(%s[%d]):new ExynosCameraFusionWrapper object allocated", __FUNCTION__, __LINE__); + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_flagCreated[i] = false; + + m_width[i] = 0; + m_height[i] = 0; + m_stride[i] = 0; + } +} + +ExynosCameraFusionWrapper::~ExynosCameraFusionWrapper() +{ + status_t ret = NO_ERROR; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (flagCreate(i) == true) { + ret = destroy(i); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):destroy(%d) fail", __FUNCTION__, __LINE__, i); + } + } + } +} + +status_t ExynosCameraFusionWrapper::create(int cameraId, + int srcWidth, int srcHeight, + int dstWidth, int dstHeight, + char *calData, int calDataSize) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + Mutex::Autolock lock(m_createLock); + + status_t ret = NO_ERROR; + + if (CAMERA_ID_MAX <= cameraId) { + CLOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (m_flagCreated[cameraId] == true) { + CLOGE("ERR(%s[%d]):cameraId(%d) is alread created. so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (srcWidth == 0 || srcWidth == 0 || dstWidth == 0 || dstHeight == 0) { + CLOGE("ERR(%s[%d]):srcWidth == %d || srcWidth == %d || dstWidth == %d || dstHeight == %d. so, fail", + __FUNCTION__, __LINE__, srcWidth, srcWidth, dstWidth, dstHeight); + return INVALID_OPERATION; + } + + m_init(cameraId); + + CLOGD("DEBUG(%s[%d]):create(calData(%p), calDataSize(%d), srcWidth(%d), srcHeight(%d), dstWidth(%d), dstHeight(%d)", + __FUNCTION__, __LINE__, + calData, calDataSize, srcWidth, srcHeight, dstWidth, dstHeight); + + // set info int width, int height, int stride + m_width [cameraId] = srcWidth; + m_height [cameraId] = srcHeight; + m_stride [cameraId] = srcWidth; + + // declare it created + m_flagCreated[cameraId] = true; + + return NO_ERROR; +} + +status_t ExynosCameraFusionWrapper::destroy(int cameraId) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + Mutex::Autolock lock(m_createLock); + + status_t ret = NO_ERROR; + + if (CAMERA_ID_MAX <= cameraId) { + CLOGE("ERR(%s[%d]):invalid cameraId(%d). so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + if (m_flagCreated[cameraId] == false) { + CLOGE("ERR(%s[%d]):cameraId(%d) is alread destroyed. so, fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + CLOGD("DEBUG(%s[%d]):destroy()", __FUNCTION__, __LINE__); + + m_flagCreated[cameraId] = false; + + return NO_ERROR; +} + +bool ExynosCameraFusionWrapper::flagCreate(int cameraId) +{ + Mutex::Autolock lock(m_createLock); + + if (CAMERA_ID_MAX <= cameraId) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):invalid cameraId(%d), assert!!!!", + __FUNCTION__, __LINE__, cameraId); + } + + return m_flagCreated[cameraId]; +} + +bool ExynosCameraFusionWrapper::flagReady(int cameraId) +{ + return m_flagCreated[cameraId]; +} + +status_t ExynosCameraFusionWrapper::execute(int cameraId, + __unused struct camera2_shot_ext *shot_ext[], __unused DOF *dof[], + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], __unused ExynosCameraBufferManager *srcBufferManager[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect, __unused ExynosCameraBufferManager *dstBufferManager) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + status_t ret = NO_ERROR; + + if (this->flagCreate(cameraId) == false) { + CLOGE("ERR(%s[%d]):flagCreate(%d) == false. so fail", __FUNCTION__, __LINE__, cameraId); + return INVALID_OPERATION; + } + + m_emulationProcessTimer.start(); + + ret = m_execute(cameraId, + srcBuffer, srcRect, + dstBuffer, dstRect); + m_emulationProcessTimer.stop(); + m_emulationProcessTime = (int)m_emulationProcessTimer.durationUsecs(); + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_execute() fail", __FUNCTION__, __LINE__); + } + + return ret; +} + +status_t ExynosCameraFusionWrapper::m_execute(int cameraId, + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + status_t ret = NO_ERROR; + + char *srcYAddr = NULL; + char *srcCbcrAddr = NULL; + + char *dstYAddr = NULL; + char *dstCbcrAddr = NULL; + + unsigned int bpp = 0; + unsigned int planeCount = 1; + + int srcPlaneSize = 0; + int srcHalfPlaneSize = 0; + + int dstPlaneSize = 0; + int dstHalfPlaneSize = 0; + + int copySize = 0; + + /* + * if previous emulationProcessTime is slow than 33msec, + * we need change the next copy time + * + * ex : + * frame 0 : + * 1.0(copyRatio) = 33333 / 33333(previousFusionProcessTime : init value) + * 1.0 (copyRatio) = 1.0(copyRatio) * 1.0(m_emulationCopyRatio); + * m_emulationCopyRatio = 1.0 + * m_emulationProcessTime = 66666 + + * frame 1 : because of frame0's low performance, shrink down copyRatio. + * 0.5(copyRatio) = 33333 / 66666(previousFusionProcessTime) + * 0.5(copyRatio) = 0.5(copyRatio) * 1.0(m_emulationCopyRatio); + * m_emulationCopyRatio = 0.5 + * m_emulationProcessTime = 33333 + + * frame 2 : acquire the proper copy time + * 1.0(copyRatio) = 33333 / 33333(previousFusionProcessTime) + * 0.5(copyRatio) = 1.0(copyRatio) * 0.5(m_emulationCopyRatio); + * m_emulationCopyRatio = 0.5 + * m_emulationProcessTime = 16666 + + * frame 3 : because of frame2's fast performance, increase copyRatio. + * 2.0(copyRatio) = 33333 / 16666(previousFusionProcessTime) + * 1.0(copyRatio) = 2.0(copyRatio) * 0.5(m_emulationCopyRatio); + * m_emulationCopyRatio = 1.0 + * m_emulationProcessTime = 33333 + */ + int previousFusionProcessTime = m_emulationProcessTime; + if (previousFusionProcessTime <= 0) + previousFusionProcessTime = 1; + + float copyRatio = (float)FUSION_PROCESSTIME_STANDARD / (float)previousFusionProcessTime; + copyRatio = copyRatio * m_emulationCopyRatio; + + if (1.0f <= copyRatio) { + copyRatio = 1.0f; + } else if (0.1f < copyRatio) { + copyRatio -= 0.05f; // threshold value : 5% + } else { + CLOGW("WARN(%s[%d]):copyRatio(%d) is too smaller than 0.1f. previousFusionProcessTime(%d), m_emulationCopyRatio(%f)", + __FUNCTION__, __LINE__, copyRatio, previousFusionProcessTime, m_emulationCopyRatio); + } + + m_emulationCopyRatio = copyRatio; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + srcPlaneSize = srcRect[i].fullW * srcRect[i].fullH; + srcHalfPlaneSize = srcPlaneSize / 2; + + dstPlaneSize = dstRect.fullW * dstRect.fullH; + dstHalfPlaneSize = dstPlaneSize / 2; + + copySize = (srcHalfPlaneSize < dstHalfPlaneSize) ? srcHalfPlaneSize : dstHalfPlaneSize; + + ret = getYuvFormatInfo(srcRect[i].colorFormat, &bpp, &planeCount); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getYuvFormatInfo(srcRect[%d].colorFormat(%x)) fail", __FUNCTION__, __LINE__, i, srcRect[i].colorFormat); + } + + srcYAddr = srcBuffer[i].addr[0]; + dstYAddr = dstBuffer.addr[0]; + + switch (planeCount) { + case 1: + srcCbcrAddr = srcBuffer[i].addr[0] + srcRect[i].fullW * srcRect[i].fullH; + dstCbcrAddr = dstBuffer.addr[0] + dstRect.fullW * dstRect.fullH; + break; + case 2: + srcCbcrAddr = srcBuffer[i].addr[1]; + dstCbcrAddr = dstBuffer.addr[1]; + break; + default: + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Invalid planeCount(%d), assert!!!!", + __FUNCTION__, __LINE__, planeCount); + break; + } + + EXYNOS_CAMERA_FUSION_WRAPPER_DEBUG_LOG("DEBUG(%s[%d]):fusion emulationn running ~~~ memcpy(%d, %d, %d) by src(%d, %d), dst(%d, %d), previousFusionProcessTime(%d) copyRatio(%f)", + __FUNCTION__, __LINE__, + dstBuffer.addr[0], srcBuffer[i].addr[0], copySize, + srcRect[i].fullW, srcRect[i].fullH, + dstRect.fullW, dstRect.fullH, + previousFusionProcessTime, copyRatio); + + if (i == m_subCameraId) { + dstYAddr += dstHalfPlaneSize; + dstCbcrAddr += dstHalfPlaneSize / 2; + } + + if (srcRect[i].fullW == dstRect.fullW && + srcRect[i].fullH == dstRect.fullH) { + int oldCopySize = copySize; + copySize = (int)((float)copySize * copyRatio); + + if (oldCopySize < copySize) { + CLOGW("WARN(%s[%d]):oldCopySize(%d) < copySize(%d). just adjust oldCopySize", + __FUNCTION__, __LINE__, oldCopySize, copySize); + + copySize = oldCopySize; + } + + memcpy(dstYAddr, srcYAddr, copySize); + memcpy(dstCbcrAddr, srcCbcrAddr, copySize / 2); + } else { + int width = (srcRect[i].fullW < dstRect.fullW) ? srcRect[i].fullW : dstRect.fullW; + int height = (srcRect[i].fullH < dstRect.fullH) ? srcRect[i].fullH : dstRect.fullH; + + int oldHeight = height; + height = (int)((float)height * copyRatio); + + if (oldHeight < height) { + CLOGW("WARN(%s[%d]):oldHeight(%d) < height(%d). just adjust oldHeight", + __FUNCTION__, __LINE__, oldHeight, height); + + height = oldHeight; + } + + for (int h = 0; h < height / 2; h++) { + memcpy(dstYAddr, srcYAddr, width); + srcYAddr += srcRect[i].fullW; + dstYAddr += dstRect.fullW; + } + + for (int h = 0; h < height / 4; h++) { + memcpy(dstCbcrAddr, srcCbcrAddr, width); + srcCbcrAddr += srcRect[i].fullW; + dstCbcrAddr += dstRect.fullW; + } + } + } + + return ret; +} +void ExynosCameraFusionWrapper::m_init(int cameraId) +{ + // hack for CLOG + int m_cameraId = cameraId; + const char *m_name = ""; + + getDualCameraId(&m_mainCameraId, &m_subCameraId); + + CLOGD("DEBUG(%s[%d]):m_mainCameraId(CAMERA_ID_%d), m_subCameraId(CAMERA_ID_%d)", + __FUNCTION__, __LINE__, m_mainCameraId, m_subCameraId); + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_flagValidCameraId[i] = false; + } + + m_flagValidCameraId[m_mainCameraId] = true; + m_flagValidCameraId[m_subCameraId] = true; + + m_emulationProcessTime = FUSION_PROCESSTIME_STANDARD; + m_emulationCopyRatio = 1.0f; +} diff --git a/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.h b/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.h new file mode 100644 index 0000000..bfedae8 --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraFusionWrapper.h @@ -0,0 +1,111 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosCameraFusionWrapper.h + * \brief header file for ExynosCameraFusionWrapper + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2014/10/08 + * + * Revision History: + * - 2014/10/08 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef EXYNOS_CAMERA_FUSION_WRAPPER_H +#define EXYNOS_CAMERA_FUSION_WRAPPER_H + +#include "string.h" +#include +#include +#include + +#include "ExynosCameraFusionInclude.h" + +using namespace android; + +//#define EXYNOS_CAMERA_FUSION_WRAPPER_DEBUG + +#ifdef EXYNOS_CAMERA_FUSION_WRAPPER_DEBUG +#define EXYNOS_CAMERA_FUSION_WRAPPER_DEBUG_LOG CLOGD +#else +#define EXYNOS_CAMERA_FUSION_WRAPPER_DEBUG_LOG CLOGV +#endif + +#define FUSION_PROCESSTIME_STANDARD (34000) + +//! ExynosCameraFusionWrapper +/*! + * \ingroup ExynosCamera + */ +class ExynosCameraFusionWrapper +{ +protected: + friend class ExynosCameraSingleton; + + //! Constructor + ExynosCameraFusionWrapper(); + //! Destructor + virtual ~ExynosCameraFusionWrapper(); + +public: + //! create + virtual status_t create(int cameraId, + int srcWidth, int srcHeight, + int dstWidth, int dstHeight, + char *calData = NULL, int calDataSize = 0); + + //! destroy + virtual status_t destroy(int cameraId); + + //! flagCreate + virtual bool flagCreate(int cameraId); + + //! flagReady to run execute + virtual bool flagReady(int cameraId); + + //! execute + virtual status_t execute(int cameraId, + struct camera2_shot_ext *shot_ext[], DOF *dof[], + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], ExynosCameraBufferManager *srcBufferManager[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect, ExynosCameraBufferManager *dstBufferManager); + +protected: + void m_init(int cameraId); + + status_t m_execute(int cameraId, + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect); + +protected: + bool m_flagCreated[CAMERA_ID_MAX]; + Mutex m_createLock; + + int m_mainCameraId; + int m_subCameraId; + bool m_flagValidCameraId[CAMERA_ID_MAX]; + + int m_width [CAMERA_ID_MAX]; + int m_height [CAMERA_ID_MAX]; + int m_stride [CAMERA_ID_MAX]; + + ExynosCameraDurationTimer m_emulationProcessTimer; + int m_emulationProcessTime; + float m_emulationCopyRatio; +}; + +#endif //EXYNOS_CAMERA_FUSION_WRAPPER_H diff --git a/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.cpp b/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.cpp new file mode 100644 index 0000000..3771807 --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.cpp @@ -0,0 +1,661 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeFusion" +#include + +#include "ExynosCameraPipeFusion.h" + +namespace android { + +//#define EXYNOS_CAMERA_FUSION_PIPE_DEBUG + +#ifdef EXYNOS_CAMERA_FUSION_PIPE_DEBUG +#define EXYNOS_CAMERA_FUSION_PIPE_DEBUG_LOG CLOGD +#else +#define EXYNOS_CAMERA_FUSION_PIPE_DEBUG_LOG CLOGV +#endif + +ExynosCameraPipeFusion::~ExynosCameraPipeFusion() +{ + this->destroy(); +} + +status_t ExynosCameraPipeFusion::create(__unused int32_t *sensorIds) +{ + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeFusion::m_mainThreadFunc, "FusionThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFusion::destroy(void) +{ + status_t ret = NO_ERROR; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + ret = m_destroyFusion(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_destroyFusion() fail", __FUNCTION__, __LINE__); + return ret; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFusion::start(void) +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + // setInfo when start + ExynosCameraDualPreviewFrameSelector *dualPreviewFrameSelector = ExynosCameraSingleton::getInstance(); + ret = dualPreviewFrameSelector->setInfo(m_cameraId, 2, m_parameters->getDOF()); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):dualPreviewFrameSelector->setInfo(id(%d)", + __FUNCTION__, __LINE__, m_cameraId); + return ret; + } + + /* + * postpone create fusion library on m_run() + * because, fusion library must call by same thread id. + * it is back camera. + */ + /* + // create fusion library + ret = m_createFusion(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_createFusion() fail", __FUNCTION__, __LINE__); + return ret; + } + */ + + m_flagTryStop = false; + + return NO_ERROR; +} +#else +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d])invalid function, assert!!!!", __FUNCTION__, __LINE__); +} +#endif // BOARD_CAMERA_USES_DUAL_CAMERA + +status_t ExynosCameraPipeFusion::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_flagTryStop = true; + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + // clear when stop + ExynosCameraDualPreviewFrameSelector *dualPreviewFrameSelector = ExynosCameraSingleton::getInstance(); + ret = dualPreviewFrameSelector->clear(m_cameraId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):dualPreviewFrameSelector->clear(%d)", __FUNCTION__, __LINE__, m_cameraId); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeFusion::startThread(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFusion::m_run(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrameEntity *entity = NULL; + + int ret = 0; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return NO_ERROR; + } + + entity = newFrame->searchEntityByPipeId(getPipeId()); + if (entity == NULL) { + CLOGE("ERR(%s[%d]):frame(%d) entity == NULL, skip", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + goto func_exit; + } + + if (entity->getEntityState() == ENTITY_STATE_FRAME_SKIP) { + CLOGE("ERR(%s[%d]):frame(%d) entityState(ENTITY_STATE_FRAME_SKIP), skip", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + goto func_exit; + } + + if (entity->getSrcBufState() == ENTITY_BUFFER_STATE_ERROR) { + CLOGE("ERR(%s[%d]):frame(%d) entityState(ENTITY_BUFFER_STATE_ERROR), skip", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + goto func_exit; + } + + /* + * if m_mangeFusion is fail, + * the input newFrame need to drop + */ + ret = m_manageFusion(newFrame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_manageFusion(newFrame(%d)) fail", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + goto func_exit; + } + + // the frame though fusion, will pushProcessQ in m_manageFusion() + + return NO_ERROR; + +func_exit: + ret = entity->setDstBufState(ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBufState(ENTITY_BUFFER_STATE_ERROR) fail", __FUNCTION__, __LINE__); + } + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setEntityState(%d, ENTITY_STATE_FRAME_DONE) fail", __FUNCTION__, __LINE__, getPipeId()); + } + + m_outputFrameQ->pushProcessQ(&newFrame); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFusion::m_manageFusion(ExynosCameraFrame *newFrame) +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA +{ + status_t ret = NO_ERROR; + + bool flagSynced = false; + + ExynosCameraDualPreviewFrameSelector *dualPreviewFrameSelector = NULL; + + ExynosCameraFrame *frame[CAMERA_ID_MAX] = {NULL, }; + frame_queue_t *outputFrameQ[CAMERA_ID_MAX] = {NULL, }; + ExynosCameraBufferManager *srcBufferManager[CAMERA_ID_MAX] = {NULL, }; + ExynosCameraBufferManager *dstBufferManager = NULL; + + DOF *dof[CAMERA_ID_MAX] = {NULL, }; + + ExynosCameraBuffer srcBuffer[CAMERA_ID_MAX]; + ExynosRect srcRect[CAMERA_ID_MAX]; + + ExynosCameraBuffer dstBuffer; + ExynosRect dstRect; + + struct camera2_shot_ext src_shot_ext[CAMERA_ID_MAX]; + struct camera2_shot_ext *ptr_src_shot_ext[CAMERA_ID_MAX] = {NULL, }; + + struct camera2_udm tempUdm; + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + frame[i] = NULL; + outputFrameQ[i] = NULL; + srcBufferManager[i] = NULL; + + dof[i] = NULL; + + ptr_src_shot_ext[i] = NULL; + } + + frame[m_cameraId] = newFrame; + outputFrameQ[m_cameraId] = m_outputFrameQ; + srcBufferManager[m_cameraId] = m_bufferManager[OUTPUT_NODE]; + dstBufferManager = m_bufferManager[CAPTURE_NODE]; + + // create fusion library + ret = m_createFusion(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_createFusion() fail", __FUNCTION__, __LINE__); + goto func_exit; + } + + if (m_flagReadyFusion() == false) { + goto func_exit; + } + + /* update zoomRatio intentially for fast zoom effect */ + newFrame->getUserDynamicMeta(&tempUdm); + tempUdm.zoomRatio = m_parameters->getZoomRatio(); + newFrame->storeUserDynamicMeta(&tempUdm); + + dualPreviewFrameSelector = ExynosCameraSingleton::getInstance(); + ret = dualPreviewFrameSelector->managePreviewFrameHoldList(m_cameraId, + m_outputFrameQ, + newFrame, + getPipeId(), + true, + 0, + srcBufferManager[m_cameraId]); + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):dualPreviewFrameSelector->managePreviewFrameHoldList(%d)", __FUNCTION__, __LINE__, m_cameraId); + return ret; + } + + /* + * when camera front, just return true. + * when camera back, it will check sync and running fusion + */ + if (m_cameraId == m_subCameraId) { + return ret; + } + + flagSynced = dualPreviewFrameSelector->selectFrames(m_cameraId, + &frame[m_mainCameraId], &outputFrameQ[m_mainCameraId], &srcBufferManager[m_mainCameraId], + &frame[m_subCameraId], &outputFrameQ[m_subCameraId], &srcBufferManager[m_subCameraId]); + + // we did't get synced frame. + if (flagSynced == false) { + CLOGD("DEBUG(%s[%d]):not synced.", __FUNCTION__, __LINE__); + return ret; + } + + /* + * until here, if fail is happen, just return. + * after this code, it will handle synced frame0, frame1 + */ + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + if (frame[i] == NULL || outputFrameQ[i] == NULL || srcBufferManager[i] == NULL) { + CLOGE("ERR(%s[%d]):frame[%d] == %p || outputFrameQ[%d] == %p || srcBufferManager[%d] == %p)", + __FUNCTION__, __LINE__, i, frame[i], i, outputFrameQ[i], i, srcBufferManager[i]); + goto func_exit; + } + } + + EXYNOS_CAMERA_FUSION_PIPE_DEBUG_LOG("DEBUG(%s[%d]):dualPreviewFrameSelector->selectFrames([%d]:%d, [%d]:%d)", + __FUNCTION__, __LINE__, + m_mainCameraId, (int)(ns2ms(frame[m_mainCameraId]->getTimeStamp())), + m_subCameraId, (int)(ns2ms(frame[m_subCameraId]->getTimeStamp()))); + + /////////////////////////////////////// + // get source information of all cameras + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + if (frame[i] == NULL) + continue; + + // buffer + ret = frame[i]->getSrcBuffer(getPipeId(), &srcBuffer[i]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->getSrcBuffer(%d) fail, ret(%d)", __FUNCTION__, __LINE__, i, getPipeId(), ret); + goto func_exit; + } + + // rect + ret = frame[i]->getSrcRect(getPipeId(), &srcRect[i]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->getSrcRect(%d) fail, ret(%d)", __FUNCTION__, __LINE__, i, getPipeId(), ret); + goto func_exit; + } + + // is this need? + switch (srcRect[i].colorFormat) { + case V4L2_PIX_FMT_NV21: + srcRect[i].fullH = ALIGN_UP(srcRect[i].fullH, 2); + break; + default: + srcRect[i].fullH = ALIGN_UP(srcRect[i].fullH, GSCALER_IMG_ALIGN); + break; + } + + camera2_node_group node_group_info; + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + int zoom = 0; + frame[i]->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA, &zoom); + + if (node_group_info.leader.input.cropRegion[2] == 0 || + node_group_info.leader.input.cropRegion[3] == 0) { + CLOGW("WARN(%s[%d]):frame[%d] node_group_info.leader.input.cropRegion(%d, %d, %d, %d) is not valid", + __FUNCTION__, __LINE__, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + } else { + if (zoom == 0) { + srcRect[i].x = 0; + srcRect[i].y = 0; + } else { + srcRect[i].x = node_group_info.leader.input.cropRegion[0]; + srcRect[i].y = node_group_info.leader.input.cropRegion[1]; + } + srcRect[i].w = node_group_info.leader.input.cropRegion[2]; + srcRect[i].h = node_group_info.leader.input.cropRegion[3]; + + EXYNOS_CAMERA_FUSION_PIPE_DEBUG_LOG("DEBUG(%s[%d]):frame[%d] zoom(%d), input(%d, %d, %d, %d), output(%d, %d, %d, %d), srcRect(%d, %d, %d, %d in %d x %d)", + __FUNCTION__, __LINE__, + i, + zoom, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3], + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3], + srcRect[i].x, + srcRect[i].y, + srcRect[i].w, + srcRect[i].h, + srcRect[i].fullW, + srcRect[i].fullH); + } + + // dof + dof[i] = dualPreviewFrameSelector->getDOF(i); + if (dof[i] == NULL) { + CLOGE("ERR(%s[%d]):dualPreviewFrameSelector->getDOF(%d) fail", __FUNCTION__, __LINE__, i); + goto func_exit; + } + + frame[i]->getMetaData(&src_shot_ext[i]); + + ptr_src_shot_ext[i] = &src_shot_ext[i]; + } + + /////////////////////////////////////// + // get destination information of all cameras + ret = frame[m_cameraId]->getDstBuffer(getPipeId(), &dstBuffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->getSrcBuffer(%d) fail, ret(%d)", __FUNCTION__, __LINE__, m_cameraId, getPipeId(), ret); + goto func_exit; + } + + ret = frame[m_cameraId]->getDstRect(getPipeId(), &dstRect); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->getDstRect(%d) fail, ret(%d)", __FUNCTION__, __LINE__, m_cameraId, getPipeId(), ret); + goto func_exit; + } + + { + m_fusionProcessTimer.start(); + + ret = m_executeFusion(ptr_src_shot_ext, dof, + srcBuffer, srcRect, srcBufferManager, + dstBuffer, dstRect, dstBufferManager); + + m_fusionProcessTimer.stop(); + int fisionProcessTime = (int)m_fusionProcessTimer.durationUsecs(); + + if (FUSION_PROCESSTIME_STANDARD < fisionProcessTime) { + CLOGW("WARN(%s[%d]):fisionProcessTime(%d) is too more slow than than %d usec", + __FUNCTION__, __LINE__, fisionProcessTime, FUSION_PROCESSTIME_STANDARD); + } + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_executeFusion() fail", __FUNCTION__, __LINE__); + goto func_exit; + } + } + + /////////////////////////////////////// + // push frame to all cameras handlePreviewFrame + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + if (frame[i] == NULL) + continue; + + ret = frame[i]->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->setEntityState(%d, ENTITY_STATE_FRAME_DONE) fail", __FUNCTION__, __LINE__, i, getPipeId()); + goto func_exit; + } + + if (outputFrameQ[i]) { + outputFrameQ[i]->pushProcessQ(&frame[i]); + } + } + + return NO_ERROR; + +func_exit: + /* + * if fusion operation is fail, + * all frames's frame need to drop + */ + for (int i = 0; i < CAMERA_ID_MAX; i++) { + if (m_flagValidCameraId[i] == false) + continue; + + if (frame[i] == NULL) + continue; + + ExynosCameraFrameEntity *entity = NULL; + + entity = frame[i]->searchEntityByPipeId(getPipeId()); + if (entity == NULL) { + CLOGE("ERR(%s[%d]):frame[%d]->searchEntityByPipeId(%d) == NULL, skip", __FUNCTION__, __LINE__, i, frame[i]->getFrameCount()); + return NO_ERROR; + } + + ret = entity->setDstBufState(ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBufState(ENTITY_BUFFER_STATE_ERROR) fail", __FUNCTION__, __LINE__); + } + + ret = frame[i]->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):frame[%d]->setEntityState(%d, ENTITY_STATE_FRAME_DONE) fail", __FUNCTION__, __LINE__, i, getPipeId()); + } + + if (outputFrameQ[i]) + outputFrameQ[i]->pushProcessQ(&frame[i]); + } + + return NO_ERROR; +} +#else +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d])invalid function, assert!!!!", __FUNCTION__, __LINE__); +} +#endif // BOARD_CAMERA_USES_DUAL_CAMERA + +bool ExynosCameraPipeFusion::m_flagReadyFusion(void) +{ + bool ret = false; + + ExynosCameraFusionWrapper *fusionWrapper = ExynosCameraSingleton::getInstance(); + + return fusionWrapper->flagReady(m_cameraId); +} + +bool ExynosCameraPipeFusion::m_mainThreadFunc(void) +{ + int ret = 0; + + ret = m_run(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + } + + return m_checkThreadLoop(); +} + +void ExynosCameraPipeFusion::m_init(__unused int32_t *nodeNums) +{ + /* TODO : we need to change here, when cameraId is changed */ + getDualCameraId(&m_mainCameraId, &m_subCameraId); + + CLOGD("DEBUG(%s[%d]):m_mainCameraId(CAMERA_ID_%d), m_subCameraId(CAMERA_ID_%d)", + __FUNCTION__, __LINE__, m_mainCameraId, m_subCameraId); + + for (int i = 0; i < CAMERA_ID_MAX; i++) { + m_flagValidCameraId[i] = false; + } + + m_flagValidCameraId[m_mainCameraId] = true; + m_flagValidCameraId[m_subCameraId] = true; +} + +status_t ExynosCameraPipeFusion::m_createFusion(void) +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA +{ + status_t ret = NO_ERROR; + + ExynosCameraFusionWrapper *fusionWrapper = ExynosCameraSingleton::getInstance(); + + /* + * we will create front -> back, + * for dczoom_init and dczoom_execute happen on same back camera thread + */ + if (m_cameraId == m_mainCameraId) { + if (fusionWrapper->flagCreate(m_subCameraId) == false) { + CLOGE("ERR(%s[%d]):CAMERA_ID_%d is not created, yet, so postpone create(CAMERA_ID_%d)", + __FUNCTION__, __LINE__, m_subCameraId, m_mainCameraId); + return ret; + } + } + + if (fusionWrapper->flagCreate(m_cameraId) == false) { + int previewW = 0, previewH = 0; + m_parameters->getPreviewSize(&previewW, &previewH); + + ExynosRect fusionSrcRect; + ExynosRect fusionDstRect; + + ret = m_parameters->getFusionSize(previewW, previewH, &fusionSrcRect, &fusionDstRect); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getFusionSize() fail", __FUNCTION__, __LINE__); + return ret; + } + + char *calData = NULL; + int calDataSize = 0; + + fusionWrapper->create(m_cameraId, + fusionSrcRect.fullW, fusionSrcRect.fullH, + fusionDstRect.fullW, fusionDstRect.fullH, + calData, calDataSize); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):fusionWrapper->create() fail", __FUNCTION__, __LINE__); + return ret; + } + } + + return ret; +} +#else +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d])invalid function, assert!!!!", __FUNCTION__, __LINE__); +} +#endif // BOARD_CAMERA_USES_DUAL_CAMERA + +status_t ExynosCameraPipeFusion::m_executeFusion(struct camera2_shot_ext *shot_ext[], DOF *dof[], + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], ExynosCameraBufferManager *srcBufferManager[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect, ExynosCameraBufferManager *dstBufferManager) +{ +#ifdef EXYNOS_CAMERA_FUSION_PIPE_DEBUG + ExynosCameraAutoTimer autoTimer(__func__); +#endif + + status_t ret = NO_ERROR; + + ExynosCameraFusionWrapper *fusionWrapper = ExynosCameraSingleton::getInstance(); + + if (fusionWrapper->flagCreate(m_cameraId) == false) { + CLOGE("ERR(%s[%d]):usionWrapper->flagCreate(%d) == false. so fail", __FUNCTION__, __LINE__, m_cameraId); + return INVALID_OPERATION; + } + + ////////////////////////////////////// + // trigger dczoom_execute + ret = fusionWrapper->execute(m_cameraId, + shot_ext, dof, + srcBuffer, srcRect, srcBufferManager, + dstBuffer, dstRect, dstBufferManager); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):fusionWrapper->excute() fail", __FUNCTION__, __LINE__); + return ret; + } + + return ret; +} + +status_t ExynosCameraPipeFusion::m_destroyFusion(void) +{ + status_t ret = NO_ERROR; + + ExynosCameraFusionWrapper *fusionWrapper = ExynosCameraSingleton::getInstance(); + + if (fusionWrapper->flagCreate(m_cameraId) == true) { + fusionWrapper->destroy(m_cameraId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):fusionWrapper->destroy() fail", __FUNCTION__, __LINE__); + return ret; + } + } + + return ret; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.h b/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.h new file mode 100644 index 0000000..9ce420f --- /dev/null +++ b/libcamera/common_v2/Fusion/ExynosCameraPipeFusion.h @@ -0,0 +1,84 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_FUSION_H +#define EXYNOS_CAMERA_PIPE_FUSION_H + +#include "ExynosCameraPipe.h" + +#include "ExynosCameraFusionInclude.h" +#include "ExynosCameraDualFrameSelector.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeFusion : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeFusion() + { + m_init(NULL); + } + + ExynosCameraPipeFusion( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(nodeNums); + } + + virtual ~ExynosCameraPipeFusion(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + virtual bool m_mainThreadFunc(void); + +private: + void m_init(int32_t *nodeNums); + + status_t m_manageFusion(ExynosCameraFrame *newFrame); + + status_t m_createFusion(void); + bool m_flagReadyFusion(void); + status_t m_executeFusion(struct camera2_shot_ext *shot_ext[], DOF *dof[], + ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], ExynosCameraBufferManager *srcBufferManager[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect, ExynosCameraBufferManager *dstBufferManager); + status_t m_executeEmulationFusion(ExynosCameraBuffer srcBuffer[], ExynosRect srcRect[], + ExynosCameraBuffer dstBuffer, ExynosRect dstRect); + + status_t m_destroyFusion(void); + +private: + int m_mainCameraId; + int m_subCameraId; + bool m_flagValidCameraId[CAMERA_ID_MAX]; + + ExynosCameraDurationTimer m_fusionProcessTimer; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Ged/ExynosCameraActivityAutofocusVendor.cpp b/libcamera/common_v2/Ged/ExynosCameraActivityAutofocusVendor.cpp new file mode 100644 index 0000000..4653d53 --- /dev/null +++ b/libcamera/common_v2/Ged/ExynosCameraActivityAutofocusVendor.cpp @@ -0,0 +1,584 @@ +/* + * Copyright 2012, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityAutofocusGed" +#include + +#include "ExynosCameraActivityAutofocus.h" + +namespace android { + +int ExynosCameraActivityAutofocus::t_func3ABefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + int currentState = this->getCurrentState(); + + shot_ext->shot.ctl.aa.vendor_afState = (enum aa_afstate)m_aaAfState; + + switch (m_autofocusStep) { + case AUTOFOCUS_STEP_STOP: + /* m_interenalAutoFocusMode = m_autoFocusMode; */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_OFF; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + break; + case AUTOFOCUS_STEP_TRIGGER_START: + /* Autofocus lock for capture. + * The START afTrigger make the AF state be locked on current state. + */ + m_AUTOFOCUS_MODE2AA_AFMODE(m_interenalAutoFocusMode, shot_ext); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + break; + case AUTOFOCUS_STEP_REQUEST: + /* Autofocus unlock for capture. + * If the AF request is triggered by "unlockAutofocus()", + * Send CANCEL afTrigger to F/W and start new AF scanning. + */ + if (m_flagAutofocusLock == true) { + m_AUTOFOCUS_MODE2AA_AFMODE(m_interenalAutoFocusMode, shot_ext); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + m_flagAutofocusLock = false; + m_autofocusStep = AUTOFOCUS_STEP_START; + } else { + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_OFF; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + + /* + * assure triggering is valid + * case 0 : adjusted m_aaAFMode is AA_AFMODE_OFF + * case 1 : AUTOFOCUS_STEP_REQUESTs more than 3 times. + */ + if (m_aaAFMode == ::AA_AFMODE_OFF || + AUTOFOCUS_WAIT_COUNT_STEP_REQUEST < m_stepRequestCount) { + + if (AUTOFOCUS_WAIT_COUNT_STEP_REQUEST < m_stepRequestCount) + ALOGD("DEBUG(%s[%d]):m_stepRequestCount(%d), force AUTOFOCUS_STEP_START", + __FUNCTION__, __LINE__, m_stepRequestCount); + + m_stepRequestCount = 0; + + m_autofocusStep = AUTOFOCUS_STEP_START; + } else { + m_stepRequestCount++; + } + } + + break; + case AUTOFOCUS_STEP_START: + m_interenalAutoFocusMode = m_autoFocusMode; + m_AUTOFOCUS_MODE2AA_AFMODE(m_autoFocusMode, shot_ext); + + if (m_interenalAutoFocusMode == AUTOFOCUS_MODE_TOUCH) { + shot_ext->shot.ctl.aa.afRegions[0] = m_focusArea.x1; + shot_ext->shot.ctl.aa.afRegions[1] = m_focusArea.y1; + shot_ext->shot.ctl.aa.afRegions[2] = m_focusArea.x2; + shot_ext->shot.ctl.aa.afRegions[3] = m_focusArea.y2; + shot_ext->shot.ctl.aa.afRegions[4] = m_focusWeight; + } else { + shot_ext->shot.ctl.aa.afRegions[0] = 0; + shot_ext->shot.ctl.aa.afRegions[1] = 0; + shot_ext->shot.ctl.aa.afRegions[2] = 0; + shot_ext->shot.ctl.aa.afRegions[3] = 0; + shot_ext->shot.ctl.aa.afRegions[4] = 1000; + + /* macro position */ + if (m_interenalAutoFocusMode == AUTOFOCUS_MODE_CONTINUOUS_PICTURE || + m_interenalAutoFocusMode == AUTOFOCUS_MODE_MACRO || + m_interenalAutoFocusMode == AUTOFOCUS_MODE_AUTO) { + if (m_macroPosition == AUTOFOCUS_MACRO_POSITION_CENTER) + shot_ext->shot.ctl.aa.afRegions[4] = AA_AFMODE_EXT_ADVANCED_MACRO_FOCUS; + else if(m_macroPosition == AUTOFOCUS_MACRO_POSITION_CENTER_UP) + shot_ext->shot.ctl.aa.afRegions[4] = AA_AFMODE_EXT_FOCUS_LOCATION; + } + } + + ALOGD("DEBUG(%s[%d]):AF-Mode(HAL/FW)=(%d/%d(%d)) AF-Region(x1,y1,x2,y2,weight)=(%d, %d, %d, %d, %d)", + __FUNCTION__, __LINE__, m_interenalAutoFocusMode, shot_ext->shot.ctl.aa.afMode, shot_ext->shot.ctl.aa.vendor_afmode_option, + shot_ext->shot.ctl.aa.afRegions[0], + shot_ext->shot.ctl.aa.afRegions[1], + shot_ext->shot.ctl.aa.afRegions[2], + shot_ext->shot.ctl.aa.afRegions[3], + shot_ext->shot.ctl.aa.afRegions[4]); + + switch (m_interenalAutoFocusMode) { + /* these affect directly */ + case AUTOFOCUS_MODE_INFINITY: + case AUTOFOCUS_MODE_FIXED: + /* These above mode may be considered like CAF. */ + /* + m_autofocusStep = AUTOFOCUS_STEP_DONE; + break; + */ + /* these start scanning directrly */ + case AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + m_autofocusStep = AUTOFOCUS_STEP_SCANNING; + break; + /* these need to wait starting af */ + default: + m_autofocusStep = AUTOFOCUS_STEP_START_SCANNING; + break; + } + + break; + case AUTOFOCUS_STEP_START_SCANNING: + m_AUTOFOCUS_MODE2AA_AFMODE(m_interenalAutoFocusMode, shot_ext); + + /* set TAF regions */ + if (m_interenalAutoFocusMode == AUTOFOCUS_MODE_TOUCH) { + shot_ext->shot.ctl.aa.afRegions[0] = m_focusArea.x1; + shot_ext->shot.ctl.aa.afRegions[1] = m_focusArea.y1; + shot_ext->shot.ctl.aa.afRegions[2] = m_focusArea.x2; + shot_ext->shot.ctl.aa.afRegions[3] = m_focusArea.y2; + shot_ext->shot.ctl.aa.afRegions[4] = m_focusWeight; + } + + if (currentState == AUTOFOCUS_STATE_SCANNING) { + m_autofocusStep = AUTOFOCUS_STEP_SCANNING; + m_waitCountFailState = 0; + } + + break; + case AUTOFOCUS_STEP_SCANNING: + m_AUTOFOCUS_MODE2AA_AFMODE(m_interenalAutoFocusMode, shot_ext); + + /* set TAF regions */ + if (m_interenalAutoFocusMode == AUTOFOCUS_MODE_TOUCH) { + shot_ext->shot.ctl.aa.afRegions[0] = m_focusArea.x1; + shot_ext->shot.ctl.aa.afRegions[1] = m_focusArea.y1; + shot_ext->shot.ctl.aa.afRegions[2] = m_focusArea.x2; + shot_ext->shot.ctl.aa.afRegions[3] = m_focusArea.y2; + shot_ext->shot.ctl.aa.afRegions[4] = m_focusWeight; + } + + switch (m_interenalAutoFocusMode) { + case AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + break; + default: + if (currentState == AUTOFOCUS_STATE_SUCCEESS || + currentState == AUTOFOCUS_STATE_FAIL) { + + /* some times fail is happen on 3, 4, 5 count while scanning */ + if (currentState == AUTOFOCUS_STATE_FAIL && + m_waitCountFailState < WAIT_COUNT_FAIL_STATE) { + m_waitCountFailState++; + break; + } + + m_waitCountFailState = 0; + m_autofocusStep = AUTOFOCUS_STEP_DONE; + } else { + m_waitCountFailState++; + } + break; + } + break; + case AUTOFOCUS_STEP_DONE: + /* to assure next AUTOFOCUS_MODE_AUTO and AUTOFOCUS_MODE_TOUCH */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_OFF; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + break; + default: + break; + } + + return 1; +} + +int ExynosCameraActivityAutofocus::t_func3AAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + m_aaAfState = shot_ext->shot.dm.aa.afState; + + m_aaAFMode = shot_ext->shot.ctl.aa.afMode; + + m_frameCount = shot_ext->shot.dm.request.frameCount; + + return true; +} + +bool ExynosCameraActivityAutofocus::setAutofocusMode(int autoFocusMode) +{ + ALOGI("INFO(%s[%d]):autoFocusMode(%d)", __FUNCTION__, __LINE__, autoFocusMode); + + bool ret = true; + + switch(autoFocusMode) { + case AUTOFOCUS_MODE_AUTO: + case AUTOFOCUS_MODE_INFINITY: + case AUTOFOCUS_MODE_MACRO: + case AUTOFOCUS_MODE_FIXED: + case AUTOFOCUS_MODE_EDOF: + case AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + case AUTOFOCUS_MODE_TOUCH: + m_autoFocusMode = autoFocusMode; + break; + default: + ALOGE("ERR(%s):invalid focus mode(%d) fail", __FUNCTION__, autoFocusMode); + ret = false; + break; + } + + return ret; +} + +bool ExynosCameraActivityAutofocus::getAutofocusResult(bool flagLockFocus, bool flagStartFaceDetection, int numOfFace) +{ + ALOGI("INFO(%s[%d]):getAutofocusResult in m_autoFocusMode(%d)", + __FUNCTION__, __LINE__, m_autoFocusMode); + + bool ret = false; + bool af_over = false; + bool flagCheckStep = false; + int currentState = AUTOFOCUS_STATE_NONE; + bool flagScanningStarted = false; + int flagtrigger = true; + + unsigned int i = 0; + + unsigned int waitTimeoutFpsValue = 0; + + if (m_samsungCamera) { + if (getFpsValue() > 0) { + waitTimeoutFpsValue = 30 / getFpsValue(); + } + if (waitTimeoutFpsValue < 1) + waitTimeoutFpsValue = 1; + } else { + waitTimeoutFpsValue = 1; + } + + ALOGI("INFO(%s[%d]):waitTimeoutFpsValue(%d), getFpsValue(%d), flagStartFaceDetection(%d), numOfFace(%d)", + __FUNCTION__, __LINE__, waitTimeoutFpsValue, getFpsValue(), flagStartFaceDetection, numOfFace); + + for (i = 0; i < AUTOFOCUS_TOTAL_WATING_TIME * waitTimeoutFpsValue; i += AUTOFOCUS_WATING_TIME) { + currentState = this->getCurrentState(); + + /* + * TRIGGER_START means that lock the AF state. + */ + if(flagtrigger && flagLockFocus && (m_interenalAutoFocusMode == m_autoFocusMode)) { + m_autofocusStep = AUTOFOCUS_STEP_TRIGGER_START; + flagtrigger = false; + ALOGI("INFO(%s):m_aaAfState(%d) flagLockFocus(%d) m_interenalAutoFocusMode(%d) m_autoFocusMode(%d)", + __FUNCTION__, m_aaAfState, flagLockFocus, m_interenalAutoFocusMode, m_autoFocusMode); + } + + /* If stopAutofocus() called */ + if (m_autofocusStep == AUTOFOCUS_STEP_STOP && m_aaAfState == AA_AFSTATE_INACTIVE ) { + m_afState = AUTOFOCUS_STATE_FAIL; + af_over = true; + + if (currentState == AUTOFOCUS_STATE_SUCCEESS) + ret = true; + else + ret = false; + + break; /* break for for() loop */ + } + + switch (m_interenalAutoFocusMode) { + case AUTOFOCUS_MODE_INFINITY: + case AUTOFOCUS_MODE_FIXED: + /* These above mode may be considered like CAF. */ + /* + af_over = true; + ret = true; + break; + */ + case AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (m_autofocusStep == AUTOFOCUS_STEP_SCANNING + || m_autofocusStep == AUTOFOCUS_STEP_DONE + || m_autofocusStep == AUTOFOCUS_STEP_TRIGGER_START) { + flagCheckStep = true; + } + break; + default: + if (m_autofocusStep == AUTOFOCUS_STEP_DONE) + flagCheckStep = true; + break; + } + + if (flagCheckStep == true) { + switch (currentState) { + case AUTOFOCUS_STATE_NONE: + if (flagScanningStarted == true) + ALOGW("WARN(%s):AF restart is detected(%d)", __FUNCTION__, i / 1000); + + if (m_interenalAutoFocusMode == AUTOFOCUS_MODE_CONTINUOUS_PICTURE) { + ALOGD("DEBUG(%s):AF force-success on AUTOFOCUS_MODE_CONTINUOUS_PICTURE (%d)", __FUNCTION__, i / 1000); + af_over = true; + ret = true; + } + break; + case AUTOFOCUS_STATE_SCANNING: + flagScanningStarted = true; + break; + case AUTOFOCUS_STATE_SUCCEESS: + if (flagStartFaceDetection && numOfFace > 0) { + if (m_aaAfState == AA_AFSTATE_FOCUSED_LOCKED) { + af_over = true; + ret = true; + } + } else { + af_over = true; + ret = true; + } + break; + case AUTOFOCUS_STATE_FAIL: + if (flagStartFaceDetection && numOfFace > 0) { + if (m_aaAfState == AA_AFSTATE_NOT_FOCUSED_LOCKED) { + af_over = true; + ret = false; + } + } else { + af_over = true; + ret = false; + } + break; + default: + ALOGV("ERR(%s):Invalid afState(%d)", __FUNCTION__, currentState); + ret = false; + break; + } + } + + if (af_over == true) + break; + + usleep(AUTOFOCUS_WATING_TIME); + } + + if (AUTOFOCUS_TOTAL_WATING_TIME * waitTimeoutFpsValue <= i) { + ALOGW("WARN(%s):AF result time out(%d) msec", __FUNCTION__, i * waitTimeoutFpsValue / 1000); + stopAutofocus(); /* Reset Previous AF */ + m_afState = AUTOFOCUS_STATE_FAIL; + } + + ALOGI("INFO(%s[%d]):getAutofocusResult out m_autoFocusMode(%d) m_interenalAutoFocusMode(%d) result(%d) af_over(%d)", + __FUNCTION__, __LINE__, m_autoFocusMode, m_interenalAutoFocusMode, ret, af_over); + + return ret; +} + +int ExynosCameraActivityAutofocus::getCAFResult(void) +{ + int ret = 0; + + /* + * 0: fail + * 1: success + * 2: canceled + * 3: focusing + * 4: restart + */ + + static int oldRet = AUTOFOCUS_RESULT_CANCEL; + static bool flagCAFScannigStarted = false; + + switch (m_aaAFMode) { + case AA_AFMODE_CONTINUOUS_VIDEO: + case AA_AFMODE_CONTINUOUS_PICTURE: + /* case AA_AFMODE_CONTINUOUS_VIDEO_FACE: */ + + switch(m_aaAfState) { + case AA_AFSTATE_INACTIVE: + ret = AUTOFOCUS_RESULT_CANCEL; + break; + case AA_AFSTATE_PASSIVE_SCAN: + case AA_AFSTATE_ACTIVE_SCAN: + ret = AUTOFOCUS_RESULT_FOCUSING; + break; + case AA_AFSTATE_PASSIVE_FOCUSED: + case AA_AFSTATE_FOCUSED_LOCKED: + ret = AUTOFOCUS_RESULT_SUCCESS; + break; + case AA_AFSTATE_PASSIVE_UNFOCUSED: + if (flagCAFScannigStarted == true) + ret = AUTOFOCUS_RESULT_FAIL; + else + ret = oldRet; + break; + case AA_AFSTATE_NOT_FOCUSED_LOCKED: + ret = AUTOFOCUS_RESULT_FAIL; + break; + default: + ALOGE("(%s[%d]):invalid m_aaAfState", __FUNCTION__, __LINE__); + ret = AUTOFOCUS_RESULT_CANCEL; + break; + } + + if (m_aaAfState == AA_AFSTATE_ACTIVE_SCAN) + flagCAFScannigStarted = true; + else + flagCAFScannigStarted = false; + + oldRet = ret; + break; + default: + flagCAFScannigStarted = false; + + ret = oldRet; + break; + } + + return ret; +} + +ExynosCameraActivityAutofocus::AUTOFOCUS_STATE ExynosCameraActivityAutofocus::afState2AUTOFOCUS_STATE(enum aa_afstate aaAfState) +{ + AUTOFOCUS_STATE autoFocusState; + + switch (aaAfState) { + case AA_AFSTATE_INACTIVE: + autoFocusState = AUTOFOCUS_STATE_NONE; + break; + case AA_AFSTATE_PASSIVE_SCAN: + case AA_AFSTATE_ACTIVE_SCAN: + autoFocusState = AUTOFOCUS_STATE_SCANNING; + break; + case AA_AFSTATE_PASSIVE_FOCUSED: + case AA_AFSTATE_FOCUSED_LOCKED: + autoFocusState = AUTOFOCUS_STATE_SUCCEESS; + break; + case AA_AFSTATE_NOT_FOCUSED_LOCKED: + case AA_AFSTATE_PASSIVE_UNFOCUSED: + autoFocusState = AUTOFOCUS_STATE_FAIL; + break; + default: + autoFocusState = AUTOFOCUS_STATE_NONE; + break; + } + + return autoFocusState; +} + +void ExynosCameraActivityAutofocus::m_AUTOFOCUS_MODE2AA_AFMODE(int autoFocusMode, camera2_shot_ext *shot_ext) +{ + switch (autoFocusMode) { + case AUTOFOCUS_MODE_AUTO: + + if (m_recordingHint == true) { + /* CONTINUOUS_VIDEO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_CONTINUOUS_VIDEO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_VIDEO); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } else if (m_flagFaceDetection == true) { + /* AUTO_FACE */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_FACE); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + } else { + /* AUTO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + } + break; + case AUTOFOCUS_MODE_INFINITY: + /* INFINITY */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + break; + case AUTOFOCUS_MODE_MACRO: + /* MACRO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_MACRO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_MACRO); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + break; + case AUTOFOCUS_MODE_EDOF: + /* EDOF */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_EDOF; + break; + case AUTOFOCUS_MODE_CONTINUOUS_VIDEO: + /* CONTINUOUS_VIDEO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_CONTINUOUS_VIDEO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_VIDEO); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + break; + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE: + case AUTOFOCUS_MODE_CONTINUOUS_PICTURE_MACRO: + if (m_flagFaceDetection == true) { + /* CONTINUOUS_PICTURE_FACE */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_CONTINUOUS_PICTURE; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_FACE); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } else { + /* CONTINUOUS_PICTURE */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_CONTINUOUS_PICTURE; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } + + break; + case AUTOFOCUS_MODE_TOUCH: + if (m_recordingHint == true) { + /* CONTINUOUS_VIDEO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_CONTINUOUS_VIDEO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00 + | SET_BIT(AA_AFMODE_OPTION_BIT_VIDEO); + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_IDLE; + } else { + /* AUTO */ + shot_ext->shot.ctl.aa.afMode = ::AA_AFMODE_AUTO; + shot_ext->shot.ctl.aa.vendor_afmode_option = 0x00; + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_START; + } + + break; + case AUTOFOCUS_MODE_FIXED: + break; + default: + ALOGE("ERR(%s):invalid focus mode (%d)", __FUNCTION__, autoFocusMode); + break; + } +} + +} /* namespace android */ diff --git a/libcamera/common_v2/Ged/ExynosCameraActivityFlashVendor.cpp b/libcamera/common_v2/Ged/ExynosCameraActivityFlashVendor.cpp new file mode 100644 index 0000000..5a7ee63 --- /dev/null +++ b/libcamera/common_v2/Ged/ExynosCameraActivityFlashVendor.cpp @@ -0,0 +1,742 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraActivityFlashGed" +#include + + +#include "ExynosCameraActivityFlash.h" + +namespace android { + +int ExynosCameraActivityFlash::t_func3ABefore(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + m_currentIspInputFcount = shot_ext->shot.dm.request.frameCount; + + ALOGV("INFO(%s[%d]):m_flashReq=%d, m_flashStatus=%d, m_flashStep=%d", + __FUNCTION__, __LINE__, (int)m_flashReq, (int)m_flashStatus, (int)m_flashStep); + + if (m_flashPreStatus != m_flashStatus) { + ALOGD("DEBUG(%s[%d]):m_flashReq=%d, m_flashStatus=%d, m_flashStep=%d", + __FUNCTION__, __LINE__, + (int)m_flashReq, (int)m_flashStatus, (int)m_flashStep); + + m_flashPreStatus = m_flashStatus; + } + + if (m_aePreState != m_aeState) { + ALOGV("INFO(%s[%d]):m_aeState=%d", __FUNCTION__, __LINE__, (int)m_aeState); + + m_aePreState = m_aeState; + } + + if (m_flashStep == FLASH_STEP_CANCEL && m_checkFlashStepCancel == true) { + ALOGV("DEBUG(%s[%d]): Flash step is CANCEL", __FUNCTION__, __LINE__); + + if (m_isMainFlashFiring == false) { + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CANCEL; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_waitingCount = -1; + m_flashStepErrorCount = -1; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_checkFlashStepCancel = false; + /* m_checkFlashWaitCancel = false; */ + m_isCapture = false; + + goto done; + } else { + ALOGW("WARN(%s[%d]) When Main Flash started, Skip Flash Cancel", __FUNCTION__, __LINE__); + } + } + + if (m_flashReq == FLASH_REQ_OFF) { + ALOGV("DEBUG(%s[%d]): Flash request is OFF", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + if (m_aeflashMode == AA_FLASHMODE_ON_ALWAYS) + m_isNeedFlashOffDelay = true; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_flashStepErrorCount = -1; + m_flashStep = FLASH_STEP_OFF; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + + goto done; + } else if (m_flashReq == FLASH_REQ_TORCH) { + ALOGV("DEBUG(%s[%d]): Flash request is TORCH", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON_ALWAYS; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + + goto done; + } else if (m_flashReq == FLASH_REQ_ON) { + ALOGV("DEBUG(%s[%d]): Flash request is ON", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + if (m_flashStatus == FLASH_STATUS_OFF || m_flashStatus == FLASH_STATUS_PRE_CHECK) { + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_PRE_READY; + } else if (m_flashStatus == FLASH_STATUS_PRE_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE READY", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + if (m_flashStep == FLASH_STEP_PRE_START) { + ALOGV("DEBUG(%s[%d]): Flash step is PRE START", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_START; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE ON", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AE DONE", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + m_aeWaitMaxCount = 0; + /* AE AWB LOCK */ + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AF", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AF; + m_aeWaitMaxCount = 0; + /* + } else if (m_flashStatus == FLASH_STATUS_PRE_AF_DONE) { + shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + */ + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE DONE", __FUNCTION__, __LINE__); + + if (m_aeLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + else + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + if (m_awbLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + else + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN READY", __FUNCTION__, __LINE__); + + if (m_manualExposureTime != 0) + setMetaCtlExposureTime(shot_ext, m_manualExposureTime); + + if (m_aeLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + else + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + if (m_awbLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + else + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + if (m_flashStep == FLASH_STEP_MAIN_START) { + ALOGD("DEBUG(%s[%d]): Flash step is MAIN START (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + /* during capture, unlock AE and AWB */ + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + + m_waitingCount--; + m_aeWaitMaxCount = 0; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN ON (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN WAIT (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_MAIN_WAIT; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_DONE) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN DONE (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_OFF; + m_waitingCount = -1; + + m_aeWaitMaxCount = 0; + } + } else if (m_flashReq == FLASH_REQ_AUTO) { + ALOGV("DEBUG(%s[%d]): Flash request is AUTO", __FUNCTION__, __LINE__); + + if (m_aeState == AE_STATE_INACTIVE) { + ALOGV("DEBUG(%s[%d]): AE state is INACTIVE", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_flashStep = FLASH_STEP_OFF; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_waitingCount = -1; + + goto done; + } else if (m_aeState == AE_STATE_CONVERGED || m_aeState == AE_STATE_LOCKED_CONVERGED) { + ALOGV("DEBUG(%s[%d]): AE state is CONVERGED", __FUNCTION__, __LINE__); + m_isNeedFlash = false; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_OFF; + m_flashStep = FLASH_STEP_OFF; + + m_isPreFlash = false; + + m_checkMainCaptureRcount = false; + m_checkMainCaptureFcount = false; + m_waitingCount = -1; + + goto done; + } else if (m_aeState == AE_STATE_FLASH_REQUIRED || m_aeState == AE_STATE_LOCKED_FLASH_REQUIRED) { + ALOGV("DEBUG(%s[%d]): AE state is FLASH REQUIRED", __FUNCTION__, __LINE__); + m_isNeedFlash = true; + + if (m_flashStatus == FLASH_STATUS_OFF || m_flashStatus == FLASH_STATUS_PRE_CHECK) { + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_PRE_READY; + } else if (m_flashStatus == FLASH_STATUS_PRE_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE READY", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + if (m_flashStep == FLASH_STEP_PRE_START) { + ALOGV("DEBUG(%s[%d]): Flash step is PRE START", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_START; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE ON", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_OFF; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_ON; + m_aeWaitMaxCount--; + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AE DONE", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + m_aeWaitMaxCount = 0; + /* AE AWB LOCK */ + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE AF", __FUNCTION__, __LINE__); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_ON; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + + m_flashStatus = FLASH_STATUS_PRE_AF; + m_aeWaitMaxCount = 0; + /* + } else if (m_flashStatus == FLASH_STATUS_PRE_AF_DONE) { + shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + */ + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + ALOGV("DEBUG(%s[%d]): Flash status is PRE DONE", __FUNCTION__, __LINE__); + + if (m_aeLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + else + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + if (m_awbLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + else + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_AUTO; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_waitingCount = -1; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_READY) { + ALOGV("DEBUG(%s[%d]): Flash status is MAIN READY", __FUNCTION__, __LINE__); + + if (m_manualExposureTime != 0) + setMetaCtlExposureTime(shot_ext, m_manualExposureTime); + + if (m_aeLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.aeLock = AA_AE_LOCK_ON; + else + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + if (m_awbLock && m_flashTrigger != FLASH_TRIGGER_TOUCH_DISPLAY) + shot_ext->shot.ctl.aa.awbLock = AA_AWB_LOCK_ON; + else + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + if (m_flashStep == FLASH_STEP_MAIN_START) { + ALOGD("DEBUG(%s[%d]): Flash step is MAIN START (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + /* during capture, unlock AE and AWB */ + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + + m_waitingCount--; + m_aeWaitMaxCount = 0; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN ON (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_MAIN_ON; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN WAIT (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_CAPTURE; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_MAIN_WAIT; + m_waitingCount--; + m_aeWaitMaxCount = 0; + } else if (m_flashStatus == FLASH_STATUS_MAIN_DONE) { + ALOGD("DEBUG(%s[%d]): Flash status is MAIN DONE (fcount %d)", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + shot_ext->shot.ctl.aa.vendor_aeflashMode = AA_FLASHMODE_OFF; + shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NONE; + shot_ext->shot.ctl.flash.firingTime = 0; + shot_ext->shot.ctl.flash.firingPower = 0; + + shot_ext->shot.ctl.aa.aeMode = m_aeMode; + shot_ext->shot.ctl.aa.awbMode = m_awbMode; + + m_flashStatus = FLASH_STATUS_OFF; + m_waitingCount = -1; + + m_aeWaitMaxCount = 0; + } + } + } + + if (0 < m_flashStepErrorCount) + m_flashStepErrorCount++; + + ALOGV("INFO(%s[%d]):aeflashMode=%d", + __FUNCTION__, __LINE__, (int)shot_ext->shot.ctl.aa.vendor_aeflashMode); + +done: + return 1; +} + +int ExynosCameraActivityFlash::t_func3AAfter(void *args) +{ + ExynosCameraBuffer *buf = (ExynosCameraBuffer *)args; + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buf->addr[1]); + + if (shot_ext == NULL) { + ALOGE("ERR(%s[%d]):shot_ext is null", __FUNCTION__, __LINE__); + return false; + } + + if (m_isCapture == false) + m_aeState = shot_ext->shot.dm.aa.aeState; + m_curAeState = shot_ext->shot.dm.aa.aeState; + + /* Convert aeState for Locked */ + if (shot_ext->shot.dm.aa.aeState == AE_STATE_LOCKED_CONVERGED || + shot_ext->shot.dm.aa.aeState == AE_STATE_LOCKED_FLASH_REQUIRED) { + shot_ext->shot.dm.aa.aeState = AE_STATE_LOCKED; + } + + if (m_flashStep == FLASH_STEP_CANCEL && + m_checkFlashStepCancel == false) { + m_flashStep = FLASH_STEP_OFF; + m_flashStatus = FLASH_STATUS_OFF; + + goto done; + } + + if (m_flashReq == FLASH_REQ_OFF) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 3) { + ALOGV("DEBUG(%s[%d]): flashReady = 3 frameCount %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_isFlashOff = true; + } + } + + if (m_flashStatus == FLASH_STATUS_PRE_CHECK) { + if (shot_ext->shot.dm.flash.vendor_decision == 2 || + FLASH_TIMEOUT_COUNT < m_timeoutCount) { + m_flashStatus = FLASH_STATUS_PRE_READY; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_ON) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 1 || + FLASH_AE_TIMEOUT_COUNT < m_timeoutCount) { + if (FLASH_AE_TIMEOUT_COUNT < m_timeoutCount) + ALOGD("DEBUG(%s[%d]):auto exposure timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + m_flashStatus = FLASH_STATUS_PRE_AE_DONE; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_PRE_AE_DONE) { + m_flashStatus = FLASH_STATUS_PRE_AF; + } else if (m_flashStatus == FLASH_STATUS_PRE_AF) { + if (m_flashStep == FLASH_STEP_PRE_DONE || + FLASH_AF_TIMEOUT_COUNT < m_timeoutCount) { + if (FLASH_AF_TIMEOUT_COUNT < m_timeoutCount) + ALOGD("DEBUG(%s[%d]):auto focus timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + m_flashStatus = FLASH_STATUS_PRE_DONE; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + /* + } else if (m_flashStatus == FLASH_STATUS_PRE_AF_DONE) { + if (shot_ext->shot.dm.flash.flashOffReady == 1 || + FLASH_TIMEOUT_COUNT < m_timeoutCount) { + if (shot_ext->shot.dm.flash.flashOffReady == 1) { + ALOGD("[%s] (%d) flashOffReady == 1 frameCount %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + } + + m_flashStatus = FLASH_STATUS_PRE_DONE; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + */ + } else if (m_flashStatus == FLASH_STATUS_PRE_DONE) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 2 || + FLASH_TIMEOUT_COUNT < m_timeoutCount) { + if (shot_ext->shot.dm.flash.vendor_flashReady == 2) { + ALOGD("DEBUG(%s[%d]):flashReady == 2 frameCount %d", + __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + } else if (FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):m_timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + } + + m_flashStatus = FLASH_STATUS_MAIN_READY; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_READY) { +#if 0 // Disabled By TN + if (m_flashTrigger == FLASH_TRIGGER_TOUCH_DISPLAY) { + if (FLASH_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):m_timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + + m_flashStep = FLASH_STEP_OFF; + m_flashStatus = FLASH_STATUS_OFF; + /* m_flashTrigger = FLASH_TRIGGER_OFF; */ + m_isCapture = false; + m_isPreFlash = false; + + m_waitingCount = -1; + m_checkMainCaptureFcount = false; + m_timeoutCount = 0; + } else { + m_timeoutCount++; + } + } +#endif + } else if (m_flashStatus == FLASH_STATUS_MAIN_ON) { + if ((shot_ext->shot.dm.flash.vendor_flashOffReady == 2) || + (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_FLASH) || + FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + if (shot_ext->shot.dm.flash.vendor_flashOffReady == 2) { + ALOGD("DEBUG(%s[%d]):flashOffReady %d", __FUNCTION__, __LINE__, + shot_ext->shot.dm.flash.vendor_flashOffReady); + } else if (shot_ext->shot.dm.flash.vendor_firingStable == CAPTURE_STATE_FLASH) { + m_ShotFcount = shot_ext->shot.dm.request.frameCount; + ALOGD("DEBUG(%s[%d]):m_ShotFcount %u", __FUNCTION__, __LINE__, m_ShotFcount); + } else if (FLASH_MAIN_TIMEOUT_COUNT < m_timeoutCount) { + ALOGD("DEBUG(%s[%d]):m_timeoutCount %d", __FUNCTION__, __LINE__, m_timeoutCount); + } + ALOGD("DEBUG(%s[%d]):frameCount %d" ,__FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + + m_flashStatus = FLASH_STATUS_MAIN_DONE; + m_timeoutCount = 0; + m_mainWaitCount = 0; + + m_waitingCount--; + } else { + m_timeoutCount++; + } + } else if (m_flashStatus == FLASH_STATUS_MAIN_WAIT) { + /* 1 frame is used translate status MAIN_ON to MAIN_WAIT */ + if (m_mainWaitCount < FLASH_MAIN_WAIT_COUNT -1) { + ALOGD("DEBUG(%s[%d]):frameCount=%d", __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_mainWaitCount ++; + } else { + ALOGD("DEBUG(%s[%d]):frameCount=%d", __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + m_mainWaitCount = 0; + m_waitingCount = -1; + m_flashStatus = FLASH_STATUS_MAIN_DONE; + } + } + + m_aeflashMode = shot_ext->shot.dm.aa.vendor_aeflashMode; + + ALOGV("INFO(%s[%d]):(m_aeState %d)(m_flashStatus %d)", __FUNCTION__, __LINE__, + (int)m_aeState, (int)m_flashStatus); + ALOGV("INFO(%s[%d]):(decision %d flashReady %d flashOffReady %d firingStable %d)", __FUNCTION__, __LINE__, + (int)shot_ext->shot.dm.flash.vendor_decision, + (int)shot_ext->shot.dm.flash.vendor_flashReady, + (int)shot_ext->shot.dm.flash.vendor_flashOffReady, + (int)shot_ext->shot.dm.flash.vendor_firingStable); + ALOGV("INFO(%s[%d]):(aeState %d)(aeflashMode %d)", __FUNCTION__, __LINE__, + (int)shot_ext->shot.dm.aa.aeState, (int)shot_ext->shot.dm.aa.vendor_aeflashMode); + +done: + return 1; +} + +bool ExynosCameraActivityFlash::setFlashStep(enum FLASH_STEP flashStepVal) +{ + m_flashStep = flashStepVal; + + /* trigger events */ + switch (flashStepVal) { + case FLASH_STEP_OFF: + m_waitingCount = -1; + m_checkMainCaptureFcount = false; + m_checkFlashStepCancel = false; + /* m_checkFlashWaitCancel = false; */ + m_flashStatus = FLASH_STATUS_OFF; + m_isPreFlash = false; + m_isCapture = false; + m_isMainFlashFiring = false; + m_manualExposureTime = 0; + break; + case FLASH_STEP_PRE_START: + m_aeWaitMaxCount = 25; + m_isPreFlash = true; + m_isFlashOff = false; + m_isMainFlashFiring = false; + if ((m_flashStatus == FLASH_STATUS_PRE_DONE || m_flashStatus == FLASH_STATUS_MAIN_READY) && + (m_flashTrigger == FLASH_TRIGGER_LONG_BUTTON || m_flashTrigger == FLASH_TRIGGER_TOUCH_DISPLAY)) + m_flashStatus = FLASH_STATUS_OFF; + break; + case FLASH_STEP_PRE_DONE: + break; + case FLASH_STEP_MAIN_START: + m_isMainFlashFiring = true; + setShouldCheckedFcount(m_currentIspInputFcount + CAPTURE_SKIP_COUNT); + + m_waitingCount = 15; + m_timeoutCount = 0; + m_checkMainCaptureFcount = false; + break; + case FLASH_STEP_MAIN_DONE: + m_waitingCount = -1; + m_checkMainCaptureFcount = false; + m_isPreFlash = false; + m_isMainFlashFiring = false; + break; + case FLASH_STEP_CANCEL: + m_checkFlashStepCancel = true; + m_checkFlashWaitCancel = true; + m_isNeedCaptureFlash = true; + m_isPreFlash = false; + break; + case FLASH_STEP_END: + break; + default: + break; + } + + ALOGD("DEBUG(%s[%d]):flashStepVal=%d", __FUNCTION__, __LINE__, (int)flashStepVal); + + if (flashStepVal != FLASH_STEP_OFF) + m_flashStepErrorCount = 0; + + return true; +} + +}/* namespace android */ diff --git a/libcamera/common_v2/Ged/ExynosCameraFrameSelectorVendor.cpp b/libcamera/common_v2/Ged/ExynosCameraFrameSelectorVendor.cpp new file mode 100644 index 0000000..eded4fc --- /dev/null +++ b/libcamera/common_v2/Ged/ExynosCameraFrameSelectorVendor.cpp @@ -0,0 +1,359 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraFrameSelectorGed" + +#include "ExynosCameraFrameSelector.h" + +#define FLASHED_LLS_COUNT 4 + +namespace android { + +status_t ExynosCameraFrameSelector::release(void) +{ + int ret = 0; + ret = m_release(&m_frameHoldList); + if (ret != NO_ERROR) { + ALOGE("DEBUG(%s[%d]):m_frameHoldList release failed ", __FUNCTION__, __LINE__); + } + ret = m_release(&m_hdrFrameHoldList); + if (ret != NO_ERROR) { + ALOGE("DEBUG(%s[%d]):m_hdrFrameHoldList release failed ", __FUNCTION__, __LINE__); + } + + isCanceled = false; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + if (m_parameters->getDualCameraMode() == true) { + ExynosCameraDualFrameSelector *dualFrameSelector = ExynosCameraSingleton::getInstance(); + dualFrameSelector->clear(m_parameters->getCameraId()); + } +#endif + + return NO_ERROR; +} + +status_t ExynosCameraFrameSelector::manageFrameHoldList(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos) +{ + int ret = 0; +#ifdef USE_FRAME_REFERENCE_COUNT + frame->incRef(); +#endif + if (m_parameters->getHdrMode() == true || + m_parameters->getShotMode() == SHOT_MODE_RICH_TONE) { + ret = m_manageHdrFrameHoldList(frame, pipeID, isSrc, dstPos); + } + else { + ret = m_manageNormalFrameHoldList(frame, pipeID, isSrc, dstPos); + } + + return ret; +} + +status_t ExynosCameraFrameSelector::m_manageNormalFrameHoldList(ExynosCameraFrame *newFrame, int pipeID, bool isSrc, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame *oldFrame = NULL; + ExynosCameraBuffer buffer; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + if (m_parameters->getDualCameraMode() == true) { + /* handle dual sync frame */ + ExynosCameraDualFrameSelector *dualFrameSelector = ExynosCameraSingleton::getInstance(); + + ret = dualFrameSelector->setInfo(m_parameters->getCameraId(), +#ifdef USE_FRAMEMANAGER + m_frameMgr, +#endif + m_frameHoldCount); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):dualFrameSelector->setInfo(id(%d)", + __FUNCTION__, __LINE__, m_parameters->getCameraId()); + return ret; + } + + ret = dualFrameSelector->manageNormalFrameHoldList(m_parameters->getCameraId(), + &m_frameHoldList, + newFrame, pipeID, isSrc, dstPos, m_bufMgr); + + /* if dual frame sync is fail, just use original code. */ + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):dualFrameSelector->manageFrameHoldList(id(%d), pipeID(%d), BufferType(%s), dstPos(%d)", + __FUNCTION__, __LINE__, m_parameters->getCameraId(), pipeID, (isSrc)?"Src":"Dst", dstPos); + } else { + return ret; + } + } +#endif + + /* Skip INITIAL_SKIP_FRAME only FastenAeStable is disabled */ + /* This previous condition check is useless because almost framecount for capture is over than skip frame count */ + /* + * if (m_parameters->getUseFastenAeStable() == true || + * newFrame->getFrameCount() > INITIAL_SKIP_FRAME) { + */ + m_pushQ(&m_frameHoldList, newFrame, true); + + /* + } else { + ret = m_getBufferFromFrame(newFrame, pipeID, isSrc, &buffer, dstPos); + if( ret != NO_ERROR ) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s)", __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst"); + } + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else { + ret = m_bufMgr->putBuffer(buffer.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putIndex is %d", __FUNCTION__, __LINE__, buffer.index); + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } + m_frameComplete(newFrame, false); + newFrame = NULL; + } + } + */ + + if (m_frameHoldList.getSizeOfProcessQ() > m_frameHoldCount) { + if( m_popQ(&m_frameHoldList, &oldFrame, true, 1) != NO_ERROR ) { + ALOGE("ERR(%s[%d]):getBufferToManageQ fail", __FUNCTION__, __LINE__); + + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } else { + ret = m_getBufferFromFrame(oldFrame, pipeID, isSrc, &buffer, dstPos); + if( ret != NO_ERROR ) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s)", __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst"); + } + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else { + /* + Frames in m_frameHoldList and m_hdrFrameHoldList are locked when they are inserted + on the list. So we need to use m_LockedFrameComplete() to remove those frames. + */ + m_LockedFrameComplete(oldFrame, pipeID, isSrc, dstPos); + oldFrame = NULL; + } + } + } + + return ret; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::selectFrames(int count, int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + ExynosCameraFrame* selectedFrame = NULL; + ExynosCameraActivityFlash *m_flashMgr = NULL; + ExynosCameraActivityAutofocus *afMgr = m_activityControl->getAutoFocusMgr(); // shoud not be a NULL + ExynosCameraActivitySpecialCapture *m_sCaptureMgr = m_activityControl->getSpecialCaptureMgr(); + + m_reprocessingCount = count; + + m_flashMgr = m_activityControl->getFlashMgr(); + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + if (m_parameters->getDualCameraMode() == true) { + /* handle dual sync frame */ + ExynosCameraDualFrameSelector *dualFrameSelector = ExynosCameraSingleton::getInstance(); + + selectedFrame = dualFrameSelector->selectFrames(m_parameters->getCameraId()); + } else +#endif + + if (m_flashMgr->getNeedCaptureFlash() == true && m_parameters->getSeriesShotCount() == 0) { + selectedFrame = m_selectFlashFrame(pipeID, isSrc, tryCount, dstPos); + + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):select Flash Frame Fail!", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } else if (m_parameters->getHdrMode() == true || + m_parameters->getShotMode() == SHOT_MODE_RICH_TONE) { + selectedFrame = m_selectHdrFrame(pipeID, isSrc, tryCount, dstPos); + + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]):select HDR Frame Fail!", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } else if (afMgr->getRecordingHint() == true + || m_parameters->getHighResolutionCallbackMode() == true + || (m_parameters->getShotMode() > SHOT_MODE_AUTO + && m_parameters->getShotMode() != SHOT_MODE_NIGHT + && m_parameters->getShotMode() != SHOT_MODE_PRO_MODE)) { + /* + On recording mode, do not try to find focused frame but just use normal frame. + ExynosCameraActivityAutofocus::setRecordingHint() is called + with true argument on startRecording(), and called with false on + stopRecording(). So it is used to determine whether the recording + is currently progressing or not on codes below. + */ + + if(afMgr->getRecordingHint() == true && m_parameters->getRecordingHint() == false) { + ALOGD("DEBUG(%s[%d]):HACK: Applying AFManager recordingHint(true)", __FUNCTION__, __LINE__); + } + + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) + ALOGE("ERR(%s[%d]):select Frame Fail!", __FUNCTION__, __LINE__); + } else if (m_parameters->getSeriesShotCount() > 0) { + selectedFrame = m_selectBurstFrame(pipeID, isSrc, tryCount, dstPos); + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]:select focused frame Faile!", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } else if (m_parameters->getCaptureExposureTime() > CAMERA_PREVIEW_EXPOSURE_TIME_LIMIT) { + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } else { + selectedFrame = m_selectFocusedFrame(pipeID, isSrc, tryCount, dstPos); + + if (selectedFrame == NULL) { + ALOGE("ERR(%s[%d]:select focused frame Faile!", __FUNCTION__, __LINE__); + selectedFrame = m_selectNormalFrame(pipeID, isSrc, tryCount, dstPos); + } + } + + m_isFirstFrame = false; + + return selectedFrame; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectNormalFrame(__unused int pipeID, __unused bool isSrc, int tryCount, __unused int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame *selectedFrame = NULL; + + ret = m_waitAndpopQ(&m_frameHoldList, &selectedFrame, false, tryCount); + if (ret < 0 || selectedFrame == NULL) { + ALOGD("DEBUG(%s[%d]):getFrame Fail ret(%d)", __FUNCTION__, __LINE__, ret); + return NULL; + } else if (isCanceled == true) { + ALOGD("DEBUG(%s[%d]):isCanceled", __FUNCTION__, __LINE__); + if (selectedFrame != NULL) { + m_LockedFrameComplete(selectedFrame, pipeID, isSrc, dstPos); + } + return NULL; + } + ALOGD("DEBUG(%s[%d]):Frame Count(%d)", __FUNCTION__, __LINE__, selectedFrame->getFrameCount()); + + return selectedFrame; +} + +ExynosCameraFrame* ExynosCameraFrameSelector::m_selectFlashFrameV2(int pipeID, bool isSrc, int tryCount, int32_t dstPos) +{ + /* m_selectFlashFrameV2 is implemented by MCD */ + return m_selectFlashFrame(pipeID, isSrc, tryCount, dstPos); +} + +status_t ExynosCameraFrameSelector::m_waitAndpopQ(ExynosCameraList *list, ExynosCameraFrame** outframe, bool unlockflag, int tryCount) +{ + status_t ret = NO_ERROR; + int iter = 0; + + do { + ret = list->waitAndPopProcessQ(outframe); + + if (isCanceled == true) { + ALOGD("DEBUG(%s[%d]):isCanceled", __FUNCTION__, __LINE__); + + return NO_ERROR; + } + + if( ret < 0 ) { + if( ret == TIMED_OUT ) { + ALOGD("DEBUG(%s[%d]):waitAndPopQ Time out -> retry[max cur](%d %d)", __FUNCTION__, __LINE__, tryCount, iter); + iter++; + continue; + } + } + if(*outframe != NULL) { + ALOGD("DEBUG(%s[%d]):Frame Count(%d)", __FUNCTION__, __LINE__, (*outframe)->getFrameCount()); + } + } while (ret != OK && tryCount > iter); + + if(ret != OK) { + ALOGE("ERR(%s[%d]):wait for popQ fail(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if(*outframe == NULL) { + ALOGE("ERR(%s[%d]):wait for popQ frame = NULL frame(%p)", __FUNCTION__, __LINE__, *outframe); + return ret; + } + + if(unlockflag) { + (*outframe)->frameUnlock(); + } + return ret; +} + +status_t ExynosCameraFrameSelector::clearList(int pipeID, bool isSrc, int32_t dstPos) +{ + int ret = 0; + ExynosCameraFrame *frame = NULL; + ExynosCameraBuffer *buffer = NULL; + if (m_frameHoldList.isWaiting() == false) { + ret = m_clearList(&m_frameHoldList, pipeID, isSrc, dstPos); + if( ret < 0 ) { + ALOGE("DEBUG(%s[%d]):m_frameHoldList clear failed, pipeID(%d)", __FUNCTION__, __LINE__, pipeID); + } + } else { + ALOGE("ERR(%s[%d]):Cannot clear frameHoldList cause waiting for pop frame", __FUNCTION__, __LINE__); + } + + if (m_hdrFrameHoldList.isWaiting() == false) { + ret = m_clearList(&m_hdrFrameHoldList, pipeID, isSrc, dstPos); + if( ret < 0 ) { + ALOGE("DEBUG(%s[%d]):m_hdrFrameHoldList clear failed, pipeID(%d)", __FUNCTION__, __LINE__, pipeID); + } + } else { + ALOGE("ERR(%s[%d]):Cannot clear hdrFrameHoldList cause waiting for pop frame", __FUNCTION__, __LINE__); + } + + isCanceled = false; + + return NO_ERROR; +} + +status_t ExynosCameraFrameSelector::m_releaseBuffer(ExynosCameraFrame *frame, int pipeID, bool isSrc, int32_t dstPos) +{ + int ret = 0; + ExynosCameraBuffer bayerBuffer; + + ret = m_getBufferFromFrame(frame, pipeID, isSrc, &bayerBuffer, dstPos); + if( ret != NO_ERROR ) { + ALOGE("ERR(%s[%d]):m_getBufferFromFrame fail pipeID(%d) BufferType(%s) bufferPtr(%p)", + __FUNCTION__, __LINE__, pipeID, (isSrc)?"Src":"Dst", &bayerBuffer); + } + if (m_bufMgr == NULL) { + ALOGE("ERR(%s[%d]):m_bufMgr is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else { + ret = m_bufMgr->putBuffer(bayerBuffer.index, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + if (ret < 0) { + ALOGE("ERR(%s[%d]):putIndex is %d", __FUNCTION__, __LINE__, bayerBuffer.index); + m_bufMgr->printBufferState(); + m_bufMgr->printBufferQState(); + } + } + return NO_ERROR; +} +} diff --git a/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.cpp b/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.cpp new file mode 100644 index 0000000..fbe3973 --- /dev/null +++ b/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.cpp @@ -0,0 +1,3655 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraMCPipe" +#include + +#include "ExynosCameraMCPipe.h" + +namespace android { + +#ifdef USE_MCPIPE_SERIALIZATION_MODE +Mutex ExynosCameraMCPipe::g_serializationLock; +#endif + +ExynosCameraMCPipe::~ExynosCameraMCPipe() +{ + this->destroy(); +} + +status_t ExynosCameraMCPipe::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_preCreate(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_preCreate() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_postCreate(sensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_postCreate() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraMCPipe::precreate(__unused int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_preCreate(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_preCreate() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return ret; +} + +status_t ExynosCameraMCPipe::postcreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_postCreate(sensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_postCreate() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + CLOGI("INFO(%s[%d]):postcreate() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = (MAX_NODE - 1); i >= OUTPUT_NODE; i--) { + if (m_node[i] != NULL) { + if (OUTPUT_NODE < i + && m_node[OUTPUT_NODE] != NULL + && m_deviceInfo->nodeNum[OUTPUT_NODE] == m_deviceInfo->nodeNum[i]) { + /* In this case(3AA of 54xx), 3AA, 3AP node is same. + * So, should close one node. Skip 3AP node. + */ + } else { + ret = m_node[i]->close(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Main node(%s) close fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + } + + SAFE_DELETE(m_node[i]); + CLOGD("DEBUG(%s[%d]):Main node(%s, sensorIds:%d) closed", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], m_sensorIds[i]); + } + } + + for (int i = (MAX_NODE - 1); i >= OUTPUT_NODE; i--) { + if (m_secondaryNode[i] != NULL) { + ret = m_secondaryNode[i]->close(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):secondary node(%s) close fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->secondaryNodeName[i], ret); + return ret; + } + SAFE_DELETE(m_secondaryNode[i]); + CLOGD("DEBUG(%s[%d]):secondary node(%s, sensorIds:%d) closed", + __FUNCTION__, __LINE__, m_deviceInfo->secondaryNodeName[i], m_secondarySensorIds[i]); + } + } + + CLOGD("DEBUG(%s[%d]):Node destroyed", __FUNCTION__, __LINE__); + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + SAFE_DELETE(m_inputFrameQ); + } + + if (m_requestFrameQ != NULL) { + m_requestFrameQ->release(); + SAFE_DELETE(m_requestFrameQ); + } + + CLOGI("INFO(%s[%d]):destroy() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + status_t ret = NO_ERROR; + + ret = this->setupPipe(pipeInfos, sensorIds, NULL); + + return ret; +} + +status_t ExynosCameraMCPipe::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds, int32_t *secondarySensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + uint32_t pipeId = 0; + int result = 0; + ExynosCameraNode *setFileSettingNode = NULL; + + /* TODO: check node state */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds != NULL) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + if (secondarySensorIds != NULL) { + CLOGD("DEBUG(%s[%d]):set new ispSensorIds", __FUNCTION__, __LINE__); + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) + m_secondarySensorIds[i] = secondarySensorIds[i]; + } + + ret = m_setInput(m_node, m_deviceInfo->nodeNum, m_sensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setInput(Main) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_setInput(m_secondaryNode, m_deviceInfo->secondaryNodeNum, m_secondarySensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setInput(secondary) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos != NULL) { + ret = m_setPipeInfo(pipeInfos); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setPipeInfo() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } else { + CLOGE("ERR(%s[%d]):pipeInfos is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + result = getPipeId((enum NODE_TYPE)i); + if (0 <= result) { + if (m_node[i] != NULL) + setFileSettingNode = m_node[i]; + else if (m_secondaryNode[i] != NULL) + setFileSettingNode = m_secondaryNode[i]; + else + continue; + + pipeId = (uint32_t)result; + + ret = m_setSetfile(setFileSettingNode, pipeId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setSetfile() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for (uint32_t j = 0; j < m_numBuffers[i]; j++) { + m_runningFrameList[i][j] = NULL; + } + m_numOfRunningFrame[i] = 0; + } + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):setupPipe() is succeed, Pipe(%d), prepare(%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return ret; +} + +status_t ExynosCameraMCPipe::prepare(void) +{ + /* need modify */ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + bool retVal = true; + + /* + * prepare on only capture node + * output node doesn't need prepare + */ + for (uint32_t i = 0; i < m_prepareBufferCount; i++) { + retVal = m_putBufferThreadFunc(); + if (retVal == false) { + CLOGE("WRN(%s):m_putBufferThreadFunc no Frame(count = %d)", __FUNCTION__, m_inputFrameQ->getSizeOfProcessQ()); + ret = INVALID_OPERATION; + } + } + + CLOGI("INFO(%s[%d]):prepare() is succeed, Pipe(%d), prepare(%d)", + __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return ret; +} + +status_t ExynosCameraMCPipe::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + /* TODO: check state ready for start */ + ret = m_startNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_startNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_threadState = 0; + m_threadRenew = 0; + + m_flagStartPipe = true; + m_flagTryStop = false; + + CLOGI("INFO(%s[%d]):start() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_stopNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_stopNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_putBufferThread->requestExitAndWait(); + m_getBufferThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]):Thread exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + m_requestFrameQ->release(); + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + if (m_serializeOperation == true) { + ExynosCameraMCPipe::g_serializationLock.unlock(); + CLOGD("DEBUG(%s[%d]):%s Critical Section END", + __FUNCTION__, __LINE__, m_name); + } +#endif + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + for (uint32_t j = 0; j < m_numBuffers[i]; j++) { + m_runningFrameList[i][j] = NULL; + } + m_numOfRunningFrame[i] = 0; + m_skipBuffer[i].index = -2; + m_skipPutBuffer[i] = false; + + if (m_node[i] != NULL) + m_node[i]->removeItemBufferQ(); + + } + + m_flagStartPipe = false; + m_flagTryStop = false; + + CLOGI("INFO(%s[%d]):stop() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +bool ExynosCameraMCPipe::flagStart(void) +{ + return m_flagStartPipe; +} + +status_t ExynosCameraMCPipe::startThread(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s[%d]):outputFrameQ is NULL, cannot start", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + m_putBufferThread->run(PRIORITY_URGENT_DISPLAY); + m_getBufferThread->run(PRIORITY_URGENT_DISPLAY); + + CLOGI("INFO(%s[%d]):startThread is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::stopThread(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + m_putBufferThread->requestExit(); + m_getBufferThread->requestExit(); + + m_inputFrameQ->sendCmd(WAKE_UP); + m_requestFrameQ->sendCmd(WAKE_UP); + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + if (m_serializeOperation == true) { + ExynosCameraMCPipe::g_serializationLock.unlock(); + CLOGD("DEBUG(%s[%d]):%s Critical Section END", + __FUNCTION__, __LINE__, m_name); + } +#endif + + m_dumpRunningFrameList(); + + CLOGI("INFO(%s[%d]):stopThread is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::stopThreadAndWait(int sleep, int times) +{ + CLOGD("DEBUG(%s[%d]) IN", __FUNCTION__, __LINE__); + status_t status = NO_ERROR; + int i = 0; + + for (i = 0; i < times ; i++) { + if (m_putBufferThread->isRunning() == false && m_getBufferThread->isRunning() == false) { + break; + } + usleep(sleep * 1000); + } + + if (i >= times) { + status = TIMED_OUT; + CLOGE("ERR(%s[%d]): stopThreadAndWait failed, waitTime(%d)ms", __FUNCTION__, __LINE__, sleep*times); + } + + CLOGD("DEBUG(%s[%d]) OUT", __FUNCTION__, __LINE__); + return status; +} + +bool ExynosCameraMCPipe::flagStartThread(void) +{ + return m_putBufferThread->isRunning(); +} + +status_t ExynosCameraMCPipe::sensorStream(bool on) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + int value = on ? IS_ENABLE_STREAM : IS_DISABLE_STREAM; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->setControl(V4L2_CID_IS_S_STREAM, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):sensorStream failed, %s node, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + + return ret; + } + } + + CLOGE("ERR(%s[%d]):All Nodes is NULL", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::forceDone(unsigned int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_node[OUTPUT_NODE] == NULL) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE] is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = m_forceDone(m_node[OUTPUT_NODE], cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):m_forceDone() is failed, ret", __FUNCTION__); + return ret; + } + + CLOGI("INFO(%s[%d]):forceDone() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraMCPipe::setControl(int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->setControl(cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->setControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + CLOGI("INFO(%s[%d]):setControl() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId((enum NODE_TYPE)i)); + return ret; + } + } + + CLOGE("ERR(%s[%d]):All nodes is NULL", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::setControl(int cid, int value, enum NODE_TYPE nodeType) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_node[nodeType] == NULL) { + CLOGE("ERR(%s[%d]):m_node[%d] == NULL. so, fail", __FUNCTION__, __LINE__, nodeType); + return INVALID_OPERATION; + } + + ret = m_node[nodeType]->setControl(cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->setControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[nodeType], ret); + return ret; + } + CLOGI("INFO(%s[%d]):setControl() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + return ret; +} + +status_t ExynosCameraMCPipe::getControl(int cid, int *value) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + uint32_t nodeCount = 0; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->getControl(cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->getControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + CLOGI("INFO(%s[%d]):getControl() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId((enum NODE_TYPE)i)); + return ret; + } + } + + CLOGE("ERR(%s[%d]):All nodes is NULL", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::getControl(int cid, int *value, enum NODE_TYPE nodeType) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_node[nodeType] == NULL) { + CLOGE("ERR(%s[%d]):m_node[%d] == NULL. so, fail", __FUNCTION__, __LINE__, nodeType); + return INVALID_OPERATION; + } + + ret = m_node[nodeType]->getControl(cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->getControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[nodeType], ret); + return ret; + } + CLOGI("INFO(%s[%d]):getControl() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + return ret; +} + +status_t ExynosCameraMCPipe::setExtControl(struct v4l2_ext_controls *ctrl) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->setExtControl(ctrl); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->setControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + CLOGI("INFO(%s[%d]):setControl() is succeed, Pipe(%d)", + __FUNCTION__, __LINE__, getPipeId((enum NODE_TYPE)i)); + return ret; + } + } + + CLOGE("ERR(%s[%d]):All nodes is NULL", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::setExtControl(struct v4l2_ext_controls *ctrl, enum NODE_TYPE nodeType) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_node[nodeType] == NULL) { + CLOGE("ERR(%s[%d]):m_node[%d] == NULL. so, fail", + __FUNCTION__, __LINE__, nodeType); + return INVALID_OPERATION; + } + + ret = m_node[nodeType]->setExtControl(ctrl); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->setControl failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[nodeType], ret); + return ret; + } + CLOGI("INFO(%s[%d]):setControl() is succeed, Pipe(%d)", + __FUNCTION__, __LINE__, getPipeId()); + return ret; +} + +status_t ExynosCameraMCPipe::setParam(struct v4l2_streamparm streamParam) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + uint32_t nodeCount = 0; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->setParam(&streamParam); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node(%s)->setParam failed, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + CLOGI("INFO(%s[%d]):setParam() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId((enum NODE_TYPE)i)); + return ret; + } + } + + CLOGE("ERR(%s[%d]):All nodes is NULL", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::pushFrame(ExynosCameraFrame **newFrame) +{ + Mutex::Autolock lock(m_pipeframeLock); + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return BAD_VALUE; + } + + m_inputFrameQ->pushProcessQ(newFrame); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::instantOn(int32_t numFrames) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + uint32_t nodeCount = 0; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + + if (m_inputFrameQ->getSizeOfProcessQ() != numFrames) { + CLOGE("ERR(%s[%d]):instantOn need %d Frames, but %d Frames are queued", + __FUNCTION__, __LINE__, numFrames, m_inputFrameQ->getSizeOfProcessQ()); + return BAD_VALUE; + } + + for (int i = (OTF_NODE_BASE - 1); i >= OUTPUT_NODE; i--) { + if (m_node[i] != NULL) { + ret = m_node[i]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) instantOn fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + nodeCount++; + } + } + + if (nodeCount == 0) { + CLOGE("ERR(%s[%d]):All nodes is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + for (int i = 0; i < numFrames; i++) { + ret = m_inputFrameQ->popProcessQ(&newFrame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &newBuffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (m_node[OUTPUT_NODE] != NULL) { + CLOGD("DEBUG(%s[%d]):Put instantOn Buffer (index %d)", __FUNCTION__, __LINE__, newBuffer.index); + + ret = m_node[OUTPUT_NODE]->putBuffer(&newBuffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):putBuffer() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + /* TODO: doing exception handling */ + } + } else { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE] is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + CLOGI("INFO(%s[%d]):instantOn() is succeed, Pipe(%d), Frames(%d)", + __FUNCTION__, __LINE__, getPipeId(), numFrames); + + return ret; +} + +/* Don't use this function, this is regacy code */ +status_t ExynosCameraMCPipe::instantOnQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos) +{ + if (m_node[OUTPUT_NODE] == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + int ret = 0; + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_mainNode->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + if(pos == BUFFER_POS::DST) + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + else if(pos == BUFFER_POS::SRC) + ret = newFrame->getSrcBuffer(getPipeId(), &newBuffer); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_runningFrameList[OUTPUT_NODE][newBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), newFrame->frameCount(%d)", + __FUNCTION__, newBuffer.index, newFrame->getFrameCount()); + return BAD_VALUE; + } + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[newBuffer.planeCount - 1]); + + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + /* set metadata for instant on */ + shot_ext->shot.ctl.scaler.cropRegion[0] = 0; + shot_ext->shot.ctl.scaler.cropRegion[1] = 0; +#if defined(FASTEN_AE_WIDTH) && defined(FASTEN_AE_HEIGHT) + shot_ext->shot.ctl.scaler.cropRegion[2] = FASTEN_AE_WIDTH; + shot_ext->shot.ctl.scaler.cropRegion[3] = FASTEN_AE_HEIGHT; +#else + int bcropW = 0; + int bcropH = 0; + + shot_ext->shot.ctl.scaler.cropRegion[2] = bcropW; + shot_ext->shot.ctl.scaler.cropRegion[3] = bcropH; +#endif + uint32_t frameRate = 0; + if (m_parameters->getCameraId() == CAMERA_ID_FRONT) { + frameRate = FASTEN_AE_FPS_FRONT; + } else { + frameRate = FASTEN_AE_FPS; + } + setMetaCtlAeTargetFpsRange(shot_ext, frameRate, frameRate); + setMetaCtlSensorFrameDuration(shot_ext, (uint64_t)((1000 * 1000 * 1000) / (uint64_t)frameRate)); + + /* set afMode into INFINITY */ + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + shot_ext->shot.ctl.aa.vendor_afmode_option &= (0 << AA_AFMODE_OPTION_BIT_MACRO); + + if (m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(m_mainNode->getName(), &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%s) fail", __FUNCTION__, __LINE__, m_mainNode->getName()); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + if (CAPTURE_NODE_MAX < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):PipeId(%d) has Invalid perframeSupportNodeNum:CAPTURE_NODE_MAX(%d) < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum(%d), assert!!!!", + __FUNCTION__, __LINE__, getPipeId(), CAPTURE_NODE_MAX, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum); + } + + for (int i = 0; i < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + ret = m_mainNode->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + return ret; + /* TODO: doing exception handling */ + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + m_runningFrameList[OUTPUT_NODE][newBuffer.index] = newFrame; + + m_numOfRunningFrame[OUTPUT_NODE]++; + + *frame = newFrame; + + return NO_ERROR; +} + +/* Don't use this function, this is regacy code */ +status_t ExynosCameraMCPipe::instantOnDQbuf(ExynosCameraFrame **frame, __unused BUFFER_POS::POS pos) +{ + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + + if (m_numOfRunningFrame[OUTPUT_NODE] <= 0 ) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_numOfRunningFrame[OUTPUT_NODE]); + return NO_ERROR; + } + + ret = m_mainNode->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + ret = m_updateMetadataToFrame(curBuffer.addr[curBuffer.planeCount - 1], curBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + if (curBuffer.index < 0) { + CLOGE("ERR(%s):index(%d) is invalid", __FUNCTION__, curBuffer.index); + return BAD_VALUE; + } + + curFrame = m_runningFrameList[OUTPUT_NODE][curBuffer.index]; + + if (curFrame == NULL) { + CLOGE("ERR(%s):Unknown buffer, frame is NULL", __FUNCTION__); + dump(); + return BAD_VALUE; + } + + *frame = curFrame; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::instantOff(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) stop fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + ret = m_node[i]->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) clrBuffers fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + } + } + + CLOGI("INFO(%s[%d]):instantOff() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +/* Don't use this function, this is regacy code */ +status_t ExynosCameraMCPipe::instantOnPushFrameQ(BUFFERQ_TYPE::TYPE type, ExynosCameraFrame **frame) +{ + if( type == BUFFERQ_TYPE::OUTPUT ) + m_outputFrameQ->pushProcessQ(frame); + else + m_inputFrameQ->pushProcessQ(frame); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + int planeCount = 0; + + if (pipePosition == DST_PIPE) { + if (m_node[OUTPUT_NODE] == NULL) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE] is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = m_node[OUTPUT_NODE]->getSize(fullW, fullH); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) getSize fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[OUTPUT_NODE], ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->getColorFormat(colorFormat, &planeCount); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) getColorFormat fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[OUTPUT_NODE], ret); + return ret; + } + } else if (pipePosition == SRC_PIPE) { + for (int i = (OTF_NODE_BASE - 1); i > OUTPUT_NODE; i--) { + if (m_node[i] != NULL) { + ret = m_node[i]->getSize(fullW, fullH); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) getSize fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + ret = m_node[i]->getColorFormat(colorFormat, &planeCount); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s) getColorFormat fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + CLOGV("INFO(%s[%d]):getPipeInfo() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId((enum NODE_TYPE)i)); + return ret; + } + } + CLOGE("ERR(%s[%d]):all capture m_node is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } else { + CLOGE("ERR(%s[%d]):Pipe position is Invalid, position(%d)", __FUNCTION__, __LINE__, pipePosition); + return BAD_VALUE; + } + + CLOGV("INFO(%s[%d]):getPipeInfo() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + return ret; +} + +int ExynosCameraMCPipe::getCameraId(void) +{ + return this->m_cameraId; +} + +status_t ExynosCameraMCPipe::setPipeId(uint32_t id) +{ + return this->setPipeId(OUTPUT_NODE, id); +} + +uint32_t ExynosCameraMCPipe::getPipeId(void) +{ + return (uint32_t)this->getPipeId(OUTPUT_NODE); +} + +status_t ExynosCameraMCPipe::setPipeId(enum NODE_TYPE nodeType, uint32_t id) +{ + if (nodeType < OUTPUT_NODE || MAX_NODE <= nodeType) { + CLOGE("ERR(%s[%d]):Invalid nodeType(%d). so, fail", __FUNCTION__, __LINE__, nodeType); + return BAD_VALUE; + } + + CLOGD("DEBUG(%s[%d]):nodeType(%d), id(%d)", __FUNCTION__, __LINE__, nodeType, id); + + m_pipeIdArr[nodeType] = id; + + if (nodeType == OUTPUT_NODE) + m_pipeId = id; + + return NO_ERROR; +} + +int ExynosCameraMCPipe::getPipeId(enum NODE_TYPE nodeType) +{ + if (nodeType < OUTPUT_NODE || MAX_NODE <= nodeType) { + CLOGE("ERR(%s[%d]):Invalid nodeType(%d). so, fail", __FUNCTION__, __LINE__, nodeType); + return -1; + } + + return m_pipeIdArr[nodeType]; +} + + +status_t ExynosCameraMCPipe::setPipeName(const char *pipeName) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + strncpy(m_name, pipeName, (EXYNOS_CAMERA_NAME_STR_SIZE - 1)); + + return NO_ERROR; +} + +char *ExynosCameraMCPipe::getPipeName(void) +{ + return m_name; +} + +status_t ExynosCameraMCPipe::setBufferManager(ExynosCameraBufferManager **bufferManager) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) + m_bufferManager[i] = bufferManager[i]; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::clearInputFrameQ(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_inputFrameQ != NULL) + m_inputFrameQ->release(); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getInputFrameQ(frame_queue_t **inputFrameQ) +{ + *inputFrameQ = m_inputFrameQ; + + if (*inputFrameQ == NULL) + CLOGE("ERR(%s[%d])inputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::setOutputFrameQ(frame_queue_t *outputFrameQ) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_outputFrameQ = outputFrameQ; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getOutputFrameQ(frame_queue_t **outputFrameQ) +{ + *outputFrameQ = m_outputFrameQ; + + if (*outputFrameQ == NULL) + CLOGE("ERR(%s[%d]):outputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::setFrameDoneQ(frame_queue_t *frameDoneQ) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_frameDoneQ = frameDoneQ; + m_flagFrameDoneQ = true; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getFrameDoneQ(frame_queue_t **frameDoneQ) +{ + *frameDoneQ = m_frameDoneQ; + + if (*frameDoneQ == NULL) + CLOGE("ERR(%s[%d]):frameDoneQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::setNodeInfos(camera_node_objects_t *nodeObjects, bool flagReset) +{ + CLOGD("DEBUG(%s[%d]):setNodeInfos flagReset(%s)", + __FUNCTION__, __LINE__, (flagReset) ? "True" : "False"); + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + m_node[i] = nodeObjects->node[i]; + m_secondaryNode[i] = nodeObjects->secondaryNode[i]; + + if (flagReset == true) { + if (m_node[i] != NULL) + m_node[i]->resetInput(); + + if (m_secondaryNode[i] != NULL) + m_secondaryNode[i]->resetInput(); + } + } + + if (flagReset == true) { + m_frameDoneQ = NULL; + m_flagFrameDoneQ = false; + m_outputFrameQ = NULL; + } + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getNodeInfos(camera_node_objects_t *nodeObjects) +{ + CLOGD("DEBUG(%s[%d]):getNodeInfos", __FUNCTION__, __LINE__); + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + nodeObjects->node[i] = m_node[i]; + nodeObjects->secondaryNode[i] = m_secondaryNode[i]; + } + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::setMapBuffer(__unused ExynosCameraBuffer *srcBuf, __unused ExynosCameraBuffer *dstBuf) +{ + status_t ret = NO_ERROR; + + if (m_node[OUTPUT_NODE] != NULL + && m_bufferManager[OUTPUT_NODE] != NULL) + ret |= m_setMapBuffer(OUTPUT_NODE); + + for (int i = CAPTURE_NODE; i < OTF_NODE_BASE; i++) { + if (m_deviceInfo->connectionMode[i] == HW_CONNECTION_MODE_M2M_BUFFER_HIDING + && m_node[i] != NULL + && m_bufferManager[i] != NULL) + ret |= m_setMapBuffer(i); + } + + return ret; +} + +status_t ExynosCameraMCPipe::setBoosting(bool isBoosting) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_isBoosting = isBoosting; + + return NO_ERROR; +} + +bool ExynosCameraMCPipe::isThreadRunning(void) +{ + if (m_putBufferThread->isRunning() || m_getBufferThread->isRunning()) + return true; + + return false; +} + +status_t ExynosCameraMCPipe::getThreadState(int **threadState) +{ + *threadState = &m_threadState; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getThreadInterval(uint64_t **timeInterval) +{ + *timeInterval = &m_timeInterval; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::getThreadRenew(int **timeRenew) +{ + *timeRenew = &m_threadRenew; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::incThreadRenew(void) +{ + m_threadRenew ++; + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::setStopFlag(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_flagTryStop = true; + + return NO_ERROR; +} + +int ExynosCameraMCPipe::getRunningFrameCount(void) +{ + int runningFrameCount = 0; + + for (uint32_t i = 0; i < m_numBuffers[OUTPUT_NODE]; i++) { + if (m_runningFrameList[OUTPUT_NODE][i] != NULL) { + runningFrameCount++; + } + } + + return runningFrameCount; +} + +#ifdef USE_MCPIPE_SERIALIZATION_MODE +void ExynosCameraMCPipe::needSerialization(bool enable) +{ + CLOGI("INFO(%s[%d]):%s serialized operation %s", + __FUNCTION__, __LINE__, m_name, + (enable == true)? "enabled" : "disabled"); + + m_serializeOperation = enable; +} +#endif + +void ExynosCameraMCPipe::dump(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_dumpRunningFrameList(); + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + m_node[i]->dump(); + } + } + + return; +} + +status_t ExynosCameraMCPipe::dumpFimcIsInfo(bool bugOn) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_node[OUTPUT_NODE]->setControl(V4L2_CID_IS_DEBUG_DUMP, bugOn); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->setControl failed", __FUNCTION__, __LINE__); + + return ret; +} + +//#ifdef MONITOR_LOG_SYNC +status_t ExynosCameraMCPipe::syncLog(uint32_t syncId) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_node[OUTPUT_NODE]->setControl(V4L2_CID_IS_DEBUG_SYNC_LOG, syncId); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->setControl failed", __FUNCTION__, __LINE__); + + return ret; +} +//#endif + +status_t ExynosCameraMCPipe::m_preCreate(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ExynosCameraNode *jpegNode = NULL; + + /* Create & open output/capture nodes */ + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_flagValidInt(m_deviceInfo->nodeNum[i]) == true) { + if ((m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_HWFC_JPEG_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_HWFC_THUMB_NUM) + && jpegNode != NULL) { + enum EXYNOS_CAMERA_NODE_JPEG_HAL_LOCATION location = NODE_LOCATION_DST; + + if (m_deviceInfo->pipeId[i] == PIPE_HWFC_JPEG_SRC_REPROCESSING + || m_deviceInfo->pipeId[i] == PIPE_HWFC_THUMB_SRC_REPROCESSING) + location = NODE_LOCATION_SRC; + + /* JpegHAL Destinaion node case */ + m_node[i] = (ExynosCameraNode*)new ExynosCameraNodeJpegHAL(); + + ExynosJpegEncoderForCamera *jpegEncoder = NULL; + ret = jpegNode->getJpegEncoder(&jpegEncoder); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):jpegNode->gejpegEncoder failed", __FUNCTION__); + return ret; + } + + ret = m_node[i]->create(m_deviceInfo->nodeName[i], m_cameraId, location, jpegEncoder); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Create node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + ret = m_node[i]->open(m_deviceInfo->nodeNum[i], m_parameters->isUseThumbnailHWFC()); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Open node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + CLOGD("DEBUG(%s[%d]):JpegHAL Node(%d) opened", + __FUNCTION__, __LINE__, m_deviceInfo->nodeNum[i]); + } else if (i > OUTPUT_NODE + && m_node[OUTPUT_NODE] != NULL + && m_deviceInfo->nodeNum[i] == m_deviceInfo->nodeNum[OUTPUT_NODE]) { + + /* W/A for under Helsinki Prime (same node number) */ + m_node[i] = new ExynosCameraNode(); + + int fd = -1; + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR || m_flagValidInt(fd) == false) { + CLOGE("ERR(%s):OUTPUT_NODE->getFd failed", __FUNCTION__); + return ret; + } + + ret = m_node[i]->create(m_deviceInfo->nodeName[i], m_cameraId, fd); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Create node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + CLOGD("DEBUG(%s[%d]):Node(%d) opened, fd(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeNum[i], fd); + } else { + if (m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_HWFC_JPEG_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_HWFC_THUMB_NUM) { + /* JpegHAL Node case */ + m_node[i] = new ExynosCameraNodeJpegHAL(); + jpegNode = m_node[i]; + } else { + /* Normal case */ + m_node[i] = new ExynosCameraNode(); + } + + ret = m_node[i]->create(m_deviceInfo->nodeName[i], m_cameraId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Create node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + ret = m_node[i]->open(m_deviceInfo->nodeNum[i]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Open node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + CLOGV("DEBUG(%s[%d]):Node(%d) opened", + __FUNCTION__, __LINE__, m_deviceInfo->nodeNum[i]); + } + } + } + + /* Create & open OTF nodes */ + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_flagValidInt(m_deviceInfo->secondaryNodeNum[i]) == true) { + m_secondaryNode[i] = new ExynosCameraNode(); + + ret = m_secondaryNode[i]->create(m_deviceInfo->secondaryNodeName[i], m_cameraId); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Create node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->secondaryNodeName[i], ret); + return ret; + } + + ret = m_secondaryNode[i]->open(m_deviceInfo->secondaryNodeNum[i]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Open node fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->secondaryNodeName[i], ret); + return ret; + } + + CLOGD("DEBUG(%s[%d]):Node(%s) opened", __FUNCTION__, __LINE__, m_deviceInfo->secondaryNodeName[i]); + } + } + + m_putBufferThread = new MCPipeThread(this, + &ExynosCameraMCPipe::m_putBufferThreadFunc, "putBufferThread", PRIORITY_URGENT_DISPLAY); + m_getBufferThread = new MCPipeThread(this, + &ExynosCameraMCPipe::m_getBufferThreadFunc, "getBufferThread", PRIORITY_URGENT_DISPLAY); + + if (m_reprocessing == true) { + m_inputFrameQ = new frame_queue_t(m_putBufferThread); + m_requestFrameQ = new frame_queue_t(m_getBufferThread); + } else { + m_inputFrameQ = new frame_queue_t; + m_requestFrameQ = new frame_queue_t; + } + + /* Set wait time 0.55 sec. Because, it support 2fps */ + m_inputFrameQ->setWaitTime(550000000); /* .55 sec */ + m_requestFrameQ->setWaitTime(550000000); /* .55 sec */ + + CLOGI("INFO(%s[%d]):m_preCreate() is succeed, Pipe(%d), prepare(%d)", + __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return ret; +} + +status_t ExynosCameraMCPipe::m_postCreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (sensorIds != NULL) { + CLOGD("DEBUG(%s[%d]):Set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + ret = m_setInput(m_node, m_deviceInfo->nodeNum, m_sensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setInput(Main) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_setInput(m_secondaryNode, m_deviceInfo->secondaryNodeNum, m_secondarySensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setInput(secondary) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + CLOGI("INFO(%s[%d]):m_postCreate() is succeed, Pipe(%d)", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +bool ExynosCameraMCPipe::m_putBufferThreadFunc(void) +{ + status_t ret = NO_ERROR; + +#ifdef TEST_WATCHDOG_THREAD + testErrorDetect++; + if (testErrorDetect == 100) + m_threadState = ERROR_POLLING_DETECTED; +#endif + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):m_putbuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + return m_checkThreadLoop(m_inputFrameQ); +} + +bool ExynosCameraMCPipe::m_getBufferThreadFunc(void) +{ + status_t ret = NO_ERROR; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_getBuffer(); + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):m_getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + m_timer.stop(); + m_timeInterval = m_timer.durationMsecs(); + m_timer.start(); + + /* update renew count */ + if (ret >= 0) + m_threadRenew = 0; + + return m_checkThreadLoop(m_requestFrameQ); +} + +status_t ExynosCameraMCPipe::m_putBuffer(void) +{ + CLOGV("DEBUG(%s[%d]):-IN-", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer[OTF_NODE_BASE]; + ExynosCameraFrameEntity *entity = NULL; + int pipeId = 0; + int bufferIndex[OTF_NODE_BASE]; + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) + bufferIndex[i] = -2; + uint32_t captureNodeCount = 0; + + /* 1. Pop from input frame queue */ + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret == TIMED_OUT) { + CLOGW("WARN(%s[%d]):inputFrameQ wait timeout", __FUNCTION__, __LINE__); + return ret; + } else if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):inputFrameQ wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):New frame is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (newFrame->getFrameState() == FRAME_STATE_SKIPPED + || newFrame->getFrameState() == FRAME_STATE_INVALID) { + if (newFrame->getFrameType() == FRAME_TYPE_INTERNAL) { + CLOGI("INFO(%s[%d]):Internal Frame(%d), frameCount(%d) (%d)", + __FUNCTION__, __LINE__, newFrame->getFrameType(), newFrame->getFrameCount()); + } else { + CLOGE("ERR(%s[%d]):New frame is INVALID, frameCount(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + } + goto CLEAN_FRAME; + } + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + if (m_serializeOperation == true) { + CLOGD("DEBUG(%s[%d]):%s Critical Section WAIT", + __FUNCTION__, __LINE__, m_name); + ExynosCameraMCPipe::g_serializationLock.lock(); + CLOGD("DEBUG(%s[%d]):%s Critical Section START", + __FUNCTION__, __LINE__, m_name); + } +#endif + + for (int i = (OTF_NODE_BASE - 1); i > OUTPUT_NODE; i--) { + if (m_node[i] == NULL) + continue; + + pipeId = getPipeId((enum NODE_TYPE)i); + if (pipeId < 0) { + CLOGE("ERR(%s[%d]):getPipeId(%d) fail", __FUNCTION__, __LINE__, i); + return BAD_VALUE; + } + + /* 2. Get capture node buffer(DstBuffer) from buffer manager */ + if (m_node[i] != NULL + && newFrame->getRequest(pipeId) == true + && m_skipPutBuffer[i] == false) { + if (m_bufferManager[i] == NULL) { + CLOGE("ERR(%s[%d]):Buffer manager is NULL, i(%d), piepId(%d), frameCount(%d)", + __FUNCTION__, __LINE__, i, pipeId, newFrame->getFrameCount()); + continue; + } + + ret = newFrame->getDstBuffer(getPipeId(), &buffer[i], i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail. pipeId(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, newFrame->getFrameCount(), ret); + continue; + } + + if (buffer[i].index < 0) { + ret = m_bufferManager[i]->getBuffer(&(bufferIndex[i]), EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &(buffer[i])); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager getBuffer fail, manager(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, i, newFrame->getFrameCount(), ret); + + newFrame->dump(); + + newFrame->setRequest(pipeId, false); + /* m_bufferManager[i]->dump(); */ + continue; + } + } else { + CLOGD("DEBUG(%s[%d]):Skip to get buffer from bufferMgr.\ + pipeId(%d), frameCount(%d) bufferIndex %d)", + __FUNCTION__, __LINE__, + pipeId, newFrame->getFrameCount(), buffer[i].index); + bufferIndex[i] = buffer[i].index; + } + + if (bufferIndex[i] < 0 + || m_runningFrameList[i][(bufferIndex[i])] != NULL) { + CLOGE("ERR(%s[%d]):New buffer is invalid, we already get buffer, index(%d), frameCount(%d)", + __FUNCTION__, __LINE__, bufferIndex[i], newFrame->getFrameCount()); + newFrame->setRequest(pipeId, false); + /* dump(); */ + continue; + } + + /* 3. Put capture buffer(DstBuffer) to node */ + if (bufferIndex[i] >= 0 + && newFrame->getRequest(pipeId) == true) { + /* Set JPEG node's perframe information only for HWFC*/ + if (m_reprocessing == true + && (m_deviceInfo->pipeId[i] == PIPE_HWFC_JPEG_SRC_REPROCESSING + || m_deviceInfo->pipeId[i] == PIPE_HWFC_JPEG_DST_REPROCESSING + || m_deviceInfo->pipeId[i] == PIPE_HWFC_THUMB_SRC_REPROCESSING)) { + + camera2_shot_ext *shot_ext = NULL; + shot_ext = (struct camera2_shot_ext *)(buffer[i].addr[buffer[i].planeCount-1]); + newFrame->getMetaData(shot_ext); + + ret = m_setJpegInfo(i, &(buffer[i])); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setJpegInfo, pipeId %s buffer.index %d", + __FUNCTION__, __LINE__, + (m_deviceInfo->pipeId[i] == PIPE_HWFC_JPEG_SRC_REPROCESSING)? + "PIPE_HWFC_JPEG_SRC_REPROCESSING":"PIPE_HWFC_THUMB_SRC_REPROCESSING", + buffer[i].index); + continue; + } + } + + ret = m_node[i]->putBuffer(&(buffer[i])); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->putBuffer() fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], newFrame->getFrameCount(), ret); + + /* TODO: doing exception handling */ + ret = m_bufferManager[i]->putBuffer(bufferIndex[i], EXYNOS_CAMERA_BUFFER_POSITION_NONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager putBuffer fail, manager(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, i, newFrame->getFrameCount(), ret); + } + + newFrame->setRequest(pipeId, false); + } else { + m_skipPutBuffer[i] = true; + m_skipBuffer[i] = buffer[i]; + } + } + } else if (m_skipPutBuffer[i] == true) { + CLOGD("DEBUG(%s[%d]):%s:Skip putBuffer. framecount %d bufferIndex %d", + __FUNCTION__, __LINE__, + m_deviceInfo->nodeName[i], newFrame->getFrameCount(), m_skipBuffer[i].index); + } + + if (m_skipPutBuffer[i] == true) + captureNodeCount++; + } + + if (captureNodeCount == 0) { + CLOGW("WRN(%s[%d]):Capture node putbuffer is Zero, frameCount(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + /* Comment out: 3AA and ISP must running, because it save stat and refered next frame. + * So, put SRC buffer to output node when DST buffers all empty(zero). + */ + /* goto CLEAN_FRAME; */ + } + + /* 4. Get output node(SrcBuffer) buffer from frame */ + if (m_node[OUTPUT_NODE] != NULL) { + ret = newFrame->getSrcBuffer(getPipeId(OUTPUT_NODE), &(buffer[OUTPUT_NODE])); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get src buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + goto CLEAN_FRAME; + } + + if (m_runningFrameList[OUTPUT_NODE][(buffer[OUTPUT_NODE].index)] != NULL) { + if ( (m_parameters->isReprocessing() == true) + && (m_parameters->getUsePureBayerReprocessing() == false) /* if Dirty bayer reprocessing */ + && (newFrame->getFrameType() == FRAME_TYPE_INTERNAL) + && (getPipeId(OUTPUT_NODE) == PIPE_ISP) ) { /* if internal frame at ISP pipe */ + /* In dirty bayer mode, Internal frame would not have valid + output buffer on ISP pipe. So suppress error message */ + CLOGI("INFO(%s[%d]):Internal frame will raises an error here, but it's normal operation, index(%d), frameCount(%d)", + __FUNCTION__, __LINE__, buffer[OUTPUT_NODE].index, newFrame->getFrameCount()); + } else { + CLOGE("ERR(%s[%d]):New buffer is invalid, we already get buffer, index(%d), frameCount(%d)", + __FUNCTION__, __LINE__, buffer[OUTPUT_NODE].index, newFrame->getFrameCount()); + } + /* dump(); */ + goto CLEAN_FRAME; + } + + /* 5. Update control metadata for request, Zoom, ... */ + if (useSizeControlApi() == true) + ret = m_updateMetadataFromFrame_v2(newFrame, &(buffer[OUTPUT_NODE])); + else + ret = m_updateMetadataFromFrame(newFrame, &(buffer[OUTPUT_NODE])); + if (ret != NO_ERROR) { + CLOGW("WARN(%s[%d]):Update metadata fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + + /* 6. Put output buffer(SrcBuffer) to node */ + ret = m_node[OUTPUT_NODE]->putBuffer(&(buffer[OUTPUT_NODE])); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->putBuffer() fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[OUTPUT_NODE], newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + goto CLEAN_FRAME; + } + + ret = newFrame->setSrcBufferState(getPipeId(OUTPUT_NODE), ENTITY_BUFFER_STATE_PROCESSING); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer state fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + + m_runningFrameList[OUTPUT_NODE][(buffer[OUTPUT_NODE].index)] = newFrame; + m_numOfRunningFrame[OUTPUT_NODE]++; + + /* 7. Link capture node buffer(DstBuffer) to frame */ + for (int i = (OTF_NODE_BASE - 1); i > OUTPUT_NODE; i--) { + if (m_node[i] == NULL) + continue; + + pipeId = getPipeId((enum NODE_TYPE)i); + if (pipeId < 0) { + CLOGE("ERR(%s[%d]):getPipeId(%d) fail", __FUNCTION__, __LINE__, i); + return BAD_VALUE; + } + + + if (m_node[i] != NULL + && newFrame->getRequest(pipeId) == true) { + /* HACK: Should change ExynosCamera, Frame */ + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, newFrame->getFrameCount(), ret); + } + + if (m_skipPutBuffer[i] == true) { + buffer[i] = m_skipBuffer[i]; + bufferIndex[i] = buffer[i].index; + } + + ret = newFrame->setDstBuffer(getPipeId(), buffer[i], i, INDEX(pipeId)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame set dst buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + if (m_bufferManager[i] != NULL) + ret = m_bufferManager[i]->putBuffer(buffer[i].index, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager putBuffer fail, manager(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, i, newFrame->getFrameCount(), ret); + } + + newFrame->setRequest(pipeId, false); + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + } + + m_runningFrameList[i][(bufferIndex[i])] = newFrame; + m_numOfRunningFrame[i]++; + m_skipPutBuffer[i] = false; + m_skipBuffer[i].index = -2; + } + } + } + + /* 8. Push frame to getBufferThread */ + m_requestFrameQ->pushProcessQ(&newFrame); + + CLOGV("DEBUG(%s[%d]):OUT-", __FUNCTION__, __LINE__); + return NO_ERROR; + + /* Error handling for SrcBuffer and Frame */ +CLEAN_FRAME: + CLOGD("DEBUG(%s[%d]):clean frame, frameCount(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount()); + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + if (m_serializeOperation == true) { + ExynosCameraMCPipe::g_serializationLock.unlock(); + CLOGD("DEBUG(%s[%d]):%s Critical Section END", + __FUNCTION__, __LINE__, m_name); + } +#endif + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer state fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + } + + for (int i = (OTF_NODE_BASE - 1); i > OUTPUT_NODE; i--) { + if (m_node[i] != NULL + && newFrame->getRequest(getPipeId((enum NODE_TYPE)i)) == true) + newFrame->setRequest(getPipeId((enum NODE_TYPE)i), false); + } + + if (newFrame->getFrameState() != FRAME_STATE_SKIPPED + && newFrame->getFrameState() != FRAME_STATE_INVALID) { + newFrame->setFrameState(FRAME_STATE_SKIPPED); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setFrameState fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + } + + ret = m_completeFrame(newFrame, false); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Complete frame fail, frameCount(%d), ret(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + } + + if (m_frameDoneQ != NULL && m_flagFrameDoneQ == true) + m_frameDoneQ->pushProcessQ(&newFrame); + + m_outputFrameQ->pushProcessQ(&newFrame); + + return INVALID_OPERATION; +} + +status_t ExynosCameraMCPipe::m_getBuffer(void) +{ + CLOGV("DEBUG(%s[%d]):-IN-", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + status_t nodeDqRet[OTF_NODE_BASE]; + status_t checkRet = NO_ERROR; + + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer[OTF_NODE_BASE]; + int pipeId = 0; + int v4l2Colorformat = 0; + int planeCount[OTF_NODE_BASE] = {0}; + int bufferIndex[OTF_NODE_BASE]; + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) { + bufferIndex[i] = -2; + nodeDqRet[i] = NO_ERROR; + } + uint32_t captureNodeCount = 0; + uint32_t checkPollingCount = 0; + + /* 1. Pop from request frame queue */ + ret = m_requestFrameQ->waitAndPopProcessQ(&newFrame); + if (ret == TIMED_OUT) { + CLOGW("WARN(%s[%d]):requestFrameQ wait timeout", __FUNCTION__, __LINE__); + return ret; + } else if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):requestFrameQ wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):New frame is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* 2. Get output buffer(SrcBuffer) from node */ + if (m_node[OUTPUT_NODE] != NULL) { + ret = m_node[OUTPUT_NODE]->getBuffer(&(buffer[OUTPUT_NODE]), &(bufferIndex[OUTPUT_NODE])); + nodeDqRet[OUTPUT_NODE] = ret; + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->getBuffer() fail, index(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[OUTPUT_NODE], + bufferIndex[OUTPUT_NODE], newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + /* Comment out : dqblock case was disappeared */ + /* + for (int i = (MAX_NODE - 1); i > OUTPUT_NODE; i--) { + if (newFrame->getRequest(getPipeId() + i) == true) + m_skipPutBuffer[i] = true; + } + + ret = m_completeFrame(newFrame, false); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + goto CLEAN; + */ + + if (bufferIndex[OUTPUT_NODE] >= 0) { + newFrame = m_runningFrameList[OUTPUT_NODE][bufferIndex[OUTPUT_NODE]]; + } else { + ret = newFrame->getSrcBuffer(getPipeId(), &buffer[OUTPUT_NODE]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } else { + buffer[OUTPUT_NODE].index = -2; + } + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):Invalid DQ buffer index(%d)", __FUNCTION__, __LINE__, bufferIndex[OUTPUT_NODE]); + ret = BAD_VALUE; + goto EXIT; + } + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer state fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + } else { + if (bufferIndex[OUTPUT_NODE] < 0) { + CLOGE("ERR(%s[%d]):Invalid DQ buffer index(%d)", __FUNCTION__, __LINE__, bufferIndex[OUTPUT_NODE]); + ret = BAD_VALUE; + goto EXIT; + } + + /* + prevent the frame null pointer exception, get the frame from m_runningFrameList + 1. in case of NDONE, dq order was reversed. + 2. always use the m_runningFrameList instead of m_requestFrameQ + */ + newFrame = m_runningFrameList[OUTPUT_NODE][bufferIndex[OUTPUT_NODE]]; + ret = newFrame->getSrcBuffer(getPipeId(), &buffer[OUTPUT_NODE]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } else { + if (bufferIndex[OUTPUT_NODE] != buffer[OUTPUT_NODE].index) { + ret = newFrame->getSrcBuffer(getPipeId(), &buffer[OUTPUT_NODE]); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + } + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer state fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + + if (m_reprocessing == false) + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&buffer[OUTPUT_NODE]); + } + } + + if (bufferIndex[OUTPUT_NODE] >= 0) { + /* 3. Update frame from dynamic metadata of output node buffer(SrcBuffer) for request, ... */ + ret = m_updateMetadataToFrame(buffer[OUTPUT_NODE].addr[buffer[OUTPUT_NODE].planeCount - 1], buffer[OUTPUT_NODE].index, newFrame, OUTPUT_NODE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Update metadata fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } + + m_runningFrameList[OUTPUT_NODE][bufferIndex[OUTPUT_NODE]] = NULL; + m_numOfRunningFrame[OUTPUT_NODE]--; + } + } + + if (m_parameters->isUseEarlyFrameReturn() == true + && m_reprocessing == false + && m_frameDoneQ != NULL\ + && m_flagFrameDoneQ == true) { + newFrame->incRef(); + m_frameDoneQ->pushProcessQ(&newFrame); + } + + /* 4. Get capture buffer(DstBuffer) from node */ + for (int i = (OTF_NODE_BASE - 1); i > OUTPUT_NODE; i--) { + ret = NO_ERROR; + + if (m_node[i] == NULL) + continue; + + pipeId = getPipeId((enum NODE_TYPE)i); + if (pipeId < 0) { + CLOGE("ERR(%s[%d]):getPipeId(%d) fail", __FUNCTION__, __LINE__, i); + ret = BAD_VALUE; + goto EXIT; + } + + if (m_node[i] != NULL + && newFrame->getRequest(pipeId) == true) { +#ifndef SKIP_SCHECK_POLLING + if (pipeId == PIPE_SCP && checkPollingCount == 0) + ret = m_checkPolling(m_node[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_checkPolling fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + // HACK: for panorama shot + //return false; + } + checkPollingCount++; +#endif + ret = m_node[i]->getBuffer(&(buffer[i]), &(bufferIndex[i])); + nodeDqRet[i] = ret; + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->getBuffer() fail, index(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], + bufferIndex[i], newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + + if (bufferIndex[i] >= 0) { + newFrame = m_runningFrameList[i][bufferIndex[i]]; + } else { + ret = newFrame->getDstBuffer(getPipeId(), &buffer[i], i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame get buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + } else { + bufferIndex[i] = buffer[i].index = -2; + } + newFrame->setRequest(pipeId, false); + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):Invalid DQ buffer index(%d)", __FUNCTION__, __LINE__, bufferIndex[i]); + ret = BAD_VALUE; + goto EXIT; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + } + } + + if (bufferIndex[i] >= 0) { + m_runningFrameList[i][bufferIndex[i]] = NULL; + m_numOfRunningFrame[i]--; + } + + /* 5. Link capture node buffer(DstBuffer) to frame */ + if (bufferIndex[i] >= 0 && nodeDqRet[i] == NO_ERROR) { + if (bufferIndex[i] != buffer[i].index) + newFrame = m_runningFrameList[i][bufferIndex[i]]; + + /* HACK: Should change ExynosCamera, Frame */ + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, pipeId, newFrame->getFrameCount(), ret); + } + + ret = newFrame->setDstBuffer(getPipeId(), buffer[i], i, INDEX(pipeId)); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Frame set dst buffer fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + if (m_bufferManager[i] != NULL) + ret = m_bufferManager[i]->putBuffer(buffer[i].index, EXYNOS_CAMERA_BUFFER_POSITION_NONE); + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager putBuffer fail, manager(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, i, newFrame->getFrameCount(), ret); + } + + newFrame->setRequest(pipeId, false); + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer state fail, pipeID(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + } + + captureNodeCount++; + } + + /* 6. Update metadata of capture node buffer(DstBuffer) from output node buffer(SrcBuffer) */ + if (m_node[OUTPUT_NODE] != NULL + && m_deviceInfo->nodeNum[i] != FIMC_IS_VIDEO_HWFC_JPEG_NUM + && m_deviceInfo->nodeNum[i] != FIMC_IS_VIDEO_HWFC_THUMB_NUM) { + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + m_node[i]->getColorFormat(&v4l2Colorformat, &planeCount[i]); + + camera2_shot_ext *shot_ext_src = (struct camera2_shot_ext *)(buffer[OUTPUT_NODE].addr[(planeCount[OUTPUT_NODE] - 1)]); + camera2_shot_ext *shot_ext_dst = (struct camera2_shot_ext *)(buffer[i].addr[(planeCount[i] - 1)]); + if (shot_ext_src != NULL && shot_ext_dst != NULL) { + memcpy(&shot_ext_dst->shot.ctl, &shot_ext_src->shot.ctl, sizeof(struct camera2_ctl) - sizeof(struct camera2_entry_ctl)); + memcpy(&shot_ext_dst->shot.udm, &shot_ext_src->shot.udm, sizeof(struct camera2_udm)); + memcpy(&shot_ext_dst->shot.dm, &shot_ext_src->shot.dm, sizeof(struct camera2_dm)); + + shot_ext_dst->setfile = shot_ext_src->setfile; + shot_ext_dst->drc_bypass = shot_ext_src->drc_bypass; + shot_ext_dst->dis_bypass = shot_ext_src->dis_bypass; + shot_ext_dst->dnr_bypass = shot_ext_src->dnr_bypass; + shot_ext_dst->fd_bypass = shot_ext_src->fd_bypass; + shot_ext_dst->shot.dm.request.frameCount = shot_ext_src->shot.dm.request.frameCount; + shot_ext_dst->shot.magicNumber= shot_ext_src->shot.magicNumber; + } else { + CLOGE("ERR(%s[%d]):metadata address fail, frameCount(%d) shot_ext src(%p) dst(%p) ", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), shot_ext_src, shot_ext_dst); + } + /* Comment out : It was not useful for metadate fully update, I want know reasons for the existence. */ + /* memcpy(buffer[i].addr[(planeCount[i] - 1)], buffer[OUTPUT_NODE].addr[(planeCount[OUTPUT_NODE] - 1)], sizeof(struct camera2_shot_ext)); */ + } + } + } + + /* + * skip condition : + * 1 : all capture nodes are not valid. + * 2 : one of capture nodes is not valid. + */ + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) { + checkRet |= nodeDqRet[i]; + } + + if (captureNodeCount == 0 || checkRet != NO_ERROR) { + if (newFrame->getFrameType() == FRAME_TYPE_INTERNAL) { + CLOGI("INFO(%s[%d]):InternalFrame(%d) frameCount(%d)\ + : captureNodeCount == %d || checkRet(%d) != NO_ERROR.\ + so, setFrameState(FRAME_STATE_SKIPPED)", + __FUNCTION__, __LINE__, newFrame->getFrameType(), newFrame->getFrameCount(), captureNodeCount, checkRet); + } else { + CLOGE("ERR(%s[%d]):frameCount(%d)\ + : captureNodeCount == %d || checkRet(%d) != NO_ERROR.\ + so, setFrameState(FRAME_STATE_SKIPPED)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), captureNodeCount, checkRet); + } + + /* set err on frame */ + newFrame->setFrameState(FRAME_STATE_SKIPPED); + + /* set err on src */ + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBufferState(%d, ENTITY_BUFFER_STATE_ERROR) fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + } + + /* set err on dst */ + for (int i = OUTPUT_NODE + 1; i < OTF_NODE_BASE; i++) { + int dstPipeId = getPipeId((enum NODE_TYPE)i); + + if (dstPipeId < 0) + continue; + + if (newFrame->getRequest(dstPipeId) == true) { + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR, i); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBufferState(Pipe ID(%d),\ + ENTITY_BUFFER_STATE_ERROR, %d) fail, \ + frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), i, newFrame->getFrameCount(), ret); + } + } + } + } + + /* 7. Complete frame */ + ret = m_completeFrame(newFrame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Complete frame fail, frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), ret); + /* TODO: doing exception handling */ + } + + /* 8. Push frame to out of Pipe */ +CLEAN: + m_outputFrameQ->pushProcessQ(&newFrame); + if ((m_parameters->isUseEarlyFrameReturn() == false + || m_reprocessing == true) + && m_frameDoneQ != NULL + && m_flagFrameDoneQ == true) + m_frameDoneQ->pushProcessQ(&newFrame); + + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) + ret |= nodeDqRet[i]; + + CLOGV("DEBUG(%s[%d]):OUT-", __FUNCTION__, __LINE__); + +EXIT: +#ifdef USE_MCPIPE_SERIALIZATION_MODE + if (m_serializeOperation == true) { + ExynosCameraMCPipe::g_serializationLock.unlock(); + CLOGD("DEBUG(%s[%d]):%s Critical Section END", + __FUNCTION__, __LINE__, m_name); + } +#endif + + return ret; +} + +status_t ExynosCameraMCPipe::m_checkShotDone(struct camera2_shot_ext *shot_ext) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (shot_ext->node_group.leader.request != 1) { + CLOGW("WARN(%s[%d]):3a1 NOT DONE, frameCount(%d)", __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount(shot_ext)); + /* TODO: doing exception handling */ + return INVALID_OPERATION; + } + + return OK; +} + +/* m_updateMetadataFromFrame() will be deprecated */ +status_t ExynosCameraMCPipe::m_updateMetadataFromFrame(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buffer->addr[buffer->planeCount - 1]); + + if (shot_ext != NULL) { + int perframePosition = 0; + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int videoW = 0, videoH = 0; + ExynosRect sensorSize; + ExynosRect bnsSize; + ExynosRect previewBayerCropSize; + ExynosRect pictureBayerCropSize; + ExynosRect bdsSize; + camera2_node_group node_group_info; + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_tpu_sc; + char captureNodeName[CAPTURE_NODE_MAX][EXYNOS_CAMERA_NAME_STR_SIZE]; + for (int i = 0; i < CAPTURE_NODE_MAX; i++) + memset(captureNodeName[i], 0, EXYNOS_CAMERA_NAME_STR_SIZE); + + if (INDEX(getPipeId()) == (uint32_t)m_parameters->getPerFrameControlPipe() + || getPipeId() == (uint32_t)m_parameters->getPerFrameControlReprocessingPipe()) { + frame->getMetaData(shot_ext); + + if (m_parameters->getHalVersion() != IS_HAL_VER_3_2) { + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + // Setfile index is updated by capture intent at HAL3 + setMetaSetfile(shot_ext, m_setfile); + } + } + + if (m_reprocessing == false) + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)buffer); + +#ifdef SET_SETFILE_BY_SHOT_REPROCESSING + /* setfile setting */ + if (m_reprocessing == true) { + int yuvRange = 0; + int setfile = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &yuvRange); + ALOGD("INFO(%s[%d]):setfile(%d),m_reprocessing(%d)", __FUNCTION__, __LINE__, setfile, m_reprocessing); + setMetaSetfile(shot_ext, setfile); + } +#endif + + CLOGV("DEBUG(%s[%d]):frameCount(%d), rCount(%d)", + __FUNCTION__, __LINE__, + frame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + + frame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perframeInfoIndex, &zoomFrameInfo); + + /* HACK: To speed up DZOOM */ + if ((getPipeId() == (uint32_t)m_parameters->getPerFrameControlPipe() + || getPipeId() == (uint32_t)m_parameters->getPerFrameControlReprocessingPipe()) + && zoomFrameInfo != zoomParamInfo) { + CLOGI("INFO(%s[%d]):zoomFrameInfo(%d), zoomParamInfo(%d)", + __FUNCTION__, __LINE__, zoomFrameInfo, zoomParamInfo); + + frame->getNodeGroupInfo(&node_group_info_isp, m_parameters->getPerFrameInfoIsp(), &zoomFrameInfo); + frame->getNodeGroupInfo(&node_group_info_tpu_sc, m_parameters->getPerFrameInfoDis(), &zoomFrameInfo); + + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&sensorSize, &previewBayerCropSize); + + if (m_reprocessing == false) { + m_parameters->getPreviewBdsSize(&bdsSize); + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getVideoSize(&videoW, &videoH); + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + previewBayerCropSize, + bdsSize, + previewW, previewH, + videoW, videoH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + previewBayerCropSize, + bdsSize, + previewW, previewH, + videoW, videoH, + m_parameters->getHWVdisMode()); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_tpu_sc, + previewBayerCropSize, + bdsSize, + previewW, previewH, + videoW, videoH, + m_parameters->getHWVdisMode()); + + frame->storeNodeGroupInfo(&node_group_info, m_parameters->getPerFrameInfo3AA(), zoomParamInfo); + frame->storeNodeGroupInfo(&node_group_info_isp, m_parameters->getPerFrameInfoIsp(), zoomParamInfo); + frame->storeNodeGroupInfo(&node_group_info_tpu_sc, m_parameters->getPerFrameInfoDis(), zoomParamInfo); + } else { + m_parameters->getPictureBayerCropSize(&bnsSize, &pictureBayerCropSize); + m_parameters->getPictureBdsSize(&bdsSize); + + ExynosCameraNodeGroup::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + &node_group_info_isp, + previewBayerCropSize, + pictureBayerCropSize, + bdsSize, + pictureW, pictureH, + m_parameters->getUsePureBayerReprocessing(), + m_parameters->isReprocessing3aaIspOTF()); + + frame->storeNodeGroupInfo(&node_group_info, m_parameters->getPerFrameInfoReprocessingPure3AA(), zoomParamInfo); + frame->storeNodeGroupInfo(&node_group_info_isp, m_parameters->getPerFrameInfoReprocessingPureIsp(), zoomParamInfo); + } + } + + /* Update node's size & request */ + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + + if (node_group_info.leader.request == 1) { + if (m_checkNodeGroupInfo(m_deviceInfo->nodeName[OUTPUT_NODE], &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + + setMetaNodeLeaderRequest(shot_ext, node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perFrameVideoID); + } + + if (CAPTURE_NODE_MAX < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):PipeId(%d) has Invalid perframeSupportNodeNum: \ + CAPTURE_NODE_MAX(%d) < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum(%d), assert!!!!", + __FUNCTION__, __LINE__, getPipeId(), CAPTURE_NODE_MAX, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum); + } + + /* Update capture node request from Frame */ + for (int i = CAPTURE_NODE; i < OTF_NODE_BASE; i++) { + int funcRet = NO_ERROR; + if (m_node[i] != NULL) { + funcRet = m_getPerframePosition(&perframePosition, getPipeId((enum NODE_TYPE)i)); + if (funcRet != NO_ERROR) + continue; + + node_group_info.capture[perframePosition].request = frame->getRequest(getPipeId((enum NODE_TYPE)i)); + strncpy(captureNodeName[perframePosition], m_deviceInfo->nodeName[i], EXYNOS_CAMERA_NAME_STR_SIZE - 1); + } + } + + for (int i = 0; i < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum; i ++) { + /* + * To set 3AP BDS size on full OTF, + * We need to set perframeSize. + * set size when request is 0. so, no side effect. + */ + /* if (node_group_info.capture[i].request == 1) { */ + + if (m_checkNodeGroupInfo(captureNodeName[i], &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, + m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameCaptureInfo[i].perFrameVideoID); + } + + /* + CLOGI("INFO(%s[%d]):frameCount(%d)", __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + frame->dumpNodeGroupInfo(m_deviceInfo->nodeName[OUTPUT_NODE]); + m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo[OUTPUT_NODE]); + + for (int i = (OUTPUT_NODE + 1); i < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum; i++) + m_dumpPerframeNodeGroupInfo("m_perframeCaptureNodeGroupInfo", m_perframeMainNodeGroupInfo[i]); + */ + + /* dump info on shot_ext, just before qbuf */ + /* m_dumpPerframeShotInfo(m_deviceInfo->nodeName[OUTPUT_NODE], frame->getFrameCount(), shot_ext); */ + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_updateMetadataFromFrame_v2(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(buffer->addr[buffer->planeCount - 1]); + + if (shot_ext != NULL) { + int perframePosition = 0; + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int videoW = 0, videoH = 0; + ExynosRect sensorSize; + ExynosRect bnsSize; + ExynosRect previewBayerCropSize; + ExynosRect pictureBayerCropSize; + ExynosRect bdsSize; + camera2_node_group node_group_info; + char captureNodeName[CAPTURE_NODE_MAX][EXYNOS_CAMERA_NAME_STR_SIZE]; + for (int i = 0; i < CAPTURE_NODE_MAX; i++) + memset(captureNodeName[i], 0, EXYNOS_CAMERA_NAME_STR_SIZE); + + frame->getMetaData(shot_ext); + + if (INDEX(getPipeId()) == (uint32_t)m_parameters->getPerFrameControlPipe() + || getPipeId() == (uint32_t)m_parameters->getPerFrameControlReprocessingPipe()) { + if (m_parameters->getHalVersion() != IS_HAL_VER_3_2) { + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + setMetaSetfile(shot_ext, m_setfile); + } + + if (m_reprocessing == false) + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)buffer); + +#ifdef SET_SETFILE_BY_SHOT_REPROCESSING + /* setfile setting */ + if (m_reprocessing == true) { + int yuvRange = 0; + int setfile = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &yuvRange); + ALOGD("INFO(%s[%d]):setfile(%d),m_reprocessing(%d)", __FUNCTION__, __LINE__, setfile, m_reprocessing); + setMetaSetfile(shot_ext, setfile); + } +#endif + + CLOGV("DEBUG(%s[%d]):frameCount(%d), rCount(%d)", + __FUNCTION__, __LINE__, + frame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + + frame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perframeInfoIndex, &zoomFrameInfo); + +#if 0 + /* HACK: To speed up DZOOM */ + if ((getPipeId() == m_parameters->getPerFrameControlPipe() + || getPipeId() == m_parameters->getPerFrameControlReprocessingPipe()) + && zoomFrameInfo != zoomParamInfo) { + CLOGI("INFO(%s[%d]):zoomFrameInfo(%d), zoomParamInfo(%d)", + __FUNCTION__, __LINE__, zoomFrameInfo, zoomParamInfo); + + updateNodeGroupInfo( + getPipeId(), + m_parameters, + &node_group_info); + + frame->storeNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perframeInfoIndex, zoomParamInfo); + } +#endif + + /* Update node's size & request */ + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + + if (node_group_info.leader.request == 1) { + if (m_checkNodeGroupInfo(m_deviceInfo->nodeName[OUTPUT_NODE], &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + + setMetaNodeLeaderRequest(shot_ext, node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo[OUTPUT_NODE].perFrameLeaderInfo.perFrameVideoID); + } + + if (CAPTURE_NODE_MAX < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):PipeId(%d) has Invalid perframeSupportNodeNum:CAPTURE_NODE_MAX(%d) < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum(%d), assert!!!!", + __FUNCTION__, __LINE__, getPipeId(), CAPTURE_NODE_MAX, m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum); + } + + /* Update capture node request from Frame */ + for (int i = CAPTURE_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + uint32_t videoId = m_deviceInfo->nodeNum[i] - FIMC_IS_VIDEO_BAS_NUM; + for (perframePosition = 0; perframePosition < CAPTURE_NODE_MAX; perframePosition++) { + if (node_group_info.capture[perframePosition].vid == videoId) { + node_group_info.capture[perframePosition].request = frame->getRequest(getPipeId((enum NODE_TYPE)i)); + strncpy(captureNodeName[perframePosition], m_deviceInfo->nodeName[i], EXYNOS_CAMERA_NAME_STR_SIZE - 1); + break; + } + } + } + } + + for (int i = 0; i < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum; i ++) { + /* + * To set 3AP BDS size on full OTF, + * We need to set perframeSize. + * set size when request is 0. so, no side effect. + */ + /* if (node_group_info.capture[i].request == 1) { */ + + if (m_checkNodeGroupInfo(captureNodeName[i], &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, node_group_info.capture[i].vid); + } + + /* + CLOGI("INFO(%s[%d]):frameCount(%d)", __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); + frame->dumpNodeGroupInfo(m_deviceInfo->nodeName[OUTPUT_NODE]); + m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo[OUTPUT_NODE]); + + for (int i = (OUTPUT_NODE + 1); i < m_perframeMainNodeGroupInfo[OUTPUT_NODE].perframeSupportNodeNum; i++) + m_dumpPerframeNodeGroupInfo("m_perframeCaptureNodeGroupInfo", m_perframeMainNodeGroupInfo[i]); + */ + + /* dump info on shot_ext, just before qbuf */ + /* m_dumpPerframeShotInfo(m_deviceInfo->nodeName[OUTPUT_NODE], frame->getFrameCount(), shot_ext); */ + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_updateMetadataToFrame(void *metadata, int index, ExynosCameraFrame *frame, enum NODE_TYPE nodeLocation) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + ExynosCameraFrame *curFrame = NULL; + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)metadata; + + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):Meta buffer is null", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d)", __FUNCTION__, __LINE__, index); + return BAD_VALUE; + } + if (frame == NULL) { + CLOGE("ERR(%s[%d]):frame is Null", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (nodeLocation < OUTPUT_NODE) { + CLOGE("ERR(%s[%d]):Invalid node location(%d)", __FUNCTION__, __LINE__, nodeLocation); + return BAD_VALUE; + } + + if (m_metadataTypeShot == false) { + CLOGV("DEBUG(%s[%d]):Stream type do not need update metadata", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + /* + ret = m_getFrameByIndex(&curFrame, index, nodeLocation); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_getFrameByIndex() fail, node(%s), index(%d), ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[nodeLocation], index, ret); + return ret; + } + */ + + ret = frame->storeDynamicMeta(shot_ext); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):storeDynamicMeta() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = frame->storeUserDynamicMeta(shot_ext); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):storeUserDynamicMeta() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (shot_ext->shot.dm.request.frameCount != 0) + ret = frame->setMetaDataEnable(true); + + return ret; +} + +status_t ExynosCameraMCPipe::m_getFrameByIndex(ExynosCameraFrame **frame, int index, enum NODE_TYPE nodeLocation) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (nodeLocation < OUTPUT_NODE) { + CLOGE("ERR(%s[%d]):Invalid node location(%d)", __FUNCTION__, __LINE__, nodeLocation); + return BAD_VALUE; + } + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d)", __FUNCTION__, __LINE__, index); + return BAD_VALUE; + } + + *frame = m_runningFrameList[nodeLocation][index]; + if (*frame == NULL) { + CLOGE("ERR(%s[%d]):Unknown buffer, index %d frame is NULL", __FUNCTION__, __LINE__, index); + dump(); + return BAD_VALUE; + } + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::m_completeFrame( + ExynosCameraFrame *frame, + bool isValid) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (frame == NULL) { + CLOGE("ERR(%s[%d]):Frame is NULL", __FUNCTION__, __LINE__); + dump(); + return BAD_VALUE; + } + + if (isValid == false) { + CLOGD("DEBUG(%s[%d]):NOT DONE frameCount(%d)", __FUNCTION__, __LINE__, + frame->getFrameCount()); + } + + ret = frame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + CLOGV("DEBUG(%s[%d]):Entity pipeId(%d), frameCount(%d)", + __FUNCTION__, __LINE__, getPipeId(), frame->getFrameCount()); + + return ret; +} + +status_t ExynosCameraMCPipe::m_setInput(ExynosCameraNode *nodes[], int32_t *nodeNums, int32_t *sensorIds) +{ + status_t ret = NO_ERROR; + int currentSensorId[MAX_NODE] = {0}; + + if (nodes == NULL || nodeNums == NULL || sensorIds == NULL) { + CLOGE("ERR(%s[%d]): nodes == %p || nodeNum == %p || sensorId == %p", + __FUNCTION__, __LINE__, nodes, nodeNums, sensorIds); + return INVALID_OPERATION; + } + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_flagValidInt(nodeNums[i]) == false) + continue; + + if (m_flagValidInt(sensorIds[i]) == false) + continue; + + if (nodes[i] == NULL) + continue; + + currentSensorId[i] = nodes[i]->getInput(); + + if (m_flagValidInt(currentSensorId[i]) == false || + currentSensorId[i] != sensorIds[i]) { + +#ifdef INPUT_STREAM_MASK + CLOGD("DEBUG(%s[%d]): setInput(sensorIds : %d) [src nodeNum : %d][nodeNums : %d][otf : %d][leader : %d][reprocessing : %d][unique sensorId : %d]", + __FUNCTION__, __LINE__, + sensorIds[i], + ((sensorIds[i] & INPUT_VINDEX_MASK) >> INPUT_VINDEX_SHIFT) + FIMC_IS_VIDEO_BAS_NUM, + nodeNums[i], + ((sensorIds[i] & INPUT_MEMORY_MASK) >> INPUT_MEMORY_SHIFT), + ((sensorIds[i] & INPUT_LEADER_MASK) >> INPUT_LEADER_SHIFT), + ((sensorIds[i] & INPUT_STREAM_MASK) >> INPUT_STREAM_SHIFT), + ((sensorIds[i] & INPUT_MODULE_MASK) >> INPUT_MODULE_SHIFT)); +#else + CLOGD("DEBUG(%s[%d]): setInput(sensorIds : %d)", + __FUNCTION__, __LINE__, sensorIds[i]); +#endif + ret = nodes[i]->setInput(sensorIds[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): nodeNums[%d] : %d, setInput(sensorIds : %d fail, ret(%d)", + __FUNCTION__, __LINE__, i, nodeNums[i], sensorIds[i], + ret); + + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + uint32_t planeCount = 2; + enum YUV_RANGE yuvRange = YUV_FULL_RANGE; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]):pipeInfos is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) { + if (m_node[i] != NULL && + 0 < pipeInfos[i].rectInfo.fullW && + 0 < pipeInfos[i].rectInfo.fullH) { + /* check about OUTPUT_NODE */ + if (i == OUTPUT_NODE + && pipeInfos[i].bufInfo.type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { + CLOGE("ERR(%s[%d]):pipeInfos[%d].bufInfo.type is not Valid(type:%d)", + __FUNCTION__, __LINE__, i, pipeInfos[i].bufInfo.type); + return BAD_VALUE; + } + + /* check about CAPTURE_NODE */ + if (i >= CAPTURE_NODE + && m_deviceInfo->connectionMode[i] != HW_CONNECTION_MODE_M2M_BUFFER_HIDING + && pipeInfos[i].bufInfo.type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { + CLOGE("ERR(%s[%d]):pipeInfos[%d].bufInfo.type is not Valid(type:%d)", + __FUNCTION__, __LINE__, i, pipeInfos[i].bufInfo.type); + return BAD_VALUE; + } + + uint32_t bytePerPlane = 0; + int colorFormat = pipeInfos[i].rectInfo.colorFormat; + + getYuvFormatInfo(colorFormat, &bytePerPlane, &planeCount); + + /* Add medadata plane count */ + planeCount++; + + if (m_reprocessing == false + && (m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_SCP_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_M0P_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_M1P_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_M2P_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_M3P_NUM + || m_deviceInfo->nodeNum[i] == FIMC_IS_VIDEO_M4P_NUM)) { + int setfile = 0; + int previewYuvRange = 0; + + /* MC scaler can set different format with preview */ + /* + int colorFormat = m_parameters->getHwPreviewFormat(); + + if (colorFormat != pipeInfos[i].rectInfo.colorFormat) { + CLOGE("ERR(%s[%d]):SCP colorformat is not Valid(%d)", + __FUNCTION__, __LINE__, pipeInfos[i].rectInfo.colorFormat); + return BAD_VALUE; + } + */ + + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &previewYuvRange); + + yuvRange = (enum YUV_RANGE)previewYuvRange; + } else { + planeCount = 2; + yuvRange = YUV_FULL_RANGE; + } + + ret = m_setNodeInfo(m_node[i], &pipeInfos[i], planeCount, yuvRange); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setNodeInfo(W:%d, H:%d, buffer count:%d) fail(Node:%s), ret(%d)", + __FUNCTION__, __LINE__, + pipeInfos[i].rectInfo.fullW, pipeInfos[i].rectInfo.fullH, + pipeInfos[i].bufInfo.count, m_deviceInfo->nodeName[i], ret); + return ret; + } + + m_numBuffers[i] = pipeInfos[i].bufInfo.count; + m_perframeMainNodeGroupInfo[i] = pipeInfos[i].perFrameNodeGroupInfo; + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_setNodeInfo(ExynosCameraNode *node, camera_pipe_info_t *pipeInfos, + uint32_t planeCount, enum YUV_RANGE yuvRange, + __unused bool flagBayer) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + bool flagSetRequest = false; + unsigned int requestBufCount = 0; + + int currentW = 0; + int currentH = 0; + int currentV4l2Colorformat = 0; + int currentPlanesCount = 0; + enum YUV_RANGE currentYuvRange = YUV_FULL_RANGE; + int currentBufferCount = 0; + enum v4l2_buf_type currentBufType; + enum v4l2_memory currentMemType; + + if (node == NULL) { + CLOGE("ERR(%s[%d]):node is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]):pipeInfos is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + requestBufCount = node->reqBuffersCount(); + + /* If it already set */ + if (0 < requestBufCount) { + node->getSize(¤tW, ¤tH); + node->getColorFormat(¤tV4l2Colorformat, ¤tPlanesCount, ¤tYuvRange); + node->getBufferType(¤tBufferCount, ¤tBufType, ¤tMemType); + + if (/* setSize */ + currentW != pipeInfos->rectInfo.fullW || + currentH != pipeInfos->rectInfo.fullH || + /* setColorFormat */ + currentV4l2Colorformat != pipeInfos->rectInfo.colorFormat || + currentPlanesCount != (int)planeCount || + currentYuvRange != yuvRange || + /* setBufferType */ + currentBufferCount != (int)pipeInfos->bufInfo.count || + currentBufType != (enum v4l2_buf_type)pipeInfos->bufInfo.type || + currentMemType != (enum v4l2_memory)pipeInfos->bufInfo.memory) { + + flagSetRequest = true; + + CLOGW("WARN(%s[%d]):Node is already requested. call clrBuffers()", __FUNCTION__, __LINE__); + + CLOGW("WARN(%s[%d]):W(%d -> %d), H(%d -> %d)", + __FUNCTION__, __LINE__, + currentW, pipeInfos->rectInfo.fullW, + currentH, pipeInfos->rectInfo.fullH); + + CLOGW("WARN(%s[%d]):colorFormat(%d -> %d), planeCount(%d -> %d), yuvRange(%d -> %d)", + __FUNCTION__, __LINE__, + currentV4l2Colorformat, pipeInfos->rectInfo.colorFormat, + currentPlanesCount, planeCount, + currentYuvRange, yuvRange); + + CLOGW("WARN(%s[%d]):bufferCount(%d -> %d), bufType(%d -> %d), memType(%d -> %d)", + __FUNCTION__, __LINE__, + currentBufferCount, pipeInfos->bufInfo.count, + currentBufType, pipeInfos->bufInfo.type, + currentMemType, pipeInfos->bufInfo.memory); + + ret = node->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): node->clrBuffers() fail", __FUNCTION__, __LINE__); + return ret; + } + } + } else { + flagSetRequest = true; + } + + if (flagSetRequest == true) { + CLOGD("DEBUG(%s[%d]):set pipeInfos on %s, setFormat(%d, %d) and reqBuffers(%d)", + __FUNCTION__, __LINE__, + node->getName(), pipeInfos->rectInfo.fullW, + pipeInfos->rectInfo.fullH, pipeInfos->bufInfo.count); + + bool flagValidSetFormatInfo = true; + + if (pipeInfos->rectInfo.fullW == 0 || pipeInfos->rectInfo.fullH == 0) { + CLOGW("WARN(%s[%d]):Invalid size(%d x %d), skip setSize()", + __FUNCTION__, __LINE__, + pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH); + + flagValidSetFormatInfo = false; + } + node->setSize(pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH); + + if (pipeInfos->rectInfo.colorFormat == 0 || planeCount == 0) { + CLOGW("WARN(%s[%d]):invalid colorFormat(%d), planeCount(%d), skip setColorFormat()", + __FUNCTION__, __LINE__, + pipeInfos->rectInfo.colorFormat, planeCount); + + flagValidSetFormatInfo = false; + } + node->setColorFormat(pipeInfos->rectInfo.colorFormat, planeCount, yuvRange); + + if ((int)pipeInfos->bufInfo.type == 0 || pipeInfos->bufInfo.memory == 0) { + CLOGW("WARN(%s[%d]):Invalid bufInfo.type(%d), bufInfo.memory(%d), skip setBufferType()", + __FUNCTION__, __LINE__, + (int)pipeInfos->bufInfo.type, (int)pipeInfos->bufInfo.memory); + + flagValidSetFormatInfo = false; + } + node->setBufferType(pipeInfos->bufInfo.count, + (enum v4l2_buf_type)pipeInfos->bufInfo.type, + (enum v4l2_memory)pipeInfos->bufInfo.memory); + + if (flagValidSetFormatInfo == true) { + ret = node->setFormat(pipeInfos->bytesPerPlane); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node->setFormat() fail", __FUNCTION__, __LINE__); + return ret; + } + } + + node->getBufferType(¤tBufferCount, ¤tBufType, ¤tMemType); + + } else { + CLOGD("DEBUG(%s[%d]):Skip set pipeInfos setFormat(%d, %d) and reqBuffers(%d).", + __FUNCTION__, __LINE__, + pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH, pipeInfos->bufInfo.count); + } + + if (currentBufferCount <= 0) { + CLOGW("WARN(%s[%d]):Invalid currentBufferCount(%d), skip reqBuffers()", + __FUNCTION__, __LINE__, currentBufferCount); + } else { + ret = node->reqBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node->reqBuffers() fail", __FUNCTION__, __LINE__); + return ret; + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_getPerframePosition(int *perframePosition, uint32_t pipeId) +{ + status_t ret = NO_ERROR; + + if (perframePosition == NULL) { + CLOGE("ERR(%s[%d]):perframePosition is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + switch(pipeId) { + case PIPE_3AC: + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AC_POS : PERFRAME_FRONT_3AC_POS; + break; + case PIPE_3AP: + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_3AP_POS : PERFRAME_FRONT_3AP_POS; + break; + case PIPE_ISPC: + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPC_POS : PERFRAME_FRONT_ISPC_POS; + break; + case PIPE_ISPP: + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPP_POS : PERFRAME_FRONT_ISPP_POS; + break; + case PIPE_SCC: + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCC_POS : PERFRAME_FRONT_SCC_POS; + break; + case PIPE_SCP: /* Same as case of PIPE_MCSC0 */ + *perframePosition = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + break; + case PIPE_MCSC1: + *perframePosition = PERFRAME_BACK_MCSC1_POS; + break; + case PIPE_MCSC2: + *perframePosition = PERFRAME_BACK_MCSC2_POS; + break; + case PIPE_3AC_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_3AC_POS; + break; + case PIPE_3AP_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_3AP_POS; + break; + case PIPE_ISPC_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_ISPC_POS; + break; + case PIPE_ISPP_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_ISPP_POS; + break; + case PIPE_MCSC0_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_MCSC0_POS; + break; + case PIPE_MCSC2_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_MCSC2_POS; + break; + case PIPE_MCSC3_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_MCSC3_POS; + break; + case PIPE_MCSC4_REPROCESSING: + *perframePosition = PERFRAME_REPROCESSING_MCSC4_POS; + break; + default: + CLOGV("ERR(%s[%d]):Invalid pipeID(%d)", __FUNCTION__, __LINE__, pipeId); + ret = BAD_VALUE; + break; + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_setSetfile(ExynosCameraNode *node, uint32_t pipeId) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + int yuvRange = 0; + + m_parameters->getSetfileYuvRange(m_reprocessing, &m_setfile, &yuvRange); + + if (m_parameters->getSetFileCtlMode() == true) { + if (m_parameters->getSetFileCtl3AA() == true && INDEX(pipeId) == INDEX(PIPE_3AA)) { + ret = node->setControl(V4L2_CID_IS_SET_SETFILE, m_setfile); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, m_setfile, ret); + return ret; + } + } else if (m_parameters->getSetFileCtlISP() == true && INDEX(pipeId) == INDEX(PIPE_ISP)) { + ret = node->setControl(V4L2_CID_IS_SET_SETFILE, m_setfile); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, m_setfile, ret); + return ret; + } + } else if (m_parameters->getSetFileCtlSCP() == true && INDEX(pipeId) == INDEX(PIPE_SCP)) { + ret = node->setControl(V4L2_CID_IS_COLOR_RANGE, yuvRange); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, m_setfile, ret); + return ret; + } + } + } else { + m_setfile = mergeSetfileYuvRange(m_setfile, yuvRange); + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_forceDone(ExynosCameraNode *node, unsigned int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (node == NULL) { + CLOGE("ERR(%s[%d]):node is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (cid != V4L2_CID_IS_FORCE_DONE) { + CLOGW("ERR(%s[%d]):cid != V4L2_CID_IS_FORCE_DONE", __FUNCTION__, __LINE__); + } + + /* "value" is not meaningful */ + ret = node->setControl(V4L2_CID_IS_FORCE_DONE, value); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):node V4L2_CID_IS_FORCE_DONE failed", __FUNCTION__, __LINE__); + node->dump(); + return ret; + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_setMapBuffer(int nodeIndex) +{ + status_t ret = NO_ERROR; + + int bufferIndex[VIDEO_MAX_FRAME]; + for (int i = 0; i < VIDEO_MAX_FRAME; i++) + bufferIndex[i] = -2; + ExynosCameraBuffer buffer; + + if (m_bufferManager[nodeIndex]->getAllocatedBufferCount() > 0) { + int index = 0; + while (m_bufferManager[nodeIndex]->getNumOfAvailableBuffer() > 0) { + ret |= m_bufferManager[nodeIndex]->getBuffer(&(bufferIndex[index]), EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager getBuffer fail, manager(%d), ret(%d)", + __FUNCTION__, __LINE__, nodeIndex, ret); + } + + ret |= m_node[nodeIndex]->prepareBuffer(&buffer); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->putBuffer() fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[nodeIndex], ret); + + } + index++; + } + + while (index > 0) { + index--; + /* TODO: doing exception handling */ + ret |= m_bufferManager[nodeIndex]->putBuffer(bufferIndex[index], EXYNOS_CAMERA_BUFFER_POSITION_NONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager putBuffer fail, manager(%d), ret(%d)", + __FUNCTION__, __LINE__, nodeIndex, ret); + } + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_setMapBuffer(ExynosCameraNode *node, ExynosCameraBuffer *buffer) +{ + status_t ret = NO_ERROR; + + if (buffer == NULL) { + CLOGE("ERR(%s[%d]):Buffer is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (node == NULL) { + CLOGE("ERR(%s[%d]):Node is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + /* Require code sync release-git to Repo */ +#if 0 + ret = node->mapBuffer(buffer); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):mapBuffer() fail, ret(%d)", __FUNCTION__, __LINE__, ret); +#endif + + return ret; +} + +status_t ExynosCameraMCPipe::m_setJpegInfo(int nodeType, ExynosCameraBuffer *buffer) +{ + status_t ret = NO_ERROR; + int pipeId = MAX_PIPE_NUM; + ExynosRect pictureRect; + ExynosRect thumbnailRect; + int jpegQuality = -1; + int thumbnailQuality = -1; + exif_attribute_t exifInfo; + debug_attribute_t *debugInfo; + camera2_shot_ext *shot_ext = NULL; + + /* 1. Check the invalid parameters */ + if (buffer == NULL) { + CLOGE("ERR(%s[%d]):buffer is NULL. pipeId %d", + __FUNCTION__, __LINE__, pipeId); + return BAD_VALUE; + } + + /* 2. Get control metadata from buffer and pipeId */ + shot_ext = (struct camera2_shot_ext *)(buffer->addr[buffer->planeCount - 1]); + pipeId = m_deviceInfo->pipeId[nodeType]; + + /* 3. Get control informations from parameter & metadata */ + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + m_parameters->getThumbnailSize(&thumbnailRect.w, &thumbnailRect.h); + jpegQuality = m_parameters->getJpegQuality(); + thumbnailQuality = m_parameters->getThumbnailQuality(); + + debugInfo = m_parameters->getDebugAttribute(); + + /* 3. Set JPEG node perframe control information for each node */ + switch (pipeId) { + case PIPE_HWFC_JPEG_SRC_REPROCESSING: + case PIPE_HWFC_JPEG_DST_REPROCESSING: + /* JPEG HAL setSize */ + ret = m_node[nodeType]->setSize(pictureRect.w, pictureRect.h); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to set size %dx%d into %s, ret %d", + __FUNCTION__, __LINE__, + pictureRect.w, pictureRect.h, + m_deviceInfo->nodeName[nodeType], ret); + + /* JPEG HAL setQuality */ + CLOGD("DEBUG(%s[%d]):m_node[nodeType]->setQuality(int)", __FUNCTION__, __LINE__); + ret = m_node[nodeType]->setQuality(jpegQuality); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to set jpeg quality %d into %s, ret %d", + __FUNCTION__, __LINE__, + jpegQuality, + m_deviceInfo->nodeName[nodeType], ret); + + /* Create EXIF info */ + m_parameters->getFixedExifInfo(&exifInfo); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to get Fixed Exif Info, ret %d", + __FUNCTION__, __LINE__, ret); + if (thumbnailRect.w > 0 && thumbnailRect.h > 0) { + exifInfo.enableThumb = true; + } else { + exifInfo.enableThumb = false; + } + m_parameters->setExifChangedAttribute(&exifInfo, &pictureRect, &thumbnailRect, &shot_ext->shot); + + /* JPEG HAL setExifInfo */ + ret = m_node[nodeType]->setExifInfo(&exifInfo); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to set EXIF info into %s, ret %d", + __FUNCTION__, __LINE__, + m_deviceInfo->nodeName[nodeType], ret); + + /* JPEG HAL setDebugInfo */ + debugInfo = m_parameters->getDebugAttribute(); + ret = m_node[nodeType]->setDebugInfo(debugInfo); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to set DEBUG Info into %s, ret %d", + __FUNCTION__, __LINE__, + m_deviceInfo->nodeName[nodeType], ret); + break; + case PIPE_HWFC_THUMB_SRC_REPROCESSING: + /* JPEG HAL setSize */ + if (thumbnailRect.w > 0 && thumbnailRect.h > 0) { + ret = m_node[nodeType]->setSize(thumbnailRect.w, thumbnailRect.h); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to set thumbnail size %dx%d into %s, ret %d", + __FUNCTION__, __LINE__, + thumbnailRect.w, thumbnailRect.h, + m_deviceInfo->nodeName[nodeType], ret); + } + + /* JPEG HAL setQuality */ + m_node[nodeType]->setQuality(thumbnailQuality); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Failed to setQuality %d into %s, ret %d", + __FUNCTION__, __LINE__, + thumbnailQuality, + m_deviceInfo->nodeName[nodeType], ret); + break; + default: + CLOGE("ERR(%s[%d]):Invalid pipeId %d", __FUNCTION__, __LINE__, pipeId); + ret = BAD_VALUE; + break; + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_startNode(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = (OTF_NODE_BASE - 1); i >= OUTPUT_NODE; i--) { + /* only M2M mode need stream on/off */ + /* TODO : flite has different sensorId bit */ + if (m_node[i] != NULL) { + ret = m_node[i]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->start fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_stopNode(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < OTF_NODE_BASE; i++) { + /* only M2M mode need stream on/off */ + /* TODO : flite has different sensorId bit */ + if (m_node[i] != NULL) { + ret = m_node[i]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->stop fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + ret = m_node[i]->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->clrBuffers fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + + m_node[i]->removeItemBufferQ(); + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_clearNode(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + if (m_node[i] != NULL) { + ret = m_node[i]->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):node(%s)->clrBuffers fail, ret(%d)", + __FUNCTION__, __LINE__, m_deviceInfo->nodeName[i], ret); + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraMCPipe::m_checkNodeGroupInfo(char *name, camera2_node *oldNode, camera2_node *newNode) +{ + if (oldNode == NULL || newNode == NULL) { + CLOGE("ERR(%s[%d]): oldNode(%p) == NULL || newNode(%p) == NULL", __FUNCTION__, __LINE__, oldNode, newNode); + return INVALID_OPERATION; + } + + bool flagCropRegionChanged = false; + + for (int i = 0; i < 4; i++) { + if (oldNode->input.cropRegion[i] != newNode->input.cropRegion[i] || + oldNode->output.cropRegion[i] != newNode->output.cropRegion[i]) { + + CLOGD("DEBUG(%s[%d]):(%s):oldCropSize(%d, %d, %d, %d / %d, %d, %d, %d) -> newCropSize(%d, %d, %d, %d / %d, %d, %d, %d)", + __FUNCTION__, __LINE__, + name, + oldNode->input. cropRegion[0], oldNode->input. cropRegion[1], oldNode->input. cropRegion[2], oldNode->input. cropRegion[3], + oldNode->output.cropRegion[0], oldNode->output.cropRegion[1], oldNode->output.cropRegion[2], oldNode->output.cropRegion[3], + newNode->input. cropRegion[0], newNode->input. cropRegion[1], newNode->input. cropRegion[2], newNode->input. cropRegion[3], + newNode->output.cropRegion[0], newNode->output.cropRegion[1], newNode->output.cropRegion[2], newNode->output.cropRegion[3]); + + break; + } + } + + for (int i = 0; i < 4; i++) { + oldNode->input. cropRegion[i] = newNode->input. cropRegion[i]; + oldNode->output.cropRegion[i] = newNode->output.cropRegion[i]; + } + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::m_checkNodeGroupInfo(char *name, int index, camera2_node *oldNode, camera2_node *newNode) +{ + if (oldNode == NULL || newNode == NULL) { + CLOGE("ERR(%s[%d]): oldNode(%p) == NULL || newNode(%p) == NULL", __FUNCTION__, __LINE__, oldNode, newNode); + return INVALID_OPERATION; + } + + bool flagCropRegionChanged = false; + + for (int i = 0; i < 4; i++) { + if (oldNode->input.cropRegion[i] != newNode->input.cropRegion[i] || + oldNode->output.cropRegion[i] != newNode->output.cropRegion[i]) { + + CLOGD("DEBUG(%s[%d]): name %s : index %d: PerFrame oldCropSize (%d, %d, %d, %d / %d, %d, %d, %d) -> newCropSize (%d, %d, %d, %d / %d, %d, %d, %d)", + __FUNCTION__, __LINE__, + name, + index, + oldNode->input. cropRegion[0], oldNode->input. cropRegion[1], oldNode->input. cropRegion[2], oldNode->input. cropRegion[3], + oldNode->output.cropRegion[0], oldNode->output.cropRegion[1], oldNode->output.cropRegion[2], oldNode->output.cropRegion[3], + newNode->input. cropRegion[0], newNode->input. cropRegion[1], newNode->input. cropRegion[2], newNode->input. cropRegion[3], + newNode->output.cropRegion[0], newNode->output.cropRegion[1], newNode->output.cropRegion[2], newNode->output.cropRegion[3]); + + break; + } + } + + for (int i = 0; i < 4; i++) { + oldNode->input. cropRegion[i] = newNode->input. cropRegion[i]; + oldNode->output.cropRegion[i] = newNode->output.cropRegion[i]; + } + + return NO_ERROR; +} + +status_t ExynosCameraMCPipe::m_checkNodeGroupInfo(int index, camera2_node *oldNode, camera2_node *newNode) +{ + return m_checkNodeGroupInfo(m_deviceInfo->nodeName[index], oldNode, newNode); +} + +void ExynosCameraMCPipe::m_dumpRunningFrameList(void) +{ + CLOGI("INFO(%s[%d]):*********runningFrameList dump***********", __FUNCTION__, __LINE__); + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + CLOGI("INFO(%s[%d]):m_numBuffers[%d] : %d", __FUNCTION__, __LINE__, i, m_numBuffers[i]); + for (uint32_t j = 0; j < m_numBuffers[i]; j++) { + if (m_runningFrameList[i][j] == NULL) { + CLOGI("runningFrameList[%d][%d] is NULL", i, j); + } else { + CLOGI("runningFrameList[%d][%d]: fcount = %d", + i, j, m_runningFrameList[i][j]->getFrameCount()); + } + } + } + + return; +} + +void ExynosCameraMCPipe::m_dumpPerframeNodeGroupInfo(const char *name, camera_pipe_perframe_node_group_info_t nodeInfo) +{ + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) ++++++++++++++++++++", __FUNCTION__, __LINE__, name); + + CLOGI("\t\t perframeSupportNodeNum : %d", nodeInfo.perframeSupportNodeNum); + CLOGI("\t\t perFrameLeaderInfo.perframeInfoIndex : %d", nodeInfo.perFrameLeaderInfo.perframeInfoIndex); + CLOGI("\t\t perFrameLeaderInfo.perFrameVideoID : %d", nodeInfo.perFrameLeaderInfo.perFrameVideoID); + + for (int i = 0; i < CAPTURE_NODE_MAX; i++) + CLOGI("\t\t perFrameCaptureInfo[%d].perFrameVideoID : %d", i, nodeInfo.perFrameCaptureInfo[i].perFrameVideoID); + + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) ------------------------------", __FUNCTION__, __LINE__, name); + + return; +} + +void ExynosCameraMCPipe::m_dumpPerframeShotInfo(const char *name, int frameCount, camera2_shot_ext *shot_ext) +{ + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) frameCount(%d) ++++++++++++++++++++", __FUNCTION__, __LINE__, name, frameCount); + + if (shot_ext != NULL) { + for (int i = 0; i < CAPTURE_NODE_MAX; i++) { + CLOGI("DEBUG(%s[%d]):\t\t index(%d), vid(%d) request(%d) input (%d, %d, %d, %d) output (%d, %d, %d, %d)", + __FUNCTION__, __LINE__, + i, + shot_ext->node_group.capture[i].vid, + shot_ext->node_group.capture[i].request, + shot_ext->node_group.capture[i].input.cropRegion[0], + shot_ext->node_group.capture[i].input.cropRegion[1], + shot_ext->node_group.capture[i].input.cropRegion[2], + shot_ext->node_group.capture[i].input.cropRegion[3], + shot_ext->node_group.capture[i].output.cropRegion[0], + shot_ext->node_group.capture[i].output.cropRegion[1], + shot_ext->node_group.capture[i].output.cropRegion[2], + shot_ext->node_group.capture[i].output.cropRegion[3]); + } + } else { + CLOGI("DEBUG(%s[%d]):\t\t shot_ext == NULL", __FUNCTION__, __LINE__); + } + + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) ------------------------------", __FUNCTION__, __LINE__, name); +} + +void ExynosCameraMCPipe::m_configDvfs(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + bool newDvfs = m_parameters->getDvfsLock(); + + if (newDvfs != m_dvfsLocked) { + setControl(V4L2_CID_IS_DVFS_LOCK, 533000); + m_dvfsLocked = newDvfs; + } +} + +bool ExynosCameraMCPipe::m_flagValidInt(int num) +{ + bool ret = false; + + if (num == -1 || num == 0) + ret = false; + else + ret = true; + + return ret; +} + +bool ExynosCameraMCPipe::m_checkThreadLoop(frame_queue_t *frameQ) +{ + Mutex::Autolock lock(m_pipeframeLock); + bool loop = false; + + if (m_reprocessing == false) + loop = true; + + if (m_oneShotMode == false) + loop = true; + + if (frameQ->getSizeOfProcessQ() > 0) + loop = true; + + if (m_flagTryStop == true) + loop = false; + + return loop; +} + +status_t ExynosCameraMCPipe::m_checkPolling(ExynosCameraNode *node) +{ + int ret = 0; + + ret = node->polling(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):polling fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + + m_threadState = ERROR_POLLING_DETECTED; + return ERROR_POLLING_DETECTED; + } + + return NO_ERROR; +} + +void ExynosCameraMCPipe::m_init(camera_device_info_t *deviceInfo) +{ + if (deviceInfo != NULL) + m_deviceInfo = deviceInfo; + else + m_deviceInfo = NULL; + +/* + * For replace old pipe, MCPipe have all variables. + * but, if exist old pipe, same variable not declared by MCPipe. + */ + for (int i = OUTPUT_NODE; i < MAX_NODE; i++) { + /* + m_bufferManager[i] = NULL; + m_node[i] = NULL; + m_nodeNum[i] = -1; + m_sensorIds[i] = -1; + */ + m_secondaryNode[i] = NULL; + m_secondaryNodeNum[i] = -1; + m_secondarySensorIds[i] = -1; + m_numOfRunningFrame[i] = 0; + m_skipPutBuffer[i] = false; + m_numBuffers[i] = 0; + for (int j = 0; j < MAX_BUFFERS; j++) + m_runningFrameList[i][j] = NULL; + memset(&m_perframeMainNodeGroupInfo[i], 0x00, sizeof(camera_pipe_perframe_node_group_info_t)); + + if (m_deviceInfo != NULL) + m_pipeIdArr[i] = m_deviceInfo->pipeId[i]; + else + m_pipeIdArr[i] = 0; + } + + /* + m_parameters = NULL; + m_activityControl = NULL; + m_exynosconfig = NULL; + + memset(m_name, 0x00, sizeof(m_name)); + + m_inputFrameQ = NULL; + */ + m_requestFrameQ = NULL; + /* + m_outputFrameQ = NULL; + m_frameDoneQ = NULL; + + m_pipeId = 0; + m_cameraId = -1; + + m_setfile = 0x0; + + m_prepareBufferCount = 0; + + m_reprocessing = false; + m_flagStartPipe = false; + m_flagTryStop = false; + m_dvfsLocked = false; + m_isBoosting = false; + m_flagFrameDoneQ = false; + + m_threadCommand = 0; + m_timeInterval = 0; + m_threadState = 0; + m_threadRenew = 0; + + memset(&m_curNodeGroupInfo, 0x00, sizeof(camera2_node_group)); + */ +#ifdef USE_MCPIPE_SERIALIZATION_MODE + m_serializeOperation = false; +#endif +#ifdef TEST_WATCHDOG_THREAD + int testErrorDetect = 0; +#endif +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.h b/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.h new file mode 100644 index 0000000..d7a4aea --- /dev/null +++ b/libcamera/common_v2/MCPipes/ExynosCameraMCPipe.h @@ -0,0 +1,330 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_MCPIPE_H +#define EXYNOS_CAMERA_MCPIPE_H + +#include "ExynosCameraPipe.h" + +namespace android { + +enum HW_CONNECTION_MODE { + HW_CONNECTION_MODE_M2M = 0, + HW_CONNECTION_MODE_OTF, + HW_CONNECTION_MODE_M2M_BUFFER_HIDING, +}; + +typedef struct ExynosCameraDeviceInfo { + int32_t nodeNum[MAX_NODE]; + int32_t secondaryNodeNum[MAX_NODE]; + char nodeName[MAX_NODE][EXYNOS_CAMERA_NAME_STR_SIZE]; + char secondaryNodeName[MAX_NODE][EXYNOS_CAMERA_NAME_STR_SIZE]; + + int pipeId[MAX_NODE]; /* enum pipeline */ + unsigned int connectionMode[MAX_NODE]; + + ExynosCameraDeviceInfo() + { + for (int i = 0; i < MAX_NODE; i++) { + nodeNum[i] = -1; + secondaryNodeNum[i] = -1; + + memset(nodeName[i], 0, EXYNOS_CAMERA_NAME_STR_SIZE); + memset(secondaryNodeName[i], 0, EXYNOS_CAMERA_NAME_STR_SIZE); + + pipeId[i] = -1; + connectionMode[i] = 0; + } + } + + ExynosCameraDeviceInfo& operator =(const ExynosCameraDeviceInfo &other) + { + for (int i = 0; i < MAX_NODE; i++) { + nodeNum[i] = other.nodeNum[i]; + secondaryNodeNum[i] = other.secondaryNodeNum[i]; + + strncpy(nodeName[i], other.nodeName[i], EXYNOS_CAMERA_NAME_STR_SIZE - 1); + strncpy(secondaryNodeName[i], other.secondaryNodeName[i], EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + pipeId[i] = other.pipeId[i]; + } + + return *this; + } +} camera_device_info_t; + +class ExynosCameraMCPipe : protected virtual ExynosCameraPipe { +public: + ExynosCameraMCPipe() + { + m_init(NULL); + } + + ExynosCameraMCPipe( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + camera_device_info_t *deviceInfo) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, NULL) + { + m_init(deviceInfo); + } + + virtual ~ExynosCameraMCPipe(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t precreate(int32_t *sensorIds = NULL); + virtual status_t postcreate(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds, int32_t *secondarySensorIds); + virtual status_t prepare(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual bool flagStart(void); + + virtual status_t startThread(void); + virtual status_t stopThread(void); + virtual status_t stopThreadAndWait(int sleep, int times); + + virtual bool flagStartThread(void); + + virtual status_t sensorStream(bool on); + virtual status_t forceDone(unsigned int cid, int value); + virtual status_t setControl(int cid, int value); + virtual status_t getControl(int cid, int *value); + virtual status_t setExtControl(struct v4l2_ext_controls *ctrl); + virtual status_t setParam(struct v4l2_streamparm streamParam); + + virtual status_t pushFrame(ExynosCameraFrame **newFrame); + + virtual status_t instantOn(int32_t numFrames); +/* Don't use this function, this is regacy code */ + virtual status_t instantOnQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos); +/* Don't use this function, this is regacy code */ + virtual status_t instantOnDQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos); + virtual status_t instantOff(void); +/* Don't use this function, this is regacy code */ + virtual status_t instantOnPushFrameQ(BUFFERQ_TYPE::TYPE type, ExynosCameraFrame **frame); + + virtual status_t getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition); + virtual int getCameraId(void); + virtual status_t setPipeId(uint32_t id); + virtual uint32_t getPipeId(void); + virtual status_t setPipeId(enum NODE_TYPE nodeType, uint32_t id); + virtual int getPipeId(enum NODE_TYPE nodeType); + + virtual status_t setPipeName(const char *pipeName); + virtual char *getPipeName(void); + + virtual status_t clearInputFrameQ(void); + virtual status_t getInputFrameQ(frame_queue_t **inputQ); + virtual status_t setOutputFrameQ(frame_queue_t *outputQ); + virtual status_t getOutputFrameQ(frame_queue_t **outputQ); + + virtual status_t setBoosting(bool isBoosting); + + virtual bool isThreadRunning(void); + + virtual status_t getThreadState(int **threadState); + virtual status_t getThreadInterval(uint64_t **timeInterval); + virtual status_t getThreadRenew(int **timeRenew); + virtual status_t incThreadRenew(void); + virtual status_t setStopFlag(void); + + virtual int getRunningFrameCount(void); + + virtual void dump(void); + + /* only for debugging */ + virtual status_t dumpFimcIsInfo(bool bugOn); +//#ifdef MONITOR_LOG_SYNC + virtual status_t syncLog(uint32_t syncId); +//#endif + +/* MC Pipe include buffer manager, so FrameFactory(ExynosCamera) must set buffer manager to pipe. + * Add interface for set buffer manager to pipe. + */ + virtual status_t setBufferManager(ExynosCameraBufferManager **bufferManager); + +/* MC Pipe have several nodes. It need to specify nodes. + * Add interface set/get control with specify nodes. + */ + virtual status_t setControl(int cid, int value, enum NODE_TYPE nodeType); + virtual status_t getControl(int cid, int *value, enum NODE_TYPE nodeType); + virtual status_t setExtControl(struct v4l2_ext_controls *ctrl, enum NODE_TYPE nodeType); + +/* Set map buffer is makes node operation faster at first start. + * Thereby map buffer before start, reduce map buffer time at start. + * It use first Exynos5430/5433 in old pipe. + */ + virtual status_t setMapBuffer(ExynosCameraBuffer *srcBuf = NULL, ExynosCameraBuffer *dstBuf = NULL); + +/* MC Pipe have two output queue. + * If you want push frame to FrameDoneQ in ExynosCamera explicitly, use this interface. + */ + + virtual status_t setFrameDoneQ(frame_queue_t *frameDoneQ); + virtual status_t getFrameDoneQ(frame_queue_t **frameDoneQ); + + virtual status_t setNodeInfos(camera_node_objects_t *nodeObjects, bool flagReset = false); + virtual status_t getNodeInfos(camera_node_objects_t *nodeObjects); + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + virtual void needSerialization(bool enable); +#endif + +protected: + virtual bool m_putBufferThreadFunc(void); + virtual bool m_getBufferThreadFunc(void); + + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + + virtual status_t m_updateMetadataToFrame(void *metadata, int index, ExynosCameraFrame *frame = NULL, enum NODE_TYPE nodeLocation = OUTPUT_NODE); + virtual status_t m_getFrameByIndex(ExynosCameraFrame **frame, int index, enum NODE_TYPE nodeLocation = OUTPUT_NODE); + virtual status_t m_completeFrame(ExynosCameraFrame *frame, bool isValid = true); + + virtual status_t m_setInput(ExynosCameraNode *nodes[], int32_t *nodeNums, int32_t *sensorIds); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + virtual status_t m_setNodeInfo(ExynosCameraNode *node, camera_pipe_info_t *pipeInfos, + uint32_t planeCount, enum YUV_RANGE yuvRange, + bool flagBayer = false); + virtual status_t m_forceDone(ExynosCameraNode *node, unsigned int cid, int value); + + virtual status_t m_startNode(void); + virtual status_t m_stopNode(void); + virtual status_t m_clearNode(void); + + virtual status_t m_checkNodeGroupInfo(char *name, camera2_node *oldNode, camera2_node *newNode); + virtual status_t m_checkNodeGroupInfo(char *name, int index, camera2_node *oldNode, camera2_node *newNode); + virtual status_t m_checkNodeGroupInfo(int index, camera2_node *oldNode, camera2_node *newNode); + + virtual void m_dumpRunningFrameList(void); + + virtual void m_dumpPerframeNodeGroupInfo(const char *name, camera_pipe_perframe_node_group_info_t nodeInfo); + virtual void m_dumpPerframeShotInfo(const char *name, int frameCount, camera2_shot_ext *shot_ext); + + virtual void m_configDvfs(void); + virtual bool m_flagValidInt(int num); + virtual bool m_checkThreadLoop(frame_queue_t *frameQ); + + virtual status_t m_preCreate(void); + virtual status_t m_postCreate(int32_t *sensorIds = NULL); + + virtual status_t m_checkShotDone(struct camera2_shot_ext *shot_ext); + /* m_updateMetadataFromFrame() will be deprecated */ + virtual status_t m_updateMetadataFromFrame(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer); + virtual status_t m_updateMetadataFromFrame_v2(ExynosCameraFrame *frame, ExynosCameraBuffer *buffer); + + virtual status_t m_getPerframePosition(int *perframePosition, uint32_t pipeId); + + virtual status_t m_setSetfile(ExynosCameraNode *node, uint32_t pipeId); + + virtual status_t m_setMapBuffer(int nodeIndex); + virtual status_t m_setMapBuffer(ExynosCameraNode *node, ExynosCameraBuffer *buffer); + + virtual status_t m_setJpegInfo(int nodeType, ExynosCameraBuffer *buffer); + + status_t m_checkPolling(ExynosCameraNode *node); + +private: + void m_init(camera_device_info_t *deviceInfo); + +protected: +/* + * For replace old pipe, MCPipe have all variables. + * but, if exist old pipe, same variable not declared by MCPpipe. + */ + +/* + ExynosCameraParameters *m_parameters; + ExynosCameraBufferManager *m_bufferManager[MAX_NODE]; + ExynosCameraActivityControl *m_activityControl; +*/ + typedef ExynosCameraThread MCPipeThread; + sp m_putBufferThread; + sp m_getBufferThread; +/* + struct ExynosConfigInfo *m_exynosconfig; + + ExynosCameraNode *m_node[MAX_NODE]; + int32_t m_nodeNum[MAX_NODE]; + int32_t m_sensorIds[MAX_NODE]; + + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + frame_queue_t *m_inputFrameQ; + frame_queue_t *m_outputFrameQ; + frame_queue_t *m_frameDoneQ; +*/ + + ExynosCameraFrame *m_runningFrameList[MAX_NODE][MAX_BUFFERS]; + uint32_t m_numOfRunningFrame[MAX_NODE]; + + uint32_t m_pipeIdArr[MAX_NODE]; + +/* + uint32_t m_pipeId; + int32_t m_cameraId; + + uint32_t m_prepareBufferCount; +*/ + uint32_t m_numBuffers[MAX_NODE]; + +/* + bool m_reprocessing; + bool m_flagStartPipe; + bool m_flagTryStop; + bool m_dvfsLocked; + bool m_isBoosting; + bool m_metadataTypeShot; + + ExynosCameraDurationTimer m_timer; + int m_threadCommand; + uint64_t m_timeInterval; + int m_threadState; + int m_threadRenew; + + Mutex m_pipeframeLock; + + camera2_node_group m_curNodeGroupInfo; +*/ + camera_pipe_perframe_node_group_info_t m_perframeMainNodeGroupInfo[MAX_NODE]; + +/* int m_setfile; */ + + ExynosCameraNode *m_secondaryNode[MAX_NODE]; + int32_t m_secondaryNodeNum[MAX_NODE]; + int32_t m_secondarySensorIds[MAX_NODE]; + camera_device_info_t *m_deviceInfo; + + frame_queue_t *m_requestFrameQ; + + ExynosCameraBuffer m_skipBuffer[MAX_NODE]; + bool m_skipPutBuffer[MAX_NODE]; + +#ifdef USE_MCPIPE_SERIALIZATION_MODE + bool m_serializeOperation; + static Mutex g_serializationLock; +#endif +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipe.cpp new file mode 100644 index 0000000..0aecf9b --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe.cpp @@ -0,0 +1,2091 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipe" +#include + +#include "ExynosCameraPipe.h" + +namespace android { + +ExynosCameraPipe::~ExynosCameraPipe() +{ + /* don't call virtual function */ +} + +status_t ExynosCameraPipe::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d -> %d", __FUNCTION__, __LINE__, i, m_sensorIds[i], sensorIds[i]); + + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create(m_name, m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]):OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[OUTPUT_NODE]); + } + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe::m_mainThreadFunc, "mainThread"); + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = 0; +#ifdef USE_CAMERA2_API_SUPPORT + m_timeLogCount = TIME_LOG_COUNT; +#endif + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::precreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("main", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(FIMC_IS_VIDEO_SS0_NUM); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, FIMC_IS_VIDEO_SS0_NUM); + } + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + CLOGI("INFO(%s[%d]):precreate() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::postcreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe::m_mainThreadFunc, "mainThread"); + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = 0; + CLOGI("INFO(%s[%d]):postcreate() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + for (int i = MAX_NODE - 1; 0 <= i; i--) { + if (m_node[i] != NULL) { + if (m_node[i]->close() != NO_ERROR) { + CLOGE("ERR(%s): close(%d) fail", __FUNCTION__, i); + return INVALID_OPERATION; + } + delete m_node[i]; + m_node[i] = NULL; + CLOGD("DEBUG(%s):Node(%d, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, i, m_nodeNum[i], m_sensorIds[i]); + } + } + + m_mainNodeNum = -1; + m_mainNode = NULL; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + /* TODO: check node state */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for(int i = 0; i < MAX_NODE; i++) { + for (uint32_t j = 0; j < m_numBuffers; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + m_numOfRunningFrame = 0; + + m_prepareBufferCount = 0; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + return NO_ERROR; +} + +status_t ExynosCameraPipe::setupPipe(__unused camera_pipe_info_t *pipeInfos, __unused int32_t *sensorIds, __unused int32_t *ispSensorIds) +{ + CLOGE("ERR(%s[%d]): unexpected api call. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; +} + +status_t ExynosCameraPipe::prepare(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_sensorIds[OUTPUT_NODE]) == false) { + CLOGW("WARN(%s[%d]): prepare on m2m src is logically weird. so, skip.", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + for (uint32_t i = 0; i < m_prepareBufferCount; i++) { + ret = m_putBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::prepare(uint32_t prepareCnt) +{ + CLOGD("DEBUG(%s[%d] prepareCnt:%d)", __FUNCTION__, __LINE__, prepareCnt); + status_t ret = NO_ERROR; + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_sensorIds[OUTPUT_NODE]) == false) { + CLOGW("WARN(%s[%d]): prepare on m2m src is logically weird. so, skip.", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + for (uint32_t i = 0; i < prepareCnt; i++) { + ret = m_putBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + /* TODO: check state ready for start */ + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s): Starting Node Error!", __FUNCTION__); + return ret; + } + + m_threadState = 0; + m_threadRenew = 0; + m_threadCommand = 0; + m_timeInterval = 0; + + m_flagStartPipe = true; + m_flagTryStop = false; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + m_flagStartPipe = false; + + ret = m_mainNode->stop(); + if (ret < 0) { + CLOGE("ERR(%s): node stop fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + + m_mainThread->requestExitAndWait(); + + ret = m_mainNode->clrBuffers(); + if (ret < 0) { + CLOGE("ERR(%s): node clrBuffers fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + m_mainNode->removeItemBufferQ(); + + for(int i = 0; i < MAX_NODE; i++) { + for (uint32_t j = 0; j < m_numBuffers; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + m_numOfRunningFrame = 0; + + m_threadState = 0; + m_threadRenew = 0; + m_threadCommand = 0; + m_timeInterval = 0; + m_flagTryStop= false; + + return NO_ERROR; +} + +bool ExynosCameraPipe::flagStart(void) +{ + return m_flagStartPipe; +} + +status_t ExynosCameraPipe::startThread(void) +{ + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s): outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_timer.start(); + if (m_mainThread->isRunning() == false) { + m_mainThread->run(); + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + } else { + CLOGW("WRN(%s[%d]):startThread is already running (%d)", __FUNCTION__, __LINE__, getPipeId()); + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::stopThread(void) +{ + m_mainThread->requestExit(); + m_inputFrameQ->sendCmd(WAKE_UP); + + m_dumpRunningFrameList(); + + return NO_ERROR; +} + +bool ExynosCameraPipe::flagStartThread(void) +{ + return m_mainThread->isRunning(); +} + +status_t ExynosCameraPipe::stopThreadAndWait(int sleep, int times) +{ + CLOGD("DEBUG(%s[%d]) IN", __FUNCTION__, __LINE__); + status_t status = NO_ERROR; + int i = 0; + + for (i = 0; i < times ; i++) { + if (m_mainThread->isRunning() == false) { + break; + } + usleep(sleep * 1000); + } + + if (i >= times) { + status = TIMED_OUT; + CLOGE("ERR(%s[%d]): stopThreadAndWait failed, waitTime(%d)ms", __FUNCTION__, __LINE__, sleep*times); + } + + CLOGD("DEBUG(%s[%d]) OUT", __FUNCTION__, __LINE__); + return status; +} + +status_t ExynosCameraPipe::sensorStream(bool on) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + int value = on ? IS_ENABLE_STREAM: IS_DISABLE_STREAM; + + ret = m_mainNode->setControl(V4L2_CID_IS_S_STREAM, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->sensorStream failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::forceDone(unsigned int cid, int value) +{ + status_t ret = NO_ERROR; + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_mainNode == NULL) { + CLOGE("ERR(%s):m_mainNode is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = m_forceDone(m_mainNode, cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_forceDone is failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::setControl(int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->setControl(cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::setControl(int cid, int value, __unused enum NODE_TYPE nodeType) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->setControl(cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::getControl(int cid, int *value) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->getControl(cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->getControl failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::setExtControl(struct v4l2_ext_controls *ctrl) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s):m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->setExtControl(ctrl); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::setParam(struct v4l2_streamparm streamParam) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->setParam(&streamParam); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} + +status_t ExynosCameraPipe::pushFrame(ExynosCameraFrame **newFrame) +{ + Mutex::Autolock lock(m_pipeframeLock); + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return BAD_VALUE; + } + + m_inputFrameQ->pushProcessQ(newFrame); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::instantOn(__unused int32_t numFrames) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + + ret = m_mainNode->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): mainNode instantOn fail", __FUNCTION__, __LINE__); + return ret; + } + + return ret; +} + +status_t ExynosCameraPipe::instantOnQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos) +{ + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + int ret = 0; + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_mainNode->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + if(pos == BUFFER_POS::DST) + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + else if(pos == BUFFER_POS::SRC) + ret = newFrame->getSrcBuffer(getPipeId(), &newBuffer); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_runningFrameList[newBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), newFrame->frameCount(%d)", + __FUNCTION__, newBuffer.index, newFrame->getFrameCount()); + return BAD_VALUE; + } + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[newBuffer.planeCount - 1]); + + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + /* set metadata for instant on */ + shot_ext->shot.ctl.scaler.cropRegion[0] = 0; + shot_ext->shot.ctl.scaler.cropRegion[1] = 0; +#if defined(FASTEN_AE_WIDTH) && defined(FASTEN_AE_HEIGHT) + shot_ext->shot.ctl.scaler.cropRegion[2] = FASTEN_AE_WIDTH; + shot_ext->shot.ctl.scaler.cropRegion[3] = FASTEN_AE_HEIGHT; +#else + int bcropW = 0; + int bcropH = 0; + + shot_ext->shot.ctl.scaler.cropRegion[2] = bcropW; + shot_ext->shot.ctl.scaler.cropRegion[3] = bcropH; +#endif + + setMetaCtlAeTargetFpsRange(shot_ext, FASTEN_AE_FPS, FASTEN_AE_FPS); + setMetaCtlSensorFrameDuration(shot_ext, (uint64_t)((1000 * 1000 * 1000) / (uint64_t)FASTEN_AE_FPS)); + + /* set afMode into INFINITY */ + shot_ext->shot.ctl.aa.afTrigger = AA_AF_TRIGGER_CANCEL; + shot_ext->shot.ctl.aa.vendor_afmode_option &= (0 << AA_AFMODE_OPTION_BIT_MACRO); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(m_mainNode->getName(), &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%s) fail", __FUNCTION__, __LINE__, m_mainNode->getName()); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum - 1; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + ret = m_mainNode->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + return ret; + /* TODO: doing exception handling */ + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + m_runningFrameList[newBuffer.index] = newFrame; + + m_numOfRunningFrame++; + + *frame = newFrame; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::instantOnDQbuf(ExynosCameraFrame **frame, __unused BUFFER_POS::POS pos) +{ + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + + if (m_numOfRunningFrame <= 0 ) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_mainNode->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + ret = m_updateMetadataToFrame(curBuffer.addr[curBuffer.planeCount - 1], curBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + + if (curBuffer.index < 0) { + CLOGE("ERR(%s):index(%d) is invalid", __FUNCTION__, curBuffer.index); + return BAD_VALUE; + } + + curFrame = m_runningFrameList[curBuffer.index]; + + if (curFrame == NULL) { + CLOGE("ERR(%s):Unknown buffer, frame is NULL", __FUNCTION__); + dump(); + return BAD_VALUE; + } + + *frame = curFrame; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::instantOff(void) +{ + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int ret = 0; + + ret = m_mainNode->stop(); + + ret = m_mainNode->clrBuffers(); + if (ret < 0) { + CLOGE("ERR(%s):3AA output node clrBuffers fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + + for(int i = 0; i < MAX_NODE; i++) { + for (int j = 0; j < MAX_BUFFERS; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::instantOnPushFrameQ(BUFFERQ_TYPE::TYPE type, ExynosCameraFrame **frame) +{ + if( type == BUFFERQ_TYPE::OUTPUT ) + m_outputFrameQ->pushProcessQ(frame); + else + m_inputFrameQ->pushProcessQ(frame); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::getPipeInfo(int *fullW, int *fullH, int *colorFormat, __unused int pipePosition) +{ + if (m_mainNode == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + int planeCount = 0; + int ret = NO_ERROR; + + ret = m_mainNode->getSize(fullW, fullH); + if (ret < 0) { + CLOGE("ERR(%s):getSize fail", __FUNCTION__); + return ret; + } + + ret = m_mainNode->getColorFormat(colorFormat, &planeCount); + if (ret < 0) { + CLOGE("ERR(%s):getColorFormat fail", __FUNCTION__); + return ret; + } + + return ret; +} + +int ExynosCameraPipe::getCameraId(void) +{ + return this->m_cameraId; +} + +status_t ExynosCameraPipe::setPipeId(uint32_t id) +{ + this->m_pipeId = id; + + return NO_ERROR; +} + +uint32_t ExynosCameraPipe::getPipeId(void) +{ + return this->m_pipeId; +} + +int ExynosCameraPipe::getPipeId(__unused enum NODE_TYPE nodeType) +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Not supported API. use MCPipe, assert!!!!", __FUNCTION__, __LINE__); + + return -1; +} + +status_t ExynosCameraPipe::setPipeId(__unused enum NODE_TYPE nodeType, __unused uint32_t id) +{ + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):Not supported API. use MCPipe, assert!!!!", __FUNCTION__, __LINE__); + + return INVALID_OPERATION; +} + +status_t ExynosCameraPipe::setPipeName(const char *pipeName) +{ + strncpy(m_name, pipeName, EXYNOS_CAMERA_NAME_STR_SIZE - 1); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setBufferManager(ExynosCameraBufferManager **bufferManager) +{ + for (int i = 0; i < MAX_NODE; i++) + m_bufferManager[i] = bufferManager[i]; + + return NO_ERROR; +} + +char *ExynosCameraPipe::getPipeName(void) +{ + return m_name; +} + +status_t ExynosCameraPipe::clearInputFrameQ(void) +{ + if (m_inputFrameQ != NULL) + m_inputFrameQ->release(); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::getInputFrameQ(frame_queue_t **inputFrameQ) +{ + *inputFrameQ = m_inputFrameQ; + + if (*inputFrameQ == NULL) + CLOGE("ERR(%s[%d])inputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setOutputFrameQ(frame_queue_t *outputFrameQ) +{ + m_outputFrameQ = outputFrameQ; + return NO_ERROR; +} + +status_t ExynosCameraPipe::getOutputFrameQ(frame_queue_t **outputFrameQ) +{ + *outputFrameQ = m_outputFrameQ; + + if (*outputFrameQ == NULL) + CLOGE("ERR(%s[%d])outputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::getFrameDoneQ(frame_queue_t **frameDoneQ) +{ + *frameDoneQ = m_frameDoneQ; + + if (*frameDoneQ == NULL) + CLOGE("ERR(%s[%d]):frameDoneQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setFrameDoneQ(frame_queue_t *frameDoneQ) +{ + m_frameDoneQ = frameDoneQ; + m_flagFrameDoneQ = true; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setNodeInfos(__unused camera_node_objects_t *nodeObjects, __unused bool flagReset) +{ + CLOGD("DEBUG(%s[%d]):do not support SUPPORT_GROUP_MIGRATION", __FUNCTION__, __LINE__); + return NO_ERROR; +} + +status_t ExynosCameraPipe::getNodeInfos(__unused camera_node_objects_t *nodeObjects) +{ + CLOGD("DEBUG(%s[%d]):do not support SUPPORT_GROUP_MIGRATION", __FUNCTION__, __LINE__); + return NO_ERROR; +} + +status_t ExynosCameraPipe::setMapBuffer(__unused ExynosCameraBuffer *srcBuf, __unused ExynosCameraBuffer *dstBuf) +{ + return NO_ERROR; +} + +status_t ExynosCameraPipe::setBoosting(bool isBoosting) +{ + m_isBoosting = isBoosting; + + return NO_ERROR; +} + +bool ExynosCameraPipe::isThreadRunning() +{ + return m_mainThread->isRunning(); +} + +status_t ExynosCameraPipe::getThreadState(int **threadState) +{ + *threadState = &m_threadState; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::getThreadInterval(uint64_t **timeInterval) +{ + *timeInterval = &m_timeInterval; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::getThreadRenew(int **timeRenew) +{ + *timeRenew = &m_threadRenew; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::incThreadRenew(void) +{ + m_threadRenew ++; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::setStopFlag(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_flagTryStop = true; + + return NO_ERROR; +} + +int ExynosCameraPipe::getRunningFrameCount(void) +{ + int runningFrameCount = 0; + + for (uint32_t i = 0; i < m_numBuffers; i++) { + if (m_runningFrameList[i] != NULL) { + runningFrameCount++; + } + } + + return runningFrameCount; +} + +void ExynosCameraPipe::setOneShotMode(bool enable) +{ + CLOGI("INFO(%s[%d]):%s %s OneShot mode", + __FUNCTION__, __LINE__, m_name, + (enable == true)? "Enable" : "Disable"); + + m_oneShotMode = enable; +} + +#ifdef USE_MCPIPE_SERIALIZATION_MODE +void ExynosCameraPipe::needSerialization(__unused bool enable) +{ + CLOGD("DEBUG(%s[%d]):do not support %s()", __FUNCTION__, __LINE__, __FUNCTION__); +} +#endif + +void ExynosCameraPipe::dump(void) +{ + CLOGI("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_dumpRunningFrameList(); + + if (m_mainNode != NULL) + m_mainNode->dump(); + + return; +} + +status_t ExynosCameraPipe::dumpFimcIsInfo(bool bugOn) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + ret = m_mainNode->setControl(V4L2_CID_IS_DEBUG_DUMP, bugOn); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} + +//#ifdef MONITOR_LOG_SYNC +status_t ExynosCameraPipe::syncLog(uint32_t syncId) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + ret = m_mainNode->setControl(V4L2_CID_IS_DEBUG_SYNC_LOG, syncId); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_mainNode->setControl failed", __FUNCTION__); + + return ret; +} +//#endif + +bool ExynosCameraPipe::m_mainThreadFunc(void) +{ + int ret = 0; + + /* TODO: check exit condition */ + /* running list != empty */ + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return false; + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + + CLOGE("ERR(%s[%d]): m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return false; + } + + return true; +} + +status_t ExynosCameraPipe::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + int ret = 0; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return NO_ERROR; + } + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { +#ifdef USE_CAMERA2_API_SUPPORT + /* + * TIMEOUT log print + * condition 1 : it is not reprocessing + * condition 2 : if it is reprocessing, but m_timeLogCount is equals or lower than 0 + */ + if (!(m_parameters->isReprocessing() == true && m_timeLogCount <= 0)) { + m_timeLogCount--; +#endif + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_mainNode->dumpState(); +#ifdef USE_CAMERA2_API_SUPPORT + } +#endif + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_runningFrameList[newBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), newFrame->frameCount(%d)", + __FUNCTION__, newBuffer.index, newFrame->getFrameCount()); + m_dumpRunningFrameList(); + + return BAD_VALUE; + } + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[newBuffer.planeCount - 1]); + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + if (m_parameters->getHalVersion() != IS_HAL_VER_3_2) + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(m_mainNode->getName(), &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum - 1; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + + ret = m_mainNode->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + return ret; + /* TODO: doing exception handling */ + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + +#ifdef SUPPORT_DEPTH_MAP + if (getPipeId() == PIPE_FLITE && newFrame->getRequest(PIPE_VC1) == true) { + ExynosCameraBuffer depthMapBuffer; + ret = newFrame->getDstBuffer(getPipeId(), &depthMapBuffer, CAPTURE_NODE_2); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to get CAPTURE_NODE_2 buffer. ret %d", + __FUNCTION__, __LINE__, ret); + return OK; + } + + ret = m_node[CAPTURE_NODE_2]->putBuffer(&depthMapBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + return ret; + /* TODO: doing exception handling */ + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING, CAPTURE_NODE_2); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + } +#endif + + m_runningFrameList[newBuffer.index] = newFrame; + m_numOfRunningFrame++; + +#ifdef USE_CAMERA2_API_SUPPORT + m_timeLogCount = TIME_LOG_COUNT; +#endif + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", + __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_mainNode->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + ret = m_updateMetadataToFrame(curBuffer.addr[curBuffer.planeCount - 1], curBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_comleteFrame fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_updateMetadataToFrame(void *metadata, int index) +{ + int ret = 0; + ExynosCameraFrame *curFrame = NULL; + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)metadata; + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]): metabuffer is null", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + if (index < 0) { + CLOGE("ERR(%s[%d]): Invalid index(%d)", __FUNCTION__, __LINE__, index); + return BAD_VALUE; + } + + if (m_metadataTypeShot == false) { + CLOGV("DEBUG(%s[%d]): stream type do not need update metadata", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + ret = m_getFrameByIndex(&curFrame, index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_getFrameByIndex fail, index(%d), ret(%d)", __FUNCTION__, __LINE__, index, ret); + return ret; + } + + ret = curFrame->storeDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = curFrame->storeUserDynamicMeta(shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]): storeUserDynamicMeta fail ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_getFrameByIndex(ExynosCameraFrame **frame, int index) +{ + *frame = m_runningFrameList[index]; + if (*frame == NULL) { + CLOGE("ERR(%s[%d]):Unknown buffer, index %d frame is NULL", __FUNCTION__, __LINE__, index); + dump(); + return BAD_VALUE; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_completeFrame( + ExynosCameraFrame **frame, + ExynosCameraBuffer buffer, + bool isValid) +{ + int ret = 0; + + if (buffer.index < 0) { + CLOGE("ERR(%s):index(%d) is invalid", __FUNCTION__, buffer.index); + return BAD_VALUE; + } + + *frame = m_runningFrameList[buffer.index]; + + if (*frame == NULL) { + CLOGE("ERR(%s):Unknown buffer, frame is NULL", __FUNCTION__); + dump(); + return BAD_VALUE; + } + + if (isValid == false) { + CLOGD("DEBUG(%s[%d]):NOT DONE frameCount %d, buffer index(%d)", __FUNCTION__, __LINE__, + (*frame)->getFrameCount(), buffer.index); + } + + ret = (*frame)->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + CLOGV("DEBUG(%s):entity pipeId(%d), buffer index(%d), frameCount(%d)", + __FUNCTION__, buffer.index, getPipeId(), + m_runningFrameList[buffer.index]->getFrameCount()); + + m_runningFrameList[buffer.index] = NULL; + m_numOfRunningFrame--; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_setInput(ExynosCameraNode *nodes[], int32_t *nodeNums, int32_t *sensorIds) +{ + status_t ret = NO_ERROR; + int currentSensorId[MAX_NODE] = {0, }; + + if (nodeNums == NULL || sensorIds == NULL) { + CLOGE("ERR(%s[%d]): nodes == %p || nodeNum == %p || sensorId == %p", + __FUNCTION__, __LINE__, nodes, nodeNums, sensorIds); + return INVALID_OPERATION; + } + + for (int i = 0; i < MAX_NODE; i++) { + if (m_flagValidInt(nodeNums[i]) == false) + continue; + + if (m_flagValidInt(sensorIds[i]) == false) + continue; + + if (nodes[i] == NULL) + continue; + + currentSensorId[i] = nodes[i]->getInput(); + + if (m_flagValidInt(currentSensorId[i]) == false || + currentSensorId[i] != sensorIds[i]) { + +#ifdef FIMC_IS_VIDEO_BAS_NUM + CLOGD("DEBUG(%s[%d]): setInput(sensorIds : %d) [src nodeNum : %d][nodeNums : %d]\ + [otf : %d][leader : %d][reprocessing : %d][unique sensorId : %d]", + __FUNCTION__, __LINE__, + sensorIds[i], + ((sensorIds[i] & INPUT_VINDEX_MASK) >> INPUT_VINDEX_SHIFT) + FIMC_IS_VIDEO_BAS_NUM, + nodeNums[i], + ((sensorIds[i] & INPUT_MEMORY_MASK) >> INPUT_MEMORY_SHIFT), + ((sensorIds[i] & INPUT_LEADER_MASK) >> INPUT_LEADER_SHIFT), + ((sensorIds[i] & INPUT_STREAM_MASK) >> INPUT_STREAM_SHIFT), + ((sensorIds[i] & INPUT_MODULE_MASK) >> INPUT_MODULE_SHIFT)); +#else + CLOGD("DEBUG(%s[%d]): setInput(sensorIds : %d)", + __FUNCTION__, __LINE__, sensorIds[i]); +#endif + ret = nodes[i]->setInput(sensorIds[i]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): nodeNums[%d] : %d, setInput(sensorIds : %d fail, ret(%d)", + __FUNCTION__, __LINE__, i, nodeNums[i], sensorIds[i], + ret); + + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraPipe::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + uint32_t planeCount = 2; + uint32_t bytePerPlane = 0; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + int colorFormat = pipeInfos[0].rectInfo.colorFormat; + + getYuvFormatInfo(colorFormat, &bytePerPlane, &planeCount); + /* Add medadata plane count */ + planeCount++; + + ret = m_setNodeInfo(m_mainNode, &pipeInfos[0], + planeCount, YUV_FULL_RANGE); + + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_perframeMainNodeGroupInfo = pipeInfos[0].perFrameNodeGroupInfo; + m_numBuffers = pipeInfos[0].bufInfo.count; + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_setNodeInfo(ExynosCameraNode *node, camera_pipe_info_t *pipeInfos, + int planeCount, enum YUV_RANGE yuvRange, + __unused bool flagBayer) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (node == NULL) { + CLOGE("ERR(%s[%d]): node == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + bool flagSetRequest = false; + unsigned int requestBufCount = 0; + + int currentW = 0; + int currentH = 0; + + int currentV4l2Colorformat = 0; + int currentPlanesCount = 0; + enum YUV_RANGE currentYuvRange = YUV_FULL_RANGE; + + int currentBufferCount = 0; + enum v4l2_buf_type currentBufType; + enum v4l2_memory currentMemType; + + requestBufCount = node->reqBuffersCount(); + + /* If it already set */ + if (0 < requestBufCount) { + node->getSize(¤tW, ¤tH); + node->getColorFormat(¤tV4l2Colorformat, ¤tPlanesCount, ¤tYuvRange); + node->getBufferType(¤tBufferCount, ¤tBufType, ¤tMemType); + + if (/* setSize */ + currentW != pipeInfos->rectInfo.fullW || + currentH != pipeInfos->rectInfo.fullH || + /* setColorFormat */ + currentV4l2Colorformat != pipeInfos->rectInfo.colorFormat || + currentPlanesCount != planeCount || + currentYuvRange != yuvRange || + /* setBufferType */ + currentBufferCount != (int)pipeInfos->bufInfo.count || + currentBufType != (enum v4l2_buf_type)pipeInfos->bufInfo.type || + currentMemType != (enum v4l2_memory)pipeInfos->bufInfo.memory) { + + flagSetRequest = true; + + CLOGW("WARN(%s[%d]): node is already requested. so, call clrBuffers()", __FUNCTION__, __LINE__); + + CLOGW("WARN(%s[%d]): w (%d -> %d), h (%d -> %d)", + __FUNCTION__, __LINE__, + currentW, pipeInfos->rectInfo.fullW, + currentH, pipeInfos->rectInfo.fullH); + + CLOGW("WARN(%s[%d]): colorFormat (%d -> %d), planeCount (%d -> %d), yuvRange (%d -> %d)", + __FUNCTION__, __LINE__, + currentV4l2Colorformat, pipeInfos->rectInfo.colorFormat, + currentPlanesCount, planeCount, + currentYuvRange, yuvRange); + + CLOGW("WARN(%s[%d]): bufferCount (%d -> %d), bufType (%d -> %d), memType (%d -> %d)", + __FUNCTION__, __LINE__, + currentBufferCount, pipeInfos->bufInfo.count, + currentBufType, pipeInfos->bufInfo.type, + currentMemType, pipeInfos->bufInfo.memory); + + if (node->clrBuffers() != NO_ERROR) { + CLOGE("ERR(%s[%d]): node->clrBuffers() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + } else { + flagSetRequest = true; + } + + if (flagSetRequest == true) { + CLOGD("DEBUG(%s[%d]): set pipeInfos on %s : setFormat(%d, %d) and reqBuffers(%d).", + __FUNCTION__, __LINE__, node->getName(), pipeInfos->rectInfo.fullW, + pipeInfos->rectInfo.fullH, pipeInfos->bufInfo.count); + + bool flagValidSetFormatInfo = true; + + if (pipeInfos->rectInfo.fullW == 0 || pipeInfos->rectInfo.fullH == 0) { + CLOGW("WARN(%s[%d]): invalid size (%d x %d). So, skip setSize()", + __FUNCTION__, __LINE__, pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH); + flagValidSetFormatInfo = false; + } + node->setSize(pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH); + + if (pipeInfos->rectInfo.colorFormat == 0 || planeCount == 0) { + CLOGW("WARN(%s[%d]): invalid colorFormat(%d), planeCount(%d). So, skip setColorFormat()", + __FUNCTION__, __LINE__, pipeInfos->rectInfo.colorFormat, planeCount); + flagValidSetFormatInfo = false; + } + node->setColorFormat(pipeInfos->rectInfo.colorFormat, planeCount, yuvRange); + + if ((int)pipeInfos->bufInfo.type == 0 || pipeInfos->bufInfo.memory == 0) { + CLOGW("WARN(%s[%d]): invalid bufInfo.type(%d), bufInfo.memory(%d). So, skip setBufferType()", + __FUNCTION__, __LINE__, (int)pipeInfos->bufInfo.type, (int)pipeInfos->bufInfo.memory); + flagValidSetFormatInfo = false; + } + node->setBufferType(pipeInfos->bufInfo.count, (enum v4l2_buf_type)pipeInfos->bufInfo.type, + (enum v4l2_memory)pipeInfos->bufInfo.memory); + + if (flagValidSetFormatInfo == true) { +#if defined(DEBUG_RAWDUMP) + if (m_parameters->checkBayerDumpEnable() && flagBayer == true) { + //bytesPerLine[0] = (maxW + 16) * 2; + if (node->setFormat() != NO_ERROR) { + CLOGE("ERR(%s[%d]): node->setFormat() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } else +#endif + { + if (node->setFormat(pipeInfos->bytesPerPlane) != NO_ERROR) { + CLOGE("ERR(%s[%d]): node->setFormat() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + } + + node->getBufferType(¤tBufferCount, ¤tBufType, ¤tMemType); + + } else { + CLOGD("DEBUG(%s[%d]): SKIP set pipeInfos setFormat(%d, %d) and reqBuffers(%d).", + __FUNCTION__, __LINE__, pipeInfos->rectInfo.fullW, pipeInfos->rectInfo.fullH, pipeInfos->bufInfo.count); + } + + if (currentBufferCount <= 0) { + CLOGW("WARN(%s[%d]): invalid currentBufferCount(%d). So, skip reqBuffers()", + __FUNCTION__, __LINE__, currentBufferCount); + } else { + if (node->reqBuffers() != NO_ERROR) { + CLOGE("ERR(%s[%d]): node->reqBuffers() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_forceDone(ExynosCameraNode *node, unsigned int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = NO_ERROR; + + if (node == NULL) { + CLOGE("ERR(%s): m_mainNode == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + if (cid != V4L2_CID_IS_FORCE_DONE) { + CLOGE("ERR(%s): cid != V4L2_CID_IS_FORCE_DONE so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + /* "value" is not meaningful */ + ret = node ->setControl(cid, value); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):node V4L2_CID_IS_FORCE_DONE failed", __FUNCTION__, __LINE__); + node->dump(); + return ret; + } + + return ret; +} + +bool ExynosCameraPipe::m_checkLeaderNode(int sensorId) +{ + bool ret = false; + +#ifdef INPUT_LEADER_MASK + if ((sensorId & INPUT_LEADER_MASK) >> INPUT_LEADER_SHIFT) + ret = true; +#endif + + return ret; +} + +status_t ExynosCameraPipe::m_startNode(void) +{ + status_t ret = NO_ERROR; + const char* nodeNames[MAX_NODE] = {"MAIN_OUTPUT", "MAIN_CAPTURE", "MAIN_SUB"}; + + for (int i = MAX_NODE - 1; 0 <= i; i--) { + /* only M2M mode need stream on/off */ + /* TODO : flite has different sensorId bit */ + if (m_node[i] != NULL && + m_isOtf(m_sensorIds[i]) == false) { + ret = m_node[i]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_node[%d](%s)->start fail!, ret(%d)", __FUNCTION__, __LINE__, i, nodeNames[i], ret); + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraPipe::m_stopNode(void) +{ + status_t ret = NO_ERROR; + const char* nodeNames[MAX_NODE] = {"MAIN_OUTPUT", "MAIN_CAPTURE", "MAIN_SUB"}; + + for (int i = 0 ; i < MAX_NODE; i++) { + /* only M2M mode need stream on/off */ + /* TODO : flite has different sensorId bit */ + if (m_node[i] != NULL && + m_isOtf(m_sensorIds[i]) == false) { + ret = m_node[i]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_node[%d](%s)->stop fail!, ret(%d)", __FUNCTION__, __LINE__, i, nodeNames[i], ret); + } + } + } + + return ret; +} + +status_t ExynosCameraPipe::m_clearNode(void) +{ + status_t ret = NO_ERROR; + const char* nodeNames[MAX_NODE] = {"MAIN_OUTPUT", "MAIN_CAPTURE", "MAIN_SUB"}; + + for (int i = 0 ; i < MAX_NODE; i++) { + if (m_node[i]) { + ret = m_node[i]->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_node[%d](%s)->clrBuffers fail!, ret(%d)", + __FUNCTION__, __LINE__, i, nodeNames[i], ret); + return ret; + } + } + } + + return ret; +} + +status_t ExynosCameraPipe::m_checkNodeGroupInfo(char *name, camera2_node *oldNode, camera2_node *newNode) +{ + if (oldNode == NULL || newNode == NULL) { + CLOGE("ERR(%s[%d]): oldNode(%p) == NULL || newNode(%p) == NULL", __FUNCTION__, __LINE__, oldNode, newNode); + return INVALID_OPERATION; + } + + bool flagCropRegionChanged = false; + + for (int i = 0; i < 4; i++) { + if (oldNode->input.cropRegion[i] != newNode->input.cropRegion[i] || + oldNode->output.cropRegion[i] != newNode->output.cropRegion[i]) { + + CLOGD("DEBUG(%s[%d]): name %s : PerFrame oldCropSize (%d, %d, %d, %d / %d, %d, %d, %d) \ + -> newCropSize (%d, %d, %d, %d / %d, %d, %d, %d)", + __FUNCTION__, __LINE__, + name, + oldNode->input. cropRegion[0], oldNode->input. cropRegion[1], + oldNode->input. cropRegion[2], oldNode->input. cropRegion[3], + oldNode->output.cropRegion[0], oldNode->output.cropRegion[1], + oldNode->output.cropRegion[2], oldNode->output.cropRegion[3], + newNode->input. cropRegion[0], newNode->input. cropRegion[1], + newNode->input. cropRegion[2], newNode->input. cropRegion[3], + newNode->output.cropRegion[0], newNode->output.cropRegion[1], + newNode->output.cropRegion[2], newNode->output.cropRegion[3]); + + break; + } + } + + for (int i = 0; i < 4; i++) { + oldNode->input. cropRegion[i] = newNode->input. cropRegion[i]; + oldNode->output.cropRegion[i] = newNode->output.cropRegion[i]; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe::m_checkNodeGroupInfo(int index, camera2_node *oldNode, camera2_node *newNode) +{ + if (oldNode == NULL || newNode == NULL) { + CLOGE("ERR(%s[%d]): oldNode(%p) == NULL || newNode(%p) == NULL", __FUNCTION__, __LINE__, oldNode, newNode); + return INVALID_OPERATION; + } + + bool flagCropRegionChanged = false; + + for (int i = 0; i < 4; i++) { + if (oldNode->input.cropRegion[i] != newNode->input.cropRegion[i] || + oldNode->output.cropRegion[i] != newNode->output.cropRegion[i]) { + + CLOGD("DEBUG(%s[%d]): index %d : PerFrame oldCropSize (%d, %d, %d, %d / %d, %d, %d, %d) \ + -> newCropSize (%d, %d, %d, %d / %d, %d, %d, %d)", + __FUNCTION__, __LINE__, + index, + oldNode->input. cropRegion[0], oldNode->input. cropRegion[1], + oldNode->input. cropRegion[2], oldNode->input. cropRegion[3], + oldNode->output.cropRegion[0], oldNode->output.cropRegion[1], + oldNode->output.cropRegion[2], oldNode->output.cropRegion[3], + newNode->input. cropRegion[0], newNode->input. cropRegion[1], + newNode->input. cropRegion[2], newNode->input. cropRegion[3], + newNode->output.cropRegion[0], newNode->output.cropRegion[1], + newNode->output.cropRegion[2], newNode->output.cropRegion[3]); + + break; + } + } + + for (int i = 0; i < 4; i++) { + oldNode->input. cropRegion[i] = newNode->input. cropRegion[i]; + oldNode->output.cropRegion[i] = newNode->output.cropRegion[i]; + } + + return NO_ERROR; +} + +void ExynosCameraPipe::m_dumpRunningFrameList(void) +{ + CLOGI("DEBUG(%s[%d]):*********runningFrameList dump***********", __FUNCTION__, __LINE__); + CLOGI("DEBUG(%s[%d]):m_numBuffers : %d", __FUNCTION__, __LINE__, m_numBuffers); + for (uint32_t i = 0; i < m_numBuffers; i++) { + if (m_runningFrameList[i] == NULL) { + CLOGI("DEBUG:runningFrameList[%d] is NULL", i); + } else { + CLOGI("DEBUG:runningFrameList[%d]: fcount = %d", + i, m_runningFrameList[i]->getFrameCount()); + } + } +} + +void ExynosCameraPipe::m_dumpPerframeNodeGroupInfo(const char *name, camera_pipe_perframe_node_group_info_t nodeInfo) +{ + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) ++++++++++++++++++++", __FUNCTION__, __LINE__, name); + + CLOGI("\t\t perframeSupportNodeNum : %d", nodeInfo.perframeSupportNodeNum); + CLOGI("\t\t perFrameLeaderInfo.perframeInfoIndex : %d", nodeInfo.perFrameLeaderInfo.perframeInfoIndex); + CLOGI("\t\t perFrameLeaderInfo.perFrameVideoID : %d", nodeInfo.perFrameLeaderInfo.perFrameVideoID); + CLOGI("\t\t perFrameCaptureInfo[0].perFrameVideoID : %d", nodeInfo.perFrameCaptureInfo[0].perFrameVideoID); + CLOGI("\t\t perFrameCaptureInfo[1].perFrameVideoID : %d", nodeInfo.perFrameCaptureInfo[1].perFrameVideoID); + + if (name != NULL) + CLOGI("DEBUG(%s[%d]):(%s) ------------------------------", __FUNCTION__, __LINE__, name); +} + +void ExynosCameraPipe::m_configDvfs(void) { + bool newDvfs = m_parameters->getDvfsLock(); + int ret = 0; + + if (newDvfs != m_dvfsLocked) { + ret = setControl(V4L2_CID_IS_DVFS_LOCK, 533000); + if (ret != NO_ERROR) + CLOGE("ERR(%s):setControl failed", __FUNCTION__); + m_dvfsLocked = newDvfs; + } +} + +bool ExynosCameraPipe::m_flagValidInt(int num) +{ + bool ret = false; + + if (num == -1 || num == 0) + ret = false; + else + ret = true; + + return ret; +} + +bool ExynosCameraPipe::m_isOtf(int sensorId) +{ + if (m_flagValidInt(sensorId) == false) { + CLOGW("WARN(%s[%d]):m_flagValidInt(%d) == false", __FUNCTION__, __LINE__, sensorId); + /* to protect q, dq */ + return true; + } + +#ifdef INPUT_MEMORY_MASK + return ((sensorId & INPUT_MEMORY_MASK) >> INPUT_MEMORY_SHIFT); +#else + return false; +#endif +} + +bool ExynosCameraPipe::m_checkValidFrameCount(struct camera2_shot_ext *shot_ext) +{ + bool ret = true; + + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext == NULL. so fail", __FUNCTION__, __LINE__); + return false; + } + + int frameCount = getMetaDmRequestFrameCount(shot_ext); + + if (frameCount < 0 || + frameCount < m_lastSrcFrameCount) { + CLOGE("ERR(%s[%d]):invalid frameCount(%d) < m_lastSrcFrameCount(%d). so fail", + __FUNCTION__, __LINE__, frameCount, m_lastSrcFrameCount); + + ret = false; + } + + m_lastSrcFrameCount = frameCount; + + return ret; +} + +bool ExynosCameraPipe::m_checkValidFrameCount(struct camera2_stream *stream) +{ + bool ret = true; + + if (stream == NULL) { + CLOGE("ERR(%s[%d]):shot_ext == NULL. so fail", __FUNCTION__, __LINE__); + return false; + } + + /* + if (stream->fcount <= 0 || + m_lastDstFrameCount + 1 != frameCount) { + CLOGE("ERR(%s[%d]):invalid m_lastDstFrameCount(%d) + 1 != stream->fcount(%d). so fail", + __FUNCTION__, __LINE__, m_lastDstFrameCount, stream->fcount); + + ret = false; + } + */ + + if (stream->fcount <= 0) { + CLOGE("ERR(%s[%d]):invalid stream->fcount(%d). so fail", + __FUNCTION__, __LINE__, stream->fcount); + + ret = false; + } + + if (stream->fvalid == 0) { + CLOGE("ERR(%s[%d]):invalid fvalid(%d). so fail", + __FUNCTION__, __LINE__, stream->fvalid); + + ret = false; + } + + if (0 < stream->fcount) + m_lastDstFrameCount = stream->fcount; + + return ret; +} + +status_t ExynosCameraPipe::m_handleInvalidFrame(int index, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer) +{ + status_t ret = NO_ERROR; + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame == NULL. so fail", __FUNCTION__, __LINE__); + return false; + } + + if (buffer == NULL) { + CLOGE("ERR(%s[%d]):buffer == NULL. so fail", __FUNCTION__, __LINE__); + return false; + } + + /* to complete frame */ + ExynosCameraFrame *curFrame = NULL; + + m_runningFrameList[index] = newFrame; + + /* m_completeFrame will do m_completeFrame--. so, I put ++ to make 0 */ + m_numOfRunningFrame++; + + /* complete frame */ + ret = m_completeFrame(&curFrame, buffer[OUTPUT_NODE], false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + if (m_outputFrameQ != NULL) + m_outputFrameQ->pushProcessQ(&curFrame); + + return ret; +} + +bool ExynosCameraPipe::m_isReprocessing(void) +{ + return m_reprocessing == 1 ? true : false; +} + +bool ExynosCameraPipe::m_checkThreadLoop(void) +{ + Mutex::Autolock lock(m_pipeframeLock); + bool loop = false; + + if (m_isReprocessing() == false) + loop = true; + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loop = true; + + if (m_flagTryStop == true) + loop = false; + + return loop; +} + +void ExynosCameraPipe::m_init(void) +{ + m_mainNodeNum = -1; + m_mainNode = NULL; + + for (int i = 0; i < MAX_NODE; i++) { + m_nodeNum[i] = -1; + m_node[i] = NULL; + m_sensorIds[i] = -1; + m_bufferManager[i] = NULL; + } + + m_exynosconfig = NULL; + + m_pipeId = 0; + m_cameraId = -1; + m_reprocessing = 0; + m_oneShotMode = false; + + m_parameters = NULL; + m_prepareBufferCount = 0; + m_numBuffers = 0; + m_numCaptureBuf = 0; + + m_activityControl = NULL; + + m_inputFrameQ = NULL; + m_outputFrameQ = NULL; + m_frameDoneQ= NULL; + + for(int i = 0; i < MAX_NODE; i++) { + for (int j = 0; j < MAX_BUFFERS; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + m_numOfRunningFrame = 0; + + m_flagStartPipe = false; + m_flagTryStop = false; + + m_flagFrameDoneQ = false; + + m_threadCommand = 0; + m_timeInterval = 0; + m_threadState = 0; + m_threadRenew = 0; + memset(m_name, 0x00, sizeof(m_name)); + + m_metadataTypeShot = true; + memset(&m_perframeMainNodeGroupInfo, 0x00, sizeof(camera_pipe_perframe_node_group_info_t)); + memset(&m_curNodeGroupInfo, 0x00, sizeof(camera2_node_group)); + + m_dvfsLocked = false; + m_isBoosting = false; + m_lastSrcFrameCount = 0; + m_lastDstFrameCount = 0; + + m_setfile = 0x0; + +#ifdef USE_CAMERA2_API_SUPPORT + m_timeLogCount = TIME_LOG_COUNT; +#endif +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe.h b/libcamera/common_v2/Pipes2/ExynosCameraPipe.h new file mode 100644 index 0000000..0adea93 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe.h @@ -0,0 +1,426 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_H +#define EXYNOS_CAMERA_PIPE_H + +#include "ExynosCameraConfig.h" + +#include "ExynosCameraThread.h" +#include "ExynosCameraThreadFactory.h" + +#include "ExynosCameraNode.h" +#include "ExynosCameraNodeJpegHAL.h" +#include "ExynosCameraFrame.h" +#include "ExynosCameraSensorInfo.h" +#include "ExynosCameraParameters.h" +#include "ExynosCameraList.h" +#include "ExynosCameraBufferManager.h" + +#include "ExynosCameraUtilsModule.h" +#include "ExynosCameraSizeControl.h" + +#include "ExynosJpegApi.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +#ifdef USE_CAMERA2_API_SUPPORT +/* for reprocessing pipe, if timeout happend, prohibit loging until this define */ +#define TIME_LOG_COUNT 5 +#endif + +enum PIPE_POSITION { + SRC_PIPE = 0, + DST_PIPE +}; + +enum NODE_TYPE { + INVALID_NODE = -1, + OUTPUT_NODE = 0, /* Node for output device */ + CAPTURE_NODE, /* deprecated enum */ + SUB_NODE, /* deprecated enum */ + + /* MCPipe CAPTURE NODE */ + CAPTURE_NODE_1 = CAPTURE_NODE, /* MCPipe use CAPTURE_NODE_X. so, this start from OUTPUT_NODE + 1 */ + CAPTURE_NODE_2, + CAPTURE_NODE_3, + CAPTURE_NODE_4, + CAPTURE_NODE_5, + CAPTURE_NODE_6, + CAPTURE_NODE_7, + CAPTURE_NODE_8, + CAPTURE_NODE_9, + CAPTURE_NODE_10, + CAPTURE_NODE_11, + CAPTURE_NODE_12, + CAPTURE_NODE_13, + CAPTURE_NODE_14, + CAPTURE_NODE_15, + CAPTURE_NODE_16, + CAPTURE_NODE_17, + + /* OTF NODE */ + OTF_NODE_BASE, + OTF_NODE_1, + OTF_NODE_2, + OTF_NODE_3, + OTF_NODE_4, + OTF_NODE_5, + OTF_NODE_6, + + MAX_NODE +}; + +typedef enum perframe_node_type { + PERFRAME_NODE_TYPE_NONE = 0, + PERFRAME_NODE_TYPE_LEADER = 1, + PERFRAME_NODE_TYPE_CAPTURE = 2, +} perframe_node_type_t; + +typedef struct ExynosCameraNodeObjects { + ExynosCameraNode *node[MAX_NODE]; + ExynosCameraNode *secondaryNode[MAX_NODE]; + bool isInitalize; +} camera_node_objects_t; + +typedef struct ExynosCameraPerframeNodeInfo { + perframe_node_type_t perFrameNodeType; + int perframeInfoIndex; + int perFrameVideoID; + + ExynosCameraPerframeNodeInfo() + { + perFrameNodeType = PERFRAME_NODE_TYPE_NONE; + perframeInfoIndex = 0; + perFrameVideoID = 0; + } + + ExynosCameraPerframeNodeInfo& operator =(const ExynosCameraPerframeNodeInfo &other) + { + perFrameNodeType = other.perFrameNodeType; + perframeInfoIndex = other.perframeInfoIndex; + perFrameVideoID = other.perFrameVideoID; + + return *this; + } +} camera_pipe_perframe_node_info_t; + +typedef struct ExynosCameraPerframeNodeGroupInfo { + int perframeSupportNodeNum; + camera_pipe_perframe_node_info_t perFrameLeaderInfo; + camera_pipe_perframe_node_info_t perFrameCaptureInfo[CAPTURE_NODE_MAX]; + + ExynosCameraPerframeNodeGroupInfo() + { + perframeSupportNodeNum = 0; + } + + ExynosCameraPerframeNodeGroupInfo& operator =(const ExynosCameraPerframeNodeGroupInfo &other) + { + perframeSupportNodeNum = other.perframeSupportNodeNum; + perFrameLeaderInfo = other.perFrameLeaderInfo; + + for (int i = 0; i < CAPTURE_NODE_MAX; i++) + perFrameCaptureInfo[i] = other.perFrameCaptureInfo[i]; + + return *this; + } +} camera_pipe_perframe_node_group_info_t; + +typedef struct ExynosCameraPipeInfo { + struct ExynosRect rectInfo; + struct v4l2_requestbuffers bufInfo; + camera_pipe_perframe_node_group_info_t perFrameNodeGroupInfo; + unsigned int bytesPerPlane[EXYNOS_CAMERA_BUFFER_MAX_PLANES]; + + ExynosCameraPipeInfo() + { + memset(&bufInfo, 0, sizeof(v4l2_requestbuffers)); + + for (int i = 0; i < EXYNOS_CAMERA_BUFFER_MAX_PLANES; i++) + bytesPerPlane[i] = 0; + } + + ExynosCameraPipeInfo& operator =(const ExynosCameraPipeInfo &other) + { + rectInfo = other.rectInfo; + memcpy(&bufInfo, &(other.bufInfo), sizeof(v4l2_requestbuffers)); + perFrameNodeGroupInfo = other.perFrameNodeGroupInfo; + + for (int i = 0; i < EXYNOS_CAMERA_BUFFER_MAX_PLANES; i++) + bytesPerPlane[i] = other.bytesPerPlane[i]; + + return *this; + } +} camera_pipe_info_t; + +namespace BUFFER_POS { + enum POS { + SRC = 0x00, + DST = 0x01 + }; +} + +namespace BUFFERQ_TYPE { + enum TYPE { + INPUT = 0x00, + OUTPUT = 0x01 + }; +}; + +class ExynosCameraPipe { +public: + ExynosCameraPipe() + { + m_init(); + } + + ExynosCameraPipe( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) + { + m_init(); + + m_cameraId = cameraId; + m_reprocessing = isReprocessing ? 1 : 0; + m_oneShotMode = isReprocessing; + + if (nodeNums) { + for (int i = 0; i < MAX_NODE; i++) + m_nodeNum[i] = nodeNums[i]; + } + + if (obj_param) { + m_parameters = obj_param; + m_activityControl = m_parameters->getActivityControl(); + m_exynosconfig = m_parameters->getConfig(); + } + } + + virtual ~ExynosCameraPipe(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t precreate(int32_t *sensorIds = NULL); + virtual status_t postcreate(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds, int32_t *ispSensorIds); + virtual status_t prepare(void); + virtual status_t prepare(uint32_t prepareCnt); + + virtual status_t start(void); + virtual status_t stop(void); + virtual bool flagStart(void); + + virtual status_t startThread(void); + virtual status_t stopThread(void); + virtual bool flagStartThread(void); + virtual status_t stopThreadAndWait(int sleep, int times); + + virtual status_t sensorStream(bool on); + virtual status_t forceDone(unsigned int cid, int value); + virtual status_t setControl(int cid, int value); + virtual status_t setControl(int cid, int value, enum NODE_TYPE nodeType); + virtual status_t getControl(int cid, int *value); + virtual status_t setExtControl(struct v4l2_ext_controls *ctrl); + virtual status_t setParam(struct v4l2_streamparm streamParam); + + virtual status_t pushFrame(ExynosCameraFrame **newFrame); + + virtual status_t instantOn(int32_t numFrames); + virtual status_t instantOnQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos); + virtual status_t instantOnDQbuf(ExynosCameraFrame **frame, BUFFER_POS::POS pos); + virtual status_t instantOff(void); + virtual status_t instantOnPushFrameQ(BUFFERQ_TYPE::TYPE type, ExynosCameraFrame **frame); + + virtual status_t getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition); + virtual int getCameraId(void); + virtual status_t setPipeId(uint32_t id); + virtual uint32_t getPipeId(void); + virtual status_t setPipeId(enum NODE_TYPE nodeType, uint32_t id); + virtual int getPipeId(enum NODE_TYPE nodeType); + + virtual status_t setPipeName(const char *pipeName); + virtual char *getPipeName(void); + + virtual status_t clearInputFrameQ(void); + virtual status_t getInputFrameQ(frame_queue_t **inputQ); + virtual status_t setOutputFrameQ(frame_queue_t *outputQ); + virtual status_t getOutputFrameQ(frame_queue_t **outputQ); + + virtual status_t setBoosting(bool isBoosting); + + virtual bool isThreadRunning(void); + + virtual status_t getThreadState(int **threadState); + virtual status_t getThreadInterval(uint64_t **timeInterval); + virtual status_t getThreadRenew(int **timeRenew); + virtual status_t incThreadRenew(void); + virtual status_t setStopFlag(void); + + virtual int getRunningFrameCount(void); + + virtual void dump(void); + + /* only for debugging */ + virtual status_t dumpFimcIsInfo(bool bugOn); +//#ifdef MONITOR_LOG_SYNC + virtual status_t syncLog(uint32_t syncId); +//#endif + +/* MC Pipe include buffer manager, so FrameFactory(ExynosCamera) must set buffer manager to pipe. + * Add interface for set buffer manager to pipe. + */ + virtual status_t setBufferManager(ExynosCameraBufferManager **bufferManager); + +/* Set map buffer is makes node operation faster at first start. + * Thereby map buffer before start, reduce map buffer time at start. + * It use first Exynos5430/5433 in old pipe. + */ + virtual status_t setMapBuffer(ExynosCameraBuffer *srcBuf = NULL, ExynosCameraBuffer *dstBuf = NULL); + +/* MC Pipe have two output queue. + * If you want push frame to FrameDoneQ in ExynosCamera explicitly, use this interface. + */ + + virtual status_t setFrameDoneQ(frame_queue_t *frameDoneQ); + virtual status_t getFrameDoneQ(frame_queue_t **frameDoneQ); + + virtual status_t setNodeInfos(camera_node_objects_t *nodeObjects, bool flagReset = false); + virtual status_t getNodeInfos(camera_node_objects_t *nodeObjects); + + virtual void setOneShotMode(bool enable); +#ifdef USE_MCPIPE_SERIALIZATION_MODE + virtual void needSerialization(bool enable); +#endif + +protected: + virtual bool m_mainThreadFunc(void); + + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + + virtual status_t m_updateMetadataToFrame(void *metadata, int index); + virtual status_t m_getFrameByIndex(ExynosCameraFrame **frame, int index); + virtual status_t m_completeFrame( + ExynosCameraFrame **frame, + ExynosCameraBuffer buffer, + bool isValid = true); + + virtual status_t m_setInput(ExynosCameraNode *nodes[], int32_t *nodeNums, int32_t *sensorIds); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + virtual status_t m_setNodeInfo(ExynosCameraNode *node, camera_pipe_info_t *pipeInfos, + int planeCount, enum YUV_RANGE yuvRange, + bool flagBayer = false); + virtual status_t m_forceDone(ExynosCameraNode *node, unsigned int cid, int value); + + virtual status_t m_startNode(void); + virtual status_t m_stopNode(void); + virtual status_t m_clearNode(void); + + virtual status_t m_checkNodeGroupInfo(char *name, camera2_node *oldNode, camera2_node *newNode); + virtual status_t m_checkNodeGroupInfo(int index, camera2_node *oldNode, camera2_node *newNode); + + virtual void m_dumpRunningFrameList(void); + + virtual void m_dumpPerframeNodeGroupInfo(const char *name, camera_pipe_perframe_node_group_info_t nodeInfo); + + virtual void m_configDvfs(void); + virtual bool m_flagValidInt(int num); + bool m_isOtf(int sensorId); + bool m_checkValidFrameCount(struct camera2_shot_ext *shot_ext); + bool m_checkValidFrameCount(struct camera2_stream *stream); + virtual status_t m_handleInvalidFrame(int index, ExynosCameraFrame *newFrame, ExynosCameraBuffer *buffer); + virtual bool m_checkLeaderNode(int sensorId); + virtual bool m_isReprocessing(void); + virtual bool m_checkThreadLoop(void); + +private: + void m_init(void); + +protected: + ExynosCameraParameters *m_parameters; + ExynosCameraBufferManager *m_bufferManager[MAX_NODE]; + ExynosCameraActivityControl *m_activityControl; + + sp m_mainThread; + + struct ExynosConfigInfo *m_exynosconfig; + + ExynosCameraNode *m_node[MAX_NODE]; + int32_t m_nodeNum[MAX_NODE]; + int32_t m_sensorIds[MAX_NODE]; + ExynosCameraNode *m_mainNode; + int32_t m_mainNodeNum; + + char m_name[EXYNOS_CAMERA_NAME_STR_SIZE]; + + frame_queue_t *m_inputFrameQ; + frame_queue_t *m_outputFrameQ; + frame_queue_t *m_frameDoneQ; + + ExynosCameraFrame *m_runningFrameList[MAX_BUFFERS]; + ExynosCameraFrame *m_nodeRunningFrameList[MAX_NODE][MAX_BUFFERS]; + uint32_t m_numOfRunningFrame; + +protected: + uint32_t m_pipeId; + int32_t m_cameraId; + + uint32_t m_prepareBufferCount; + uint32_t m_numBuffers; + /* Node for capture Interface : destination port */ + int m_numCaptureBuf; + + uint32_t m_reprocessing; + bool m_oneShotMode; + bool m_flagStartPipe; + bool m_flagTryStop; + bool m_dvfsLocked; + bool m_isBoosting; + bool m_metadataTypeShot; + bool m_flagFrameDoneQ; + + ExynosCameraDurationTimer m_timer; + int m_threadCommand; + uint64_t m_timeInterval; + int m_threadState; + int m_threadRenew; + + Mutex m_pipeframeLock; + + camera2_node_group m_curNodeGroupInfo; + + camera_pipe_perframe_node_group_info_t m_perframeMainNodeGroupInfo; + + int m_lastSrcFrameCount; + int m_lastDstFrameCount; + + int m_setfile; +#ifdef USE_CAMERA2_API_SUPPORT + int m_timeLogCount; +#endif +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.cpp new file mode 100644 index 0000000..f712769 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.cpp @@ -0,0 +1,786 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipe3AA" +#include + +#include "ExynosCameraPipe3AA.h" + +namespace android { + +ExynosCameraPipe3AA::~ExynosCameraPipe3AA() +{ + this->destroy(); +} + +status_t ExynosCameraPipe3AA::create(int32_t *sensorIds) +{ + CLOGI("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + int fd = -1; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + /* 3AA output */ + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("3AA_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[OUTPUT_NODE]); + } + + /* 3AA capture */ + if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + + /* HACK for helsinki. this must fix on istor */ + /* if (1) { */ + if (m_nodeNum[OUTPUT_NODE] == m_nodeNum[CAPTURE_NODE]) { + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR || m_flagValidInt(fd) == false) { + CLOGE("ERR(%s):OUTPUT_NODE->getFd failed", __FUNCTION__); + return ret; + } + + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId, fd); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } else { + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + } + } + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AA::m_mainThreadFunc, "3AAThread"); + + if (m_reprocessing == true) + m_inputFrameQ = new frame_queue_t(m_mainThread); + else + m_inputFrameQ = new frame_queue_t; + m_inputFrameQ->setWaitTime(500000000); /* .5 sec */ + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::destroy(void) +{ + int ret = 0; + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_node[OUTPUT_NODE] != NULL) { + ret = m_node[OUTPUT_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):3AA OUTPUT_NODE close fail(ret = %d)", __FUNCTION__, ret); + return INVALID_OPERATION; + } + delete m_node[OUTPUT_NODE]; + m_node[OUTPUT_NODE] = NULL; + CLOGD("DEBUG(%s):Node(OUTPUT_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[OUTPUT_NODE], m_sensorIds[OUTPUT_NODE]); + } + + if (m_node[CAPTURE_NODE] != NULL) { + /* CAPTURE_NODE is created by fd */ + + ret = m_node[CAPTURE_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):3AA CAPTURE_NODE close fail(ret = %d)", __FUNCTION__, ret); + return INVALID_OPERATION; + } + + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + + m_mainNodeNum = -1; + m_mainNode = NULL; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + status_t ret = NO_ERROR; + + /* 3AA output */ + ret = m_setNodeInfo(m_node[OUTPUT_NODE], &pipeInfos[0], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + m_perframeMainNodeGroupInfo = pipeInfos[0].perFrameNodeGroupInfo; + + /* 3AA capture */ + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[1], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[1].rectInfo.fullW, pipeInfos[1].rectInfo.fullH, pipeInfos[1].bufInfo.count); + return INVALID_OPERATION; + } + + m_numCaptureBuf = pipeInfos[1].bufInfo.count; + m_perframeSubNodeGroupInfo = pipeInfos[1].perFrameNodeGroupInfo; + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + +#ifdef DEBUG_RAWDUMP + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; +#endif + + /* TODO: check node state */ + /* stream on? */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + /* setfile setting */ + int setfile = 0; + int yuvRange = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &m_setfile, &yuvRange); +#ifdef SET_SETFILE_BY_SHOT + m_setfile = mergeSetfileYuvRange(setfile, yuvRange); +#else +#if SET_SETFILE_BY_SET_CTRL_3AA + if (m_checkLeaderNode(m_sensorIds[OUTPUT_NODE]) == true) { + ret = m_node[OUTPUT_NODE]->setControl(V4L2_CID_IS_SET_SETFILE, m_setfile); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, m_setfile, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):set setfile(%d),m_reprocessing(%d)", __FUNCTION__, __LINE__, m_setfile, m_reprocessing); + } else { + CLOGW("WARN(%s[%d]):m_checkLeaderNode(%d) == false. so, skip set setfile.", + __FUNCTION__, __LINE__, m_sensorIds[OUTPUT_NODE]); + } +#endif +#endif + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::prepare(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::start(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_startNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_startNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_flagStartPipe = true; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipe3AA::stop(void) +{ + CLOGD("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + /* 3AA output stop */ + ret = m_stopNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_stopNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + m_flagStartPipe = false; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipe3AA::getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition) +{ + int planeCount = 0; + int ret = NO_ERROR; + + if (pipePosition == SRC_PIPE) { + if (m_node[CAPTURE_NODE] == NULL) { + CLOGE("ERR(%s): m_node[CAPTURE_NODE] == NULL. so, fail", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = m_node[CAPTURE_NODE]->getSize(fullW, fullH); + if (ret < 0) { + CLOGE("ERR(%s):getSize fail", __FUNCTION__); + return ret; + } + + ret = m_node[CAPTURE_NODE]->getColorFormat(colorFormat, &planeCount); + if (ret < 0) { + CLOGE("ERR(%s):getColorFormat fail", __FUNCTION__); + return ret; + } + } else { + ret = ExynosCameraPipe::getPipeInfo(fullW, fullH, colorFormat, pipePosition); + } + + return ret; +} + +status_t ExynosCameraPipe3AA::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer fliteBuffer; + ExynosCameraBuffer ispBuffer; + + int ret = 0; + + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_node[OUTPUT_NODE]->dumpState(); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return INVALID_OPERATION; + /* return NO_ERROR; */ + } + + /* + * Even 3aa input is OTf, we need to q, dq. + * this is important difference from other pipe. + * in other pipe, it will check OUTPUT_NODE also. + * but, 3aa pipe will not check OUTPUT_NODE. + */ + /* if(m_node[OUTPUT_NODE] != NULL && + * m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + */ + ret = newFrame->getSrcBuffer(getPipeId(), &fliteBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = newFrame->getDstBuffer(getPipeId(), &ispBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + } + + if (m_runningFrameList[fliteBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d)", + __FUNCTION__, fliteBuffer.index); + dump(); + return BAD_VALUE; + } + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(fliteBuffer.addr[1]); + + if (shot_ext != NULL) { + int pictureW = 0, pictureH = 0; + int cropW = 0, cropH = 0, cropX = 0, cropY = 0; + + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getHwBayerCropRegion(&cropW, &cropH, &cropX, &cropY); + + newFrame->getMetaData(shot_ext); + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + CLOGV("DEBUG(%s[%d]):frameCount(%d), rCount(%d)", + __FUNCTION__, __LINE__, + newFrame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + ExynosRect bnsSize; + ExynosRect bayerCropSize; + ExynosRect bdsSize; + camera2_node_group node_group_info; + + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex, &zoomFrameInfo); + +#ifdef PERFRAME_CONTROL_NODE_3AA + /* HACK: To speed up DZOOM */ + if (zoomFrameInfo != zoomParamInfo) { + CLOGI("INFO(%s[%d]):zoomFrameInfo(%d), zoomParamInfo(%d)", + __FUNCTION__, __LINE__, zoomFrameInfo, zoomParamInfo); + + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_dis; + + newFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, &zoomFrameInfo); + newFrame->getNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, &zoomFrameInfo); + + if (m_reprocessing == true) { + int pictureW = 0, pictureH = 0; + ExynosRect bayerCropSizePicture; + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPictureBayerCropSize(&bnsSize, &bayerCropSizePicture); + m_parameters->getPictureBdsSize(&bdsSize); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + + ExynosCameraNodeGroup::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + &node_group_info_isp, + bayerCropSize, + bayerCropSizePicture, + bdsSize, + pictureW, pictureH, + m_parameters->getUsePureBayerReprocessing(), + m_parameters->isReprocessing3aaIspOTF()); + + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_PURE_REPROCESSING_3AA, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_PURE_REPROCESSING_ISP, zoomParamInfo); + /* PERFRAME_INFO_DIS is not required */ + } else if (!m_flagTryStop) { + int previewW = 0, previewH = 0; + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPreviewBayerCropSize(&bnsSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + m_parameters->getHWVdisMode()); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoomParamInfo); + } + } +#endif + + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + + /* Per - 3AA */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum; i ++) { + if (node_group_info.capture[i].request == 1 || + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == true) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + /* CLOGI("INFO(%s[%d]):fcount(%d)", __FUNCTION__, __LINE__, shot_ext_dst->shot.dm.request.frameCount); */ + /* newFrame->dumpNodeGroupInfo("3AA"); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeIspNodeGroupInfo", m_perframeIspNodeGroupInfo); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo); */ + } + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + if (m_node[CAPTURE_NODE]->putBuffer(&ispBuffer) != NO_ERROR) { + CLOGE("ERR(%s):capture putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + } + + if (m_node[OUTPUT_NODE]->putBuffer(&fliteBuffer) != NO_ERROR) { + CLOGE("ERR(%s):output putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setSrcBuffer state fail", __FUNCTION__); + return ret; + } + + m_runningFrameList[fliteBuffer.index] = newFrame; + m_numOfRunningFrame++; + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer fliteBuffer; + ExynosCameraBuffer ispBuffer; + int v4l2Colorformat = 0; + int planeCount[MAX_NODE] = {0}; + int index = 0; + status_t ret = 0; + int error = 0; + + CLOGV("DEBUG(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = m_node[CAPTURE_NODE]->getBuffer(&ispBuffer, &index); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[CAPTURE_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + error = ret; + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + m_node[CAPTURE_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[CAPTURE_NODE]); + } + + ret = m_node[OUTPUT_NODE]->getBuffer(&fliteBuffer, &index); + if (m_flagTryStop == true) { + CLOGE("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + + if (ret != NO_ERROR || error != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)(fliteBuffer.addr[1]); + + if (shot_ext == NULL) + return BAD_VALUE; + + fliteBuffer.index = index; + CLOGE("ERR(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + + /* complete frame */ + ret = m_completeFrame(&curFrame, fliteBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; + } + + ret = m_updateMetadataToFrame(fliteBuffer.addr[1], fliteBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + nsecs_t timeStamp = (nsecs_t)getMetaDmSensorTimeStamp((struct camera2_shot_ext *)fliteBuffer.addr[1]); + if (timeStamp < 0) { + CLOGW("WRN(%s[%d]): frameCount(%d), Invalid timeStamp(%lld)", + __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)fliteBuffer.addr[1]), + timeStamp); + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, fliteBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* In M2M case, we copy meta data */ + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = curFrame->getSrcBuffer(getPipeId(), &fliteBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + ret = curFrame->getDstBuffer(getPipeId(), &ispBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + memcpy(ispBuffer.addr[planeCount[CAPTURE_NODE] - 1], fliteBuffer.addr[planeCount[OUTPUT_NODE] - 1], sizeof(struct camera2_shot_ext)); + + CLOGV("DEBUG(%s[%d]):isp frameCount %d", __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount((struct camera2_shot_ext *)ispBuffer.addr[1])); + } + + if (m_outputFrameQ != NULL) + m_outputFrameQ->pushProcessQ(&curFrame); + else + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipe3AA::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s[%d]):m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + m_timer.stop(); + m_timeInterval = m_timer.durationMsecs(); + m_timer.start(); + + /* update renew count */ + if (ret >= 0) + m_threadRenew = 0; + + return m_checkThreadLoop(); +} + +void ExynosCameraPipe3AA::m_init(void) +{ + memset(&m_perframeSubNodeGroupInfo, 0x00, sizeof(camera_pipe_perframe_node_group_info_t)); +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.h b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.h new file mode 100644 index 0000000..1df8e83 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA.h @@ -0,0 +1,72 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_3AA_H +#define EXYNOS_CAMERA_PIPE_3AA_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipe3AA : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipe3AA() + { + m_init(); + } + + ExynosCameraPipe3AA( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipe3AA(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t prepare(void); + + virtual status_t start(void); + virtual status_t stop(void); + + virtual status_t getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition); + +private: + virtual bool m_mainThreadFunc(void); + + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +private: + void m_init(void); + +protected: + camera_pipe_perframe_node_group_info_t m_perframeSubNodeGroupInfo; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.cpp new file mode 100644 index 0000000..efac4f4 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.cpp @@ -0,0 +1,1781 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipe3AA_ISP" +#include + +#include "ExynosCameraPipe3AA_ISP.h" + +namespace android { + +ExynosCameraPipe3AA_ISP::~ExynosCameraPipe3AA_ISP() +{ + this->destroy(); +} + +status_t ExynosCameraPipe3AA_ISP::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + int fd = -1; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + + m_ispSensorIds[i] = -1; + } + + if (sensorIds) + m_copyNodeInfo2IspNodeInfo(); + + /* ISP output */ + if (m_flagValidInt(m_ispNodeNum[OUTPUT_NODE]) == true) { + m_ispNode[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_ispNode[OUTPUT_NODE]->create("ISP_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[OUTPUT_NODE] create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_ispNode[OUTPUT_NODE]->open(m_ispNodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[OUTPUT_NODE] open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_ispNodeNum[OUTPUT_NODE]); + } + +#if 0 + /* + * Isp capture node will be open on PIPE_ISP. + * not remove for history. + */ + /* ISP capture */ + if (m_flagValidInt(m_ispNodeNum[CAPTURE_NODE]) == true) { + m_ispNode[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_ispNode[CAPTURE_NODE]->create("ISP_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[CAPTURE_NODE] create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_ispNode[CAPTURE_NODE]->open(m_ispNodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[CAPTURE_NODE] open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_ispNodeNum[CAPTURE_NODE]); + } +#endif + + m_ispThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AA_ISP::m_ispThreadFunc, "ISPThread", PRIORITY_URGENT_DISPLAY); + + m_ispBufferQ = new isp_buffer_queue_t; + + /* 3AA output */ + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("3AA_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_nodeNum[OUTPUT_NODE]); + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):OUTPUT_NODE->getFd failed", __FUNCTION__, __LINE__); + return ret; + } + } + + /* 3AA capture */ + if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + + /* HACK for helsinki. this must fix on istor */ + /* if (1) { */ + if (m_nodeNum[OUTPUT_NODE] == m_nodeNum[CAPTURE_NODE]) { + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR || m_flagValidInt(fd) == false) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE->getFd(%d) failed", __FUNCTION__, __LINE__, fd); + return ret; + } + + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId, fd); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } else { + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_nodeNum[CAPTURE_NODE]); + } + } + + /* setInput for 54xx */ + ret = m_setInput(m_ispNode, m_ispNodeNum, m_ispSensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput(isp) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AA_ISP::m_mainThreadFunc, "3AAThread", PRIORITY_URGENT_DISPLAY); + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::precreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + int fd = -1; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + + m_ispSensorIds[i] = -1; + } + + if (sensorIds) + m_copyNodeInfo2IspNodeInfo(); + + /* ISP output */ + if (m_flagValidInt(m_ispNodeNum[OUTPUT_NODE]) == true) { + m_ispNode[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_ispNode[OUTPUT_NODE]->create("ISP", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[OUTPUT_NODE] create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_ispNode[OUTPUT_NODE]->open(m_ispNodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[OUTPUT_NODE] open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_ispNodeNum[OUTPUT_NODE]); + } + +#if 0 + /* + * Isp capture node will be open on PIPE_ISP. + * not remove for history. + */ + /* ISP capture */ + if (m_flagValidInt(m_ispNodeNum[CAPTURE_NODE]) == true) { + m_ispNode[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_ispNode[CAPTURE_NODE]->create("ISP_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[CAPTURE_NODE] create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_ispNode[CAPTURE_NODE]->open(m_ispNodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[CAPTURE_NODE] open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_ispNodeNum[CAPTURE_NODE]); + } +#endif + + m_ispThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AA_ISP::m_ispThreadFunc, "ISPThread", PRIORITY_URGENT_DISPLAY); + + m_ispBufferQ = new isp_buffer_queue_t; + + /* 3AA output */ + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("3AA_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_nodeNum[OUTPUT_NODE]); + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->getFd(%d) failed", __FUNCTION__, __LINE__, fd); + return ret; + } + } + + /* 3AA capture */ + if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + + /* HACK for helsinki. this must fix on istor */ + /* if (1) { */ + if (m_nodeNum[OUTPUT_NODE] == m_nodeNum[CAPTURE_NODE]) { + ret = m_node[OUTPUT_NODE]->getFd(&fd); + if (ret != NO_ERROR || m_flagValidInt(fd) == false) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE->getFd(%d) failed", __FUNCTION__, __LINE__, fd); + return ret; + } + + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId, fd); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } else { + ret = m_node[CAPTURE_NODE]->create("3AA_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):Node(%d) opened", __FUNCTION__, __LINE__, m_nodeNum[CAPTURE_NODE]); + } + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AA_ISP::m_mainThreadFunc, "3AAThread", PRIORITY_URGENT_DISPLAY); + + CLOGI("INFO(%s[%d]):precreate() is succeed (%d) prepare", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::postcreate(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + int fd = -1; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + + m_ispSensorIds[i] = -1; + } + + if (sensorIds) + m_copyNodeInfo2IspNodeInfo(); + + ret = m_setInput(m_ispNode, m_ispNodeNum, m_ispSensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput(isp) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):postcreate() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::destroy(void) +{ + int ret = 0; + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + +#if 0 + /* + * Isp capture node will be open on PIPE_ISP. + * not remove for history. + */ + if (m_ispNode[CAPTURE_NODE] != NULL) { + ret = m_ispNode[CAPTURE_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):m_ispNode[CAPTURE_NODE] close fail(ret = %d)", __FUNCTION__, ret); + return ret; + } + delete m_ispNode[CAPTURE_NODE]; + m_ispNode[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(m_ispNode[CAPTURE_NODE], m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_ispNodeNum[CAPTURE_NODE], m_ispSensorIds[CAPTURE_NODE]); + } + CLOGD("DEBUG(%s[%d]):m_ispNode[OUTPUT_NODE] destroyed", __FUNCTION__, __LINE__); +#endif + + if (m_ispNode[OUTPUT_NODE] != NULL) { + ret = m_ispNode[OUTPUT_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE] close fail(ret = %d)", __FUNCTION__, ret); + return ret; + } + delete m_ispNode[OUTPUT_NODE]; + m_ispNode[OUTPUT_NODE] = NULL; + CLOGD("DEBUG(%s):Node(m_ispNode[OUTPUT_NODE], m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_ispNodeNum[OUTPUT_NODE], m_ispSensorIds[OUTPUT_NODE]); + } + CLOGD("DEBUG(%s[%d]):m_ispNode[OUTPUT_NODE] destroyed", __FUNCTION__, __LINE__); + + if (m_node[CAPTURE_NODE] != NULL) { + ret = m_node[CAPTURE_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):m_node[CAPTURE_NODE] close fail(ret = %d)", __FUNCTION__, ret); + return ret; + } + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + CLOGD("DEBUG(%s[%d]):m_nodeNum[CAPTURE_NODE] destroyed", __FUNCTION__, __LINE__); + + if (m_node[OUTPUT_NODE] != NULL) { + ret = m_node[OUTPUT_NODE]->close(); + if (ret < 0) { + CLOGE("ERR(%s):3AA OUTPUT_NODE close fail(ret = %d)", __FUNCTION__, ret); + return INVALID_OPERATION; + } + delete m_node[OUTPUT_NODE]; + m_node[OUTPUT_NODE] = NULL; + + CLOGD("DEBUG(%s):Node(OUTPUT_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[OUTPUT_NODE], m_sensorIds[OUTPUT_NODE]); + } + CLOGD("DEBUG(%s[%d]):3AA OUTPUT_NODE destroyed", __FUNCTION__, __LINE__); + + m_mainNodeNum = -1; + m_mainNode = NULL; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + if (m_ispBufferQ != NULL) { + m_ispBufferQ->release(); + delete m_ispBufferQ; + m_ispBufferQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* isp output */ + if (0 < pipeInfos[2].rectInfo.fullW && + 0 < pipeInfos[2].rectInfo.fullH) { + ret = m_setNodeInfo(m_ispNode[OUTPUT_NODE], &pipeInfos[2], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[2].rectInfo.fullW, pipeInfos[2].rectInfo.fullH, pipeInfos[2].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[2].bufInfo.count; + m_perframeIspNodeGroupInfo = pipeInfos[2].perFrameNodeGroupInfo; + } + + /* 3a1 output */ + ret = m_setNodeInfo(m_node[OUTPUT_NODE], &pipeInfos[0], + 2, YUV_FULL_RANGE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + m_perframeMainNodeGroupInfo = pipeInfos[0].perFrameNodeGroupInfo; + + + /* 3a1 capture */ + if (0 < pipeInfos[1].rectInfo.fullW && + 0 < pipeInfos[1].rectInfo.fullH) { + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[1], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[1].rectInfo.fullW, pipeInfos[1].rectInfo.fullH, pipeInfos[1].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[1].bufInfo.count; + m_perframeSubNodeGroupInfo = pipeInfos[1].perFrameNodeGroupInfo; + } + + return NO_ERROR; +} + + +status_t ExynosCameraPipe3AA_ISP::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + ret = ExynosCameraPipe3AA_ISP::setupPipe(pipeInfos, sensorIds, NULL); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): ExynosCameraPipe3AA_ISP::setupPipe fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d))", __FUNCTION__, __LINE__, getPipeId()); + + return ret; +} + +status_t ExynosCameraPipe3AA_ISP::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds, int32_t *ispSensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + +#ifdef DEBUG_RAWDUMP + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; +#endif + /* TODO: check node state */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + for (int i = 0; i < MAX_NODE; i++) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d -> %d", __FUNCTION__, __LINE__, i, m_sensorIds[i], sensorIds[i]); + + m_sensorIds[i] = sensorIds[i]; + } + } + + if (ispSensorIds) { + for (int i = 0; i < MAX_NODE; i++) { + CLOGD("DEBUG(%s[%d]):set new ispSensorIds[%d] : %d -> %d", __FUNCTION__, __LINE__, i, m_ispSensorIds[i], ispSensorIds[i]); + + m_ispSensorIds[i] = ispSensorIds[i]; + } + } + + ret = m_setInput(m_ispNode, m_ispNodeNum, m_ispSensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput(isp) fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + /* setfile setting */ + int setfile = 0; + int yuvRange = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &m_setfile, &yuvRange); + +#ifdef SET_SETFILE_BY_SHOT + m_setfile = mergeSetfileYuvRange(setfile, yuvRange); +#else +#if SET_SETFILE_BY_SET_CTRL_3AA_ISP + if (m_checkLeaderNode(m_sensorIds[OUTPUT_NODE]) == true) { + ret = m_node[OUTPUT_NODE]->setControl(V4L2_CID_IS_SET_SETFILE, m_setfile); + if (ret != NO_ERROR) { + ALOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, m_setfile, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):set setfile(%d),m_reprocessing(%d)", __FUNCTION__, __LINE__, m_setfile, m_reprocessing); + } else { + CLOGW("WARN(%s[%d]):m_checkLeaderNode(%d) == false. so, skip set setfile.", + __FUNCTION__, __LINE__, m_sensorIds[OUTPUT_NODE]); + } +#endif +#endif + + if (m_isOtf(m_node[CAPTURE_NODE]->getInput()) == true) { + CLOGD("DEBUG(%s[%d]):m_isOtf(m_node[CAPTURE_NODE]) == true. so, stop m_ispThread", __FUNCTION__, __LINE__); + m_ispThread->requestExit(); + m_ispBufferQ->sendCmd(WAKE_UP); + } + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::prepare(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (uint32_t i = 0; i < m_prepareBufferCount; i++) { + ret = m_putBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_putBuffer fail(ret = %d)", __FUNCTION__, ret); + return ret; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::instantOn(int32_t numFrames) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + + if (m_inputFrameQ->getSizeOfProcessQ() != numFrames) { + CLOGE("ERR(%s[%d]): instantOn need %d Frames, but %d Frames are queued", + __FUNCTION__, __LINE__, numFrames, m_inputFrameQ->getSizeOfProcessQ()); + return INVALID_OPERATION; + } + + if (m_ispNode[OUTPUT_NODE]) { + ret = m_ispNode[OUTPUT_NODE]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_ispNode[OUTPUT_NODE] instantOn fail", __FUNCTION__, __LINE__); + return ret; + } + } + + if (m_node[CAPTURE_NODE]) { + ret = m_node[CAPTURE_NODE]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Starting CAPTURE_NODE Error!", __FUNCTION__, __LINE__); + return ret; + } + } + + if (m_node[OUTPUT_NODE]) { + ret = m_node[OUTPUT_NODE]->start(); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE instantOn fail", __FUNCTION__, __LINE__); + return ret; + } + } + + for (int i = 0; i < numFrames; i++) { + ret = m_inputFrameQ->popProcessQ(&newFrame); + if (ret < 0) { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):newFrame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + + if (m_node[OUTPUT_NODE]) { + CLOGD("DEBUG(%s[%d]): put instantOn Buffer (index %d)", __FUNCTION__, __LINE__, newBuffer.index); + + ret = m_node[OUTPUT_NODE]->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):putBuffer fail", __FUNCTION__, __LINE__); + return ret; + /* TODO: doing exception handling */ + } + } + } + + return ret; +} + +status_t ExynosCameraPipe3AA_ISP::instantOff(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + + if (m_node[OUTPUT_NODE]) + ret = m_node[OUTPUT_NODE]->stop(); + + if (m_node[CAPTURE_NODE]) + ret = m_node[CAPTURE_NODE]->stop(); + + if (m_ispNode[OUTPUT_NODE]) + ret = m_ispNode[OUTPUT_NODE]->stop(); + + if (m_node[OUTPUT_NODE]) { + ret = m_node[OUTPUT_NODE]->clrBuffers(); + if (ret < 0) { + CLOGE("ERR(%s):3AA OUTPUT_NODE clrBuffers fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + } + + if (m_node[CAPTURE_NODE]) { + ret = m_node[CAPTURE_NODE]->clrBuffers(); + if (ret < 0) { + CLOGE("ERR(%s):3AA CAPTURE_NODE clrBuffers fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + } + + if (m_ispNode[OUTPUT_NODE]) { + ret = m_ispNode[OUTPUT_NODE]->clrBuffers(); + if (ret < 0) { + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE] clrBuffers fail, ret(%d)", __FUNCTION__, ret); + return ret; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::forceDone(unsigned int cid, int value) +{ + ALOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + if (m_node[OUTPUT_NODE] == NULL) { + CLOGE("ERR(%s):m_node[OUTPUT_NODE] is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + + ret = m_forceDone(m_node[OUTPUT_NODE], cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_forceDone is failed", __FUNCTION__); + + + if (m_ispNode[OUTPUT_NODE] == NULL) { + CLOGE("ERR(%s):m_node[OUTPUT_NODE] m_ispNode[OUTPUT_NODE] is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = m_forceDone(m_ispNode[OUTPUT_NODE], cid, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE] m_forceDone is failed", __FUNCTION__); + + return ret; + +} + +status_t ExynosCameraPipe3AA_ISP::setControl(int cid, int value) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = NO_ERROR; + + if (m_node[OUTPUT_NODE]) { + ret = m_node[OUTPUT_NODE]->setControl(cid, value); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->setControl failed", __FUNCTION__, __LINE__); + return ret; + } + } + + return ret; +} + +status_t ExynosCameraPipe3AA_ISP::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + /* TODO: check state ready for start */ + + int ret = NO_ERROR; + + if (m_ispNode[OUTPUT_NODE]) { + ret = m_ispNode[OUTPUT_NODE]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Starting m_ispNode[OUTPUT_NODE] Error!", __FUNCTION__, __LINE__); + return ret; + } + } + + if (m_node[CAPTURE_NODE]) { + ret = m_node[CAPTURE_NODE]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Starting CAPTURE_NODE Error!", __FUNCTION__, __LINE__); + return ret; + } + } + + if (m_node[OUTPUT_NODE]) { + ret = m_node[OUTPUT_NODE]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): Starting OUTPUT_NODE Error!", __FUNCTION__, __LINE__); + return ret; + } + } + + m_flagStartPipe = true; + m_flagTryStop = false; + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = NO_ERROR; + + /* 3AA output stop */ + if (m_node[OUTPUT_NODE]) { + ret = m_node[OUTPUT_NODE]->stop(); + if (ret < 0) + CLOGE("ERR(%s):3AA output node stop fail, ret(%d)", __FUNCTION__, ret); + + ret = m_node[OUTPUT_NODE]->clrBuffers(); + if (ret < 0) + CLOGE("ERR(%s):3AA output node clrBuffers fail, ret(%d)", __FUNCTION__, ret); + } + + /* 3AA capture stop */ + if (m_node[CAPTURE_NODE]) { + ret = m_node[CAPTURE_NODE]->stop(); + if (ret < 0) + CLOGE("ERR(%s):3AA capture node stop fail, ret(%d)", __FUNCTION__, ret); + + ret = m_node[CAPTURE_NODE]->clrBuffers(); + if (ret < 0) + CLOGE("ERR(%s):3AA capture node clrBuffers fail, ret(%d)", __FUNCTION__, ret); + } + + /* isp output stop */ + if (m_ispNode[OUTPUT_NODE]) { + ret = m_ispNode[OUTPUT_NODE]->stop(); + if (ret < 0) + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE] stop fail, ret(%d)", __FUNCTION__, ret); + + ret = m_ispNode[OUTPUT_NODE]->clrBuffers(); + if (ret < 0) + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE] clrBuffers fail, ret(%d)", __FUNCTION__, ret); + } + + m_mainThread->requestExitAndWait(); + m_ispThread->requestExitAndWait(); + + m_inputFrameQ->release(); + m_ispBufferQ->release(); + + for (uint32_t i = 0; i < m_numBuffers; i++) + m_runningFrameList[i] = NULL; + + m_numOfRunningFrame = 0; + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + if (m_node[OUTPUT_NODE]) + m_node[OUTPUT_NODE]->removeItemBufferQ(); + + if (m_node[CAPTURE_NODE]) + m_node[CAPTURE_NODE]->removeItemBufferQ(); + + if (m_ispNode[OUTPUT_NODE]) + m_ispNode[OUTPUT_NODE]->removeItemBufferQ(); + + m_flagStartPipe = false; + m_flagTryStop = false; + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::startThread(void) +{ + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + m_ispThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::stopThread(void) +{ + /* stop thread */ + m_mainThread->requestExit(); + m_ispThread->requestExit(); + + m_inputFrameQ->sendCmd(WAKE_UP); + m_ispBufferQ->sendCmd(WAKE_UP); + + m_dumpRunningFrameList(); + + return NO_ERROR; +} + +void ExynosCameraPipe3AA_ISP::dump() +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + m_dumpRunningFrameList(); + m_node[OUTPUT_NODE]->dump(); + m_node[CAPTURE_NODE]->dump(); + m_ispNode[OUTPUT_NODE]->dump(); + + return; +} + +status_t ExynosCameraPipe3AA_ISP::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer fliteBuffer; + ExynosCameraBuffer ispBuffer; + int bufIndex = -1; + int ret = 0; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s): wait timeout", __FUNCTION__); + m_node[OUTPUT_NODE]->dumpState(); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + //return INVALID_OPERATION; + return NO_ERROR; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &fliteBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_isOtf(m_node[CAPTURE_NODE]->getInput()) == true) { + bufIndex = fliteBuffer.index; + } else { + ret = newFrame->getDstBuffer(getPipeId(), &ispBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + bufIndex = ispBuffer.index; + } + + if (bufIndex < 0) { + CLOGE("ERR(%s[%d]):bufIndex(%d) < 0 fail", __FUNCTION__, __LINE__, bufIndex); + /* TODO: doing exception handling */ + return OK; + } + + if (m_runningFrameList[bufIndex] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), newFrame->frameCount(%d)", + __FUNCTION__, bufIndex, newFrame->getFrameCount()); + m_dumpRunningFrameList(); + return BAD_VALUE; + } + + if (m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = m_node[CAPTURE_NODE]->putBuffer(&ispBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_node[CAPTURE_NODE]->putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + } + + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)(fliteBuffer.addr[1]); + + if (shot_ext != NULL) { + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + int cropW = 0, cropH = 0, cropX = 0, cropY = 0; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getHwBayerCropRegion(&cropW, &cropH, &cropX, &cropY); + + newFrame->getMetaData(shot_ext); + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* setfile setting */ + ALOGV("INFO(%s[%d]):setfile(%d), m_reprocessing(%d)", __FUNCTION__, __LINE__, m_setfile, m_reprocessing); + setMetaSetfile(shot_ext, m_setfile); + + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&fliteBuffer); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + ExynosRect sensorSize; + ExynosRect bayerCropSize; + ExynosRect bdsSize; + camera2_node_group node_group_info; + + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex, &zoomFrameInfo); + +#ifdef PERFRAME_CONTROL_NODE_3AA + /* HACK: To speed up DZOOM */ + if (zoomFrameInfo != zoomParamInfo) { + CLOGI("INFO(%s[%d]):zoomFrameInfo(%d), zoomParamInfo(%d)", + __FUNCTION__, __LINE__, zoomFrameInfo, zoomParamInfo); + + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_dis; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&sensorSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + + newFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, &zoomFrameInfo); + newFrame->getNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, &zoomFrameInfo); + + ExynosCameraNodeGroup3AA::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info_isp, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + m_parameters->getHWVdisMode()); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_3AA, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoomParamInfo); + } +#endif + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + + /* Per - 3AA */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(m_node[OUTPUT_NODE]->getName(), &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%s) fail", __FUNCTION__, __LINE__, m_node[OUTPUT_NODE]->getName()); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - 0:3AC 1:3AP 2:SCP*/ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum; i ++) { + if (node_group_info.capture[i].request == 1 || + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == true) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + + /* CLOGI("INFO(%s[%d]):fcount(%d)", __FUNCTION__, __LINE__, shot_ext->shot.dm.request.frameCount); */ + /* newFrame->dumpNodeGroupInfo("3AA_ISP"); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeIspNodeGroupInfo", m_perframeIspNodeGroupInfo); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo); */ + } + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + ret = m_node[OUTPUT_NODE]->putBuffer(&fliteBuffer); + if (ret < 0) { + CLOGE("ERR(%s):output putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setSrcBuffer state fail", __FUNCTION__); + return ret; + } + + m_runningFrameList[bufIndex] = newFrame; + m_numOfRunningFrame++; + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraFrame *perframeFrame = NULL; + ExynosCameraBuffer fliteBuffer; + ExynosCameraBuffer ispBuffer; + ExynosCameraBuffer resultBuffer; + + int index = 0; + int ret = 0; + int error = 0; + camera2_node_group node_group_info; + + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + fliteBuffer.addr[1] = NULL; + + ispBuffer.index = -1; + resultBuffer.index = -1; + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + if (m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = m_node[CAPTURE_NODE]->getBuffer(&ispBuffer, &index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[CAPTURE_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + error = ret; + } else { + struct camera2_stream *stream = (struct camera2_stream *)(ispBuffer.addr[1]); + if (m_checkValidFrameCount(stream) == false) { + CLOGW("WARN(%s[%d]):m_checkValidFrameCount() fail. so, frame(cnt:%d)) skip", __FUNCTION__, __LINE__, stream->fcount); + error = INVALID_OPERATION; + } + } + + resultBuffer = ispBuffer; + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + ret = m_node[OUTPUT_NODE]->getBuffer(&fliteBuffer, &index); + + /* in case of 3aa_isp OTF and 3ap capture fail.*/ + if (resultBuffer.index < 0) { + resultBuffer = fliteBuffer; + } + + if (ret != NO_ERROR || error != NO_ERROR) { + + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + camera2_shot_ext *shot_ext; + shot_ext = (struct camera2_shot_ext *)(fliteBuffer.addr[1]); + CLOGE("ERR(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + + if (fliteBuffer.addr[1] != NULL) { + ret = m_updateMetadataToFrame(fliteBuffer.addr[1], resultBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, resultBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* Update the frame information with 3AA Drop */ + curFrame->set3aaDrop(true); + curFrame->setIspDone(true); + curFrame->setDisDrop(true); + curFrame->setScpDrop(true); + + if (m_parameters->isReprocessing() == false) { + curFrame->setIspcDrop(true); + curFrame->setSccDrop(true); + } + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + /* add delay to run m_monitoer thread */ + usleep(15000); + + return NO_ERROR; + + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&resultBuffer); + + ret = m_updateMetadataToFrame(fliteBuffer.addr[1], resultBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + ret = m_getFrameByIndex(&perframeFrame, resultBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): getFrameByIndex fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } else { + perframeFrame->getNodeGroupInfo(&node_group_info, m_perframeIspNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); +#if 0 + ret = m_checkShotDone((struct camera2_shot_ext*)fliteBuffer.addr[1]); + if (ret < 0) { + CLOGE("ERR(%s):Shot done invalid, frame skip", __FUNCTION__); + /* TODO: doing exception handling */ + + /* complete frame */ + ret = m_completeFrame(&curFrame, resultBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s):m_completeFrame is fail", __FUNCTION__); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; +#endif + } + + /* TODO: Is it necessary memcpy shot.ctl from parameter? */ + camera2_shot_ext *shot_ext_src = (struct camera2_shot_ext *)(fliteBuffer.addr[1]); + camera2_shot_ext *shot_ext_dst = (struct camera2_shot_ext *)(ispBuffer.addr[1]); + + if (shot_ext_src == NULL) { + CLOGE("ERR(%s[%d]):shot_ext_src == NULL. so, fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (shot_ext_dst == NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + CLOGE("ERR(%s[%d]):shot_ext_dst == NULL. so, fail", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if ((shot_ext_src != NULL) && (shot_ext_dst != NULL)) { + int previewW, previewH; + m_parameters->getHwPreviewSize(&previewW, &previewH); + memcpy(&shot_ext_dst->shot.ctl, &shot_ext_src->shot.ctl, sizeof(struct camera2_ctl) - sizeof(struct camera2_entry_ctl)); + memcpy(&shot_ext_dst->shot.udm, &shot_ext_src->shot.udm, sizeof(struct camera2_udm)); + memcpy(&shot_ext_dst->shot.dm, &shot_ext_src->shot.dm, sizeof(struct camera2_dm)); + + if (m_perframeIspNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + memset(&shot_ext_dst->node_group, 0x0, sizeof(camera2_node_group)); + + /* Per - ISP */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(m_ispNode[OUTPUT_NODE]->getName(), &m_curIspNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%s) fail", __FUNCTION__, __LINE__, m_ispNode[OUTPUT_NODE]->getName()); + + setMetaNodeLeaderInputSize(shot_ext_dst, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext_dst, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext_dst, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext_dst, + m_perframeIspNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - SCP */ + for (int i = 0; i < m_perframeIspNodeGroupInfo.perframeSupportNodeNum; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curIspNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext_dst, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext_dst, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext_dst, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext_dst, i, m_perframeIspNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + /* CLOGE("INFO(%s[%d]):fcount(%d)", __FUNCTION__, __LINE__, shot_ext_dst->shot.dm.request.frameCount); */ + /* perframeFrame->dumpNodeGroupInfo("ISP__"); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeIspNodeGroupInfo", m_perframeIspNodeGroupInfo); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo); */ + } + + shot_ext_dst->setfile = shot_ext_src->setfile; + shot_ext_dst->drc_bypass = shot_ext_src->drc_bypass; + shot_ext_dst->dis_bypass = shot_ext_src->dis_bypass; + shot_ext_dst->dnr_bypass = shot_ext_src->dnr_bypass; + shot_ext_dst->fd_bypass = shot_ext_src->fd_bypass; + shot_ext_dst->shot.dm.request.frameCount = shot_ext_src->shot.dm.request.frameCount; + shot_ext_dst->shot.magicNumber= shot_ext_src->shot.magicNumber; + } + +//#ifdef SHOT_RECOVERY + if (shot_ext_src != NULL) { + + retryGetBufferCount = shot_ext_src->complete_cnt; + + if (retryGetBufferCount > 0) { + CLOGI("INFO(%s[%d]): ( %d %d %d %d )", __FUNCTION__, __LINE__, + shot_ext_src->free_cnt, + shot_ext_src->request_cnt, + shot_ext_src->process_cnt, + shot_ext_src->complete_cnt); + } + } +//#endif + + /* to set metadata of ISP buffer */ + m_activityControl->activityBeforeExecFunc(PIPE_POST_3AA_ISP, (void *)&resultBuffer); + + if (shot_ext_dst != NULL) { + if (m_checkValidIspFrameCount(shot_ext_dst) == false) { + CLOGE("ERR(%s[%d]):m_checkValidIspFrameCount() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + if (m_isOtf(m_node[CAPTURE_NODE]->getInput()) == true) { + ret = m_updateMetadataToFrame(resultBuffer.addr[1], resultBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + /* complete frame */ + ret = m_completeFrame(&curFrame, resultBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* set ISP done flag to true */ + curFrame->setIspDone(true); + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + } else { + if (ispBuffer.index < 0) { + ALOGE("ERR(%s):ispBuffer index is invalid", __FUNCTION__); + return BAD_VALUE; + } else { + ret = m_ispNode[OUTPUT_NODE]->putBuffer(&ispBuffer); + if (ret < 0) { + ALOGE("ERR(%s):m_ispNode[OUTPUT_NODE]->putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + } + + m_ispBufferQ->pushProcessQ(&ispBuffer); + } + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AA_ISP::m_checkShotDone(struct camera2_shot_ext *shot_ext) +{ + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (shot_ext->node_group.leader.request != 1) { + CLOGW("WARN(%s[%d]): 3a1 NOT DONE, frameCount(%d)", __FUNCTION__, __LINE__, + getMetaDmRequestFrameCount(shot_ext)); + /* TODO: doing exception handling */ + return INVALID_OPERATION; + } + + return OK; +} + +status_t ExynosCameraPipe3AA_ISP::m_getIspBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer newBuffer; + ExynosCameraBuffer curBuffer; + camera2_shot_ext *shot_ext; + int index = 0; + int ret = 0; + + ret = m_ispBufferQ->waitAndPopProcessQ(&curBuffer); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_node[OUTPUT_NODE]->dumpState(); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + ret = m_ispNode[OUTPUT_NODE]->getBuffer(&newBuffer, &index); + if (ret != NO_ERROR || index < 0) { + CLOGE("ERR(%s[%d]):m_ispNode[OUTPUT_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[1]); + newBuffer.index = index; + if (shot_ext != NULL) { + CLOGW("(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", + __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, newBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* check scp frame drop */ + curFrame->setDisDrop(true); + curFrame->setScpDrop(true); + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; + + } + + if (curBuffer.index != newBuffer.index) { + CLOGW("ERR(%s[%d]):Frame mismatch, we expect index %d, but we got index %d", + __FUNCTION__, __LINE__, curBuffer.index, newBuffer.index); + /* TODO: doing exception handling */ + } + + ret = m_updateMetadataToFrame(curBuffer.addr[1], curBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + /* complete frame */ + ret = m_completeFrame(&curFrame, newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* set ISP done flag to true */ + curFrame->setIspDone(true); + + shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[1]); + + int perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + /* check scp frame drop */ + if (shot_ext == NULL) { + CLOGW("(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + } else if (shot_ext->node_group.capture[perFramePos].request == 0) { + curFrame->setScpDrop(true); + } + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipe3AA_ISP::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + /* deliver buffer from 3AA node to ISP node */ + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return true; + } + + /* put buffer to 3AA node */ + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return false; + } + +//#ifdef SHOT_RECOVERY + if (retryGetBufferCount > 0) { +//#ifdef SHOT_RECOVERY_ONEBYONE + retryGetBufferCount = 1; +//#endif + for (int i = 0; i < retryGetBufferCount; i ++) { + CLOGI("INFO(%s[%d]): retryGetBufferCount( %d)", __FUNCTION__, __LINE__, retryGetBufferCount); + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + } + retryGetBufferCount = 0; + } +//#endif + + return true; +} + +status_t ExynosCameraPipe3AA_ISP::dumpFimcIsInfo(bool bugOn) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + ret = m_ispNode[OUTPUT_NODE]->setControl(V4L2_CID_IS_DEBUG_DUMP, bugOn); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE]->setControl failed", __FUNCTION__); + + return ret; +} + +//#ifdef MONITOR_LOG_SYNC +status_t ExynosCameraPipe3AA_ISP::syncLog(uint32_t syncId) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + ret = m_ispNode[OUTPUT_NODE]->setControl(V4L2_CID_IS_DEBUG_SYNC_LOG, syncId); + if (ret != NO_ERROR) + CLOGE("ERR(%s):m_ispNode[OUTPUT_NODE]->setControl failed", __FUNCTION__); + + return ret; +} +//#endif + +bool ExynosCameraPipe3AA_ISP::m_ispThreadFunc(void) +{ + int ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + /* get buffer from ISP node */ + ret = m_getIspBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_getIspBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return false; + } + + m_timer.stop(); + m_timeInterval = m_timer.durationMsecs(); + m_timer.start(); + + /* update renew count */ + if (ret >= 0) + m_threadRenew = 0; + + return true; +} + +void ExynosCameraPipe3AA_ISP::m_init(int32_t *ispNodeNums) +{ + memset(&m_perframeSubNodeGroupInfo, 0x00, sizeof(camera_pipe_perframe_node_group_info_t)); + memset(&m_perframeIspNodeGroupInfo, 0x00, sizeof(camera_pipe_perframe_node_group_info_t)); + + memset(&m_curIspNodeGroupInfo, 0x00, sizeof(camera2_node_group)); + + m_ispBufferQ = NULL; + + retryGetBufferCount = 0; + + if (ispNodeNums) { + /* this will get info though ispNodeNum */ + for (int i = 0; i < MAX_NODE; i++) + m_ispNodeNum[i] = ispNodeNums[i]; + + m_flagIndependantIspNode = true; + } else { + for (int i = 0; i < MAX_NODE; i++) + m_ispNodeNum[i] = -1; + + /* + * m_node[OUTPUT_NODE] will be isp Node + * we will use isp node through m_nodeNum[SUB_NODE] + * for 54xx maintain. + */ + m_ispNodeNum[OUTPUT_NODE] = m_nodeNum[SUB_NODE]; + m_nodeNum[SUB_NODE] = -1; + + m_flagIndependantIspNode = false; + + CLOGD("DEBUG(%s[%d]): m_ispNodeNum set By m_nodeNum", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + CLOGD("DEBUG(%s[%d]): m_ispNodeNum[%d] : %d / m_nodeNum[%d] : %d", __FUNCTION__, __LINE__, i, m_ispNodeNum[i], i, m_nodeNum[i]); + } + + for (int i = 0; i < MAX_NODE; i++) { + m_ispNode[i] = NULL; + m_ispSensorIds[i] = -1; + } + + m_lastIspFrameCount = 0; +} + +void ExynosCameraPipe3AA_ISP:: m_copyNodeInfo2IspNodeInfo(void) +{ + /* + * transfer node info to ispNode info. + * + * if constructor set ispNodeNum[i], isp all node is proper. + * else, constructor doesn't set ispNodeNum[i], only m_ispNodeNum[OUTPUT_NODE] is proper. + * for 54xx maintain. + */ + + if (m_flagIndependantIspNode == false) { + m_ispSensorIds[OUTPUT_NODE] = m_sensorIds[SUB_NODE]; + m_sensorIds[SUB_NODE] = -1; + + CLOGD("DEBUG(%s[%d]): m_ispSensorIds set By m_sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + CLOGD("DEBUG(%s[%d]): m_ispSensorIds[%d] : %d / m_sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, m_ispSensorIds[i], i, m_sensorIds[i]); + } +} + +bool ExynosCameraPipe3AA_ISP::m_checkValidIspFrameCount(struct camera2_shot_ext * shot_ext) +{ + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext == NULL. so fail", __FUNCTION__, __LINE__); + return false; + } + + int frameCount = getMetaDmRequestFrameCount(shot_ext); + + if (frameCount < 0 || + frameCount < m_lastIspFrameCount) { + CLOGE("ERR(%s[%d]):invalid frameCount(%d) < m_lastIspFrameCount(%d). so fail", + __FUNCTION__, __LINE__, frameCount, m_lastIspFrameCount); + return false; + } + + m_lastIspFrameCount = frameCount; + + return true; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.h b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.h new file mode 100644 index 0000000..9f84ad6 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AA_ISP.h @@ -0,0 +1,119 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_3AA_ISP_H +#define EXYNOS_CAMERA_PIPE_3AA_ISP_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList isp_buffer_queue_t; + +class ExynosCameraPipe3AA_ISP : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipe3AA_ISP() + { + m_init(NULL); + } + + ExynosCameraPipe3AA_ISP( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(NULL); + } + + ExynosCameraPipe3AA_ISP( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums, int32_t *ispNodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(ispNodeNums); + } + + virtual ~ExynosCameraPipe3AA_ISP(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds, int32_t *ispSensorIds); + virtual status_t prepare(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + virtual status_t stopThread(void); + + virtual status_t setControl(int cid, int value); + virtual status_t instantOn(int32_t numFrames); + virtual status_t instantOff(void); + virtual status_t forceDone(unsigned int cid, int value); + + virtual void dump(void); + virtual status_t precreate(int32_t *sensorIds = NULL); + virtual status_t postcreate(int32_t *sensorIds = NULL); + + virtual status_t dumpFimcIsInfo(bool bugOn); +//#ifdef MONITOR_LOG_SYNC + virtual status_t syncLog(uint32_t syncId); +//#endif + +protected: + status_t m_getBuffer(void); + status_t m_putBuffer(void); + status_t m_getIspBuffer(void); + + virtual status_t m_checkShotDone(struct camera2_shot_ext *shot_ext); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +protected: + virtual bool m_mainThreadFunc(void); + virtual bool m_ispThreadFunc(void); + bool m_checkValidIspFrameCount(struct camera2_shot_ext * shot_ext); + +private: + void m_init(int32_t *ispNodeNums = NULL); + void m_copyNodeInfo2IspNodeInfo(void); + +protected: + camera_pipe_perframe_node_group_info_t m_perframeSubNodeGroupInfo; + camera_pipe_perframe_node_group_info_t m_perframeIspNodeGroupInfo; + camera2_node_group m_curIspNodeGroupInfo; + +private: + int32_t m_ispNodeNum[MAX_NODE]; + ExynosCameraNode *m_ispNode[MAX_NODE]; + + int32_t m_ispSensorIds[MAX_NODE]; + + sp m_ispThread; + isp_buffer_queue_t *m_ispBufferQ; +//#ifdef SHOT_RECOVERY + int retryGetBufferCount; +//#endif + bool m_flagIndependantIspNode; + int m_lastIspFrameCount; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.cpp new file mode 100644 index 0000000..e1df2f0 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.cpp @@ -0,0 +1,366 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipe3AC" +#include + +#include "ExynosCameraPipe3AC.h" + +namespace android { + +ExynosCameraPipe3AC::~ExynosCameraPipe3AC() +{ + this->destroy(); +} + +status_t ExynosCameraPipe3AC::create(int32_t *sensorIds) +{ + CLOGD("[%s(%d)]", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("3AC", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + + /* mainNode is CAPTURE_NODE */ + m_mainNodeNum = CAPTURE_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipe3AC::m_mainThreadFunc, "3ACThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + m_prepareBufferCount = m_getPrepareBufferCount(); + + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AC::destroy(void) +{ + CLOGD("[%s(%d)]", __FUNCTION__, __LINE__); + if (m_node[CAPTURE_NODE] != NULL) { + if (m_node[CAPTURE_NODE]->close() != NO_ERROR) { + CLOGE("ERR(%s): close fail", __FUNCTION__); + return INVALID_OPERATION; + } + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AC::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + struct camera2_shot_ext *shot_ext = NULL; + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_node[CAPTURE_NODE]->getBuffer(&curBuffer, &index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getBuffer(index : %d) fail", __FUNCTION__, __LINE__, index); + } + + if (ret != NO_ERROR) { + if (index < 0) { + CLOGE("ERR(%s[%d]):index(%d) < 0. so just return without error handling...", + __FUNCTION__, __LINE__, index); + + return INVALID_OPERATION; + } + + if (curBuffer.addr[1] == NULL) { + CLOGE("ERR(%s[%d]):curBuffer.addr[1] == NULL. so just return without error handling...", + __FUNCTION__, __LINE__); + + return INVALID_OPERATION; + } + + CLOGE("ERR(%s[%d]):error handling start...", __FUNCTION__, __LINE__); + + shot_ext = (struct camera2_shot_ext *)(curBuffer.addr[1]); + + CLOGE("ERR(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", + __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + + ret = m_updateMetadataToFrame(curBuffer.addr[1], curBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame(%d) fail, ret(%d)", __FUNCTION__, __LINE__, curBuffer.index, ret); + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* Update the frame information with 3AA Drop */ + curFrame->set3aaDrop(true); + curFrame->setIspcDrop(true); + curFrame->setSccDrop(true); + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + return NO_ERROR; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + ret = m_updateMetadataToFrame(curBuffer.addr[1], curBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + shot_ext = (struct camera2_shot_ext *)curBuffer.addr[1]; + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_comleteFrame fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; +} + +status_t ExynosCameraPipe3AC::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + /* TODO: check node state */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_getPrepareBufferCount(); + + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + return NO_ERROR; +} + +int ExynosCameraPipe3AC::m_getPrepareBufferCount(void) +{ + int prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + bool dynamicBayer = false; + + if (m_parameters->getRecordingHint() == true) + dynamicBayer = m_parameters->getUseDynamicBayerVideoSnapShot(); + /* currently normal dynamic bayer is not qualified */ + /* + else + dynamicBayer = m_parameters->getUseDynamicBayer(); + */ + + if (dynamicBayer == true) { + CLOGD("DEBUG(%s[%d]): Dynamic bayer. so, set prepareBufferCount(%d) as 0", + __FUNCTION__, __LINE__, prepareBufferCount); + + prepareBufferCount = 0; + } + + return prepareBufferCount; +} + +status_t ExynosCameraPipe3AC::sensorStream(bool on) +{ + CLOGD("[%s(%d)]", __FUNCTION__, __LINE__); + + int ret = 0; + int value = on ? IS_ENABLE_STREAM: IS_DISABLE_STREAM; + + ret = m_node[CAPTURE_NODE]->setControl(V4L2_CID_IS_S_STREAM, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s): sensor S_STREAM(%d) fail", __FUNCTION__, value); + + return ret; +} + +bool ExynosCameraPipe3AC::m_checkThreadLoop(void) +{ + Mutex::Autolock lock(m_pipeframeLock); + bool loop = false; + + if (m_isReprocessing() == false) + loop = true; + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loop = true; + + /* + if (m_inputFrameQ->getSizeOfProcessQ() == 0 && + m_numOfRunningFrame == 0) + loop = false; + */ + if (0 < m_numOfRunningFrame) + loop = true; + + if (m_flagTryStop == true) + loop = false; + + return loop; +} + +bool ExynosCameraPipe3AC::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagStartPipe == false) { + /* waiting for pipe started */ + usleep(5000); + return m_checkThreadLoop(); + } + + if (m_numOfRunningFrame == 0 && m_inputFrameQ->getSizeOfProcessQ() != 0) { + ret = prepare(); + if (ret < 0) + CLOGE("ERR(%s[%d]):3AC prepare fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + ret = m_getBuffer(); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret < 0) { + CLOGE("ERR(%s): m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (m_numOfRunningFrame < m_prepareBufferCount - 1) { + int cnt = m_inputFrameQ->getSizeOfProcessQ(); + do { + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return m_checkThreadLoop(); + + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + cnt--; + } while (cnt > 0); + } else { + ret = m_putBuffer(); + if (ret < 0) { + CLOGE("ERR(%s): m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + } + + return m_checkThreadLoop(); +} + +void ExynosCameraPipe3AC::m_init(void) +{ + m_metadataTypeShot = false; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.h b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.h new file mode 100644 index 0000000..46a6c67 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipe3AC.h @@ -0,0 +1,65 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_3AC_H +#define EXYNOS_CAMERA_PIPE_3AC_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipe3AC : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipe3AC() + { + m_init(); + } + + ExynosCameraPipe3AC( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipe3AC(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + + virtual status_t sensorStream(bool on); + + virtual bool m_checkThreadLoop(void); + +private: + virtual bool m_mainThreadFunc(void); + virtual status_t m_getBuffer(void); + +private: + void m_init(void); + int m_getPrepareBufferCount(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.cpp new file mode 100644 index 0000000..4175419 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.cpp @@ -0,0 +1,761 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeDIS" +#include + +#include "ExynosCameraPipeDIS.h" + +namespace android { + +ExynosCameraPipeDIS::~ExynosCameraPipeDIS() +{ + this->destroy(); +} + +status_t ExynosCameraPipeDIS::create(int32_t *sensorIds) +{ + CLOGI("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + int fd = -1; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + /* DIS output */ + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("DIS_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[OUTPUT_NODE]); + } + + /* DIS capture */ + if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + + ret = m_node[CAPTURE_NODE]->create("DIS_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + } + + /* mainNode is OUTPUT_NODE */ + m_mainNodeNum = OUTPUT_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeDIS::m_mainThreadFunc, "DISThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + m_inputFrameQ->setWaitTime(500000000); /* .5 sec */ + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + + for (int i = MAX_NODE - 1; 0 <= i; i--) { + if (m_node[i] != NULL) { + if (m_node[i]->close() != NO_ERROR) { + CLOGE("ERR(%s): close(%d) fail", __FUNCTION__, i); + return INVALID_OPERATION; + } + delete m_node[i]; + m_node[i] = NULL; + CLOGD("DEBUG(%s):Node(%d, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, i, m_nodeNum[i], m_sensorIds[i]); + } + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + status_t ret = NO_ERROR; + unsigned int bpp = 0; + unsigned int planes = 0; + + if (m_node[OUTPUT_NODE] != NULL) { + int disFormat = m_parameters->getHWVdisFormat(); + unsigned int disPlanes = 0; + getYuvFormatInfo(disFormat, &bpp, &disPlanes); + + getYuvFormatInfo(pipeInfos[0].rectInfo.colorFormat, &bpp, &planes); + + if (planes != disPlanes) { + CLOGE("ERR(%s[%d]):planes(%d) of colorFormat(%d) != disPlanes(%d) of disFormat(%d). so. fail(please check colorFormat scenario)", + __FUNCTION__, __LINE__, planes, pipeInfos[0].rectInfo.colorFormat, disPlanes, disFormat); + return INVALID_OPERATION; + } + + /* add meta */ + planes += 1; + + ret = m_setNodeInfo(m_node[OUTPUT_NODE], &pipeInfos[0], + planes, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + m_perframeMainNodeGroupInfo = pipeInfos[0].perFrameNodeGroupInfo; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_sensorIds[CAPTURE_NODE]) == false) { + + int previewFormat = m_parameters->getHwPreviewFormat(); + unsigned int previewPlanes = 0; + getYuvFormatInfo(previewFormat, &bpp, &previewPlanes); + + getYuvFormatInfo(pipeInfos[1].rectInfo.colorFormat, &bpp, &planes); + + if (planes != previewPlanes) { + CLOGE("ERR(%s[%d]):planes(%d) of colorFormat(%d) != previewPlanes(%d) of previewFormat(%d). so. fail(please check colorFormat scenario)", + __FUNCTION__, __LINE__, planes, pipeInfos[1].rectInfo.colorFormat, previewPlanes, previewFormat); + return INVALID_OPERATION; + } + + /* add meta */ + planes += 1; + + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[1], + planes, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[1].rectInfo.fullW, pipeInfos[1].rectInfo.fullH, pipeInfos[1].bufInfo.count); + return INVALID_OPERATION; + } + + m_numCaptureBuf = pipeInfos[1].bufInfo.count; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGI("INFO(%s[%d]): -IN-", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + +#ifdef DEBUG_RAWDUMP + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; +#endif + + /* TODO: check node state */ + /* stream on? */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for(int i = 0; i < MAX_NODE; i++) { + for (int j = 0; j < m_numBuffers; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + CLOGI("INFO(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::prepare(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::start(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_startNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_startNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_flagStartPipe = true; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipeDIS::stop(void) +{ + CLOGD("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_stopNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_stopNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + m_flagStartPipe = false; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipeDIS::getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition) +{ + int planeCount = 0; + status_t ret = NO_ERROR; + + enum NODE_TYPE nodeType = OUTPUT_NODE; + + if (pipePosition == SRC_PIPE) { + nodeType = CAPTURE_NODE; + } else { + nodeType = OUTPUT_NODE; + } + + if (m_node[nodeType] == NULL) { + CLOGE("ERR(%s): m_node[%d] == NULL. so, fail", __FUNCTION__, nodeType); + return INVALID_OPERATION; + } + + ret = m_node[nodeType]->getSize(fullW, fullH); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):m_node[%d]->getSize fail", __FUNCTION__, nodeType); + return ret; + } + + ret = m_node[nodeType]->getColorFormat(colorFormat, &planeCount); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):m_node[%d]->getColorFormat fail", __FUNCTION__, nodeType); + return ret; + } + + return ret; +} + +status_t ExynosCameraPipeDIS::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer[MAX_NODE]; + camera2_shot_ext *shot_ext = NULL; + int ret = 0; + int runningFrameIndex = -1; + + int v4l2Colorformat = 0; + int planeCount[MAX_NODE] = {0}; + int metaBufferPlaneCount = 0; + bool flagOutputNodeRunning = false; + + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s):m_flagTryStop(%d)", __FUNCTION__, m_flagTryStop); + return false; + } + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + + for (int i = 0; i < MAX_NODE; i++) { + if (m_node[i] != NULL) + m_node[i]->dumpState(); + } + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):new frame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if(m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + ret = newFrame->getSrcBuffer(getPipeId(), &buffer[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] != NULL) { + CLOGE("ERR(%s[%d]):OUTPUT_NODE new buffer is invalid, we already get buffer index(%d)", + __FUNCTION__, __LINE__, buffer[OUTPUT_NODE].index); + return BAD_VALUE; + } + + if (runningFrameIndex < 0) + runningFrameIndex = buffer[OUTPUT_NODE].index; + + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + + metaBufferPlaneCount = planeCount[OUTPUT_NODE] - 1; + + shot_ext = (struct camera2_shot_ext *)(buffer[OUTPUT_NODE].addr[metaBufferPlaneCount]); + + flagOutputNodeRunning = true; + + /* DROP CASE */ + if (newFrame->getDisDrop() == true) { + int frameCount = -1; + + if (shot_ext != NULL) + frameCount = getMetaDmRequestFrameCount(shot_ext); + + CLOGW("WARN(%s[%d]):newFrame->getDisDrop() == true, (qbuf is too late) so, dis drop frameCount(%d)", + __FUNCTION__, __LINE__, frameCount); + + return m_handleInvalidFrame(buffer[OUTPUT_NODE].index, newFrame, &buffer[OUTPUT_NODE]); + } + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = newFrame->getDstBuffer(getPipeId(), &buffer[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] != NULL) { + CLOGE("ERR(%s[%d]):CAPTURE_NODE new buffer is invalid, we already get buffer index(%d)", + __FUNCTION__, __LINE__, buffer[CAPTURE_NODE].index); + return BAD_VALUE; + } + + if (runningFrameIndex < 0) + runningFrameIndex = buffer[CAPTURE_NODE].index; + } + + /* hack for debugging */ + CLOGV("DEBUG(%s[%d]): dis is running (%d)", __FUNCTION__, __LINE__, buffer[OUTPUT_NODE].index); + + if (shot_ext == NULL) { + if (flagOutputNodeRunning == true) + CLOGW("WARN(%s[%d]):shot_ext == NULL. but, skip", __FUNCTION__, __LINE__); + } else { + if (newFrame->getMetaDataEnable() == false) { + CLOGE("ERR(%s[%d]):newFrame->getMetaDataEnable() == false. so, fail", __FUNCTION__, __LINE__); + + return m_handleInvalidFrame(buffer[OUTPUT_NODE].index, newFrame, &buffer[OUTPUT_NODE]); + } + + newFrame->getMetaData(shot_ext); + + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + camera2_node_group node_group_info; + + newFrame->getNodeGroupInfo(&node_group_info, PERFRAME_INFO_DIS, &zoomFrameInfo); + + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + /* DIS src size is same with ISP dst size. DIS dst, too. */ + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + /* CLOGI("INFO(%s[%d]):fcount(%d)", __FUNCTION__, __LINE__, shot_ext_dst->shot.dm.request.frameCount); */ + /* newFrame->dumpNodeGroupInfo("DIS"); */ + /* m_dumpPerframeNodeGroupInfo(m_name, m_perframeMainNodeGroupInfo); */ + } + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + if (m_node[CAPTURE_NODE]->putBuffer(&buffer[CAPTURE_NODE]) != NO_ERROR) { + CLOGE("ERR(%s):capture putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] = newFrame; + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + + /* check reversed frameCount (on error situation */ + if (shot_ext != NULL) { + if (m_checkValidFrameCount(shot_ext) == false) { + CLOGE("ERR(%s[%d]):m_checkValidFrameCount() fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + if (m_node[OUTPUT_NODE]->putBuffer(&buffer[OUTPUT_NODE]) != NO_ERROR) { + CLOGE("ERR(%s):output putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setSrcBuffer state fail", __FUNCTION__); + return ret; + } + + m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] = newFrame; + } + + /* + * setting m_runningFrameList set on head of function. + * OUTPUT_NODE has more priority than CAPTURE_NODE. + */ + if (0 <= runningFrameIndex) { + m_runningFrameList[runningFrameIndex] = newFrame; + m_numOfRunningFrame++; + } else { + CLOGE("ERR(%s[%d]):runningFrameIndex(%d) is weird. so, fail", __FUNCTION__, __LINE__, runningFrameIndex); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeDIS::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer buffer[MAX_NODE]; + ExynosCameraBuffer metaBuffer; + int v4l2Colorformat = 0; + int planeCount[MAX_NODE] = {0}; + int metaBufferPlaneCount = 0; + bool flagOutputNodeRunning = false; + camera2_shot_ext *shot_ext = NULL; + + int index = 0; + status_t ret = NO_ERROR; + status_t error = NO_ERROR; + + CLOGV("DEBUG(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + + ret = m_node[CAPTURE_NODE]->getBuffer(&buffer[CAPTURE_NODE], &index); + + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[CAPTURE_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + error = ret; + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + if (0 <= buffer[CAPTURE_NODE].index) + m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] = NULL; + + metaBuffer = buffer[CAPTURE_NODE]; + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + + ret = m_node[OUTPUT_NODE]->getBuffer(&buffer[OUTPUT_NODE], &index); + + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):getBuffer out, ret(%d)", __FUNCTION__, __LINE__, ret); + return false; + } + + if (0 <= buffer[OUTPUT_NODE].index) + m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] = NULL; + + metaBuffer = buffer[OUTPUT_NODE]; + + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + metaBufferPlaneCount = planeCount[OUTPUT_NODE] - 1; + + shot_ext = (struct camera2_shot_ext *)(metaBuffer.addr[metaBufferPlaneCount]); + + flagOutputNodeRunning = true; + } + + if ((ret != NO_ERROR || error != NO_ERROR || index < 0) && + (shot_ext != NULL)) { + CLOGE("ERR(%s[%d]):m_node[OUTPUT_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + + metaBuffer.index = index; + + CLOGE("ERR(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + + /* complete frame */ + ret = m_completeFrame(&curFrame, metaBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; + } + + if (flagOutputNodeRunning == true) { + ret = m_updateMetadataToFrame(metaBuffer.addr[metaBufferPlaneCount], metaBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame(%d) fail, ret(%d)", __FUNCTION__, __LINE__, metaBuffer.index, ret); + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&metaBuffer); + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + /* complete frame */ + ret = m_completeFrame(&curFrame, metaBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return ret; + } + + if (m_outputFrameQ != NULL) + m_outputFrameQ->pushProcessQ(&curFrame); + else + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipeDIS::m_mainThreadFunc(void) +{ + bool bRet = true; + status_t ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + + /* will do m_getBuffer */ + ret = NO_ERROR; + } + + if (0 < m_numOfRunningFrame) { + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + bRet = true; + goto done; + } + } + + m_timer.stop(); + m_timeInterval = m_timer.durationMsecs(); + m_timer.start(); + + /* update renew count */ + if (ret >= 0) + m_threadRenew = 0; + + bRet = m_checkThreadLoop(); + +done: + return bRet; +} + +void ExynosCameraPipeDIS::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.h new file mode 100644 index 0000000..7cf260e --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeDIS.h @@ -0,0 +1,69 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_DIS_H +#define EXYNOS_CAMERA_PIPE_DIS_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeDIS : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeDIS() + { + m_init(); + } + + ExynosCameraPipeDIS( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeDIS(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t prepare(void); + + virtual status_t start(void); + virtual status_t stop(void); + + virtual status_t getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition); + +private: + virtual bool m_mainThreadFunc(void); + + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +private: + void m_init(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.cpp new file mode 100644 index 0000000..acc9ab5 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.cpp @@ -0,0 +1,530 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeFlite" +#include + +#include "ExynosCameraPipeFlite.h" + +namespace android { + +/* global variable for multi-singleton */ +Mutex ExynosCameraPipeFlite::g_nodeInstanceMutex; +ExynosCameraNode *ExynosCameraPipeFlite::g_node[FLITE_CNTS] = {0}; +int ExynosCameraPipeFlite::g_nodeRefCount[FLITE_CNTS] = {0}; +#ifdef SUPPORT_DEPTH_MAP +Mutex ExynosCameraPipeFlite::g_vcNodeInstanceMutex; +ExynosCameraNode *ExynosCameraPipeFlite::g_vcNode[VC_CNTS] = {0}; +int ExynosCameraPipeFlite::g_vcNodeRefCount[VC_CNTS] = {0}; +#endif + +ExynosCameraPipeFlite::~ExynosCameraPipeFlite() +{ + this->destroy(); +} + +status_t ExynosCameraPipeFlite::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGV("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + /* + * Flite must open once. so we will take cover as global variable. + * we will use only m_createNode and m_destroyNode. + */ + /* + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("FLITE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + */ + m_node[CAPTURE_NODE] = m_createNode(m_cameraId, m_nodeNum[CAPTURE_NODE]); +#ifdef SUPPORT_DEPTH_MAP + if (m_parameters->getUseDepthMap()) { + m_node[CAPTURE_NODE_2] = m_createVcNode(m_cameraId, m_nodeNum[CAPTURE_NODE_2]); + } +#endif + + /* mainNode is CAPTURE_NODE */ + m_mainNodeNum = CAPTURE_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeFlite::m_mainThreadFunc, "fliteThread", PRIORITY_URGENT_DISPLAY); + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFlite::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_node[CAPTURE_NODE] != NULL) { + /* + if (m_node[CAPTURE_NODE]->close() != NO_ERROR) { + CLOGE("ERR(%s):close fail", __FUNCTION__); + return INVALID_OPERATION; + } + delete m_node[CAPTURE_NODE]; + */ + m_destroyNode(m_cameraId, m_node[CAPTURE_NODE]); + + m_node[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + +#ifdef SUPPORT_DEPTH_MAP + if (m_node[CAPTURE_NODE_2] != NULL) { + m_destroyVcNode(m_cameraId, m_node[CAPTURE_NODE_2]); + + m_node[CAPTURE_NODE_2] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE_2, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE_2], m_sensorIds[CAPTURE_NODE_2]); + } +#endif + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +#ifdef SUPPORT_DEPTH_MAP +status_t ExynosCameraPipeFlite::start(void) +{ + status_t ret = 0; + + ret = ExynosCameraPipe::start(); + + if (m_parameters->getUseDepthMap()) { + ret = m_node[CAPTURE_NODE_2]->start(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to start VCI node", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeFlite::stop(void) +{ + status_t ret = 0; + + ret = ExynosCameraPipe::stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to stopPipe", __FUNCTION__, __LINE__); + return ret; + } + + if (m_parameters->getUseDepthMap()) { + ret = m_node[CAPTURE_NODE_2]->stop(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to stop VCI node", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = m_node[CAPTURE_NODE_2]->clrBuffers(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to clrBuffers VCI node", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + m_node[CAPTURE_NODE_2]->removeItemBufferQ(); + } + + return NO_ERROR; +} +#endif + +status_t ExynosCameraPipeFlite::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + struct camera2_shot_ext *shot_ext = NULL; + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { +#if defined(USE_CAMERA2_API_SUPPORT) + if (m_timeLogCount > 0) +#endif + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", + __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_node[CAPTURE_NODE]->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + ret = m_updateMetadataToFrame(curBuffer.addr[1], curBuffer.index); + if (ret < 0) + CLOGE("ERR(%s[%d]): updateMetadataToFrame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + shot_ext = (struct camera2_shot_ext *)curBuffer.addr[1]; + +//#ifdef SHOT_RECOVERY + if (shot_ext != NULL) { + retryGetBufferCount = shot_ext->complete_cnt; + + if (retryGetBufferCount > 0) { + CLOGI("INFO(%s[%d]): ( %d %d %d %d )", __FUNCTION__, __LINE__, + shot_ext->free_cnt, + shot_ext->request_cnt, + shot_ext->process_cnt, + shot_ext->complete_cnt); + } + } +//#endif + +#ifdef SUPPORT_DEPTH_MAP + curFrame = m_runningFrameList[curBuffer.index]; + + if (curFrame->getRequest(PIPE_VC1) == true) { + ExynosCameraBuffer depthMapBuffer; + + ret = m_node[CAPTURE_NODE_2]->getBuffer(&depthMapBuffer, &index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to get DepthMap buffer", __FUNCTION__, __LINE__); + return ret; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index %d", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + ret = curFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED, CAPTURE_NODE_2); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setDstBufferState into REQUESTED. pipeId %d frameCount %d ret %d", + __FUNCTION__, __LINE__, getPipeId(), curFrame->getFrameCount(), ret); + } + + ret = curFrame->setDstBuffer(getPipeId(), depthMapBuffer, CAPTURE_NODE_2, INDEX(getPipeId())); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to set DepthMapBuffer to frame. bufferIndex %d frameCount %d", + __FUNCTION__, __LINE__, index, curFrame->getFrameCount()); + return ret; + } + + ret = curFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE, CAPTURE_NODE_2); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to setDstBufferState with COMPLETE", + __FUNCTION__, __LINE__); + return ret; + } + } +#endif + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_comleteFrame fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFlite::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* initialize node */ + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[0], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + +#ifdef SUPPORT_DEPTH_MAP + if (m_parameters->getUseDepthMap()) { + ret = m_setNodeInfo(m_node[CAPTURE_NODE_2], &pipeInfos[CAPTURE_NODE_2], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, + pipeInfos[CAPTURE_NODE_2].rectInfo.fullW, pipeInfos[CAPTURE_NODE_2].rectInfo.fullH, + pipeInfos[CAPTURE_NODE_2].bufInfo.count); + return INVALID_OPERATION; + } + } +#endif + + m_numBuffers = pipeInfos[0].bufInfo.count; + + return NO_ERROR; +} + +status_t ExynosCameraPipeFlite::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + +#ifdef DEBUG_RAWDUMP + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; +#endif + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) setupPipe (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeFlite::sensorStream(bool on) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + int value = on ? IS_ENABLE_STREAM: IS_DISABLE_STREAM; + + ret = m_node[CAPTURE_NODE]->setControl(V4L2_CID_IS_S_STREAM, value); + if (ret != NO_ERROR) + CLOGE("ERR(%s):sensor S_STREAM(%d) fail", __FUNCTION__, value); + + return ret; +} + +bool ExynosCameraPipeFlite::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_getBuffer(); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + +//#ifdef SHOT_RECOVERY + for (int i = 0; i < retryGetBufferCount; i ++) { + CLOGE("INFO(%s[%d]): retryGetBufferCount( %d)", __FUNCTION__, __LINE__, retryGetBufferCount); + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + ret = m_putBuffer(); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + } + retryGetBufferCount = 0; +//#endif + + if (m_numOfRunningFrame < 2) { + int cnt = m_inputFrameQ->getSizeOfProcessQ(); + do { + ret = m_putBuffer(); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + cnt--; + } while (cnt > 0); + } else { + ret = m_putBuffer(); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + } + + return true; +} + +void ExynosCameraPipeFlite::m_init(void) +{ +//#ifdef SHOT_RECOVERY + retryGetBufferCount = 0; +//#endif +} + +status_t ExynosCameraPipeFlite::m_updateMetadataToFrame(void *metadata, int index) +{ + status_t ret = NO_ERROR; + ExynosCameraFrame *frame = NULL; + camera2_shot_ext *shot_ext = NULL; + camera2_dm frame_dm; + + shot_ext = (struct camera2_shot_ext *) metadata; + if (shot_ext == NULL) { + CLOGE("ERR(%s[%d]):shot_ext is NULL", __FUNCTION__, __LINE__); + return BAD_VALUE; + } + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index %d", __FUNCTION__, __LINE__, index); + return BAD_VALUE; + } + + if (m_metadataTypeShot == false) { + CLOGV("DEBUG(%s[%d]):Stream type do not need update metadata", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + ret = m_getFrameByIndex(&frame, index); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getFrameByIndex. index %d ret %d", + __FUNCTION__, __LINE__, index, ret); + return ret; + } + + ret = frame->getDynamicMeta(&frame_dm); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to getDynamicMeta. frameCount %d ret %d", + __FUNCTION__, __LINE__, frame->getFrameCount(), ret); + return ret; + } + + if (frame_dm.request.frameCount == 0 || frame_dm.sensor.timeStamp == 0) { + frame_dm.request.frameCount = shot_ext->shot.dm.request.frameCount; + frame_dm.sensor.timeStamp = shot_ext->shot.dm.sensor.timeStamp; + + ret = frame->storeDynamicMeta(&frame_dm); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Failed to storeDynamicMeta. frameCount %d ret %d", + __FUNCTION__, __LINE__, frame->getFrameCount(), ret); + return ret; + } + } + + return NO_ERROR; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.h new file mode 100644 index 0000000..ec720b6 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeFlite.h @@ -0,0 +1,186 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_FLITE_H +#define EXYNOS_CAMERA_PIPE_FLITE_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +#define FLITE_CNTS (FIMC_IS_VIDEO_SS3_NUM - FIMC_IS_VIDEO_SS0_NUM + 1) + +class ExynosCameraPipeFlite : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeFlite() + { + m_init(); + } + + ExynosCameraPipeFlite( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeFlite(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + + virtual status_t sensorStream(bool on); + +protected: + virtual bool m_mainThreadFunc(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +private: + void m_init(void); + +private: + /* + * Multi-singleton : + * User must call createInstance to get PipeLite obj. + * because 4 flites must be open once. + * + */ + static ExynosCameraNode *m_createNode(int cameraId, int nodeNum) + { + Mutex::Autolock lock(g_nodeInstanceMutex); + + status_t ret = NO_ERROR; + + int index = nodeNum - FIMC_IS_VIDEO_SS0_NUM + 1; + + if (index < 0 || FLITE_CNTS <= index) + { + ALOGE("[CAM_ID(%d)]-ERR(%s[%d]):invalid Index(%d) fail", cameraId, __FUNCTION__, __LINE__, index); + return NULL; + } + + if (g_node[index] == NULL) { + if (g_nodeRefCount[index] != 0) { + ALOGW("[CAM_ID(%d)]-WARN(%s[%d]):invalid g_nodeRefCount[%d](%d). so, set 0", + cameraId, __FUNCTION__, __LINE__, index, g_nodeRefCount[index]); + + g_nodeRefCount[index] = 0; + } + + g_node[index] = new ExynosCameraNode(); + + ALOGD("[CAM_ID(%d)]-DEBUG(%s[%d]): new g_node[%d]", + cameraId, __FUNCTION__, __LINE__, index); + + ret = g_node[index]->create("FLITE", cameraId); + if (ret < 0) { + ALOGE("[CAM_ID(%d)]-ERR(%s[%d]): create(FLITE) fail, ret(%d)", + cameraId, __FUNCTION__, __LINE__, ret); + + SAFE_DELETE(g_node[index]); + return NULL; + } + + ret = g_node[index]->open(nodeNum); + if (ret < 0) { + ALOGE("[CAM_ID(%d)]-ERR(%s[%d]): open(%d) fail, ret(%d)", + cameraId, __FUNCTION__, __LINE__, nodeNum, ret); + + SAFE_DELETE(g_node[index]); + return NULL; + } + + ALOGD("[CAM_ID(%d)]-DEBUG(%s[%d]):Node(%d) opened", + cameraId, __FUNCTION__, __LINE__, nodeNum); + } else { + ALOGD("[CAM_ID(%d)]-DEBUG(%s[%d]): skip new g_nodeRefCount[%d] : (%d)", + cameraId, __FUNCTION__, __LINE__, index, g_nodeRefCount[index]); + } + + /* when calll this, increase ref */ + g_nodeRefCount[index]++; + + return g_node[index]; + } + + /* + * Multi-singleton : + * User must call DestoryInstance to delete PipeLite obj. + * This will destroy obj, when only g_nodeRefCount == 0. + */ + static void m_destroyNode(int cameraId, ExynosCameraNode * obj) + { + if (obj == NULL) { + ALOGE("[CAM_ID(%d)]-ERR(%s[%d]):obj == NULL. so fail", + cameraId, __FUNCTION__, __LINE__); + return; + } + + Mutex::Autolock lock(g_nodeInstanceMutex); + + for (int index = 0; index < FLITE_CNTS; index++) + { + if (obj == g_node[index]) { + if (1 < g_nodeRefCount[index]) { + ALOGD("[CAM_ID(%d)]-DEBUG(%s[%d]): skip delete g_nodeRefCount[%d] : (%d)", + cameraId, __FUNCTION__, __LINE__, index, g_nodeRefCount[index]); + + g_nodeRefCount[index]--; + } else if (g_nodeRefCount[index] == 1) { + ALOGD("[CAM_ID(%d)]-DEBUG(%s[%d]): delete g_nodeRefCount[%d] : (%d)", + cameraId, __FUNCTION__, __LINE__, index, g_nodeRefCount[index]); + + if (g_node[index]->close() != NO_ERROR) { + ALOGE("[CAM_ID(%d)]-DEBUG(%s[%d]): close fail", + cameraId, __FUNCTION__, __LINE__); + } + + SAFE_DELETE(g_node[index]); + g_nodeRefCount[index]--; + } else { /* g_nodeRefCount[index] < 1) */ + ALOGW("[CAM_ID(%d)]-WARN(%s[%d]):invalid g_nodeRefCount[%d](%d). so, set 0", + cameraId, __FUNCTION__, __LINE__, index, g_nodeRefCount[index]); + + g_nodeRefCount[index] = 0; + } + } + } + } + + /* Override */ + status_t m_updateMetadataToFrame(void *metadata, int index); + +private: +//#ifdef SHOT_RECOVERY + int retryGetBufferCount; +//#endif + + /* global variable for multi-singleton */ + static Mutex g_nodeInstanceMutex; + static ExynosCameraNode *g_node[FLITE_CNTS]; + static int g_nodeRefCount[FLITE_CNTS]; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.cpp new file mode 100644 index 0000000..fdf5937 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.cpp @@ -0,0 +1,261 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeGSC" +#include + +#include "ExynosCameraPipeGSC.h" + +namespace android { + +ExynosCameraPipeGSC::~ExynosCameraPipeGSC() +{ + this->destroy(); +} + +status_t ExynosCameraPipeGSC::create(__unused int32_t *sensorIds) +{ + CSC_METHOD cscMethod = CSC_METHOD_HW; + + m_csc = csc_init(cscMethod); + if (m_csc == NULL) { + CLOGE("ERR(%s):csc_init() fail", __FUNCTION__); + return INVALID_OPERATION; + } + + csc_set_hw_property(m_csc, m_property, m_gscNum); + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeGSC::m_mainThreadFunc, "GSCThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeGSC::destroy(void) +{ + if (m_csc != NULL) + csc_deinit(m_csc); + m_csc = NULL; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeGSC::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + m_flagTryStop = false; + + return NO_ERROR; +} + +status_t ExynosCameraPipeGSC::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_flagTryStop = true; + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + m_flagTryStop = false; + + return NO_ERROR; +} + +status_t ExynosCameraPipeGSC::startThread(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeGSC::m_run(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer srcBuffer; + ExynosCameraBuffer dstBuffer; + ExynosRect srcRect; + ExynosRect dstRect; + ExynosCameraFrameEntity *entity = NULL; + + int ret = 0; + int rotation = 0; + int flipHorizontal = 0; + int flipVertical = 0; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return NO_ERROR; + } + + entity = newFrame->searchEntityByPipeId(getPipeId()); + if (entity == NULL || entity->getSrcBufState() == ENTITY_BUFFER_STATE_ERROR) { + CLOGE("ERR(%s[%d]):frame(%d) entityState(ENTITY_BUFFER_STATE_ERROR), skip msc", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + goto func_exit; + } + + rotation = newFrame->getRotation(getPipeId()); + CLOGV("INFO(%s[%d]): getPipeId(%d), rotation(%d)", __FUNCTION__, __LINE__, getPipeId(), rotation); + +#ifdef PERFRAME_CONTROL_FOR_FLIP + flipHorizontal = newFrame->getFlipHorizontal(getPipeId()); + flipVertical = newFrame->getFlipVertical(getPipeId()); +#else + flipHorizontal = m_parameters->getFlipHorizontal(); + flipVertical = m_parameters->getFlipVertical(); +#endif + + ret = newFrame->getSrcRect(getPipeId(), &srcRect); + ret = newFrame->getDstRect(getPipeId(), &dstRect); + + switch (srcRect.colorFormat) { + case V4L2_PIX_FMT_NV21: + srcRect.fullH = ALIGN_UP(srcRect.fullH, 2); + break; + default: + srcRect.fullH = ALIGN_UP(srcRect.fullH, GSCALER_IMG_ALIGN); + break; + } + + csc_set_src_format(m_csc, + ALIGN_UP(srcRect.fullW, GSCALER_IMG_ALIGN), + srcRect.fullH, + srcRect.x, srcRect.y, srcRect.w, srcRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(srcRect.colorFormat), + 0); + + csc_set_dst_format(m_csc, + dstRect.fullW, dstRect.fullH, + dstRect.x, dstRect.y, dstRect.fullW, dstRect.fullH, + V4L2_PIX_2_HAL_PIXEL_FORMAT(dstRect.colorFormat), + 0); + + ret = newFrame->getSrcBuffer(getPipeId(), &srcBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + ret = newFrame->getDstBuffer(getPipeId(), &dstBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + csc_set_src_buffer(m_csc, + (void **)srcBuffer.fd, CSC_MEMORY_TYPE); + + csc_set_dst_buffer(m_csc, + (void **)dstBuffer.fd, CSC_MEMORY_TYPE); + + if (csc_convert_with_rotation(m_csc, rotation, flipHorizontal, flipVertical) != 0) + CLOGE("ERR(%s):csc_convert() fail", __FUNCTION__); + + CLOGV("DEBUG(%s[%d]):Rotation(%d), flip horizontal(%d), vertical(%d)", + __FUNCTION__, __LINE__, rotation, flipHorizontal, flipVertical); + + CLOGV("DEBUG(%s[%d]):CSC(%d) converting done", __FUNCTION__, __LINE__, m_gscNum); + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + + return NO_ERROR; + +func_exit: + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + return NO_ERROR; +} + +bool ExynosCameraPipeGSC::m_mainThreadFunc(void) +{ + int ret = 0; + + ret = m_run(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + } + + return m_checkThreadLoop(); +} + +void ExynosCameraPipeGSC::m_init(int32_t *nodeNums) +{ + if (nodeNums == NULL) + m_gscNum = -1; + else + m_gscNum = nodeNums[0]; + + m_csc = NULL; + m_property = (nodeNums == NULL) ? CSC_HW_PROPERTY_DEFAULT : CSC_HW_PROPERTY_FIXED_NODE; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.h new file mode 100644 index 0000000..a8565af --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeGSC.h @@ -0,0 +1,71 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_GSC_H +#define EXYNOS_CAMERA_PIPE_GSC_H + +#include "ExynosCameraPipe.h" +#include "csc.h" + +#define CSC_HW_PROPERTY_DEFAULT ((CSC_HW_PROPERTY_TYPE)2) /* Not fixed mode */ +#define CSC_MEMORY_TYPE CSC_MEMORY_DMABUF /* (CSC_MEMORY_USERPTR) */ + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeGSC : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeGSC() + { + m_init(NULL); + } + + ExynosCameraPipeGSC( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(nodeNums); + } + + virtual ~ExynosCameraPipeGSC(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + virtual bool m_mainThreadFunc(void); + +private: + void m_init(int32_t *nodeNums); + +private: + int m_gscNum; + void *m_csc; + CSC_HW_PROPERTY_TYPE m_property; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.cpp new file mode 100644 index 0000000..f0dbfae --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.cpp @@ -0,0 +1,883 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeISP" +#include + +#include "ExynosCameraPipeISP.h" + +namespace android { + +ExynosCameraPipeISP::~ExynosCameraPipeISP() +{ + this->destroy(); +} + +status_t ExynosCameraPipeISP::create(int32_t *sensorIds) +{ + CLOGI("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + /* ISP output */ + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_node[OUTPUT_NODE] = new ExynosCameraNode(); + ret = m_node[OUTPUT_NODE]->create("ISP_OUTPUT", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[OUTPUT_NODE]->open(m_nodeNum[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): OUTPUT_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[OUTPUT_NODE]); + } + + /* ISP capture */ + if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("ISP_CAPTURE", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + } + + if (m_flagValidInt(m_nodeNum[OUTPUT_NODE]) == true) { + m_mainNodeNum = OUTPUT_NODE; + } else if (m_flagValidInt(m_nodeNum[CAPTURE_NODE]) == true) { + m_mainNodeNum = CAPTURE_NODE; + } else { + CLOGE("ERR(%s[%d]): OUTPUT_NODE and CAPTURE_NODE are all invalid num. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + /* HACK: ISP setInput have to be called after 3AA open + * + * ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + * if (ret < 0) { + * CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + * return ret; + * } + */ + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeISP::m_mainThreadFunc, "ISPThread", PRIORITY_URGENT_DISPLAY); + + /* + if (m_isReprocessing() == true) + m_inputFrameQ = new frame_queue_t(m_mainThread); + else + m_inputFrameQ = new frame_queue_t; + */ + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + m_inputFrameQ->setWaitTime(500000000); /* .5 sec */ + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISP::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + int ret = 0; + + for (int i = MAX_NODE - 1; 0 <= i; i--) { + if (m_node[i] != NULL) { + if (m_node[i]->close() != NO_ERROR) { + CLOGE("ERR(%s): close(%d) fail", __FUNCTION__, i); + return INVALID_OPERATION; + } + delete m_node[i]; + m_node[i] = NULL; + CLOGD("DEBUG(%s):Node(%d, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, i, m_nodeNum[i], m_sensorIds[i]); + } + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISP::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + unsigned int bpp = 0; + unsigned int planes = 0; + + if (m_node[OUTPUT_NODE] != NULL) { + + if (pipeInfos[0].bufInfo.type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) { + CLOGE("ERR(%s[%d]): pipeInfos[0].bufInfo.type != V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE. so fail", + __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + ret = m_setNodeInfo(m_node[OUTPUT_NODE], &pipeInfos[0], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + m_perframeMainNodeGroupInfo = pipeInfos[0].perFrameNodeGroupInfo; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_sensorIds[CAPTURE_NODE]) == false) { + + if (pipeInfos[1].bufInfo.type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) { + CLOGE("ERR(%s[%d]): pipeInfos[1].bufInfo.type != V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE. so fail", + __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* get parameters's setting info */ + int tempColorfomat; + unsigned int tempPlanes = 0; + + if (m_parameters->getHWVdisMode() == true) + tempColorfomat = m_parameters->getHWVdisFormat(); + else + tempColorfomat = m_parameters->getHwPreviewFormat(); + + getYuvFormatInfo(tempColorfomat, &bpp, &tempPlanes); + + /* get real setting info */ + getYuvFormatInfo(pipeInfos[1].rectInfo.colorFormat, &bpp, &planes); + + if (planes != tempPlanes) { + CLOGE("ERR(%s[%d]):planes(%d) of colorFormat(%d) != tempPlanes(%d) of tempColorfomat(%d). so. fail(please check colorFormat scenario)", + __FUNCTION__, __LINE__, planes, pipeInfos[1].rectInfo.colorFormat, tempPlanes, tempColorfomat); + return INVALID_OPERATION; + } + + /* add meta */ + planes += 1; + + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[1], + planes, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[1].rectInfo.fullW, pipeInfos[1].rectInfo.fullH, pipeInfos[1].bufInfo.count); + return INVALID_OPERATION; + } + + m_numCaptureBuf = pipeInfos[1].bufInfo.count; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeISP::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGI("DEBUG(%s[%d]): -IN-", __FUNCTION__, __LINE__); +#ifdef DEBUG_RAWDUMP + unsigned int bytesPerLine[EXYNOS_CAMERA_BUFFER_MAX_PLANES] = {0}; +#endif + /* TODO: check node state */ + /* stream on? */ + + int ret = 0; + + /* HACK: ISP setInput have to be called after 3AA open */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + /* setfile setting */ +#ifdef SET_SETFILE_BY_SHOT + /* nop */ +#else +#if SET_SETFILE_BY_SET_CTRL_ISP + if (m_checkLeaderNode(m_sensorIds[OUTPUT_NODE]) == true) { + int setfile = 0; + int yuvRange = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &yuvRange); + + ret = m_node[OUTPUT_NODE]->setControl(V4L2_CID_IS_SET_SETFILE, setfile); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d", __FUNCTION__, __LINE__, setfile, ret); + return ret; + } + CLOGD("DEBUG(%s[%d]):set setfile(%d),m_reprocessing(%d)", __FUNCTION__, __LINE__, setfile, m_reprocessing); + } else { + CLOGW("WARN(%s[%d]):m_checkLeaderNode(%d) == false. so, skip set setfile.", + __FUNCTION__, __LINE__, m_sensorIds[OUTPUT_NODE]); + } +#endif +#endif + + for(int i = 0; i < MAX_NODE; i++) { + for (uint32_t j = 0; j < m_numBuffers; j++) { + m_runningFrameList[j] = NULL; + m_nodeRunningFrameList[i][j] = NULL; + } + } + + m_numOfRunningFrame = 0; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + CLOGI("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISP::start(void) +{ + CLOGI("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_startNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_startNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_flagStartPipe = true; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipeISP::stop(void) +{ + CLOGD("INFO(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + + ret = m_stopNode(); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_stopNode() fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + m_flagStartPipe = false; + m_flagTryStop = false; + + return ret; +} + +status_t ExynosCameraPipeISP::getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition) +{ + int planeCount = 0; + status_t ret = NO_ERROR; + + enum NODE_TYPE nodeType = OUTPUT_NODE; + + if (pipePosition == SRC_PIPE) { + nodeType = CAPTURE_NODE; + + /* hack : backward compatibility : old version support only capture OTF */ + if (m_isOtf(m_sensorIds[CAPTURE_NODE]) == true) + nodeType = OUTPUT_NODE; + } else { + nodeType = OUTPUT_NODE; + } + + if (m_node[nodeType] == NULL) { + CLOGE("ERR(%s): m_node[%d] == NULL. so, fail", __FUNCTION__, nodeType); + return INVALID_OPERATION; + } + + ret = m_node[nodeType]->getSize(fullW, fullH); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):m_node[%d]->getSize fail", __FUNCTION__, nodeType); + return ret; + } + + ret = m_node[nodeType]->getColorFormat(colorFormat, &planeCount); + if (ret != NO_ERROR) { + CLOGE("ERR(%s):m_node[%d]->getColorFormat fail", __FUNCTION__, nodeType); + return ret; + } + + return ret; +} + +status_t ExynosCameraPipeISP::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer buffer[MAX_NODE]; + camera2_node_group node_group_info; + camera2_shot_ext *shot_ext = NULL; + int ret = 0; + int runningFrameIndex = -1; + + int v4l2Colorformat = 0; + int planeCount[MAX_NODE] = {0}; + int metaBufferPlaneCount = 0; + bool flagOutputNodeRunning = false; + + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s):m_flagTryStop(%d)", __FUNCTION__, m_flagTryStop); + return false; + } + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + + for (int i = 0; i < MAX_NODE; i++) { + if (m_node[i] != NULL) + m_node[i]->dumpState(); + } + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s[%d]):new frame is NULL", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + if(m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + ret = newFrame->getSrcBuffer(getPipeId(), &buffer[OUTPUT_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] != NULL) { + CLOGE("ERR(%s[%d]):OUTPUT_NODE new buffer is invalid, we already get buffer index(%d)", + __FUNCTION__, __LINE__, buffer[OUTPUT_NODE].index); + return BAD_VALUE; + } + + if (runningFrameIndex < 0) + runningFrameIndex = buffer[OUTPUT_NODE].index; + + /* about meta plane */ + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + + metaBufferPlaneCount = planeCount[OUTPUT_NODE] - 1; + + shot_ext = (struct camera2_shot_ext *)(buffer[OUTPUT_NODE].addr[metaBufferPlaneCount]); + + flagOutputNodeRunning = true; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + ret = newFrame->getDstBuffer(getPipeId(), &buffer[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] != NULL) { + CLOGE("ERR(%s[%d]):CAPTURE_NODE new buffer is invalid, we already get buffer index(%d)", + __FUNCTION__, __LINE__, buffer[CAPTURE_NODE].index); + return BAD_VALUE; + } + + if (runningFrameIndex < 0) + runningFrameIndex = buffer[CAPTURE_NODE].index; + } + + if (shot_ext == NULL) { + if (flagOutputNodeRunning == true) + CLOGD("DEBUG(%s[%d]):shot_ext == NULL. but, skip", __FUNCTION__, __LINE__); + } else { + newFrame->getMetaData(shot_ext); + ret = m_parameters->duplicateCtrlMetadata((void *)shot_ext); + if (ret < 0) { + CLOGE("ERR(%s[%d]):duplicate Ctrl metadata fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + m_parameters->getFdMeta(m_isReprocessing(), (void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&buffer[OUTPUT_NODE]); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + int zoomParamInfo = m_parameters->getZoomLevel(); + int zoomFrameInfo = 0; + int maxZoomRatio = m_parameters->getMaxZoomRatio() / 1000; + int previewW = 0, previewH = 0; + int pictureW = 0, pictureH = 0; + ExynosRect sensorSize; + ExynosRect bayerCropSize; + ExynosRect bdsSize; + + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex, &zoomFrameInfo); +#ifdef PERFRAME_CONTROL_NODE_ISP + if (zoomFrameInfo != zoomParamInfo) { + CLOGI("INFO(%s[%d]):zoomFrameInfo(%d), zoomParamInfo(%d)", + __FUNCTION__, __LINE__, zoomFrameInfo, zoomParamInfo); + + camera2_node_group node_group_info_dis; + + m_parameters->getHwPreviewSize(&previewW, &previewH); + m_parameters->getPictureSize(&pictureW, &pictureH); + m_parameters->getPreviewBayerCropSize(&sensorSize, &bayerCropSize); + m_parameters->getPreviewBdsSize(&bdsSize); + + newFrame->getNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, &zoomFrameInfo); + + ExynosCameraNodeGroupISP::updateNodeGroupInfo( + m_cameraId, + &node_group_info, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH, + m_parameters->getHWVdisMode()); + + ExynosCameraNodeGroupDIS::updateNodeGroupInfo( + m_cameraId, + &node_group_info_dis, + bayerCropSize, + bdsSize, + previewW, previewH, + pictureW, pictureH); + + newFrame->storeNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex, zoomParamInfo); + newFrame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS, zoomParamInfo); + } +#endif + + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + + /* Per - ISP */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - SCP, SCC */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum; i ++) { + /* HACK: 5260 driver has bug. So, must go though below code. this will be fixed */ + /* if (1) { */ + if (node_group_info.capture[i].request == 1) { + /* W */ + if (node_group_info.capture[i].output.cropRegion[2] > node_group_info.capture[i].input.cropRegion[2] * maxZoomRatio) { + node_group_info.capture[i].output.cropRegion[2] = node_group_info.capture[i].input.cropRegion[2] * maxZoomRatio; + } + /* H */ + if (node_group_info.capture[i].output.cropRegion[3] > node_group_info.capture[i].input.cropRegion[3] * maxZoomRatio) { + node_group_info.capture[i].output.cropRegion[3] = node_group_info.capture[i].input.cropRegion[3] * maxZoomRatio; + } + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + /* CLOGI("INFO(%s[%d]):fcount(%d)", __FUNCTION__, __LINE__, shot_ext_dst->shot.dm.request.frameCount); */ + /* newFrame->dumpNodeGroupInfo("ISP"); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeIspNodeGroupInfo", m_perframeIspNodeGroupInfo); */ + /* m_dumpPerframeNodeGroupInfo("m_perframeMainNodeGroupInfo", m_perframeMainNodeGroupInfo); */ + } + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + if (m_node[CAPTURE_NODE]->putBuffer(&buffer[CAPTURE_NODE]) != NO_ERROR) { + CLOGE("ERR(%s):capture putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] = newFrame; + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + if (m_node[OUTPUT_NODE]->putBuffer(&buffer[OUTPUT_NODE]) != NO_ERROR) { + CLOGE("ERR(%s):output putBuffer fail ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + return ret; + } + + /* Old code is setDstBufferState on OUTPUT_NODE. very weird */ + /* + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + */ + + ret = newFrame->setSrcBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setSrcBuffer state fail", __FUNCTION__); + return ret; + } + + m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] = newFrame; + } + + /* + * setting m_runningFrameList set on head of function. + * OUTPUT_NODE has more priority than CAPTURE_NODE. + */ + if (0 <= runningFrameIndex) { + m_runningFrameList[runningFrameIndex] = newFrame; + m_numOfRunningFrame++; + } else { + CLOGE("ERR(%s[%d]):runningFrameIndex(%d) is weird. so, fail", __FUNCTION__, __LINE__, runningFrameIndex); + return INVALID_OPERATION; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeISP::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer buffer[MAX_NODE]; + ExynosCameraBuffer metaBuffer; + int v4l2Colorformat = 0; + int planeCount[MAX_NODE] = {0}; + int metaBufferPlaneCount = 0; + bool flagOutputNodeRunning = false; + camera2_shot_ext *shot_ext = NULL; + int perFramePos = 0; + + int index = 0; + status_t ret = NO_ERROR; + status_t error = NO_ERROR; + + + CLOGV("DEBUG(%s[%d]): -IN-", __FUNCTION__, __LINE__); + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + if (m_node[CAPTURE_NODE] != NULL && + m_isOtf(m_node[CAPTURE_NODE]->getInput()) == false) { + + ret = m_node[CAPTURE_NODE]->getBuffer(&buffer[CAPTURE_NODE], &index); + + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + return false; + } + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_node[CAPTURE_NODE]->getBuffer fail ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + error = ret; + } + + CLOGV("DEBUG(%s[%d]):index : %d", __FUNCTION__, __LINE__, index); + + if (0 <= buffer[CAPTURE_NODE].index) + m_nodeRunningFrameList[CAPTURE_NODE][buffer[CAPTURE_NODE].index] = NULL; + + metaBuffer = buffer[CAPTURE_NODE]; + + struct camera2_stream *stream = (struct camera2_stream *)(metaBuffer.addr[1]); + if (m_checkValidFrameCount(stream) == false) { + CLOGW("WARN(%s[%d]):m_checkValidFrameCount() fail. so, frame(cnt:%d)) skip", __FUNCTION__, __LINE__, stream->fcount); + error = INVALID_OPERATION; + } + } + + if (m_node[OUTPUT_NODE] != NULL && + m_isOtf(m_node[OUTPUT_NODE]->getInput()) == false) { + + ret = m_node[OUTPUT_NODE]->getBuffer(&buffer[OUTPUT_NODE], &index); + + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):getBuffer out, ret(%d)", __FUNCTION__, __LINE__, ret); + return false; + } + + if (0 <= buffer[OUTPUT_NODE].index) + m_nodeRunningFrameList[OUTPUT_NODE][buffer[OUTPUT_NODE].index] = NULL; + + metaBuffer = buffer[OUTPUT_NODE]; + + /* about meta */ + m_node[OUTPUT_NODE]->getColorFormat(&v4l2Colorformat, &planeCount[OUTPUT_NODE]); + metaBufferPlaneCount = planeCount[OUTPUT_NODE] - 1; + + shot_ext = (struct camera2_shot_ext *)(metaBuffer.addr[metaBufferPlaneCount]); + + flagOutputNodeRunning = true; + } + + if (ret != NO_ERROR || error != NO_ERROR || index < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + metaBuffer.index = index; + + if (shot_ext != NULL) + CLOGE("ERR(%s[%d]):Shot done invalid, frame(cnt:%d, index(%d)) skip", __FUNCTION__, __LINE__, getMetaDmRequestFrameCount(shot_ext), index); + else + CLOGE("ERR(%s[%d]):Shot done invalid, frame(index(%d)) skip", __FUNCTION__, __LINE__, index); + + /* complete frame */ + ret = m_completeFrame(&curFrame, metaBuffer, false); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + curFrame->setIspDone(true); + curFrame->setIspcDrop(true); + curFrame->setDisDrop(true); + curFrame->setScpDrop(true); + curFrame->setSccDrop(true); + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; + } + + if (flagOutputNodeRunning == true) { + ret = m_updateMetadataToFrame(metaBuffer.addr[metaBufferPlaneCount], metaBuffer.index); + if (ret < 0) { + CLOGE("ERR(%s[%d]): updateMetadataToFrame(%d) fail, ret(%d)", __FUNCTION__, __LINE__, metaBuffer.index, ret); + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&metaBuffer); + } + + CLOGV("DEBUG(%s[%d]):", __FUNCTION__, __LINE__); + + /* complete frame */ + ret = m_completeFrame(&curFrame, metaBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):complete frame fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return ret; + } + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]):curFrame is fail", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (curFrame != NULL && + shot_ext != NULL && + flagOutputNodeRunning == true) { + if (m_parameters->isOwnScc(m_cameraId) == true) { + if (m_reprocessing == true) + perFramePos = PERFRAME_REPROCESSING_SCC_POS; + else + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCC_POS : PERFRAME_FRONT_SCC_POS; + + if (curFrame->getRequest(PIPE_SCC) == true && shot_ext->node_group.capture[perFramePos].request == 0) { + CLOGE("ERR(%s[%d]: @@@@@@@@@@@@@@@@@@@@@@@ SCC Frame drop, halFrameCount(%d) metaFrameCount(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + curFrame->setRequest(PIPE_SCC, false); + curFrame->setSccDrop(true); + } + } else { + if (m_reprocessing == true) + perFramePos = PERFRAME_REPROCESSING_SCC_POS; + else + perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_ISPC_POS : PERFRAME_FRONT_ISPC_POS; + + if (curFrame->getRequest(PIPE_ISPC) == true && shot_ext->node_group.capture[perFramePos].request == 0) { + CLOGE("ERR(%s[%d]: @@@@@@@@@@@@@@@@@@@@@@@ ISPC Frame drop, halFrameCount(%d) metaFrameCount(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + curFrame->setRequest(PIPE_ISPC, false); + curFrame->setIspcDrop(true); + } + } + + /* check for Scp frame drop */ + int perFramePos = (m_cameraId == CAMERA_ID_BACK) ? PERFRAME_BACK_SCP_POS : PERFRAME_FRONT_SCP_POS; + + if (shot_ext->node_group.capture[perFramePos].request == 0) { + CLOGW("WARN(%s[%d]: @@@@@@@@@@@@@@@@@@@@@@@ SCP Frame drop, halFrameCount(%d) metaFrameCount(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount(), getMetaDmRequestFrameCount(shot_ext)); + curFrame->setScpDrop(true); + } + } + + if (flagOutputNodeRunning == true) + curFrame->setIspDone(true); + + /* Push to outputQ */ + if (m_outputFrameQ != NULL) { + m_outputFrameQ->pushProcessQ(&curFrame); + } else { + CLOGE("ERR(%s[%d]):m_outputFrameQ is NULL", __FUNCTION__, __LINE__); + } + + CLOGV("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipeISP::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + m_configDvfs(); + + ret = m_putBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]:m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s[%d]:m_getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + return m_checkThreadLoop(); +} + +void ExynosCameraPipeISP::m_init(void) +{ +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.h new file mode 100644 index 0000000..b593934 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeISP.h @@ -0,0 +1,71 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_ISP_H +#define EXYNOS_CAMERA_PIPE_ISP_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeISP : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeISP() + { + m_init(); + } + + ExynosCameraPipeISP( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeISP(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t start(void); + virtual status_t stop(void); + + virtual status_t getPipeInfo(int *fullW, int *fullH, int *colorFormat, int pipePosition); + +protected: + virtual bool m_mainThreadFunc(void); + + /* HACK: ISP setInput have to be called after 3AA open */ + /* replaced by m_sensorIds[MAX_NODE] */ + /* uint32_t m_sensorId; */ + + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +private: + void m_init(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.cpp new file mode 100644 index 0000000..615c005 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.cpp @@ -0,0 +1,635 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeISPC" +#include + +#include "ExynosCameraPipeISPC.h" + +namespace android { + +ExynosCameraPipeISPC::~ExynosCameraPipeISPC() +{ + this->destroy(); +} + +status_t ExynosCameraPipeISPC::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("ISPC", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + + /* mainNode is CAPTURE_NODE */ + m_mainNodeNum = CAPTURE_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeISPC::m_mainThreadFunc, "ISPCThread"); + if (m_parameters->getUseDynamicScc()) { + m_inputFrameQ = new frame_queue_t(m_mainThread); + m_inputFrameQ->setWaitTime(200000000); /* .5 sec */ + } else { + m_inputFrameQ = new frame_queue_t; + m_inputFrameQ->setWaitTime(500000000); /* .5 sec */ + } + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_node[CAPTURE_NODE] != NULL) { + if (m_node[CAPTURE_NODE]->close() != NO_ERROR) { + CLOGE("ERR(%s):close fail", __FUNCTION__); + return INVALID_OPERATION; + } + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::clearInputFrameQ(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_inputFrameQ != NULL) + m_inputFrameQ->release(); + if (&m_requestFrameQ != NULL) + m_requestFrameQ.release(); + + CLOGI("INFO(%s[%d]):clearInputFrameQ() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::setBoosting(bool isBoosting) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + m_isBoosting = isBoosting; + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + /* initialize node */ + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[0], + 2, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + /* TODO: check node state */ + /* stream on? */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_requestFrameQ.release(); + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::prepare(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipeISPC::m_checkThreadLoop(void) +{ + Mutex::Autolock lock(m_pipeframeLock); + bool loop = false; + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loop = true; + + if (m_inputFrameQ->getSizeOfProcessQ() == 0 && + m_numOfRunningFrame == 0) + loop = false; + + if (m_isReprocessing() == false) + loop = true; + +#if 0 + if (m_parameters->getUseDynamicScc() == false) + loop = true; +#endif + + return loop; +} + +bool ExynosCameraPipeISPC::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagStartPipe == false) { + /* waiting for pipe started */ + usleep(5000); + return m_checkThreadLoop(); + } + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret != TIMED_OUT) { + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } else if(m_isBoosting == true) { + CLOGW("WARN(%s):ISPC is boosting. m_putBuffer() again", __FUNCTION__); + /* On boosting, ISPC must wait the request frame before buffer DQ */ + return m_checkThreadLoop(); + } + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + return m_checkThreadLoop(); +} + +status_t ExynosCameraPipeISPC::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + int ret = 0; + entity_buffer_type_t entityBufType = ENTITY_BUFFER_INVALID; + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_dis; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + ret = newFrame->getEntityBufferType(getPipeId(), &entityBufType); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + + /* check buffer index */ + if (newBuffer.index < 0) { + if (entityBufType != ENTITY_BUFFER_DELIVERY) { + CLOGE("ERR(%s[%d]): Entity buffer type is ENTITY_BUFFER_FIXED, but index (%d)", __FUNCTION__, __LINE__, newBuffer.index); + return BAD_VALUE; + } + CLOGV("DEBUG(%s[%d]): no buffer to QBUF", __FUNCTION__, __LINE__); + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + + /* When current frame has no buffer to queue and there is no buffer in ISPC, + the frame should not be requested to ISPC by ISP */ + if (newFrame->getRequest(getPipeId()) == true && m_numOfRunningFrame < 2) { + CLOGD("DEBUG(%s[%d]): requestISPC for frame(%d) is canceled. ISPC Q num(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), m_numOfRunningFrame); + + newFrame->setRequest(getPipeId(), false); + + /* isp */ + newFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + node_group_info_isp.capture[PERFRAME_BACK_SCC_POS].request = false; + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + + /* dis */ + newFrame->getNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS); + node_group_info_dis.capture[PERFRAME_BACK_SCC_POS].request = false; + newFrame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS); + } + } else { + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[1]); + + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum - 1; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + + m_nodeStateQAndDQ = m_nodeStateQAndDQ | (1 << newBuffer.index); + CLOGV("DEBUG(%s[%d]):index(%d) m_nodeStateQAndDQ(%d)", __FUNCTION__, __LINE__, newBuffer.index, m_nodeStateQAndDQ); + + ret = m_node[CAPTURE_NODE]->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity buffer state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + return ret; + } + + if (entityBufType == ENTITY_BUFFER_FIXED || m_reprocessing == true) { + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + } else { + newBuffer.index = -1; + ret = newFrame->setDstBuffer(getPipeId(), newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + } + + m_numOfRunningFrame++; + } + + /* check request */ + if (entityBufType == ENTITY_BUFFER_FIXED || newFrame->getRequest(getPipeId()) == true) { + m_requestFrameQ.pushProcessQ(&newFrame); + CLOGV("DEBUG(%s[%d]): push reqeust Frame (frame cnt:%d, request cnt: %d)", __FUNCTION__, __LINE__, newFrame->getFrameCount(), m_requestFrameQ.getSizeOfProcessQ()); + } else { + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + CLOGV("DEBUG(%s):entity pipeId(%d), frameCount(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, getPipeId(), newFrame->getFrameCount(), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + + m_outputFrameQ->pushProcessQ(&newFrame); + } + + if (m_numOfRunningFrame < 1 && getPipeId() < MAX_PIPE_NUM && m_parameters->getUseDynamicScc() == true) { + CLOGW("DEBUG(%s):entity pipeId(%d), frameCount(%d),requestISPC(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, getPipeId(), newFrame->getFrameCount(), newFrame->getRequest(getPipeId()), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeISPC::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + bool foundMatchedFrame = false; + entity_buffer_type_t entityBufType = ENTITY_BUFFER_INVALID; + struct camera2_stream *shot_stream = NULL; + + CLOGV("DEBUG(%s[%d]: request frame size(%d), numOfRunningFrame(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ(), m_numOfRunningFrame); + + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + CLOGV("DEBUG(%s[%d]): no request", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_node[CAPTURE_NODE]->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + m_nodeStateQAndDQ = m_nodeStateQAndDQ & ~(1 << curBuffer.index); + CLOGV("DEBUG(%s[%d]):index(%d) m_nodeStateQAndDQ(%d)", __FUNCTION__, __LINE__, curBuffer.index, m_nodeStateQAndDQ); + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + /* set runningFrameList for completeFrame */ + do { + m_requestFrameQ.popProcessQ(&curFrame); + m_retry = false; + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]): curFrame is NULL, size of requestFrameQ(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ()); + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + m_putBuffer(); + CLOGW("WARN(%s[%d]):m_putBuffer() again, size(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ()); + m_retry = true; + } + continue; + } + + ret = curFrame->getEntityBufferType(getPipeId(), &entityBufType); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + entityBufType = ENTITY_BUFFER_INVALID; + continue; + } + + if (curFrame->getRequest(getPipeId()) == false) { + ret = curFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_SKIP); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGE("ERR(%s[%d]):frame drop pipeId(%d), frameCount(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, __LINE__, getPipeId(), curFrame->getFrameCount(), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + + curFrame->setRequest(getPipeId(), true); + if (entityBufType == ENTITY_BUFFER_DELIVERY) { + m_outputFrameQ->pushProcessQ(&curFrame); + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + m_putBuffer(); + CLOGW("WARN:m_putBuffer() again, size(%d)", m_requestFrameQ.getSizeOfProcessQ()); + m_retry = true; + } + continue; + } + } + + if (m_runningFrameList[curBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), curFrame->frameCount(%d)", + __FUNCTION__, index, curFrame->getFrameCount()); + m_dumpRunningFrameList(); + ret = BAD_VALUE; + goto lost_buffer; + } + + m_runningFrameList[curBuffer.index] = curFrame; + + /* If we found match frame, quit loop */ + foundMatchedFrame = true; + break; + } while (m_retry == true || (0 < m_requestFrameQ.getSizeOfProcessQ() && entityBufType == ENTITY_BUFFER_DELIVERY)); + + if (foundMatchedFrame == false || m_runningFrameList[curBuffer.index] == NULL) { + CLOGE("ERR(%s):buffer is invalid, ", + __FUNCTION__); + m_dumpRunningFrameList(); + ret = BAD_VALUE; + goto lost_buffer; + } + + if (entityBufType == ENTITY_BUFFER_DELIVERY) { + /* set frame for dynamic capture */ + ret = curFrame->setDstBuffer(getPipeId(), curBuffer); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer fail", __FUNCTION__); + goto lost_buffer; + } + } + + ret = curFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + goto lost_buffer; + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_comleteFrame fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + ret = BAD_VALUE; + goto lost_buffer; + } + + entity_buffer_state_t tempState; + curFrame->getDstBufferState(getPipeId(), &tempState); + + if (getPipeId() < MAX_PIPE_NUM && m_parameters->getUseDynamicScc() == true) { + CLOGD("DEBUG(%s[%d]): ISPC pipe output done, curFrameCount(%d) - index(%d), bufState(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount(), curBuffer.index, tempState); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; + +lost_buffer: + CLOGE("ERR{%s[%d]): FATAL : buffer(%d) will be lost!!!!", __FUNCTION__, __LINE__, curBuffer.index); + return ret; +} + +void ExynosCameraPipeISPC::m_init(void) +{ + m_metadataTypeShot = false; + m_dqFailCount = false; + m_retry = false; + m_nodeStateQAndDQ = 0; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.h new file mode 100644 index 0000000..b8359a8 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeISPC.h @@ -0,0 +1,71 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_ISPC_H +#define EXYNOS_CAMERA_PIPE_ISPC_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeISPC : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeISPC() + { + m_init(); + } + + ExynosCameraPipeISPC( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeISPC(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t clearInputFrameQ(void); + virtual status_t setBoosting(bool isBoosting); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t prepare(void); + virtual bool m_checkThreadLoop(void); + +private: + virtual bool m_mainThreadFunc(void); + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + + frame_queue_t m_requestFrameQ; + int m_dqFailCount; + bool m_retry; + int32_t m_nodeStateQAndDQ; + +private: + void m_init(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.cpp new file mode 100644 index 0000000..9d65ab4 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.cpp @@ -0,0 +1,318 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeJpeg" +#include + +#include "ExynosCameraPipeJpeg.h" + +/* For test */ +#include "ExynosCameraBuffer.h" + +namespace android { + +ExynosCameraPipeJpeg::~ExynosCameraPipeJpeg() +{ + this->destroy(); +} + +status_t ExynosCameraPipeJpeg::create(__unused int32_t *sensorIds) +{ + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeJpeg::m_mainThreadFunc, "JpegThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeJpeg::destroy(void) +{ + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + if (m_shot_ext != NULL) { + delete m_shot_ext; + m_shot_ext = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeJpeg::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + /* TODO: check state ready for start */ + + return NO_ERROR; +} + +status_t ExynosCameraPipeJpeg::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_jpegEnc.destroy(); + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + return NO_ERROR; +} + +status_t ExynosCameraPipeJpeg::startThread(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + start(); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeJpeg::m_run(void) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + status_t ret = 0; + ExynosCameraFrame *newFrame = NULL; + + ExynosCameraBuffer yuvBuf; + ExynosCameraBuffer jpegBuf; + + ExynosRect pictureRect; + ExynosRect thumbnailRect; + int jpegQuality = m_parameters->getJpegQuality(); + int thumbnailQuality = m_parameters->getThumbnailQuality(); + int jpegformat = (JPEG_INPUT_COLOR_FMT == V4L2_PIX_FMT_YUYV) ? V4L2_PIX_FMT_JPEG_422 : V4L2_PIX_FMT_JPEG_420; + + memset(m_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + + exif_attribute_t exifInfo; + m_parameters->getFixedExifInfo(&exifInfo); + + pictureRect.colorFormat = m_parameters->getHwPictureFormat(); + pictureRect.colorFormat = JPEG_INPUT_COLOR_FMT; + + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + m_parameters->getThumbnailSize(&thumbnailRect.w, &thumbnailRect.h); + + CLOGD("DEBUG(%s[%d]):picture size(%dx%d), thumbnail size(%dx%d)", + __FUNCTION__, __LINE__, pictureRect.w, pictureRect.h, thumbnailRect.w, thumbnailRect.h); + + ALOGD("DEBUG(%s[%d]):wait JPEG pipe inputFrameQ", __FUNCTION__, __LINE__); + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return NO_ERROR; + } + + ALOGD("DEBUG(%s[%d]):JPEG pipe inputFrameQ output done", __FUNCTION__, __LINE__); + + if (m_parameters->getHalVersion() == IS_HAL_VER_3_2) { + newFrame->getMetaData(m_shot_ext); + + /* JPEG Quality, Thumbnail Quality Setting */ + jpegQuality = (int) m_shot_ext->shot.ctl.jpeg.quality; + thumbnailQuality = (int) m_shot_ext->shot.ctl.jpeg.thumbnailQuality; + + /* JPEG Thumbnail Size Setting */ + thumbnailRect.w = m_shot_ext->shot.ctl.jpeg.thumbnailSize[0]; + thumbnailRect.h = m_shot_ext->shot.ctl.jpeg.thumbnailSize[1]; + } + ret = newFrame->getSrcBuffer(getPipeId(), &yuvBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + ret = newFrame->getDstBuffer(getPipeId(), &jpegBuf); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + if (m_jpegEnc.create()) { + CLOGE("ERR(%s):m_jpegEnc.create() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + m_jpegEnc.setExtScalerNum(m_parameters->getScalerNodeNumPicture()); + + if (m_jpegEnc.setQuality(jpegQuality)) { + CLOGE("ERR(%s[%d]):m_jpegEnc.setQuality() fail", __FUNCTION__, __LINE__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.setSize(pictureRect.w, pictureRect.h)) { + CLOGE("ERR(%s):m_jpegEnc.setSize() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.setColorFormat(pictureRect.colorFormat)) { + CLOGE("ERR(%s):m_jpegEnc.setColorFormat() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.setJpegFormat(jpegformat)) { + CLOGE("ERR(%s):m_jpegEnc.setJpegFormat() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (thumbnailRect.w != 0 && thumbnailRect.h != 0) { + exifInfo.enableThumb = true; + if (pictureRect.w < 320 || pictureRect.h < 240) { + thumbnailRect.w = 160; + thumbnailRect.h = 120; + } + if (m_jpegEnc.setThumbnailSize(thumbnailRect.w, thumbnailRect.h)) { + CLOGE("ERR(%s):m_jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbnailRect.w, thumbnailRect.h); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + if (0 < thumbnailQuality && thumbnailQuality <= 100) { + if (m_jpegEnc.setThumbnailQuality(thumbnailQuality)) { + ret = INVALID_OPERATION; + CLOGE("ERR(%s):m_jpegEnc.setThumbnailQuality(%d) fail", __FUNCTION__, thumbnailQuality); + } + } + } else { + exifInfo.enableThumb = false; + } + + /* wait for medata update */ + if(newFrame->getMetaDataEnable() == false) { + CLOGD("DEBUG(%s[%d]): Waiting for update jpeg metadata failed (%d) ", __FUNCTION__, __LINE__, ret); + } + + /* get dynamic meters for make exif info */ + newFrame->getDynamicMeta(m_shot_ext); + newFrame->getUserDynamicMeta(m_shot_ext); + + m_parameters->setExifChangedAttribute(&exifInfo, &pictureRect, &thumbnailRect, &m_shot_ext->shot); + + if (m_jpegEnc.setInBuf((int *)&(yuvBuf.fd), (int *)yuvBuf.size)) { + CLOGE("ERR(%s):m_jpegEnc.setInBuf() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.setOutBuf(jpegBuf.fd[0], jpegBuf.size[0] + jpegBuf.size[1] + jpegBuf.size[2])) { + CLOGE("ERR(%s):m_jpegEnc.setOutBuf() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.updateConfig()) { + CLOGE("ERR(%s):m_jpegEnc.updateConfig() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + if (m_jpegEnc.encode((int *)&jpegBuf.size, &exifInfo, (char **)jpegBuf.addr, m_parameters->getDebugAttribute())) { + CLOGE("ERR(%s):m_jpegEnc.encode() fail", __FUNCTION__); + ret = INVALID_OPERATION; + goto jpeg_encode_done; + } + + newFrame->setJpegSize(jpegBuf.size[0]); + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + +jpeg_encode_done: + if (ret != NO_ERROR) { + CLOGD("[jpegBuf.fd[0] %d][jpegBuf.size[0] + jpegBuf.size[1] + jpegBuf.size[2] %d]", + jpegBuf.fd[0], jpegBuf.size[0] + jpegBuf.size[1] + jpegBuf.size[2]); + CLOGD("[pictureW %d][pictureH %d][pictureFormat %d]", + pictureRect.w, pictureRect.h, pictureRect.colorFormat); + } + + if (m_jpegEnc.flagCreate() == true) + m_jpegEnc.destroy(); + + CLOGI("DEBUG(%s[%d]): -OUT-", __FUNCTION__, __LINE__); + + return ret; +} + +bool ExynosCameraPipeJpeg::m_mainThreadFunc(void) +{ + int ret = 0; + + ret = m_run(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_run fail", __FUNCTION__); + /* TODO: doing exception handling */ + return false; + } + + /* one time */ + return m_checkThreadLoop(); +} + +void ExynosCameraPipeJpeg::m_init(void) +{ + m_reprocessing = 1; + m_csc = NULL; + m_shot_ext = new struct camera2_shot_ext; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.h new file mode 100644 index 0000000..e593df0 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeJpeg.h @@ -0,0 +1,73 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_JPEG_H +#define EXYNOS_CAMERA_PIPE_JPEG_H + +#include "ExynosCameraPipe.h" +/* #include "csc.h" */ +#include "ExynosJpegEncoderForCamera.h" + +#define CSC_MEMORY_TYPE CSC_MEMORY_DMABUF /* (CSC_MEMORY_USERPTR) */ + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeJpeg : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeJpeg() + { + m_init(); + } + + ExynosCameraPipeJpeg( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeJpeg(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + +protected: + virtual bool m_mainThreadFunc(void); + +private: + void m_init(void); + +private: + void *m_csc; + ExynosJpegEncoderForCamera m_jpegEnc; + struct camera2_shot_ext *m_shot_ext; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.cpp new file mode 100644 index 0000000..5ffe65e --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.cpp @@ -0,0 +1,640 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeSCC" +#include + +#include "ExynosCameraPipeSCC.h" + +namespace android { + +ExynosCameraPipeSCC::~ExynosCameraPipeSCC() +{ + this->destroy(); +} + +status_t ExynosCameraPipeSCC::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("SCC", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + + /* mainNode is CAPTURE_NODE */ + m_mainNodeNum = CAPTURE_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeSCC::m_mainThreadFunc, "SCCThread"); + if (m_parameters->getUseDynamicScc()) { + m_inputFrameQ = new frame_queue_t(m_mainThread); + m_inputFrameQ->setWaitTime(200000000); /* .5 sec */ + } else { + m_inputFrameQ = new frame_queue_t; + m_inputFrameQ->setWaitTime(500000000); /* .5 sec */ + } + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):create() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::destroy(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_node[CAPTURE_NODE] != NULL) { + if (m_node[CAPTURE_NODE]->close() != NO_ERROR) { + CLOGE("ERR(%s):close fail", __FUNCTION__); + return INVALID_OPERATION; + } + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::clearInputFrameQ(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + if (m_inputFrameQ != NULL) + m_inputFrameQ->release(); + if (&m_requestFrameQ != NULL) + m_requestFrameQ.release(); + + CLOGI("INFO(%s[%d]):clearInputFrameQ() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::setBoosting(bool isBoosting) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + m_isBoosting = isBoosting; + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + int planecount = NUM_PLANES(V4L2_PIX_2_HAL_PIXEL_FORMAT(SCC_OUTPUT_COLOR_FMT)) + 1; + + /* initialize node */ + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[0], + planecount, YUV_FULL_RANGE, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + /* TODO: check node state */ + /* stream on? */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_requestFrameQ.release(); + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) prepare (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::prepare(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +bool ExynosCameraPipeSCC::m_checkThreadLoop(void) +{ + Mutex::Autolock lock(m_pipeframeLock); + bool loop = false; + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loop = true; + + if (m_inputFrameQ->getSizeOfProcessQ() == 0 && + m_numOfRunningFrame == 0) + loop = false; + + if (m_isReprocessing() == false) + loop = true; + +#if 0 + if (m_parameters->getUseDynamicScc() == false) + loop = true; +#endif + + return loop; +} + +bool ExynosCameraPipeSCC::m_mainThreadFunc(void) +{ + int ret = 0; + + if (m_flagStartPipe == false) { + /* waiting for pipe started */ + usleep(5000); + return m_checkThreadLoop(); + } + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret < 0) { + if (ret != TIMED_OUT) { + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } else if(m_isBoosting == true) { + CLOGW("WARN(%s):SCC is boosting. m_putBuffer() again", __FUNCTION__); + /* On boosting, SCC must wait the request frame before buffer DQ */ + return m_checkThreadLoop(); + } + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return m_checkThreadLoop(); + } + + return m_checkThreadLoop(); +} + +status_t ExynosCameraPipeSCC::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + int ret = 0; + entity_buffer_type_t entityBufType = ENTITY_BUFFER_INVALID; + camera2_node_group node_group_info_isp; + camera2_node_group node_group_info_dis; + + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + ret = newFrame->getEntityBufferType(getPipeId(), &entityBufType); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return INVALID_OPERATION; + } + + /* check buffer index */ + if (newBuffer.index < 0) { + if (entityBufType != ENTITY_BUFFER_DELIVERY) { + CLOGE("ERR(%s[%d]): Entity buffer type is ENTITY_BUFFER_FIXED, but index (%d)", __FUNCTION__, __LINE__, newBuffer.index); + return BAD_VALUE; + } + CLOGV("DEBUG(%s[%d]): no buffer to QBUF", __FUNCTION__, __LINE__); + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + + /* When current frame has no buffer to queue and there is no buffer in SCC, + the frame should not be requested to SCC by ISP */ + if (newFrame->getRequest(getPipeId()) == true && m_numOfRunningFrame < 2) { + CLOGD("DEBUG(%s[%d]): requestSCC for frame(%d) is canceled. SCC Q num(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), m_numOfRunningFrame); + + newFrame->setRequest(getPipeId(), false); + + /* isp */ + newFrame->getNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + node_group_info_isp.capture[PERFRAME_BACK_SCC_POS].request = false; + newFrame->storeNodeGroupInfo(&node_group_info_isp, PERFRAME_INFO_ISP); + + /* dis */ + newFrame->getNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS); + node_group_info_dis.capture[PERFRAME_BACK_SCC_POS].request = false; + newFrame->storeNodeGroupInfo(&node_group_info_dis, PERFRAME_INFO_DIS); + } + } else { + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[1]); + + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum - 1; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + + m_nodeStateQAndDQ = m_nodeStateQAndDQ | (1 << newBuffer.index); + CLOGV("DEBUG(%s[%d]):index(%d) m_nodeStateQAndDQ(%d)", __FUNCTION__, __LINE__, newBuffer.index, m_nodeStateQAndDQ); + + ret = m_node[CAPTURE_NODE]->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity buffer state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + return ret; + } + + if (entityBufType == ENTITY_BUFFER_FIXED || m_reprocessing == true) { + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + } else { + newBuffer.index = -1; + ret = newFrame->setDstBuffer(getPipeId(), newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED); + if (ret < 0) { + CLOGE("ERR(%s[%d]): setDstBuffer state fail", __FUNCTION__, __LINE__); + return ret; + } + } + + m_numOfRunningFrame++; + } + + /* check request */ + if (entityBufType == ENTITY_BUFFER_FIXED || newFrame->getRequest(getPipeId()) == true) { + m_requestFrameQ.pushProcessQ(&newFrame); + CLOGV("DEBUG(%s[%d]): push reqeust Frame (frame cnt:%d, request cnt: %d)", __FUNCTION__, __LINE__, newFrame->getFrameCount(), m_requestFrameQ.getSizeOfProcessQ()); + } else { + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + CLOGV("DEBUG(%s):entity pipeId(%d), frameCount(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, getPipeId(), newFrame->getFrameCount(), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + + m_outputFrameQ->pushProcessQ(&newFrame); + } + + if (m_numOfRunningFrame < 1 && getPipeId() < MAX_PIPE_NUM && m_parameters->getUseDynamicScc() == true) { + CLOGW("DEBUG(%s):entity pipeId(%d), frameCount(%d),requestSCC(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, getPipeId(), newFrame->getFrameCount(), newFrame->getRequest(getPipeId()), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCC::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + bool foundMatchedFrame = false; + entity_buffer_type_t entityBufType = ENTITY_BUFFER_INVALID; + struct camera2_stream *shot_stream = NULL; + + CLOGV("DEBUG(%s[%d]: request frame size(%d), numOfRunningFrame(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ(), m_numOfRunningFrame); + + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + CLOGV("DEBUG(%s[%d]): no request", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + ret = m_node[CAPTURE_NODE]->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + + m_nodeStateQAndDQ = m_nodeStateQAndDQ & ~(1 << curBuffer.index); + CLOGV("DEBUG(%s[%d]):index(%d) m_nodeStateQAndDQ(%d)", __FUNCTION__, __LINE__, curBuffer.index, m_nodeStateQAndDQ); + + if (index < 0) { + CLOGE("ERR(%s[%d]):Invalid index(%d) fail", __FUNCTION__, __LINE__, index); + return INVALID_OPERATION; + } + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + /* set runningFrameList for completeFrame */ + do { + m_requestFrameQ.popProcessQ(&curFrame); + m_retry = false; + + if (curFrame == NULL) { + CLOGE("ERR(%s[%d]): curFrame is NULL, size of requestFrameQ(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ()); + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + m_putBuffer(); + CLOGW("WARN(%s[%d]):m_putBuffer() again, size(%d)", __FUNCTION__, __LINE__, m_requestFrameQ.getSizeOfProcessQ()); + m_retry = true; + } + continue; + } + + ret = curFrame->getEntityBufferType(getPipeId(), &entityBufType); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + entityBufType = ENTITY_BUFFER_INVALID; + continue; + } + + if (curFrame->getRequest(getPipeId()) == false) { + ret = curFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_SKIP); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + } + + CLOGE("ERR(%s[%d]):frame drop pipeId(%d), frameCount(%d), numOfRunningFrame(%d), requestCount(%d)", + __FUNCTION__, __LINE__, getPipeId(), curFrame->getFrameCount(), m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + + curFrame->setRequest(getPipeId(), true); + if (entityBufType == ENTITY_BUFFER_DELIVERY) { + m_outputFrameQ->pushProcessQ(&curFrame); + if (m_requestFrameQ.getSizeOfProcessQ() == 0) { + m_putBuffer(); + CLOGW("WARN:m_putBuffer() again, size(%d)", m_requestFrameQ.getSizeOfProcessQ()); + m_retry = true; + } + continue; + } + } + + if (m_runningFrameList[curBuffer.index] != NULL) { + CLOGE("ERR(%s):new buffer is invalid, we already get buffer index(%d), curFrame->frameCount(%d)", + __FUNCTION__, index, curFrame->getFrameCount()); + m_dumpRunningFrameList(); + ret = BAD_VALUE; + goto lost_buffer; + } + + m_runningFrameList[curBuffer.index] = curFrame; + + /* If we found match frame, quit loop */ + foundMatchedFrame = true; + break; + } while (m_retry == true || (0 < m_requestFrameQ.getSizeOfProcessQ() && entityBufType == ENTITY_BUFFER_DELIVERY)); + + if (foundMatchedFrame == false || m_runningFrameList[curBuffer.index] == NULL) { + if (curFrame == NULL) { + CLOGE("ERR(%s):buffer is invalid, curFrame is NULL",__FUNCTION__); + } else { + CLOGE("ERR(%s):buffer is invalid, curFrame->frameCount(%d)", + __FUNCTION__, curFrame->getFrameCount()); + } + m_dumpRunningFrameList(); + ret = BAD_VALUE; + goto lost_buffer; + } + + if (entityBufType == ENTITY_BUFFER_DELIVERY) { + /* set frame for dynamic capture */ + ret = curFrame->setDstBuffer(getPipeId(), curBuffer); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer fail", __FUNCTION__); + goto lost_buffer; + } + } + + ret = curFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + goto lost_buffer; + } + + /* complete frame */ + ret = m_completeFrame(&curFrame, curBuffer); + if (ret < 0) { + CLOGE("ERR(%s):m_comleteFrame fail", __FUNCTION__); + /* TODO: doing exception handling */ + } + + if (curFrame == NULL) { + CLOGE("ERR(%s):curFrame is fail", __FUNCTION__); + ret = BAD_VALUE; + goto lost_buffer; + } + + entity_buffer_state_t tempState; + curFrame->getDstBufferState(getPipeId(), &tempState); + + if (getPipeId() < MAX_PIPE_NUM && m_parameters->getUseDynamicScc() == true) { + CLOGD("DEBUG(%s[%d]): SCC pipe output done, curFrameCount(%d) - index(%d), bufState(%d)", + __FUNCTION__, __LINE__, curFrame->getFrameCount(), curBuffer.index, tempState); + } + + m_outputFrameQ->pushProcessQ(&curFrame); + + return NO_ERROR; + +lost_buffer: + CLOGE("ERR{%s[%d]): FATAL : buffer(%d) will be lost!!!!", __FUNCTION__, __LINE__, curBuffer.index); + return ret; +} + +void ExynosCameraPipeSCC::m_init(void) +{ + m_metadataTypeShot = false; + m_dqFailCount = false; + m_retry = false; + m_nodeStateQAndDQ = 0; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.h new file mode 100644 index 0000000..5f653fd --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCC.h @@ -0,0 +1,71 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_SCC_H +#define EXYNOS_CAMERA_PIPE_SCC_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeSCC : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeSCC() + { + m_init(); + } + + ExynosCameraPipeSCC( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeSCC(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t clearInputFrameQ(void); + virtual status_t setBoosting(bool isBoosting); + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + virtual status_t prepare(void); + virtual bool m_checkThreadLoop(void); + +private: + virtual bool m_mainThreadFunc(void); + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + + frame_queue_t m_requestFrameQ; + int m_dqFailCount; + bool m_retry; + int32_t m_nodeStateQAndDQ; + +private: + void m_init(void); +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.cpp new file mode 100644 index 0000000..c6f6eb7 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.cpp @@ -0,0 +1,551 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeSCP" +#include + +#include "ExynosCameraPipeSCP.h" + +namespace android { + +#ifdef TEST_WATCHDOG_THREAD +int testErrorDetect = 0; +#endif + +ExynosCameraPipeSCP::~ExynosCameraPipeSCP() +{ + this->destroy(); +#ifdef TEST_WATCHDOG_THREAD + testErrorDetect = 0; +#endif +} + +status_t ExynosCameraPipeSCP::create(int32_t *sensorIds) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + for (int i = 0; i < MAX_NODE; i++) { + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds[%d] : %d", __FUNCTION__, __LINE__, i, sensorIds[i]); + m_sensorIds[i] = sensorIds[i]; + } else { + m_sensorIds[i] = -1; + } + } + + m_node[CAPTURE_NODE] = new ExynosCameraNode(); + ret = m_node[CAPTURE_NODE]->create("SCP", m_cameraId); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE create fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = m_node[CAPTURE_NODE]->open(m_nodeNum[CAPTURE_NODE]); + if (ret < 0) { + CLOGE("ERR(%s[%d]): CAPTURE_NODE open fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + CLOGD("DEBUG(%s):Node(%d) opened", __FUNCTION__, m_nodeNum[CAPTURE_NODE]); + + /* mainNode is CAPTURE_NODE */ + m_mainNodeNum = CAPTURE_NODE; + m_mainNode = m_node[m_mainNodeNum]; + + /* setInput for 54xx */ + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeSCP::m_mainThreadFunc, "SCPThread", PRIORITY_URGENT_DISPLAY); + + m_inputFrameQ = new frame_queue_t; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):create() is succeed (%d) setupPipe (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCP::destroy(void) +{ + ExynosCameraBuffer *dqBuffer = NULL; + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_node[CAPTURE_NODE] != NULL) { + if (m_node[CAPTURE_NODE]->close() != NO_ERROR) { + CLOGE("ERR(%s):close fail", __FUNCTION__); + return INVALID_OPERATION; + } + delete m_node[CAPTURE_NODE]; + m_node[CAPTURE_NODE] = NULL; + CLOGD("DEBUG(%s):Node(CAPTURE_NODE, m_nodeNum : %d, m_sensorIds : %d) closed", + __FUNCTION__, m_nodeNum[CAPTURE_NODE], m_sensorIds[CAPTURE_NODE]); + } + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + + m_mainNode = NULL; + m_mainNodeNum = -1; + + m_requestFrameQ.release(); + m_skipFrameQ.release(); + + prevDqBufferValid = false; + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCP::m_setPipeInfo(camera_pipe_info_t *pipeInfos) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + status_t ret = NO_ERROR; + + if (pipeInfos == NULL) { + CLOGE("ERR(%s[%d]): pipeInfos == NULL. so, fail", __FUNCTION__, __LINE__); + return INVALID_OPERATION; + } + + int setfile = 0; + int yuvRange = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &yuvRange); + + /* initialize node */ + ret = m_setNodeInfo(m_node[CAPTURE_NODE], &pipeInfos[0], + 4, (enum YUV_RANGE)yuvRange, + true); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]): m_setNodeInfo(%d, %d, %d) fail", + __FUNCTION__, __LINE__, pipeInfos[0].rectInfo.fullW, pipeInfos[0].rectInfo.fullH, pipeInfos[0].bufInfo.count); + return INVALID_OPERATION; + } + + m_numBuffers = pipeInfos[0].bufInfo.count; + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCP::setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds) +{ + ExynosCameraBuffer *dqBuffer = NULL; + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + status_t ret = NO_ERROR; + /* TODO: check node state stream on? */ + + /* set new sensorId to m_sensorIds */ + if (sensorIds) { + CLOGD("DEBUG(%s[%d]):set new sensorIds", __FUNCTION__, __LINE__); + + for (int i = 0; i < MAX_NODE; i++) + m_sensorIds[i] = sensorIds[i]; + } + + ret = m_setInput(m_node, m_nodeNum, m_sensorIds); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setInput fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + if (pipeInfos) { + ret = m_setPipeInfo(pipeInfos); + if (ret < 0) { + CLOGE("ERR(%s[%d]): m_setPipeInfo fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } + + /* setfile setting */ +#ifdef SET_SETFILE_BY_SHOT + /* nop */ +#else +#if SET_SETFILE_BY_SET_CTRL_SCP + int setfile = 0; + int yuvRange = 0; + m_parameters->getSetfileYuvRange(m_reprocessing, &setfile, &yuvRange); + + /* colorRange set by setFormat */ + /* + ret = m_node[CAPTURE_NODE]->setControl(V4L2_CID_IS_COLOR_RANGE, yuvRange); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setControl(%d) fail(ret = %d)", __FUNCTION__, __LINE__, setfile, ret); + return ret; + } + */ +#endif +#endif + + for (uint32_t i = 0; i < m_numBuffers; i++) { + m_runningFrameList[i] = NULL; + } + m_numOfRunningFrame = 0; + + m_requestFrameQ.release(); + m_skipFrameQ.release(); + + prevDqBufferValid = false; + + m_prepareBufferCount = m_exynosconfig->current->pipeInfo.prepare[getPipeId()]; + CLOGI("INFO(%s[%d]):setupPipe() is succeed (%d) setupPipe (%d)", __FUNCTION__, __LINE__, getPipeId(), m_prepareBufferCount); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCP::m_checkPolling(void) +{ + int ret = 0; + + ret = m_node[CAPTURE_NODE]->polling(); + if (ret < 0) { + CLOGE("ERR(%s[%d]):polling fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + + m_threadState = ERROR_POLLING_DETECTED; + return ERROR_POLLING_DETECTED; + } + + return NO_ERROR; +} + +bool ExynosCameraPipeSCP::m_mainThreadFunc(void) +{ + int ret = 0; + +#ifdef TEST_WATCHDOG_THREAD + testErrorDetect++; + if (testErrorDetect == 100) + m_threadState = ERROR_POLLING_DETECTED; +#endif + + if (m_flagTryStop == true) + return true; + + if (m_numOfRunningFrame > 0) { + +#ifndef SKIP_SCHECK_POLLING + if (!prevDqBufferValid) + ret = m_checkPolling(); +#endif + if (ret < 0) { + CLOGE("ERR(%s[%d]):m_checkPolling fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + // HACK: for panorama shot + //return false; + } + + ret = m_getBuffer(); + if (ret < 0) { + CLOGE("ERR(%s):m_getBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return true; + } + } + + m_timer.stop(); + m_timeInterval = m_timer.durationMsecs(); + m_timer.start(); + + ret = m_putBuffer(); + if (ret < 0) { + if (ret == TIMED_OUT) + return true; + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + /* TODO: doing exception handling */ + return true; + } + + /* update renew count */ + if (ret >= 0) + m_threadRenew = 0; + + return true; +} + +status_t ExynosCameraPipeSCP::m_putBuffer(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer newBuffer; + ExynosCameraFrame *skipFrame = NULL; + int ret = 0; + +retry: + newFrame = NULL; + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout, m_numOfRunningFrame: %d, m_requestFrameQSize: %d", + __FUNCTION__, m_numOfRunningFrame, m_requestFrameQ.getSizeOfProcessQ()); + m_node[CAPTURE_NODE]->dumpState(); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + return INVALID_OPERATION; + } + + if (m_skipFrameQ.getSizeOfProcessQ()) { + CLOGW("WARN(%s[%d]): DROP_SCP return skip buffer", __FUNCTION__, __LINE__); + skipFrame = NULL; + m_skipFrameQ.popProcessQ(&skipFrame); + if (skipFrame == NULL) { + CLOGE("ERR(%s):skipFrame is NULL", __FUNCTION__); + goto retry; + } + + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + if (ret != NO_ERROR) { + CLOGW("WRN(%s[%d]):Get destination buffer fail", __FUNCTION__, __LINE__); + } + + ret = skipFrame->setDstBuffer(getPipeId(), newBuffer); + if (ret != NO_ERROR) { + CLOGW("WRN(%s[%d]):Set destination buffer fail", __FUNCTION__, __LINE__); + } + + m_requestFrameQ.pushProcessQ(&newFrame); + + ret = skipFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGW("WRN(%s[%d]): setEntity state fail", __FUNCTION__, __LINE__); + } + + ret = skipFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_ERROR); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + goto retry; + } + + m_outputFrameQ->pushProcessQ(&skipFrame); + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + goto retry; + else + return NO_ERROR; + } + + ret = newFrame->getDstBuffer(getPipeId(), &newBuffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + /* check buffer index */ + if (newBuffer.index < 0) { + CLOGD("DEBUG(%s[%d]): no buffer to QBUF (%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_REQUESTED); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + CLOGV("DEBUG(%s):entity pipeId(%d), frameCount(%d), numOfRunningFrame(%d)", + __FUNCTION__, getPipeId(), newFrame->getFrameCount(), m_numOfRunningFrame); + + usleep(33000); + m_outputFrameQ->pushProcessQ(&newFrame); + + goto retry; + } else { + + camera2_shot_ext *shot_ext = (struct camera2_shot_ext *)(newBuffer.addr[2]); + + if (shot_ext != NULL) { + newFrame->getMetaData(shot_ext); + m_parameters->duplicateCtrlMetadata((void *)shot_ext); + m_activityControl->activityBeforeExecFunc(getPipeId(), (void *)&newBuffer); + + if (m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameNodeType == PERFRAME_NODE_TYPE_LEADER) { + camera2_node_group node_group_info; + memset(&shot_ext->node_group, 0x0, sizeof(camera2_node_group)); + newFrame->getNodeGroupInfo(&node_group_info, m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perframeInfoIndex); + + /* Per - Leader */ + if (node_group_info.leader.request == 1) { + + if (m_checkNodeGroupInfo(-1, &m_curNodeGroupInfo.leader, &node_group_info.leader) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(leader) fail", __FUNCTION__, __LINE__); + + setMetaNodeLeaderInputSize(shot_ext, + node_group_info.leader.input.cropRegion[0], + node_group_info.leader.input.cropRegion[1], + node_group_info.leader.input.cropRegion[2], + node_group_info.leader.input.cropRegion[3]); + setMetaNodeLeaderOutputSize(shot_ext, + node_group_info.leader.output.cropRegion[0], + node_group_info.leader.output.cropRegion[1], + node_group_info.leader.output.cropRegion[2], + node_group_info.leader.output.cropRegion[3]); + setMetaNodeLeaderRequest(shot_ext, + node_group_info.leader.request); + setMetaNodeLeaderVideoID(shot_ext, + m_perframeMainNodeGroupInfo.perFrameLeaderInfo.perFrameVideoID); + } + + /* Per - Captures */ + for (int i = 0; i < m_perframeMainNodeGroupInfo.perframeSupportNodeNum - 1; i ++) { + if (node_group_info.capture[i].request == 1) { + + if (m_checkNodeGroupInfo(i, &m_curNodeGroupInfo.capture[i], &node_group_info.capture[i]) != NO_ERROR) + CLOGW("WARN(%s[%d]): m_checkNodeGroupInfo(%d) fail", __FUNCTION__, __LINE__, i); + + setMetaNodeCaptureInputSize(shot_ext, i, + node_group_info.capture[i].input.cropRegion[0], + node_group_info.capture[i].input.cropRegion[1], + node_group_info.capture[i].input.cropRegion[2], + node_group_info.capture[i].input.cropRegion[3]); + setMetaNodeCaptureOutputSize(shot_ext, i, + node_group_info.capture[i].output.cropRegion[0], + node_group_info.capture[i].output.cropRegion[1], + node_group_info.capture[i].output.cropRegion[2], + node_group_info.capture[i].output.cropRegion[3]); + setMetaNodeCaptureRequest(shot_ext, i, node_group_info.capture[i].request); + setMetaNodeCaptureVideoID(shot_ext, i, m_perframeMainNodeGroupInfo.perFrameCaptureInfo[i].perFrameVideoID); + } + } + } + } + + ret = m_node[CAPTURE_NODE]->putBuffer(&newBuffer); + if (ret < 0) { + CLOGE("ERR(%s):putBuffer fail", __FUNCTION__); + return ret; + /* TODO: doing exception handling */ + } + m_numOfRunningFrame++; + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_PROCESSING); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + if (newFrame->getScpDrop()) { + CLOGW("WARN(%s): DROP_SCP Queue to skipFrameQ", __FUNCTION__); + m_skipFrameQ.pushProcessQ(&newFrame); + } else { + m_requestFrameQ.pushProcessQ(&newFrame); + } + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeSCP::m_getBuffer(void) +{ + ExynosCameraFrame *curFrame = NULL; + ExynosCameraBuffer curBuffer; + int index = -1; + int ret = 0; + int requestFrameQSize; + bool isFrameDropped = false; + + requestFrameQSize = m_requestFrameQ.getSizeOfProcessQ(); + if (m_numOfRunningFrame <= 0 || m_flagStartPipe == false || requestFrameQSize == 0) { + CLOGD("DEBUG(%s[%d]): skip getBuffer, flagStartPipe(%d), numOfRunningFrame = %d", __FUNCTION__, __LINE__, m_flagStartPipe, m_numOfRunningFrame); + return NO_ERROR; + } + + if (prevDqBufferValid) { + curBuffer = prevDqBuffer; + prevDqBufferValid = false; + } else { + ret = m_node[CAPTURE_NODE]->getBuffer(&curBuffer, &index); + if (ret < 0) { + CLOGE("ERR(%s[%d]):getBuffer fail", __FUNCTION__, __LINE__); + /* TODO: doing exception handling */ + return ret; + } + } + m_numOfRunningFrame--; + + m_activityControl->activityAfterExecFunc(getPipeId(), (void *)&curBuffer); + + do { + curFrame = NULL; + isFrameDropped = false; + ret = m_requestFrameQ.popProcessQ(&curFrame); + if (ret == TIMED_OUT || curFrame == NULL) { + CLOGW("WARN(%s[%d]): requestFrame is NULL", __FUNCTION__, __LINE__); + + /* preserve the dequeued buffer for next iteration */ + prevDqBuffer = curBuffer; + prevDqBufferValid = true; + m_numOfRunningFrame++; + return NO_ERROR; + } + + /* check whether this frame is dropped */ + isFrameDropped = curFrame->getScpDrop(); + if (isFrameDropped == true) + m_skipFrameQ.pushProcessQ(&curFrame); + + } while (isFrameDropped); + + ret = curFrame->setDstBuffer(getPipeId(), curBuffer); + if (ret < 0) + CLOGE("ERR(%s[%d]):set Dst buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + + ret = curFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return OK; + } + + CLOGV("DEBUG(%s):entity pipeId(%d), frameCount(%d)", + __FUNCTION__, getPipeId(), curFrame->getFrameCount()); + + ret = curFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE); + if (ret < 0) { + CLOGE("ERR(%s): setDstBuffer state fail", __FUNCTION__); + return ret; + } + + m_outputFrameQ->pushProcessQ(&curFrame); + return NO_ERROR; +} + +void ExynosCameraPipeSCP::m_init(void) +{ + m_metadataTypeShot = false; + prevDqBufferValid = false; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.h new file mode 100644 index 0000000..c9b92f1 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSCP.h @@ -0,0 +1,68 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_SCP_H +#define EXYNOS_CAMERA_PIPE_SCP_H + +#include "ExynosCameraPipe.h" + +/* #define TEST_WATCHDOG_THREAD */ + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeSCP : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeSCP() + { + m_init(); + } + + ExynosCameraPipeSCP( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeSCP(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t setupPipe(camera_pipe_info_t *pipeInfos, int32_t *sensorIds = NULL); + +protected: + virtual bool m_mainThreadFunc(void); + virtual status_t m_putBuffer(void); + virtual status_t m_getBuffer(void); + virtual status_t m_setPipeInfo(camera_pipe_info_t *pipeInfos); + +private: + void m_init(void); + status_t m_checkPolling(void); + + frame_queue_t m_requestFrameQ; + frame_queue_t m_skipFrameQ; + ExynosCameraBuffer prevDqBuffer; + bool prevDqBufferValid; +}; +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.cpp new file mode 100644 index 0000000..1f753b9 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.cpp @@ -0,0 +1,394 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeSTK_PICTURE" +#include + +#include "ExynosCameraPipeSTK_PICTURE.h" + +namespace android { + +ExynosCameraPipeSTK_PICTURE::~ExynosCameraPipeSTK_PICTURE() +{ + this->destroy(); +} + +status_t ExynosCameraPipeSTK_PICTURE::create(__unused int32_t *sensorIds) +{ + if (bSTKInit == false) { + CLOGE("ERR(%s):STK_init() fail", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeSTK_PICTURE::m_mainThreadFunc, "STK_PICTUREThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PICTURE::destroy(void) +{ + + int ret = 0; + + if (bSTKInit == false) { + return NO_ERROR; + } + + if (end_stk !=NULL) { + //ret = (*end_stk)(); + ret = (*end_stk)(m_stk_handle); + + if (ret < 0) { + CLOGE("ERR(%s):STK_PICTURE End fail", __FUNCTION__); + } else { + CLOGD("DEBUG(%s[%d]) STK_PICTURE End Success!", __FUNCTION__, __LINE__); + } + end_stk = NULL; + init_stk = NULL; + run_stk = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (stkHandle !=NULL) { + CLOGD("DEBUG(%s[%d]) STK_PICTURE Handle : %08x", __FUNCTION__, __LINE__, stkHandle); + dlclose(stkHandle); + stkHandle = NULL; + } + + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PICTURE::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + ExynosRect pictureRect; + + if (bSTKInit == false) { + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + + CLOGD("DEBUG(%s[%d]) PictureSize (%d x %d), scenario(%d)", + __FUNCTION__, __LINE__, pictureRect.w, pictureRect.h, STK_SCENARIO_CAPTURE); + m_stk_handle = (*init_stk)(pictureRect.w, pictureRect.h, STK_SCENARIO_CAPTURE); + + bSTKInit = true; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PICTURE::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + if (bSTKInit == false) { + CLOGD("DEBUG(%s[%d]): STK_PICTURE already deinit", __FUNCTION__, __LINE__); + return NO_ERROR; + } + + if (end_stk != NULL) { + ret = (*end_stk)(m_stk_handle); + + if (ret < 0) + CLOGE("ERR(%s):STK_PICTURE End fail", __FUNCTION__); + else + CLOGD("DEBUG(%s[%d]) STK_PICTURE End Success!", __FUNCTION__, __LINE__); + + bSTKInit = false; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PICTURE::startThread(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PICTURE::m_run(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer stk_in_Buffer; + ExynosRect pictureRect; + ExynosRect srcRect, dstRect; + int hwSensorWidth = 0; + int hwSensorHeight = 0; + long long durationTime = 0; + int ret = 0; + + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + + CLOGV("[ExynosCameraPipeSTK_PICTURE thread] waitFrameQ"); + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return NO_ERROR; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &stk_in_Buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + + newFrame->getUserDynamicMeta(&m_shot_ext); + + m_parameters->getHwSensorSize(&hwSensorWidth, &hwSensorHeight); + m_parameters->getPictureBayerCropSize(&srcRect, &dstRect); + + m_stkdynamicMeta.src_y = stk_in_Buffer.addr[0]; + m_stkdynamicMeta.width = pictureRect.w; + m_stkdynamicMeta.height = pictureRect.h; + + /* binning_x = (cropped_width * 1024) / capture_width + * binning_y = (cropped_height * 1024) / capture_height + */ + m_stkdynamicMeta.binning_x = (dstRect.w * 1024) / pictureRect.w; + m_stkdynamicMeta.binning_y = (dstRect.h * 1024) / pictureRect.h; + + m_stkdynamicMeta.radial_alpha_R = m_shot_ext.shot.udm.as.vendorSpecific[0]; + m_stkdynamicMeta.radial_alpha_G = (m_shot_ext.shot.udm.as.vendorSpecific[1] + m_shot_ext.shot.udm.as.vendorSpecific[2])/2; + m_stkdynamicMeta.radial_alpha_B = m_shot_ext.shot.udm.as.vendorSpecific[3]; + + CLOGV("DEBUG(%s[%d]):============= STK Dynamic Params===================", __FUNCTION__, __LINE__); + CLOGV("DEBUG(%s[%d]):= width : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.width); + CLOGV("DEBUG(%s[%d]):= height : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.height); + CLOGV("DEBUG(%s[%d]):= buffersize : %d", __FUNCTION__, __LINE__, stk_in_Buffer.size[0]); + CLOGV("DEBUG(%s[%d]):= BayerCropSize width : %d", __FUNCTION__, __LINE__, dstRect.w); + CLOGV("DEBUG(%s[%d]):= BayerCropSize height : %d", __FUNCTION__, __LINE__, dstRect.h); + CLOGV("DEBUG(%s[%d]):= binning_x : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.binning_x); + CLOGV("DEBUG(%s[%d]):= binning_y : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.binning_y); + CLOGV("DEBUG(%s[%d]):= radial_alpha_R : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.radial_alpha_R); + CLOGV("DEBUG(%s[%d]):= radial_alpha_G : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.radial_alpha_G); + CLOGV("DEBUG(%s[%d]):= radial_alpha_B : %d", __FUNCTION__, __LINE__, m_stkdynamicMeta.radial_alpha_B); + CLOGV("DEBUG(%s[%d]):===================================================", __FUNCTION__, __LINE__); + + CLOGI("DEBUG(%s[%d]): STK Processing call", __FUNCTION__, __LINE__); + +#if 0 + char buff[128]; + snprintf(buff, sizeof(buff), "/data/media/0/CameraHAL_jpeginput_%d.yuv", + m_shot_ext.shot.dm.request.frameCount); + ret = dumpToFile(buff, + stk_in_Buffer.addr[0], + stk_in_Buffer.size[0]); + if (ret != true) { + //mflag_dumped = false; + ALOGE("couldn't make a raw file"); + } + else { + //mflag_dumped = false; + ALOGI("Raw Bayer dump Success!"); + } +#endif + + int pixelformat = STK_YUYV; + int nv21Align = 0; + + if(m_parameters->getSeriesShotMode() == SERIES_SHOT_MODE_LLS + || m_parameters->getShotMode() == SHOT_MODE_RICH_TONE + || m_parameters->getShotMode() == SHOT_MODE_FRONT_PANORAMA + || m_parameters->getPictureFormat() == V4L2_PIX_FMT_NV21 + || m_parameters->getShotMode() == SHOT_MODE_OUTFOCUS) { + pixelformat = STK_NV21; + nv21Align = pictureRect.w * pictureRect.h; + } else { + pixelformat = STK_YUYV; + } + + if (run_stk !=NULL) { + m_timer.start(); + + if (pixelformat == STK_NV21) + m_thread_id = (*run_stk)(m_stk_handle, stk_in_Buffer.addr[0], stk_in_Buffer.addr[0] + nv21Align, pixelformat); + else + m_thread_id = (*run_stk)(m_stk_handle, stk_in_Buffer.addr[0], NULL, pixelformat); + + ret = pthread_join(*m_thread_id, NULL); + + m_timer.stop(); + durationTime = m_timer.durationMsecs(); + m_totalCaptureCount++; + m_totalProcessingTime += durationTime; + CLOGI("STK PICTURE Execution Time : (%5d msec), Average(%5d msec), Count=%d", + (int)durationTime, (int)(m_totalProcessingTime/m_totalCaptureCount), m_totalCaptureCount); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):STK run fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + } + + CLOGI("DEBUG(%s[%d]): STK Processing done", __FUNCTION__, __LINE__); + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + + return NO_ERROR; +} + +bool ExynosCameraPipeSTK_PICTURE::m_mainThreadFunc(void) +{ + int ret = 0; + bool loopFlag = false; + + CLOGI("[ExynosCameraPipeSTK_PICTURE] Enter m_mainThreadFunc"); + ret = m_run(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + } + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loopFlag = true; + + return loopFlag; +} + +status_t ExynosCameraPipeSTK_PICTURE::m_init(int32_t *nodeNums) +{ + if (nodeNums == NULL) + m_stkNum = -1; + else + m_stkNum = nodeNums[0]; + + m_stk = NULL; + + /* + * Load the Stain-Killer libarry + * Initialize the Stain-Killer library + */ + bSTKInit = false; + hSTK_object = NULL; + stkHandle = NULL; + init_stk = NULL; + run_stk = NULL; + end_stk = NULL; + + memset(&m_shot_ext, 0x00, sizeof(struct camera2_shot_ext)); + + m_totalCaptureCount = 0; + m_totalProcessingTime = 0; + + char stk_lib_path[] = STK_LIBRARY_PATH; + + int ret = NO_ERROR; + ExynosRect pictureRect; + + stkHandle = dlopen(stk_lib_path, RTLD_NOW); + + if (stkHandle == NULL) { + ALOGE("ERR(%s[%d]): STK so handle is NULL : %s", __FUNCTION__, __LINE__, stk_lib_path); + return INVALID_OPERATION; + } + + //init_stk = (int(*)(STK_params *))dlsym(stkHandle, "stain_killer_init"); + init_stk = (void*(*)(int, int, enum stain_killer_scenario))dlsym(stkHandle, "stain_killer_init"); + + if ((dlerror()!= NULL) && (init_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_init dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + run_stk = (pthread_t*(*)(void *, char *, char *, int))dlsym(stkHandle, "stain_killer_run"); + + if ((dlerror()!= NULL) && (run_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_run dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + end_stk = (int(*)(void *))dlsym(stkHandle, "stain_killer_deinit"); + + if ((dlerror()!= NULL) && (end_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_end dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + /* + * Call the Stain-Killer library initialization function. + * + */ + + //ret = (*init_stk)(&m_stkdynamicMeta); + + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + + CLOGI("[ExynosCameraPipeSTK_PICTURE] PictureSize (%d x %d)", pictureRect.w, pictureRect.h); + + m_stk_handle = (*init_stk)(pictureRect.w, pictureRect.h, STK_SCENARIO_CAPTURE); + + CLOGI(" init_stk ret : %d", ret); + + return ret; + +CLEAN: + if (stkHandle != NULL) { + dlclose(stkHandle); + } + return INVALID_OPERATION; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.h new file mode 100644 index 0000000..2837857 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PICTURE.h @@ -0,0 +1,149 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_STK_PICTURE_H +#define EXYNOS_CAMERA_PIPE_STK_PICTURE_H + +#include +#include "ExynosCameraPipe.h" +#include "ExynosCameraAutoTimer.h" + +/* YUV Format Info : http://www.fourcc.org/yuv.php */ +enum stain_killer_image_format { + STK_NV21 = 0x3132564E, + STK_YUV2 = 0x32595559, + STK_YUYV = 0x56595559, +}; + +enum stain_killer_scenario { + STK_SCENARIO_CAPTURE = 0, + STK_SCENARIO_PREVIEW, + STK_SCENARIO_MAX_NUM +}; + +typedef struct STKDynamicMeta { + char *src_y; + char *src_u; + char *src_v; + int bittage; + int width; + int height; + int binning_y; + int binning_x; + int radial_alpha_R; + int radial_alpha_G; + int radial_alpha_B; + int radial_biquad_A; + int radial_biquad_B; + int radial_biquad_shift_adder; + int radial_center_x; + int radial_center_y; + int radial_green; + int radial_refine_enable; + int radial_refine_luma_min; + int radial_refine_luma_max; + int pedestal_R; + int pedestal_G; + int pedestal_B; + int desat_low_U; + int desat_high_U; + int desat_low_V; + int desat_high_V; + int desat_shift; + int desat_luma_max; + int desat_singleside; + int desat_luma_offset; + int desat_gain_offset; + int out_offset_R; + int out_offset_G; + int out_offset_B; +} STK_params; + +#define STK_LIBRARY_PATH "/system/lib/libstainkiller.so" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeSTK_PICTURE : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeSTK_PICTURE() + { + m_init(NULL); + } + + ExynosCameraPipeSTK_PICTURE( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + ALOGD("ExynosCameraPipeSTK_PICTURE Initialization Start!"); + if (m_init(nodeNums) < 0){ + + bSTKInit = false; + ALOGE("ExynosCameraPipeSTK_PICTURE Initialization failed!"); + + } + else { + bSTKInit = true; + ALOGD("ExynosCameraPipeSTK_PICTURE Initialization succeed!"); + + } + + } + + virtual ~ExynosCameraPipeSTK_PICTURE(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + virtual bool m_mainThreadFunc(void); + +private: + status_t m_init(int32_t *nodeNums); + +private: + int m_stkNum; + void *m_stk; + void *stkHandle; + void *m_stk_handle; + bool bSTKInit = false; + void *hSTK_object; + pthread_t *m_thread_id; + camera2_shot_ext m_shot_ext; + + void* (*init_stk)(int, int, enum stain_killer_scenario); + pthread_t* (*run_stk)(void*, char*, char*, int); + int (*end_stk)(void*); + + STK_params m_stkdynamicMeta; + ExynosCameraDurationTimer m_timer; + + int m_totalCaptureCount; + long long m_totalProcessingTime; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.cpp new file mode 100644 index 0000000..b27168c --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.cpp @@ -0,0 +1,358 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeSTK_PREVIEW" +#include + +#include "ExynosCameraPipeSTK_PREVIEW.h" + +namespace android { + +ExynosCameraPipeSTK_PREVIEW::~ExynosCameraPipeSTK_PREVIEW() +{ + this->destroy(); +} + +status_t ExynosCameraPipeSTK_PREVIEW::create(__unused int32_t *sensorIds) +{ + if (bSTKInit == false) { + CLOGE("ERR(%s):STK_PREVIEW_init() fail", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeSTK_PREVIEW::m_mainThreadFunc, "STK_PREVIEWThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PREVIEW::destroy(void) +{ + + int ret = 0; + + if (bSTKInit == false) { + return NO_ERROR; + } + + if (end_stk !=NULL) { + //ret = (*end_stk)(); + ret = (*end_stk)(m_stk_handle); + + if (ret < 0) { + CLOGE("ERR(%s):STK_PREVIEW End fail", __FUNCTION__); + } else { + CLOGD("DEBUG(%s[%d]) STK_PREVIEW End Success!", __FUNCTION__, __LINE__); + } + end_stk = NULL; + init_stk = NULL; + run_stk = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (stkHandle !=NULL) { + CLOGD("DEBUG(%s[%d]) STK_PREVIEW Handle : %08x", __FUNCTION__, __LINE__, stkHandle); + dlclose(stkHandle); + stkHandle = NULL; + } + + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PREVIEW::start(void) +{ + ExynosRect previewRect; + + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (bSTKInit == false) { + m_parameters->getHwPreviewSize(&previewRect.w, &previewRect.h); + + CLOGI("DEBUG(%s[%d]) PreviewSize (%d x %d), scenario(%d)", + __FUNCTION__, __LINE__, previewRect.w, previewRect.h, STK_SCENARIO_PREVIEW); + m_stk_handle = (*init_stk)(previewRect.w, previewRect.h, STK_SCENARIO_PREVIEW); + bSTKInit = true; + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PREVIEW::stop(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + if (bSTKInit == false) { + return NO_ERROR; + } + + if (end_stk != NULL) { + //ret = (*end_stk)(); + ret = (*end_stk)(m_stk_handle); + + if (ret < 0) + CLOGE("ERR(%s):STK_PREVIEW End fail", __FUNCTION__); + else + CLOGD("DEBUG(%s[%d]) STK_PREVIEW End Success!", __FUNCTION__, __LINE__); + + bSTKInit = false; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PREVIEW::startThread(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeSTK_PREVIEW::m_run(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer nv21_STK_in_Buffer; + ExynosRect previewRect; + ExynosRect srcRect, dstRect; + int hwSensorWidth = 0; + int hwSensorHeight = 0; + long long durationTime = 0; + int ret = 0; + + m_parameters->getHwPreviewSize(&previewRect.w, &previewRect.h); + + CLOGV("[ExynosCameraPipeSTK_PREVIEW thread] waitFrameQ"); + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + return NO_ERROR; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &nv21_STK_in_Buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + +#if 0 + camera2_shot_ext shot_ext; + // I could not check still that 'shot_ext.shot.dm.request.frameCount' is updated. + newFrame->getUserDynamicMeta(&shot_ext); + + char buff[128]; + snprintf(buff, sizeof(buff), "/data/stk/CameraHAL_jpeginput_%d.nv1", + shot_ext.shot.dm.request.frameCount); + ret = dumpToFile2plane(buff, + nv21_STK_in_Buffer.addr[0], + nv21_STK_in_Buffer.addr[1], + nv21_STK_in_Buffer.size[0], + nv21_STK_in_Buffer.size[1]); + if (ret != true) { + //mflag_dumped = false; + ALOGE("couldn't make a raw file"); + } + else { + //mflag_dumped = false; + ALOGI("Raw Bayer dump Success!"); + } +#endif + + int pixelformat = STK_NV21; + int stkPreviewQ; + int seriesShotMode; + int availableBufferCountLimit = 4; + + stkPreviewQ = m_inputFrameQ->getSizeOfProcessQ(); + seriesShotMode = m_parameters->getSeriesShotMode(); + + if (run_stk != NULL) { + if (stkPreviewQ <= availableBufferCountLimit) { + CLOGI("INFO(%s[%d]):Start STK_Preview frameCount(%d), stkPreviewQ(%d), SeriesShotMode(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), stkPreviewQ, seriesShotMode); + + m_timer.start(); + + m_thread_id = (*run_stk)(m_stk_handle, nv21_STK_in_Buffer.addr[0], nv21_STK_in_Buffer.addr[1], pixelformat); + + ret = pthread_join(*m_thread_id, NULL); + + m_timer.stop(); + durationTime = m_timer.durationMsecs(); + CLOGI("STK Preview Execution Time : (%5d msec)", (int)durationTime); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):STK run fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + } else { + CLOGW("WARN(%s[%d]):Skip STK_Preview frameCount(%d), stkPreviewQ(%d), SeriesShotMode(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), stkPreviewQ, seriesShotMode); + } + } + + CLOGV("DEBUG(%s[%d]): STK Processing done", __FUNCTION__, __LINE__); + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = newFrame->setDstBufferState(getPipeId(), ENTITY_BUFFER_STATE_COMPLETE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setdst Buffer failed(%d) frame(%d)", __FUNCTION__, __LINE__, ret, newFrame->getFrameCount()); + return ret; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + + return NO_ERROR; +} + +bool ExynosCameraPipeSTK_PREVIEW::m_mainThreadFunc(void) +{ + int ret = 0; + bool loopFlag = false; + + CLOGV("[ExynosCameraPipeSTK] Enter m_mainThreadFunc"); + ret = m_run(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + } + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loopFlag = true; + + return loopFlag; +} + +status_t ExynosCameraPipeSTK_PREVIEW::m_init(int32_t *nodeNums) +{ + if (nodeNums == NULL) + m_stkNum = -1; + else + m_stkNum = nodeNums[0]; + + m_stk = NULL; + + /* + * Load the Stain-Killer libarry + * Initialize the Stain-Killer library + */ + bSTKInit = false; + hSTK_object = NULL; + stkHandle = NULL; + init_stk = NULL; + run_stk = NULL; + end_stk = NULL; + + char stk_lib_path[] = STK_PREVIEW_LIBRARY_PATH; + + int ret = NO_ERROR; + ExynosRect previewRect; + + stkHandle = dlopen(stk_lib_path, RTLD_NOW); + + if (stkHandle == NULL) { + ALOGE("ERR(%s[%d]): STK so handle is NULL : %s", __FUNCTION__, __LINE__, stk_lib_path); + return INVALID_OPERATION; + } + + //init_stk = (int(*)(STK_params *))dlsym(stkHandle, "stain_killer_init"); + init_stk = (void*(*)(int, int, enum stain_killer_scenario))dlsym(stkHandle, "stain_killer_init"); + + if ((dlerror()!= NULL) && (init_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_init dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + run_stk = (pthread_t*(*)(void *, char *, char*, int))dlsym(stkHandle, "stain_killer_run"); + + if ((dlerror()!= NULL) && (run_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_run dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + end_stk = (int(*)(void *))dlsym(stkHandle, "stain_killer_deinit"); + + if ((dlerror()!= NULL) && (end_stk == NULL)) { + ALOGE("ERR(%s[%d]): exn_stk_end dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + /* + * Call the Stain-Killer library initialization function. + * + */ + + m_parameters->getHwPreviewSize(&previewRect.w, &previewRect.h); + CLOGV("[ExynosCameraPipeSTK_PREVIEW] PreviewSize (%d x %d)", previewRect.w, previewRect.h); + + m_stk_handle = (*init_stk)(previewRect.w, previewRect.h, STK_SCENARIO_PREVIEW); + CLOGV(" init_stk ret : %d", ret); + + return ret; + +CLEAN: + if (stkHandle != NULL) { + dlclose(stkHandle); + } + return INVALID_OPERATION; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.h new file mode 100644 index 0000000..6581008 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeSTK_PREVIEW.h @@ -0,0 +1,96 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_STK_PREVIEW_H +#define EXYNOS_CAMERA_PIPE_STK_PREVIEW_H + +#include +#include "ExynosCameraPipe.h" +#include "ExynosCameraAutoTimer.h" +#include "ExynosCameraPipeSTK_PICTURE.h" + +#define STK_PREVIEW_LIBRARY_PATH STK_LIBRARY_PATH + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeSTK_PREVIEW : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeSTK_PREVIEW() + { + m_init(NULL); + } + + ExynosCameraPipeSTK_PREVIEW( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + ALOGD("ExynosCameraPipeSTK_PREVIEW Initialization Start!"); + if (m_init(nodeNums) < 0){ + + bSTKInit = false; + ALOGE("ExynosCameraPipeSTK_PREVIEW Initialization failed!"); + + } + else { + bSTKInit = true; + ALOGD("ExynosCameraPipeSTK_PREVIEW Initialization succeed!"); + + } + + } + + virtual ~ExynosCameraPipeSTK_PREVIEW(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + virtual bool m_mainThreadFunc(void); + +private: + status_t m_init(int32_t *nodeNums); + +private: + int m_stkNum; + void *m_stk; + void *stkHandle; + void *m_stk_handle; + bool bSTKInit = false; + void *hSTK_object; + pthread_t *m_thread_id; + + void* (*init_stk)(int, int, enum stain_killer_scenario); + pthread_t* (*run_stk)(void*, char*, char*, int); + int (*end_stk)(void*); + + STK_params m_stkdynamicMeta; + ExynosCameraDurationTimer m_timer; + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.cpp new file mode 100644 index 0000000..2123249 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.cpp @@ -0,0 +1,316 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeUVS" +#include + +#include "ExynosCameraPipeUVS.h" + +namespace android { + +ExynosCameraPipeUVS::~ExynosCameraPipeUVS() +{ + this->destroy(); +} + +status_t ExynosCameraPipeUVS::create(int32_t *sensorIds) +{ + if (bUVSInit == false) { + CLOGE("ERR(%s):UVS_init() fail", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeUVS::m_mainThreadFunc, "UVSThread"); + + m_inputFrameQ = new frame_queue_t(m_mainThread); + + CLOGI("INFO(%s[%d]):create() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeUVS::destroy(void) +{ + + int ret = 0; + + if ( bUVSInit == false) { + return NO_ERROR; + } + + if (end_uvs !=NULL) { + ret = (*end_uvs)(); + + if (ret < 0) { + CLOGE("ERR(%s):UVS End fail", __FUNCTION__); + } + else { + CLOGD("DEBUG(%s[%d]) UVS End Success!", __FUNCTION__, __LINE__); + } + end_uvs = NULL; + init_uvs = NULL; + run_uvs = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (uvsHandle !=NULL) { + CLOGD("DEBUG(%s[%d]) uvsHandle : %08x", __FUNCTION__, __LINE__, uvsHandle); + dlclose(uvsHandle); + uvsHandle = NULL; + } + + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_inputFrameQ != NULL) { + m_inputFrameQ->release(); + delete m_inputFrameQ; + m_inputFrameQ = NULL; + } + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + CLOGI("INFO(%s[%d]):destroy() is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeUVS::start(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeUVS::stop(void) +{ + CLOGD("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + int ret = 0; + + m_mainThread->requestExitAndWait(); + + CLOGD("DEBUG(%s[%d]): thead exited", __FUNCTION__, __LINE__); + + m_inputFrameQ->release(); + + return NO_ERROR; +} + +status_t ExynosCameraPipeUVS::startThread(void) +{ + CLOGV("DEBUG(%s[%d])", __FUNCTION__, __LINE__); + + if (m_outputFrameQ == NULL) { + CLOGE("ERR(%s):outputFrameQ is NULL, cannot start", __FUNCTION__); + return INVALID_OPERATION; + } + + m_mainThread->run(); + + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + + return NO_ERROR; +} + +status_t ExynosCameraPipeUVS::m_run(void) +{ + ExynosCameraFrame *newFrame = NULL; + ExynosCameraBuffer yuv_UVS_in_Buffer; + ExynosCameraBuffer yuv_UVS_out_Buffer; + ExynosRect pictureRect; + ExynosRect srcRect, dstRect; + int hwSensorWidth = 0; + int hwSensorHeight = 0; + long long durationTime = 0; + int ret = 0; + + m_parameters->getPictureSize(&pictureRect.w, &pictureRect.h); + + CLOGI("[ExynosCameraPipeUVS thread] waitFrameQ"); + ret = m_inputFrameQ->waitAndPopProcessQ(&newFrame); + if (ret < 0) { + if (ret == TIMED_OUT) { + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + } + return ret; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):new frame is NULL", __FUNCTION__); + CLOGI("[ExynosCameraPipeUVS thread] new frame is NULL"); + return NO_ERROR; + } + + ret = newFrame->getSrcBuffer(getPipeId(), &yuv_UVS_in_Buffer); + if (ret < 0) { + CLOGE("ERR(%s[%d]):frame get src buffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + + camera2_shot_ext shot_ext; + + newFrame->getUserDynamicMeta(&shot_ext); + + m_parameters->getHwSensorSize(&hwSensorWidth, &hwSensorHeight); + m_parameters->getPictureBayerCropSize(&srcRect, &dstRect); + + m_uvsdynamicMeta.src_y = yuv_UVS_in_Buffer.addr[0]; + m_uvsdynamicMeta.width = pictureRect.w; + m_uvsdynamicMeta.height = pictureRect.h; + + /* binning_x = (cropped_width * 1024) / capture_width + * binning_y = (cropped_height * 1024) / capture_height + */ + m_uvsdynamicMeta.binning_x = (dstRect.w * 1024) / pictureRect.w; + m_uvsdynamicMeta.binning_y = (dstRect.h * 1024) / pictureRect.h; + + m_uvsdynamicMeta.radial_alpha_R = shot_ext.shot.udm.as.vendorSpecific[0]; + m_uvsdynamicMeta.radial_alpha_G = (shot_ext.shot.udm.as.vendorSpecific[1] + shot_ext.shot.udm.as.vendorSpecific[2])/2; + m_uvsdynamicMeta.radial_alpha_B = shot_ext.shot.udm.as.vendorSpecific[3]; + + CLOGD("DEBUG(%s[%d]):============= UVS Dynamic Params===================", __FUNCTION__, __LINE__); + CLOGD("DEBUG(%s[%d]):= width : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.width); + CLOGD("DEBUG(%s[%d]):= height : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.height); + CLOGD("DEBUG(%s[%d]):= buffersize : %d", __FUNCTION__, __LINE__, yuv_UVS_in_Buffer.size[0]); + CLOGD("DEBUG(%s[%d]):= BayerCropSize width : %d", __FUNCTION__, __LINE__, dstRect.w); + CLOGD("DEBUG(%s[%d]):= BayerCropSize height : %d", __FUNCTION__, __LINE__, dstRect.h); + CLOGD("DEBUG(%s[%d]):= binning_x : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.binning_x); + CLOGD("DEBUG(%s[%d]):= binning_y : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.binning_y); + CLOGD("DEBUG(%s[%d]):= radial_alpha_R : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.radial_alpha_R); + CLOGD("DEBUG(%s[%d]):= radial_alpha_G : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.radial_alpha_G); + CLOGD("DEBUG(%s[%d]):= radial_alpha_B : %d", __FUNCTION__, __LINE__, m_uvsdynamicMeta.radial_alpha_B); + CLOGD("DEBUG(%s[%d]):===================================================", __FUNCTION__, __LINE__); + + CLOGI("DEBUG(%s[%d]): UVS Processing call", __FUNCTION__, __LINE__); + + if (run_uvs !=NULL) { + m_timer.start(); + ret = (*run_uvs)(&m_uvsdynamicMeta); + m_timer.stop(); + durationTime = m_timer.durationMsecs(); + CLOGI("UVS Execution Time : (%5d msec)", (int)durationTime); + + if (ret < 0) { + CLOGE("ERR(%s[%d]):UVS run fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + } + + CLOGI("DEBUG(%s[%d]): UVS Processing done", __FUNCTION__, __LINE__); + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret < 0) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return OK; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + + return NO_ERROR; +} + +bool ExynosCameraPipeUVS::m_mainThreadFunc(void) +{ + int ret = 0; + bool loopFlag = false; + + CLOGI("[ExynosCameraPipeUVS] Enter m_mainThreadFunc"); + ret = m_run(); + if (ret < 0) { + if (ret != TIMED_OUT) + CLOGE("ERR(%s):m_putBuffer fail", __FUNCTION__); + } + + if (m_inputFrameQ->getSizeOfProcessQ() > 0) + loopFlag = true; + + return loopFlag; +} + +status_t ExynosCameraPipeUVS::m_init(int32_t *nodeNums) +{ + if (nodeNums == NULL) + m_uvsNum = -1; + else + m_uvsNum = nodeNums[0]; + + m_uvs = NULL; + + /* + * Load the UVSuppression libarry + * Initialize the UVSuppression library + */ + bUVSInit = false; + hUVS_object = NULL; + uvsHandle = NULL; + init_uvs = NULL; + run_uvs = NULL; + end_uvs = NULL; + + char uvs_lib_path[] = UVS_LIBRARY_PATH; + + int ret = NO_ERROR; + + uvsHandle = dlopen(uvs_lib_path, RTLD_NOW); + + if (uvsHandle == NULL) { + ALOGE("ERR(%s[%d]): UVS so handle is NULL : %s", __FUNCTION__, __LINE__, uvs_lib_path); + return INVALID_OPERATION; + } + + init_uvs = (int(*)(UVS_params *))dlsym(uvsHandle, "exn_uvs_init"); + + if ((dlerror()!= NULL) && (init_uvs == NULL)) { + ALOGE("ERR(%s[%d]): exn_uvs_init dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + run_uvs = (int(*)(UVS_params *))dlsym(uvsHandle, "exn_uvs_run"); + + if ((dlerror()!= NULL) && (run_uvs == NULL)) { + ALOGE("ERR(%s[%d]): exn_uvs_run dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + end_uvs = (int(*)())dlsym(uvsHandle, "exn_uvs_end"); + + if ((dlerror()!= NULL) && (end_uvs == NULL)) { + ALOGE("ERR(%s[%d]): exn_uvs_end dlsym error", __FUNCTION__, __LINE__); + goto CLEAN; + } + + /* + * Call the UVSuppression library initialization function. + * + */ + + ret = (*init_uvs)(&m_uvsdynamicMeta); + + CLOGI(" init_uvs ret : %d", ret); + + return ret; + +CLEAN: + if (uvsHandle != NULL) { + dlclose(uvsHandle); + } + return INVALID_OPERATION; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.h new file mode 100644 index 0000000..a6a95d0 --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeUVS.h @@ -0,0 +1,131 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_UVS_H +#define EXYNOS_CAMERA_PIPE_UVS_H + +#include +#include "ExynosCameraPipe.h" +#include "ExynosCameraAutoTimer.h" + +typedef struct UVSDynamicMeta { + char *src_y; + char *src_u; + char *src_v; + int bittage; + int width; + int height; + int binning_y; + int binning_x; + int radial_alpha_R; + int radial_alpha_G; + int radial_alpha_B; + int radial_biquad_A; + int radial_biquad_B; + int radial_biquad_shift_adder; + int radial_center_x; + int radial_center_y; + int radial_green; + int radial_refine_enable; + int radial_refine_luma_min; + int radial_refine_luma_max; + int pedestal_R; + int pedestal_G; + int pedestal_B; + int desat_low_U; + int desat_high_U; + int desat_low_V; + int desat_high_V; + int desat_shift; + int desat_luma_max; + int desat_singleside; + int desat_luma_offset; + int desat_gain_offset; + int out_offset_R; + int out_offset_G; + int out_offset_B; +} UVS_params; + +#define UVS_LIBRARY_PATH "/vendor/lib/libexynosuvs.so" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeUVS : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeUVS() + { + m_init(NULL); + } + + ExynosCameraPipeUVS( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + ALOGD("ExynosCameraPipeUVS Initialization Start!"); + if (m_init(nodeNums) < 0){ + + bUVSInit = false; + ALOGE("ExynosCameraPipeUVS Initialization failed!"); + + } + else { + bUVSInit = true; + ALOGD("ExynosCameraPipeUVS Initialization succeed!"); + + } + + } + + virtual ~ExynosCameraPipeUVS(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t start(void); + virtual status_t stop(void); + virtual status_t startThread(void); + +protected: + virtual status_t m_run(void); + virtual bool m_mainThreadFunc(void); + +private: + status_t m_init(int32_t *nodeNums); + +private: + int m_uvsNum; + void *m_uvs; + void *uvsHandle; + bool bUVSInit = false; + void *hUVS_object; + + int (*init_uvs)(UVS_params *); + int (*run_uvs)(UVS_params *); + int (*end_uvs)(); + + UVS_params m_uvsdynamicMeta; + ExynosCameraDurationTimer m_timer; + +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.cpp b/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.cpp new file mode 100644 index 0000000..3c500bd --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.cpp @@ -0,0 +1,468 @@ +/* +** +** Copyright 2016, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraPipeVRA" +#include + +#include "ExynosCameraPipeVRA.h" +#include "ExynosCameraPipeGSC.h" + +namespace android { + +ExynosCameraPipeVRA::~ExynosCameraPipeVRA() +{ + this->destroy(); +} + +status_t ExynosCameraPipeVRA::create(int32_t *sensorIds) +{ + status_t ret = NO_ERROR; + + ret = ExynosCameraPipe::create(sensorIds); + + int32_t nodeNum[1] = {PREVIEW_GSC_NODE_NUM}; + m_gscPipe = (ExynosCameraPipe*)new ExynosCameraPipeGSC(m_cameraId, m_parameters, true, nodeNum); + m_gscPipe->setPipeId(PIPE_GSC_VRA); + m_gscPipe->setPipeName("PIPE_GSC_VRA"); + + ret = m_gscPipe->create(sensorIds); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Internal GSC Pipe creation fail!", __FUNCTION__, __LINE__); + return ret; + } + + m_gscThread = ExynosCameraThreadFactory::createThread(this, &ExynosCameraPipeVRA::m_gscThreadFunc, "gscThread"); + + m_gscFrameQ = new frame_queue_t; + m_gscFrameDoneQ = new frame_queue_t; + + return ret; +} + +status_t ExynosCameraPipeVRA::destroy(void) +{ + status_t ret = NO_ERROR; + + ret = ExynosCameraPipe::destroy(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Destroy fail!", __FUNCTION__, __LINE__); + + ret = m_gscPipe->destroy(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Internal GSC Pipe detstroy fail!", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraPipeVRA::stop(void) +{ + status_t ret = NO_ERROR; + + m_gscThread->requestExitAndWait(); + + m_gscFrameQ->release(); + m_gscFrameDoneQ->release(); + + ret = ExynosCameraPipe::stop(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Pipe stop fail!", __FUNCTION__, __LINE__); + + ret = m_gscPipe->stop(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Internal GSC Pipe stop fail!", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraPipeVRA::startThread(void) +{ + status_t ret = NO_ERROR; + + if (m_gscThread->isRunning() == false) { + ret = m_gscThread->run(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Internal GSC Pipe startThread fail!", __FUNCTION__, __LINE__); + else + CLOGI("INFO(%s[%d]):Internal GSC Pipe startThread is succeed", __FUNCTION__, __LINE__); + } else { + CLOGW("WRN(%s[%d]):Internal GSC Thread is already running", __FUNCTION__, __LINE__); + } + + return ret; +} + +status_t ExynosCameraPipeVRA::stopThread(void) +{ + status_t ret = NO_ERROR; + + m_gscThread->requestExit(); + m_gscFrameQ->sendCmd(WAKE_UP); + + ret = ExynosCameraPipe::stopThread(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):Internal GSC Pipe stopThread fail!", __FUNCTION__, __LINE__); + + return ret; +} + +status_t ExynosCameraPipeVRA::getInputFrameQ(frame_queue_t **inputFrameQ) +{ + *inputFrameQ = m_gscFrameQ; + + if (*inputFrameQ == NULL) + CLOGE("ERR(%s[%d])inputFrameQ is NULL", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +status_t ExynosCameraPipeVRA::m_runScaler(void) +{ + status_t ret = NO_ERROR; + ExynosCameraFrame *newFrame = NULL; + ExynosCameraFrame *doneFrame = NULL; + ExynosCameraBuffer srcBuf, dstBuf; + ExynosRect srcRect, dstRect; + struct camera2_stream *streamMeta = NULL; + uint32_t *mcscOutputCrop = NULL; + struct camera2_shot_ext *shot_ext; + + int dstBufIndex = -2; + int gscPipeId = PIPE_GSC_VRA; + + int waitCount = 0; + int vraWidth = 0, vraHeight = 0; + int32_t previewFormat = m_parameters->getHwPreviewFormat(); + m_parameters->getHwVraInputSize(&vraWidth, &vraHeight); + + while (m_gscFrameQ->getSizeOfProcessQ() > 1) { + ret = m_gscFrameQ->popProcessQ(&newFrame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return INVALID_OPERATION; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + } + + ret = m_gscFrameQ->waitAndPopProcessQ(&newFrame); + if (m_flagTryStop == true) { + CLOGD("DEBUG(%s[%d]):m_flagTryStop(%d)", __FUNCTION__, __LINE__, m_flagTryStop); + goto FUNC_EXIT; + } + if (ret != NO_ERROR) { + /* TODO: We need to make timeout duration depends on FPS */ + if (ret == TIMED_OUT) { +#ifdef USE_CAMERA2_API_SUPPORT + /* + * TIMEOUT log print + * condition 1 : it is not reprocessing + * condition 2 : if it is reprocessing, but m_timeLogCount is equals or lower than 0 + */ + if (!(m_parameters->isReprocessing() == true && m_timeLogCount <= 0)) { + m_timeLogCount--; +#endif + CLOGW("WARN(%s):wait timeout", __FUNCTION__); + m_mainNode->dumpState(); +#ifdef USE_CAMERA2_API_SUPPORT + } +#endif + } else { + CLOGE("ERR(%s):wait and pop fail, ret(%d)", __FUNCTION__, ret); + /* TODO: doing exception handling */ + } + goto FUNC_EXIT; + } + + if (newFrame == NULL) { + CLOGE("ERR(%s):newFrame is NULL", __FUNCTION__); + goto FUNC_EXIT; + } + + /* Get scaler source buffer */ + ret = newFrame->getSrcBuffer(getPipeId(), &srcBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getSrcBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + goto FUNC_EXIT; + } + + shot_ext = (camera2_shot_ext*)srcBuf.addr[srcBuf.planeCount-1]; + entity_buffer_state_t srcBufferState; + ret = newFrame->getSrcBufferState(getPipeId(), &srcBufferState); + if (srcBuf.index < 0 + || shot_ext->fd_bypass == true + || srcBufferState == ENTITY_BUFFER_STATE_ERROR) { + if (m_mainThread->isRunning() == true) + m_mainThread->requestExit(); + + goto FUNC_EXIT; + } + + ret = newFrame->getDstBuffer(getPipeId(), &dstBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):getDstBuffer fail. pipeId(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, getPipeId(), newFrame->getFrameCount(), ret); + goto FUNC_EXIT; + } + + if (dstBuf.index < 0) { + ret = m_bufferManager[OUTPUT_NODE]->getBuffer(&dstBufIndex, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL, &dstBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):Buffer manager getBuffer fail, manager(%d), frameCount(%d), ret(%d)", + __FUNCTION__, __LINE__, OUTPUT_NODE, newFrame->getFrameCount(), ret); + goto FUNC_EXIT; + } + } + + /* Get size from metadata */ + streamMeta = (struct camera2_stream*)srcBuf.addr[srcBuf.planeCount-1]; + if (streamMeta == NULL) { + CLOGE("ERR(%s[%d]):srcBuf.addr is NULL, srcBuf.addr(0x%x)",__FUNCTION__, __LINE__, srcBuf.addr[srcBuf.planeCount-1]); + goto FUNC_EXIT; + } + + /* Set size to GSC frame */ + mcscOutputCrop = streamMeta->output_crop_region; + + if (mcscOutputCrop[2] <= 0 + || mcscOutputCrop[3] <= 0) { + CLOGE("ERR(%s[%d]):MCSC output crop is zero(w:%d, h:%d)", + __FUNCTION__, __LINE__, mcscOutputCrop[2], mcscOutputCrop[3]); + goto FUNC_EXIT; + } + + srcRect.x = 0; + srcRect.y = 0; + srcRect.w = mcscOutputCrop[2]; + srcRect.h = mcscOutputCrop[3]; + srcRect.fullW = mcscOutputCrop[2]; + srcRect.fullH = mcscOutputCrop[3]; + srcRect.colorFormat = previewFormat; + + dstRect.x = 0; + dstRect.y = 0; + dstRect.w = vraWidth; + dstRect.h = vraHeight; + dstRect.fullW = vraWidth; + dstRect.fullH = vraHeight; + dstRect.colorFormat = m_parameters->getHwVraInputFormat(); + + ret = newFrame->setSrcRect(gscPipeId, srcRect); + ret = newFrame->setDstRect(gscPipeId, dstRect); + + /* set buffers */ + ret = newFrame->setSrcBuffer(gscPipeId, srcBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setSrcBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, gscPipeId, ret); + goto FUNC_EXIT; + } + + ret = newFrame->setDstBuffer(gscPipeId, dstBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, gscPipeId, ret); + goto FUNC_EXIT; + } + + m_gscPipe->setOutputFrameQ(m_gscFrameDoneQ); + m_gscPipe->pushFrame(&newFrame); + + /* Wait and Pop frame from GSC output Q */ + CLOGV("INFO(%s[%d]):wait GSC output", __FUNCTION__, __LINE__); + + waitCount = 0; + do { + ret = m_gscFrameDoneQ->waitAndPopProcessQ(&doneFrame); + waitCount++; + + } while (ret == TIMED_OUT && waitCount < 10); + + if (ret != NO_ERROR) + CLOGW("WARN(%s[%d]):GSC wait and pop error, ret(%d)", __FUNCTION__, __LINE__, ret); + + if (doneFrame == NULL) { + CLOGE("ERR(%s[%d]):gscFrame is NULL", __FUNCTION__, __LINE__); + goto FUNC_EXIT; + } + + if (newFrame->getFrameCount() != doneFrame->getFrameCount()) { + CLOGW("WARN(%s[%d]):FrameCount mismatch, Push(%d) Pop(%d)", + __FUNCTION__, __LINE__, newFrame->getFrameCount(), doneFrame->getFrameCount()); + } + + CLOGV("INFO(%s[%d]):Get frame from GSC Pipe, frameCount(%d)", __FUNCTION__, __LINE__, newFrame->getFrameCount()); + + memcpy(dstBuf.addr[dstBuf.planeCount-1], srcBuf.addr[srcBuf.planeCount-1], dstBuf.size[dstBuf.planeCount-1]); + + camera2_node_group node_group_info; + memset(&node_group_info, 0x0, sizeof(camera2_node_group)); + + node_group_info.leader.request = 1; + node_group_info.leader.input.cropRegion[0] = 0; + node_group_info.leader.input.cropRegion[1] = 0; + node_group_info.leader.input.cropRegion[2] = vraWidth; + node_group_info.leader.input.cropRegion[3] = vraHeight; + node_group_info.leader.output.cropRegion[0] = 0; + node_group_info.leader.output.cropRegion[1] = 0; + node_group_info.leader.output.cropRegion[2] = node_group_info.leader.input.cropRegion[2]; + node_group_info.leader.output.cropRegion[3] = node_group_info.leader.input.cropRegion[3]; + + newFrame->storeNodeGroupInfo(&node_group_info, PERFRAME_INFO_VRA); + + ret = newFrame->setDstBuffer(getPipeId(), dstBuf); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):setDstBuffer fail, pipeId(%d), ret(%d)", __FUNCTION__, __LINE__, gscPipeId, ret); + goto FUNC_EXIT; + } + + m_inputFrameQ->pushProcessQ(&newFrame); + + if (m_mainThread->isRunning() == false) { + m_mainThread->run(); + CLOGI("INFO(%s[%d]):startThread is succeed (%d)", __FUNCTION__, __LINE__, getPipeId()); + } + + return NO_ERROR; + +FUNC_EXIT: + if (dstBuf.index >= 0) { + ret = m_bufferManager[OUTPUT_NODE]->putBuffer(dstBuf.index, EXYNOS_CAMERA_BUFFER_POSITION_IN_HAL); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):m_bufferManager[%d]->putbuffer() fail, index(%d), ret(%d)", + __FUNCTION__, __LINE__, OUTPUT_NODE, dstBuf.index, ret); + } + } + + if (newFrame != NULL) { + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return INVALID_OPERATION; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + } + + return NO_ERROR; +} + +status_t ExynosCameraPipeVRA::m_putBuffer(void) +{ + status_t ret = NO_ERROR; + ExynosCameraFrame *newFrame = NULL; + + while (m_inputFrameQ->getSizeOfProcessQ() > 1) { + ret = m_inputFrameQ->popProcessQ(&newFrame); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + return ret; + } + + ret = newFrame->setEntityState(getPipeId(), ENTITY_STATE_FRAME_DONE); + if (ret != NO_ERROR) { + CLOGE("ERR(%s[%d]):set entity state fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return INVALID_OPERATION; + } + + m_outputFrameQ->pushProcessQ(&newFrame); + } + + ret = ExynosCameraPipe::m_putBuffer(); + if (ret != NO_ERROR) + CLOGE("ERR(%s[%d]):m_putBuffer() fail!", __FUNCTION__, __LINE__); + + return ret; +} + +bool ExynosCameraPipeVRA::m_gscThreadFunc(void) +{ + status_t ret = NO_ERROR; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + /* + * Cycle is below + * m_gscFrameQ -> m_runScaler()(GSC) -> + * m_inputFrameQ -> m_putBuffer()(VRA) -> m_getBuffer()(VRA) -> + * m_outputFrameQ + */ + + ret = m_runScaler(); + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) + return true; + + CLOGE("ERR(%s[%d]):m_runScaler fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return false; + } + + return true; +} + +bool ExynosCameraPipeVRA::m_mainThreadFunc(void) +{ + status_t ret = NO_ERROR; + + if (m_flagTryStop == true) { + usleep(5000); + return true; + } + + ret = m_putBuffer(); + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) + return true; + + CLOGE("ERR(%s[%d]):m_putBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return false; + } + + ret = m_getBuffer(); + if (ret != NO_ERROR) { + if (ret == TIMED_OUT) + return true; + + CLOGE("ERR(%s[%d]):m_getBuffer fail, ret(%d)", __FUNCTION__, __LINE__, ret); + /* TODO: doing exception handling */ + return false; + } + + return m_checkThreadLoop(); +} + +void ExynosCameraPipeVRA::m_init(void) +{ + m_gscFrameQ = NULL; + m_gscFrameDoneQ = NULL; + m_gscPipe = NULL; +} + +}; /* namespace android */ diff --git a/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.h b/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.h new file mode 100644 index 0000000..bfc852a --- /dev/null +++ b/libcamera/common_v2/Pipes2/ExynosCameraPipeVRA.h @@ -0,0 +1,73 @@ +/* +** +** Copyright 2016, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_PIPE_VRA_H +#define EXYNOS_CAMERA_PIPE_VRA_H + +#include "ExynosCameraPipe.h" + +namespace android { + +typedef ExynosCameraList frame_queue_t; + +class ExynosCameraPipeVRA : protected virtual ExynosCameraPipe { +public: + ExynosCameraPipeVRA() + { + m_init(); + } + + ExynosCameraPipeVRA( + int cameraId, + ExynosCameraParameters *obj_param, + bool isReprocessing, + int32_t *nodeNums) : ExynosCameraPipe(cameraId, obj_param, isReprocessing, nodeNums) + { + m_init(); + } + + virtual ~ExynosCameraPipeVRA(); + + virtual status_t create(int32_t *sensorIds = NULL); + virtual status_t destroy(void); + + virtual status_t stop(void); + + virtual status_t startThread(void); + virtual status_t stopThread(void); + + virtual status_t getInputFrameQ(frame_queue_t **inputFrameQ); + +protected: + virtual bool m_gscThreadFunc(void); + virtual bool m_mainThreadFunc(void); + virtual status_t m_runScaler(void); + virtual status_t m_putBuffer(void); + +private: + void m_init(void); + +protected: + sp m_gscThread; + frame_queue_t *m_gscFrameQ; + frame_queue_t *m_gscFrameDoneQ; + ExynosCameraPipe *m_gscPipe; +}; + +}; /* namespace android */ + +#endif diff --git a/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.cpp b/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.cpp new file mode 100644 index 0000000..87c0dd5 --- /dev/null +++ b/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.cpp @@ -0,0 +1,6559 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCamera3SensorInfoBase" +#include + +#include "ExynosCamera3SensorInfoBase.h" + +namespace android { + +#if 0//def SENSOR_NAME_GET_FROM_FILE +int g_rearSensorId = -1; +int g_frontSensorId = -1; +#endif + +ExynosCamera3SensorInfoBase::ExynosCamera3SensorInfoBase() : ExynosSensorInfoBase() +{ + /* implement AP chip variation */ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4128; + maxSensorH = 3096; + sensorMarginW = 16; + sensorMarginH = 10; + sensorMarginBase[LEFT_BASE] = 0; + sensorMarginBase[TOP_BASE] = 0; + sensorMarginBase[WIDTH_BASE] = 0; + sensorMarginBase[HEIGHT_BASE] = 0; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + + /* TODO : Where should we go? */ + minFps = 0; + maxFps = 30; + focusDistanceNum = 0; + focusDistanceDen = 0; + videoSnapshotSupport = true; + + maxBasicZoomLevel = MAX_BASIC_ZOOM_LEVEL; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + visionModeSupport = false; + drcSupport = false; + + /* flite->3aa otf support */ + flite3aaOtfSupport = true; + + rearPreviewListMax = 0; + frontPreviewListMax = 0; + rearPictureListMax = 0; + frontPictureListMax = 0; + hiddenRearPreviewListMax = 0; + hiddenFrontPreviewListMax = 0; + hiddenRearPictureListMax = 0; + hiddenFrontPictureListMax = 0; + thumbnailListMax = 0; + rearVideoListMax = 0; + frontVideoListMax = 0; + hiddenRearVideoListMax = 0; + hiddenFrontVideoListMax = 0; + highSpeedVideoListMax = 0; + rearFPSListMax = 0; + frontFPSListMax = 0; + hiddenRearFPSListMax = 0; + hiddenFrontFPSListMax = 0; + highSpeedVideoFPSListMax = 0; + + rearPreviewList = NULL; + frontPreviewList = NULL; + rearPictureList = NULL; + frontPictureList = NULL; + hiddenRearPreviewList = NULL; + hiddenFrontPreviewList = NULL; + hiddenRearPictureList = NULL; + hiddenFrontPictureList = NULL; + thumbnailList = NULL; + rearVideoList = NULL; + frontVideoList = NULL; + hiddenRearVideoList = NULL; + hiddenFrontVideoList = NULL; + highSpeedVideoList = NULL; + rearFPSList = NULL; + frontFPSList = NULL; + hiddenRearFPSList = NULL; + hiddenFrontFPSList = NULL; + highSpeedVideoFPSList = NULL; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + fastAeStableLutMax = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + dualPreviewSizeLut = NULL; + dualVideoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + + vtcallSizeLut = NULL; + fastAeStableLut = NULL; + + sizeTableSupport = false; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = NULL; + colorAberrationModesLength = 0; + + /* Android Control Static Metadata */ + antiBandingModes = NULL; + aeModes = NULL; + exposureCompensationRange[MIN] = -2; + exposureCompensationRange[MAX] = 2; + exposureCompensationStep = 1.0f; + afModes = NULL; + effectModes = NULL; + sceneModes = NULL; + videoStabilizationModes = NULL; + awbModes = NULL; + controlModes = NULL; + controlModesLength = 0; + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = NULL; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE; + antiBandingModesLength = 0; + aeModesLength = 0; + afModesLength = 0; + effectModesLength = 0; + sceneModesLength = 0; + videoStabilizationModesLength = 0; + awbModesLength = 0; + sceneModeOverridesLength = 0; + + /* Android Edge Static Metadata */ + edgeModes = NULL; + edgeModesLength = 0; + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = NULL; + hotPixelModesLength = 0; + + /* Android Lens Static Metadata */ + aperture = 2.2f; + fNumber = 2.2f; + filterDensity = 0.0f; + focalLength = 4.8f; + focalLengthIn35mmLength = 31; + opticalStabilization = NULL; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 0; + shadingMapSize[HEIGHT] = 0; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED; + lensFacing = ANDROID_LENS_FACING_BACK; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 0.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilizationLength = 0; + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = NULL; + noiseReductionModesLength = 0; + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + capabilities = NULL; + requestKeys = NULL; + resultKeys = NULL; + characteristicsKeys = NULL; + capabilitiesLength = 0; + requestKeysLength = 0; + resultKeysLength = 0; + characteristicsKeysLength = 0; + + /* Android Scaler Static Metadata */ + zoomSupport = false; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = NULL; + croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + stallDurationsLength = 0; + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = NULL; + testPatternModesLength = 0; + + /* Android Statistics Static Metadata */ + faceDetectModes = NULL; + histogramBucketCount = 64; + maxNumDetectedFaces = 1; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = NULL; + faceDetectModesLength = 0; + hotPixelMapModesLength = 0; + lensShadingMapModes = NULL; + lensShadingMapModesLength = 0; + shadingAvailableModes = NULL; + shadingAvailableModesLength = 0; + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = NULL; + toneMapModesLength = 0; + + /* Android LED Static Metadata */ + leds = NULL; + ledsLength = 0; + + /* Android Info Static Metadata */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY; + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +#if 0 +ExynosCamera3SensorS5K2P2Base::ExynosCamera3SensorS5K2P2Base() : ExynosCamera3SensorInfoBase() +{ + /* Sensor Max Size Infos */ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + sensorMarginBase[LEFT_BASE] = 2; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 4; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + /* Sensor FOV Infos */ + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + /* Hal1 info - prevent setparam fail */ + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* Hal1 info - prevent setparam fail */ + antiBandingList = + ANTIBANDING_AUTO + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_2P2_BNS; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_2P2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_2P2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_2P2_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2P2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2P2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P2_PREVIEW_LIST; + rearPictureList = S5K2P2_PICTURE_LIST; + hiddenRearPreviewList = S5K2P2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P2_THUMBNAIL_LIST; + rearVideoList = S5K2P2_VIDEO_LIST; + hiddenRearVideoList = S5K2P2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P2_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 0 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.85f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; +#endif + +ExynosCamera3SensorS5K2P2_12MBase::ExynosCamera3SensorS5K2P2_12MBase() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 4608; + maxPictureH = 2592; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4624; + maxSensorH = 2604; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 480; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2_12M) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P2_12M) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_2P2_12M_BNS; + videoSizeLut = VIDEO_SIZE_LUT_2P2_12M; + videoSizeBnsLut = VIDEO_SIZE_LUT_2P2_12M_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2P2_12M; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_12M_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_12M_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2P2_12M_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P2_12M_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P2_12M_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P2_12M_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P2_12M_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P2_12M_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P2_12M_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P2_12M_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P2_12M_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P2_12M_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P2_12M_PREVIEW_LIST; + rearPictureList = S5K2P2_12M_PICTURE_LIST; + hiddenRearPreviewList = S5K2P2_12M_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P2_12M_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P2_12M_THUMBNAIL_LIST; + rearVideoList = S5K2P2_12M_VIDEO_LIST; + hiddenRearVideoList = S5K2P2_12M_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P2_12M_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P2_12M_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorS5K2P3Base::ExynosCamera3SensorS5K2P3Base() : ExynosCamera3SensorInfoBase() +{ + +}; + +ExynosCamera3SensorS5K2T2Base::ExynosCamera3SensorS5K2T2Base() : ExynosCamera3SensorInfoBase() +{ + /* Sensor Max Size Infos */ +#if defined(ENABLE_13MP_FULL_FRAME) + maxPreviewW = 4800; + maxPreviewH = 2700; +#else + maxPreviewW = 3840; + maxPreviewH = 2160; +#endif + maxPictureW = 5952; + maxPictureH = 3348; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5968; + maxSensorH = 3368; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + /* Sensor FOV Infos */ + horizontalViewAngle[SIZE_RATIO_16_9] = 68.13f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + /* Hal1 info - prevent setparam fail */ + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* Hal1 info - prevent setparam fail */ + antiBandingList = + ANTIBANDING_AUTO + ; + + effectList = + EFFECT_NONE + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + bnsSupport = true; + + if (bnsSupport == true) { +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); +#endif +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2_13MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_2T2_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_2T2; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2T2_BNS; +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_2T2_13MP_FULL; +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLut = VIDEO_SIZE_LUT_2T2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_2T2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_2T2_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2T2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2T2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2T2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2T2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2T2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2T2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2T2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2T2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2T2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2T2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2T2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2T2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2T2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2T2_PREVIEW_LIST; + rearPictureList = S5K2T2_PICTURE_LIST; + hiddenRearPreviewList = S5K2T2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2T2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2T2_THUMBNAIL_LIST; + rearVideoList = S5K2T2_VIDEO_LIST; + hiddenRearVideoList = S5K2T2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2T2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2T2_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.27f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.05f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K3H5Base::ExynosCamera3SensorS5K3H5Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3248; + maxPictureH = 2438; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3264; + maxSensorH = 2448; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3H5_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3H5_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3H5_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3H5_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3H5_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3H5_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3H5_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3H5_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3H5_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3H5_PREVIEW_LIST; + rearPictureList = S5K3H5_PICTURE_LIST; + hiddenRearPreviewList = S5K3H5_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3H5_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3H5_THUMBNAIL_LIST; + rearVideoList = S5K3H5_VIDEO_LIST; + hiddenRearVideoList = S5K3H5_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3H5_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3H5_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorS5K3H7Base::ExynosCamera3SensorS5K3H7Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3248; + maxPictureH = 2438; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3264; + maxSensorH = 2448; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3H7) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_3H7; + videoSizeLut = VIDEO_SIZE_LUT_3H7; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_3H7; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3H7; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3H7; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3H7_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3H7_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3H7_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3H7_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3H7_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3H7_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3H7_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3H7_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3H7_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3H7_PREVIEW_LIST; + rearPictureList = S5K3H7_PICTURE_LIST; + hiddenRearPreviewList = S5K3H7_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3H7_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3H7_THUMBNAIL_LIST; + rearVideoList = S5K3H7_VIDEO_LIST; + hiddenRearVideoList = S5K3H7_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3H7_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3H7_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorS5K3M2Base::ExynosCamera3SensorS5K3M2Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 370; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 62.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 48.2f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 28; + + +#ifdef USE_SUBDIVIDED_EV + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /* | EFFECT_SOLARIZE */ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + | EFFECT_AQUA + /* | EFFECT_WHITEBOARD */ + /* | EFFECT_BLACKBOARD */ + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 2056; + highSpeedRecording60H = 1152; + highSpeedRecording120W = 1024; + highSpeedRecording120H = 574; + scalableSensorSupport = true; + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = 0; + previewSizeLut = PREVIEW_SIZE_LUT_3M2_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3M2; + videoSizeBnsLut = VIDEO_SIZE_LUT_3M2_BNS; + vtcallSizeLut = NULL; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_3M2; + videoSizeLut = VIDEO_SIZE_LUT_3M2; + videoSizeBnsLut = NULL; + vtcallSizeLut = VTCALL_SIZE_LUT_3M2; + } + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3M2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3M2) / (sizeof(int) * SIZE_OF_LUT); + + pictureSizeLut = PICTURE_SIZE_LUT_3M2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3M2; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3M2; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3M2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3M2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3M2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3M2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3M2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3M2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3M2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + highSpeedVideoListMax = sizeof(S5K3M2_HIGH_SPEED_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3M2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3M2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoFPSListMax = sizeof(S5K3M2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST) / (sizeof(int) *2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3M2_YUV_LIST; + rearPictureList = S5K3M2_YUV_LIST; + hiddenRearPreviewList = S5K3M2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3M2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3M2_THUMBNAIL_LIST; + rearVideoList = S5K3M2_VIDEO_LIST; + hiddenRearVideoList = S5K3M2_HIDDEN_VIDEO_LIST; + highSpeedVideoList = S5K3M2_HIGH_SPEED_VIDEO_LIST; + rearFPSList = S5K3M2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3M2_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoFPSList = S5K3M2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 1 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; +#if defined(USE_SUBDIVIDED_EV) + exposureCompensationRange[MIN] = -20; + exposureCompensationRange[MAX] = 20; + exposureCompensationStep = 0.1f; +#else + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; +#endif + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.85f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = 7; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 50; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 500000000L; + maxFrameDuration = 500000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 1023; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A; + blackLevelPattern[R] = 0; + blackLevelPattern[GR] = 0; + blackLevelPattern[GB] = 0; + blackLevelPattern[B] = 0; + maxAnalogSensitivity = 640; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = COLOR_MATRIX1_3M2_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_3M2_3X3; + forwardMatrix1 = FORWARD_MATRIX1_3M2_3X3; + forwardMatrix2 = FORWARD_MATRIX2_3M2_3X3; + calibration1 = UNIT_MATRIX_3M2_3X3; + calibration2 = UNIT_MATRIX_3M2_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K3L2Base::ExynosCamera3SensorS5K3L2Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 370; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 62.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 48.2f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 28; + + +#ifdef USE_SUBDIVIDED_EV + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /* | EFFECT_SOLARIZE */ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + | EFFECT_AQUA + /* | EFFECT_WHITEBOARD */ + /* | EFFECT_BLACKBOARD */ + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 2056; + highSpeedRecording60H = 1152; + highSpeedRecording120W = 1024; + highSpeedRecording120H = 574; + scalableSensorSupport = true; + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = 0; + previewSizeLut = PREVIEW_SIZE_LUT_3L2_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3L2; + videoSizeBnsLut = VIDEO_SIZE_LUT_3L2_BNS; + vtcallSizeLut = NULL; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_3L2; + videoSizeLut = VIDEO_SIZE_LUT_3L2; + videoSizeBnsLut = NULL; + vtcallSizeLut = VTCALL_SIZE_LUT_3L2; + } + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2) / (sizeof(int) * SIZE_OF_LUT); + + pictureSizeLut = PICTURE_SIZE_LUT_3L2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3L2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3L2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3L2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3L2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3L2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3L2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3L2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + highSpeedVideoListMax = sizeof(S5K3L2_HIGH_SPEED_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3L2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3L2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoFPSListMax = sizeof(S5K3L2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST) / (sizeof(int) *2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3L2_YUV_LIST; + rearPictureList = S5K3L2_YUV_LIST; + hiddenRearPreviewList = S5K3L2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3L2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3L2_THUMBNAIL_LIST; + rearVideoList = S5K3L2_VIDEO_LIST; + hiddenRearVideoList = S5K3L2_HIDDEN_VIDEO_LIST; + highSpeedVideoList = S5K3L2_HIGH_SPEED_VIDEO_LIST; + rearFPSList = S5K3L2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3L2_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoFPSList = S5K3L2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 1 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; +#if defined(USE_SUBDIVIDED_EV) + exposureCompensationRange[MIN] = -20; + exposureCompensationRange[MAX] = 20; + exposureCompensationStep = 0.1f; +#else + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; +#endif + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.85f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = 7; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 50; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 500000000L; + maxFrameDuration = 500000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 1023; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A; + blackLevelPattern[R] = 0; + blackLevelPattern[GR] = 0; + blackLevelPattern[GB] = 0; + blackLevelPattern[B] = 0; + maxAnalogSensitivity = 640; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = COLOR_MATRIX1_3L2_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_3L2_3X3; + forwardMatrix1 = FORWARD_MATRIX1_3L2_3X3; + forwardMatrix2 = FORWARD_MATRIX2_3L2_3X3; + calibration1 = UNIT_MATRIX_3L2_3X3; + calibration2 = UNIT_MATRIX_3L2_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K4E6Base::ExynosCamera3SensorS5K4E6Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 2592; + maxPreviewH = 1944; + maxPictureW = 2592; + maxPictureH = 1944; + maxVideoW = 2560; + maxVideoH = 1440; + maxSensorW = 2608; + maxSensorH = 1960; + sensorMarginW = 16; + sensorMarginH = 10; + sensorMarginBase[LEFT_BASE] = 2; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 4; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + horizontalViewAngle[SIZE_RATIO_16_9] = 77.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 77.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 60.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 65.2f; + verticalViewAngle = 61.0f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 220; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 1900; + apertureDen = 1000; + videoSnapshotSupport = true; + + /* Hal1 info - prevent setparam fail */ + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + drcSupport = true; + + bnsSupport = false; + + effectList = + EFFECT_NONE + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + previewSizeLutMax = sizeof(YUV_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(YUV_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); +#if defined(ENABLE_8MP_FULL_FRAME) || defined(ENABLE_13MP_FULL_FRAME) + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4E6_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); +#endif + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = YUV_SIZE_LUT_4E6; + pictureSizeLut = YUV_SIZE_LUT_4E6; +#if defined(ENABLE_8MP_FULL_FRAME) || defined(ENABLE_13MP_FULL_FRAME) + videoSizeLut = VIDEO_SIZE_LUT_4E6_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_4E6; +#endif + dualVideoSizeLut = DUAL_VIDEO_SIZE_LUT_4E6; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = VTCALL_SIZE_LUT_4E6; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K4E6_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K4E6_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K4E6_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K4E6_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K4E6_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K4E6_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K4E6_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K4E6_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K4E6_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K4E6_YUV_LIST; + frontPictureList = S5K4E6_YUV_LIST; + hiddenFrontPreviewList = S5K4E6_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K4E6_HIDDEN_PICTURE_LIST; + thumbnailList = S5K4E6_THUMBNAIL_LIST; + frontVideoList = S5K4E6_VIDEO_LIST; + hiddenFrontVideoList = S5K4E6_HIDDEN_VIDEO_LIST; + frontFPSList = S5K4E6_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K4E6_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_FRONT; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED_BURST; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_FRONT; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_FRONT; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_BURST); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_FRONT); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_FRONT); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.9f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 2.2f; + focalLengthIn35mmLength = 22; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 20.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = NUM_REQUEST_BLOCK_MAX; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 125000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.495f; + sensorPhysicalSize[HEIGHT] = 2.626f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = FRONT_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = UNIT_MATRIX_4E6_3X3; + colorTransformMatrix2 = UNIT_MATRIX_4E6_3X3; + forwardMatrix1 = UNIT_MATRIX_4E6_3X3; + forwardMatrix2 = UNIT_MATRIX_4E6_3X3; + calibration1 = UNIT_MATRIX_4E6_3X3; + calibration2 = UNIT_MATRIX_4E6_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[0] = 64; + sharpnessMapSize[1] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K4H5Base::ExynosCamera3SensorS5K4H5Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 330; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 43.4f; + horizontalViewAngle[SIZE_RATIO_1_1] = 33.6f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_4H5; + videoSizeLut = VIDEO_SIZE_LUT_4H5; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_4H5; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K4H5_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K4H5_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K4H5_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K4H5_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K4H5_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K4H5_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K4H5_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K4H5_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K4H5_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K4H5_PREVIEW_LIST; + rearPictureList = S5K4H5_PICTURE_LIST; + hiddenRearPreviewList = S5K4H5_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K4H5_HIDDEN_PICTURE_LIST; + thumbnailList = S5K4H5_THUMBNAIL_LIST; + rearVideoList = S5K4H5_VIDEO_LIST; + hiddenRearVideoList = S5K4H5_HIDDEN_VIDEO_LIST; + rearFPSList = S5K4H5_FPS_RANGE_LIST; + hiddenRearFPSList = S5K4H5_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorS5K5E2Base::ExynosCamera3SensorS5K5E2Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2576; + maxPictureH = 1930; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2576; + maxSensorH = 1930; + sensorMarginW = 0; + sensorMarginH = 0; + + minFps = 1; + maxFps = 30; + fNumberNum = 190; + fNumberDen = 100; + focalLengthNum = 200; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 70.3f; + horizontalViewAngle[SIZE_RATIO_4_3] = 70.3f; + horizontalViewAngle[SIZE_RATIO_1_1] = 55.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 23; + + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + ; + + hiddenEffectList = + EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH + ; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_5E2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_5E2_YC; + pictureSizeLut = PICTURE_SIZE_LUT_5E2_YC; + videoSizeLut = VIDEO_SIZE_LUT_5E2_YC; + dualVideoSizeLut = VIDEO_SIZE_LUT_5E2_YC; + vtcallSizeLut = VTCALL_SIZE_LUT_5E2; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K5E2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K5E2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K5E2_YC_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K5E2_YC_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K5E2_YC_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K5E2_YC_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K5E2_YC_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K5E2_YC_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K5E2_YC_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoListMax = 0; + highSpeedVideoFPSListMax = 0; + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K5E2_YUV_LIST; + frontPictureList = S5K5E2_YUV_LIST; + hiddenFrontPreviewList = S5K5E2_YC_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K5E2_YC_HIDDEN_PICTURE_LIST; + thumbnailList = S5K5E2_YC_THUMBNAIL_LIST; + frontVideoList = S5K5E2_YC_VIDEO_LIST; + hiddenFrontVideoList = S5K5E2_YC_HIDDEN_VIDEO_LIST; + frontFPSList = S5K5E2_YC_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K5E2_YC_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoList = NULL; + highSpeedVideoFPSList = NULL; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_FRONT; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED_BURST; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_FRONT; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_FRONT; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_BURST); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_FRONT); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_FRONT); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; +#if defined(USE_SUBDIVIDED_EV) + exposureCompensationRange[MIN] = -20; + exposureCompensationRange[MAX] = 20; + exposureCompensationStep = 0.1f; +#else + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; +#endif + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.4f; + fNumber = 2.4f; + filterDensity = 0.0f; + focalLength = 1.86f; + focalLengthIn35mmLength = 27; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 20.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = 7; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + colorTransformMatrix1 = UNIT_MATRIX_5E2_3X3; + colorTransformMatrix2 = UNIT_MATRIX_5E2_3X3; + forwardMatrix1 = UNIT_MATRIX_5E2_3X3; + forwardMatrix2 = UNIT_MATRIX_5E2_3X3; + calibration1 = UNIT_MATRIX_5E2_3X3; + calibration2 = UNIT_MATRIX_5E2_3X3; + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 125000000L; + maxFrameDuration = 500000000L; //125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = FRONT_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[0] = 64; + sharpnessMapSize[1] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K5E3Base::ExynosCamera3SensorS5K5E3Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2576; + maxPictureH = 1932; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2576; + maxSensorH = 1932; + sensorMarginW = 0; + sensorMarginH = 0; + + minFps = 1; + maxFps = 30; + fNumberNum = 190; + fNumberDen = 100; + focalLengthNum = 200; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 70.3f; + horizontalViewAngle[SIZE_RATIO_4_3] = 70.3f; + horizontalViewAngle[SIZE_RATIO_1_1] = 55.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 23; + + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + ; + + hiddenEffectList = + EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH + ; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_5E3; + pictureSizeLut = PICTURE_SIZE_LUT_5E3; + videoSizeLut = VIDEO_SIZE_LUT_5E3; + dualVideoSizeLut = VIDEO_SIZE_LUT_5E3; + vtcallSizeLut = VTCALL_SIZE_LUT_5E3; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K5E3_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K5E3_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K5E3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K5E3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K5E3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K5E3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K5E3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K5E3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K5E3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoListMax = 0; + highSpeedVideoFPSListMax = 0; + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K5E3_YUV_LIST; + frontPictureList = S5K5E3_YUV_LIST; + hiddenFrontPreviewList = S5K5E3_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K5E3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K5E3_THUMBNAIL_LIST; + frontVideoList = S5K5E3_VIDEO_LIST; + hiddenFrontVideoList = S5K5E3_HIDDEN_VIDEO_LIST; + frontFPSList = S5K5E3_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K5E3_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoList = NULL; + highSpeedVideoFPSList = NULL; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_FRONT; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED_BURST; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_FRONT; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_FRONT; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_BURST); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_FRONT); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_FRONT); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; +#if defined(USE_SUBDIVIDED_EV) + exposureCompensationRange[MIN] = -20; + exposureCompensationRange[MAX] = 20; + exposureCompensationStep = 0.1f; +#else + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; +#endif + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.4f; + fNumber = 2.4f; + filterDensity = 0.0f; + focalLength = 1.86f; + focalLengthIn35mmLength = 27; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 20.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = 7; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + colorTransformMatrix1 = UNIT_MATRIX_5E3_3X3; + colorTransformMatrix2 = UNIT_MATRIX_5E3_3X3; + forwardMatrix1 = UNIT_MATRIX_5E3_3X3; + forwardMatrix2 = UNIT_MATRIX_5E3_3X3; + calibration1 = UNIT_MATRIX_5E3_3X3; + calibration2 = UNIT_MATRIX_5E3_3X3; + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 125000000L; + maxFrameDuration = 500000000L; //125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = FRONT_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[0] = 64; + sharpnessMapSize[1] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K6A3Base::ExynosCamera3SensorS5K6A3Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1280; + maxPreviewH = 720; + maxPictureW = 1392; + maxPictureH = 1402; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1408; + maxSensorH = 1412; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6A3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6A3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6A3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6A3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6A3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6A3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6A3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6A3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6A3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6A3_PREVIEW_LIST; + frontPictureList = S5K6A3_PICTURE_LIST; + hiddenFrontPreviewList = S5K6A3_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6A3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6A3_THUMBNAIL_LIST; + frontVideoList = S5K6A3_VIDEO_LIST; + hiddenFrontVideoList = S5K6A3_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6A3_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6A3_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorS5K2P8Base::ExynosCamera3SensorS5K2P8Base() : ExynosCamera3SensorInfoBase() +{ + /* Sensor Max Size Infos */ + maxPreviewW = 5312; + maxPreviewH = 2988; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + sensorMarginBase[LEFT_BASE] = 2; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 4; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 43.4f; + horizontalViewAngle[SIZE_RATIO_1_1] = 33.6f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + + minFps = 1; + maxFps = 30; + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 480; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT*/ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + bnsSupport = true; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(YUV_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P8_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P8_WQHD) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed240Max = sizeof(VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_2P8) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = YUV_SIZE_LUT_2P8; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_2P8_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_2P8_BDS_WQHD; +#endif + pictureSizeLut = PICTURE_SIZE_LUT_2P8; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P8; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P8; + videoSizeLutHighSpeed240 = VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_2P8; + vtcallSizeLut = VTCALL_SIZE_LUT_2P8; + + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + videoSizeLutHighSpeed240Max = 0; + vtcallSizeLutMax = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + videoSizeLutHighSpeed240 = NULL; + vtcallSizeLut = NULL; + + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P8_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P8_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P8_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P8_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P8_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P8_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P8_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + highSpeedVideoListMax = sizeof(S5K2P8_HIGH_SPEED_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P8_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P8_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoFPSListMax = sizeof(S5K2P8_HIGH_SPEED_VIDEO_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P8_YUV_LIST; + rearPictureList = S5K2P8_YUV_LIST; + hiddenRearPreviewList = S5K2P8_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P8_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P8_THUMBNAIL_LIST; + rearVideoList = S5K2P8_VIDEO_LIST; + hiddenRearVideoList = S5K2P8_HIDDEN_VIDEO_LIST; + highSpeedVideoList = S5K2P8_HIGH_SPEED_VIDEO_LIST; + rearFPSList = S5K2P8_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P8_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoFPSList = S5K2P8_HIGH_SPEED_VIDEO_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.2f; + fNumber = 2.2f; + filterDensity = 0.0f; + focalLength = 4.8f; + focalLengthIn35mmLength = 31; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.05f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = COLOR_MATRIX1_2P8_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_2P8_3X3; + forwardMatrix1 = FORWARD_MATRIX1_2P8_3X3; + forwardMatrix2 = FORWARD_MATRIX2_2P8_3X3; + calibration1 = UNIT_MATRIX_2P8_3X3; + calibration2 = UNIT_MATRIX_2P8_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K6B2Base::ExynosCamera3SensorS5K6B2Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 1920; + maxPictureH = 1080; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1936; + maxSensorH = 1090; + sensorMarginW = 16; + sensorMarginH = 10; + sensorMarginBase[LEFT_BASE] = 0; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 8; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.7f; + horizontalViewAngle[SIZE_RATIO_4_3] = 54.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 60.0f; + horizontalViewAngle[SIZE_RATIO_5_4] = 54.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 64.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 54.2f; + verticalViewAngle = 39.4f; + + minFps = 1; + maxFps = 30; + fNumberNum = 240; + fNumberDen = 100; + focalLengthNum = 186; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 240; + apertureDen = 100; + videoSnapshotSupport = true; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + drcSupport = true; + + bnsSupport = false; + + effectList = + EFFECT_NONE + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_6B2) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_6B2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_6B2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_6B2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_6B2; + pictureSizeLut = PICTURE_SIZE_LUT_6B2; + videoSizeLut = VIDEO_SIZE_LUT_6B2; + dualVideoSizeLut = VIDEO_SIZE_LUT_6B2; + vtcallSizeLut = VTCALL_SIZE_LUT_6B2; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6B2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6B2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6B2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6B2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6B2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6B2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6B2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6B2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6B2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6B2_PREVIEW_LIST; + frontPictureList = S5K6B2_PICTURE_LIST; + hiddenFrontPreviewList = S5K6B2_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6B2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6B2_THUMBNAIL_LIST; + frontVideoList = S5K6B2_VIDEO_LIST; + hiddenFrontVideoList = S5K6B2_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6B2_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6B2_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_FRONT; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED_BURST; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_FRONT; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_FRONT; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_BURST); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_FRONT); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_FRONT); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.4f; + fNumber = 2.4f; + filterDensity = 0.0f; + focalLength = 1.86f; + focalLengthIn35mmLength = 27; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 20.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = NUM_REQUEST_BLOCK_MAX; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 250000000L; + maxFrameDuration = 250000000L; + sensorPhysicalSize[WIDTH] = 3.495f; + sensorPhysicalSize[HEIGHT] = 2.626f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = FRONT_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = UNIT_MATRIX_6B2_3X3; + colorTransformMatrix2 = UNIT_MATRIX_6B2_3X3; + forwardMatrix1 = UNIT_MATRIX_6B2_3X3; + forwardMatrix2 = UNIT_MATRIX_6B2_3X3; + calibration1 = UNIT_MATRIX_6B2_3X3; + calibration2 = UNIT_MATRIX_6B2_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[0] = 64; + sharpnessMapSize[1] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K6D1Base::ExynosCamera3SensorS5K6D1Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 2560; + maxPreviewH = 1440; + maxPictureW = 2560; + maxPictureH = 1440; + maxVideoW = 2560; + maxVideoH = 1440; + maxSensorW = 2576; + maxSensorH = 1456; + sensorMarginW = 16; + sensorMarginH = 16; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + horizontalViewAngle[SIZE_RATIO_16_9] = 79.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 65.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 50.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 65.2f; + verticalViewAngle = 39.4f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + focusDistanceNum = 0; + focusDistanceDen = 0; + videoSnapshotSupport = true; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + visionModeSupport = true; + drcSupport = true; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_6D1_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + +#endif + previewSizeLut = PREVIEW_SIZE_LUT_6D1; + pictureSizeLut = PICTURE_SIZE_LUT_6D1; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_6D1_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_6D1; +#endif + dualVideoSizeLut = DUAL_VIDEO_SIZE_LUT_6D1; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6D1_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6D1_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6D1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6D1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6D1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6D1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6D1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6D1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6D1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6D1_PREVIEW_LIST; + frontPictureList = S5K6D1_PICTURE_LIST; + hiddenFrontPreviewList = S5K6D1_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6D1_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6D1_THUMBNAIL_LIST; + frontVideoList = S5K6D1_VIDEO_LIST; + hiddenFrontVideoList = S5K6D1_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6D1_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6D1_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_FRONT; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 0; + max3aRegions[AWB] = 0; + max3aRegions[AF] = 0; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.9f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 1.6f; + focalLengthIn35mmLength = 22; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + hyperFocalDistance = 0.0f; + minimumFocusDistance = 0.0f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 20.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = 0.0f; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; //RAW + maxNumOutputStreams[PROCESSED] = 3; //PROC + maxNumOutputStreams[PROCESSED_STALL] = 1; //PROC_STALL + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 125000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = FRONT_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[0] = 64; + sharpnessMapSize[1] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K8B1Base::ExynosCamera3SensorS5K8B1Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 1920; + maxPictureH = 1080; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1936; + maxSensorH = 1090; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 120; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 2400; + apertureDen = 1000; + + horizontalViewAngle[SIZE_RATIO_16_9] = 79.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 65.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 50.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 65.2f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 22; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + visionModeSupport = true; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K8B1_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K8B1_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K8B1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K8B1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K8B1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K8B1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K8B1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K8B1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K8B1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K8B1_PREVIEW_LIST; + frontPictureList = S5K8B1_PICTURE_LIST; + hiddenFrontPreviewList = S5K8B1_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K8B1_HIDDEN_PICTURE_LIST; + thumbnailList = S5K8B1_THUMBNAIL_LIST; + frontVideoList = S5K8B1_VIDEO_LIST; + hiddenFrontVideoList = S5K8B1_HIDDEN_VIDEO_LIST; + frontFPSList = S5K8B1_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K8B1_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorSR261Base::ExynosCamera3SensorSR261Base() : ExynosCamera3SensorInfoBase() +{ + +}; + +ExynosCamera3SensorSR544Base::ExynosCamera3SensorSR544Base() : ExynosCamera3SensorInfoBase() +{ + +}; + +ExynosCamera3SensorIMX134Base::ExynosCamera3SensorIMX134Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + maxPreviewW = 3264; + maxPreviewH = 2448; +#else + maxPreviewW = 1920; + maxPreviewH = 1080; +#endif + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 340; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 253; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 44.3f; + horizontalViewAngle[SIZE_RATIO_1_1] = 34.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 48.1f; + horizontalViewAngle[SIZE_RATIO_5_4] = 44.3f; + horizontalViewAngle[SIZE_RATIO_5_3] = 52.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 44.3f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX134) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_IMX134; + videoSizeLut = VIDEO_SIZE_LUT_IMX134; + pictureSizeLut = PICTURE_SIZE_LUT_IMX134; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX134; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX134; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX134_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX134_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX134_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX134_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX134_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX134_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX134_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX134_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX134_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX134_PREVIEW_LIST; + rearPictureList = IMX134_PICTURE_LIST; + hiddenRearPreviewList = IMX134_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX134_HIDDEN_PICTURE_LIST; + thumbnailList = IMX134_THUMBNAIL_LIST; + rearVideoList = IMX134_VIDEO_LIST; + hiddenRearVideoList = IMX134_HIDDEN_VIDEO_LIST; + rearFPSList = IMX134_FPS_RANGE_LIST; + hiddenRearFPSList = IMX134_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorIMX135Base::ExynosCamera3SensorIMX135Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX135_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX135_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX135_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX135_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX135_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX135_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX135_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX135_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX135_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX135_PREVIEW_LIST; + rearPictureList = IMX135_PICTURE_LIST; + hiddenRearPreviewList = IMX135_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX135_HIDDEN_PICTURE_LIST; + thumbnailList = IMX135_THUMBNAIL_LIST; + rearVideoList = IMX135_VIDEO_LIST; + hiddenRearVideoList = IMX135_HIDDEN_VIDEO_LIST; + rearFPSList = IMX135_FPS_RANGE_LIST; + hiddenRearFPSList = IMX135_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorIMX175Base::ExynosCamera3SensorIMX175Base() : ExynosCamera3SensorInfoBase() +{ +#if 0 + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 26; + fNumberDen = 10; + focalLengthNum = 370; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 276; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_IMX175; + videoSizeLut = VIDEO_SIZE_LUT_IMX175; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_IMX175; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX175; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX175_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX175_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX175_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX175_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX175_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX175_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX175_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX175_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX175_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX175_PREVIEW_LIST; + rearPictureList = IMX175_PICTURE_LIST; + hiddenRearPreviewList = IMX175_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX175_HIDDEN_PICTURE_LIST; + thumbnailList = IMX175_THUMBNAIL_LIST; + rearVideoList = IMX175_VIDEO_LIST; + hiddenRearVideoList = IMX175_HIDDEN_VIDEO_LIST; + rearFPSList = IMX175_FPS_RANGE_LIST; + hiddenRearFPSList = IMX175_HIDDEN_FPS_RANGE_LIST; +#endif +}; + +ExynosCamera3SensorIMX219Base::ExynosCamera3SensorIMX219Base() : ExynosCamera3SensorInfoBase() +{ + +}; + +#if 0 +ExynosCamera3SensorIMX240Base::ExynosCamera3SensorIMX240Base() : ExynosCamera3SensorInfoBase() +{ + /* Sensor Max Size Infos */ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + sensorMarginBase[LEFT_BASE] = 2; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 4; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + /* Sensor FOV Infos */ + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + /* Hal1 info - prevent setparam fail */ + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* Hal1 info - prevent setparam fail */ + antiBandingList = + ANTIBANDING_AUTO + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX240) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_IMX240_BNS; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX240_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX240; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_IMX240_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_IMX240; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX240_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX240_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX240_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX240_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX240_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX240_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX240_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX240_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX240_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX240_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX240_PREVIEW_LIST; + rearPictureList = IMX240_PICTURE_LIST; + hiddenRearPreviewList = IMX240_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX240_HIDDEN_PICTURE_LIST; + thumbnailList = IMX240_THUMBNAIL_LIST; + rearVideoList = IMX240_VIDEO_LIST; + hiddenRearVideoList = IMX240_HIDDEN_VIDEO_LIST; + rearFPSList = IMX240_FPS_RANGE_LIST; + hiddenRearFPSList = IMX240_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 0 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.85f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; +#endif + +ExynosCamera3SensorIMX240_2P2Base::ExynosCamera3SensorIMX240_2P2Base(int sensorId) : ExynosCamera3SensorInfoBase() +{ + /* Sensor Max Size Infos */ + maxPreviewW = 5312; + maxPreviewH = 2988; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + sensorMarginBase[LEFT_BASE] = 2; + sensorMarginBase[TOP_BASE] = 2; + sensorMarginBase[WIDTH_BASE] = 4; + sensorMarginBase[HEIGHT_BASE] = 4; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + /* Sensor FOV Infos */ + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + + /* TODO : Where should we go? */ + minFps = 1; + maxFps = 30; + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + /* Hal1 info - prevent setparam fail */ + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* Hal1 info - prevent setparam fail */ + antiBandingList = + ANTIBANDING_AUTO + ; + + effectList = + EFFECT_NONE + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_FIXED + | FOCUS_MODE_INFINITY + ; + + sceneModeList = + SCENE_MODE_AUTO + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + ; + + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(YUV_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(YUV_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_2P2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = YUV_SIZE_LUT_IMX240_2P2; + pictureSizeLut = YUV_SIZE_LUT_IMX240_2P2; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX240_2P2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX240_2P2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_IMX240_2P2_BNS; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX240_2P2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX240_2P2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX240_2P2_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX240_2P2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX240_2P2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX240_2P2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX240_2P2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX240_2P2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + highSpeedVideoListMax = sizeof(IMX240_2P2_HIGH_SPEED_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX240_2P2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX240_2P2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoFPSListMax = sizeof(IMX240_2P2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX240_2P2_YUV_LIST; + rearPictureList = IMX240_2P2_YUV_LIST; + hiddenRearPreviewList = IMX240_2P2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX240_2P2_HIDDEN_PICTURE_LIST; + thumbnailList = IMX240_2P2_THUMBNAIL_LIST; + rearVideoList = IMX240_2P2_VIDEO_LIST; + hiddenRearVideoList = IMX240_2P2_HIDDEN_VIDEO_LIST; + highSpeedVideoList = IMX240_2P2_HIGH_SPEED_VIDEO_LIST; + rearFPSList = IMX240_2P2_FPS_RANGE_LIST; + hiddenRearFPSList = IMX240_2P2_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoFPSList = IMX240_2P2_HIGH_SPEED_VIDEO_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 0 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.85f; + fNumber = 1.9f; + filterDensity = 0.0f; + focalLength = 4.3f; + focalLengthIn35mmLength = 28; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = NUM_REQUEST_BLOCK_MAX; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 50; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 5.967f; + sensorPhysicalSize[HEIGHT] = 3.360f; + whiteLevel = 1023; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A; + blackLevelPattern[R] = 0; + blackLevelPattern[GR] = 0; + blackLevelPattern[GB] = 0; + blackLevelPattern[B] = 0; + maxAnalogSensitivity = 640; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + if (sensorId == SENSOR_NAME_IMX240) { + colorTransformMatrix1 = COLOR_MATRIX1_IMX240_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_IMX240_3X3; + forwardMatrix1 = FORWARD_MATRIX1_IMX240_3X3; + forwardMatrix2 = FORWARD_MATRIX2_IMX240_3X3; + } else { + colorTransformMatrix1 = COLOR_MATRIX1_2P2_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_2P2_3X3; + forwardMatrix1 = FORWARD_MATRIX1_2P2_3X3; + forwardMatrix2 = FORWARD_MATRIX2_2P2_3X3; + } + calibration1 = UNIT_MATRIX_IMX240_2P2_3X3; + calibration2 = UNIT_MATRIX_IMX240_2P2_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorIMX260_2L1Base::ExynosCamera3SensorIMX260_2L1Base(int sensorId) : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 4032; + maxPreviewH = 3024; + maxPictureW = 4032; + maxPictureH = 3024; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4032; + maxSensorH = 3024; + sensorMarginW = 0; + sensorMarginH = 0; + sensorMarginBase[LEFT_BASE] = 0; + sensorMarginBase[TOP_BASE] = 0; + sensorMarginBase[WIDTH_BASE] = 0; + sensorMarginBase[HEIGHT_BASE] = 0; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + minFps = 1; + maxFps = 30; + + exposureCompensationStep = 0.5f; + + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + bnsSupport = true; + +#if defined(USE_SUBDIVIDED_EV) + exposureCompensationStep = 0.1f; +#else + exposureCompensationStep = 0.5f; +#endif + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(YUV_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(YUV_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX260_2L1_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); +#endif + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed240Max = sizeof(VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + fastAeStableLutMax = sizeof(FAST_AE_STABLE_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + depthMapSizeLutMax = sizeof(DEPTH_MAP_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_RESOLUTION); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = YUV_SIZE_LUT_IMX260_2L1; + pictureSizeLut = YUV_SIZE_LUT_IMX260_2L1; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX260_2L1_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX260_2L1; +#endif + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX260_2L1; + videoSizeLutHighSpeed240 = VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX260_2L1; + depthMapSizeLut = DEPTH_MAP_SIZE_LUT_IMX260_2L1; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX260_2L1; + fastAeStableLut = FAST_AE_STABLE_SIZE_LUT_IMX260_2L1; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed120Max = 0; + videoSizeLutHighSpeed240Max = 0; + vtcallSizeLutMax = 0; + fastAeStableLutMax = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed120 = NULL; + videoSizeLutHighSpeed240 = NULL; + vtcallSizeLut = NULL; + fastAeStableLut = NULL; + + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX260_2L1_YUV_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX260_2L1_JPEG_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX260_2L1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX260_2L1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX260_2L1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX260_2L1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX260_2L1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + highSpeedVideoListMax = sizeof(IMX260_2L1_HIGH_SPEED_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX260_2L1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX260_2L1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + highSpeedVideoFPSListMax = sizeof(IMX260_2L1_HIGH_SPEED_VIDEO_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX260_2L1_YUV_LIST; + rearPictureList = IMX260_2L1_JPEG_LIST; + hiddenRearPreviewList = IMX260_2L1_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX260_2L1_HIDDEN_PICTURE_LIST; + thumbnailList = IMX260_2L1_THUMBNAIL_LIST; + rearVideoList = IMX260_2L1_VIDEO_LIST; + hiddenRearVideoList = IMX260_2L1_HIDDEN_VIDEO_LIST; + highSpeedVideoList = IMX260_2L1_HIGH_SPEED_VIDEO_LIST; + rearFPSList = IMX260_2L1_FPS_RANGE_LIST; + hiddenRearFPSList = IMX260_2L1_HIDDEN_FPS_RANGE_LIST; + highSpeedVideoFPSList = IMX260_2L1_HIGH_SPEED_VIDEO_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 0 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 1.70f; + fNumber = 1.7f; + filterDensity = 0.0f; + focalLength = 4.2f; + focalLengthIn35mmLength = 26; + hyperFocalDistance = 1.0f / 3.6f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = NUM_REQUEST_BLOCK_MAX; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 64; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; + exposureTimeRange[MIN] = 22000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 1023; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A; + blackLevelPattern[R] = 0; + blackLevelPattern[GR] = 0; + blackLevelPattern[GB] = 0; + blackLevelPattern[B] = 0; + maxAnalogSensitivity = 640; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + if (sensorId == SENSOR_NAME_IMX260) { + colorTransformMatrix1 = COLOR_MATRIX1_IMX260_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_IMX260_3X3; + forwardMatrix1 = FORWARD_MATRIX1_IMX260_3X3; + forwardMatrix2 = FORWARD_MATRIX2_IMX260_3X3; + } else { + colorTransformMatrix1 = COLOR_MATRIX1_2L1_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_2L1_3X3; + forwardMatrix1 = FORWARD_MATRIX1_2L1_3X3; + forwardMatrix2 = FORWARD_MATRIX2_2L1_3X3; + } + calibration1 = UNIT_MATRIX_IMX260_2L1_3X3; + calibration2 = UNIT_MATRIX_IMX260_2L1_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorS5K3P3Base::ExynosCamera3SensorS5K3P3Base(int sensorId) : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 4608; + maxPreviewH = 3456; + maxPictureW = 4608; + maxPictureH = 3456; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4624; + maxSensorH = 3466; + sensorMarginW = 16; + sensorMarginH = 10; + //check until here + maxThumbnailW = 512; + maxThumbnailH = 384; + + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#ifdef USE_SUBDIVIDED_EV + exposureCompensationStep = 0.1f; +#else + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_3P3; + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3P3_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3P3; + videoSizeBnsLut = VIDEO_SIZE_LUT_3P3_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_3P3; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS; + + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3P3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3P3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3P3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3P3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3P3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3P3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3P3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3P3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3P3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3P3_PREVIEW_LIST; + rearPictureList = S5K3P3_PICTURE_LIST; + hiddenRearPreviewList = S5K3P3_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3P3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3P3_THUMBNAIL_LIST; + rearVideoList = S5K3P3_VIDEO_LIST; + hiddenRearVideoList = S5K3P3_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3P3_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3P3_HIDDEN_FPS_RANGE_LIST; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: +#if 0 + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); +#else + capabilities = AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED_OPTIONAL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED_OPTIONAL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED_OPTIONAL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED_OPTIONAL); +#endif + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.27; + fNumber = 2.2f; + filterDensity = 0.0f; + focalLength = 4.8f; + focalLengthIn35mmLength = 31; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.1f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION_BACK; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION_BACK); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = NUM_REQUEST_BLOCK_MAX; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 50; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 5.967f; + sensorPhysicalSize[HEIGHT] = 3.360f; + whiteLevel = 1023; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A; + blackLevelPattern[R] = 0; + blackLevelPattern[GR] = 0; + blackLevelPattern[GB] = 0; + blackLevelPattern[B] = 0; + maxAnalogSensitivity = 640; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + if (sensorId == SENSOR_NAME_IMX240) { + colorTransformMatrix1 = COLOR_MATRIX1_IMX240_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_IMX240_3X3; + forwardMatrix1 = FORWARD_MATRIX1_IMX240_3X3; + forwardMatrix2 = FORWARD_MATRIX2_IMX240_3X3; + } else { + colorTransformMatrix1 = COLOR_MATRIX1_2P2_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_2P2_3X3; + forwardMatrix1 = FORWARD_MATRIX1_2P2_3X3; + forwardMatrix2 = FORWARD_MATRIX2_2P2_3X3; + } + calibration1 = UNIT_MATRIX_IMX240_2P2_3X3; + calibration2 = UNIT_MATRIX_IMX240_2P2_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +}; + +ExynosCamera3SensorOV5670Base::ExynosCamera3SensorOV5670Base() : ExynosCamera3SensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2592; + maxPictureH = 1944; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2608; + maxSensorH = 1960; + sensorMarginW = 16; + sensorMarginH = 16; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 55.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + ; + + hiddenEffectList = + EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT*/; + + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_OV5670; + pictureSizeLut = PICTURE_SIZE_LUT_OV5670; + videoSizeLut = VIDEO_SIZE_LUT_OV5670; + dualVideoSizeLut = VIDEO_SIZE_LUT_OV5670; + videoSizeBnsLut = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(OV5670_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(OV5670_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(OV5670_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(OV5670_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(OV5670_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(OV5670_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(OV5670_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(OV5670_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(OV5670_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = OV5670_PREVIEW_LIST; + frontPictureList = OV5670_PICTURE_LIST; + hiddenFrontPreviewList = OV5670_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = OV5670_HIDDEN_PICTURE_LIST; + thumbnailList = OV5670_THUMBNAIL_LIST; + frontVideoList = OV5670_VIDEO_LIST; + hiddenFrontVideoList = OV5670_HIDDEN_VIDEO_LIST; + frontFPSList = OV5670_FPS_RANGE_LIST; + hiddenFrontFPSList = OV5670_HIDDEN_FPS_RANGE_LIST; +}; + +}; /* namespace android */ diff --git a/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.h b/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.h new file mode 100644 index 0000000..5fc8357 --- /dev/null +++ b/libcamera/common_v2/SensorInfos/ExynosCamera3SensorInfoBase.h @@ -0,0 +1,240 @@ +/* +** +** Copyright 2015, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_3_SENSOR_INFO_BASE_H +#define EXYNOS_CAMERA_3_SENSOR_INFO_BASE_H + +#include "ExynosCameraConfig.h" +#include "ExynosCameraSensorInfoBase.h" +#include "ExynosCameraAvailabilityTable.h" + +namespace android { + +struct ExynosCamera3SensorInfoBase : public ExynosSensorInfoBase { +private: + +public: + ExynosCamera3SensorInfoBase(); +}; + +struct ExynosCamera3SensorIMX135Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX135Base(); +}; + +struct ExynosCamera3SensorIMX134Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX134Base(); +}; + +struct ExynosCamera3SensorS5K3M2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K3M2Base(); +}; + +struct ExynosCamera3SensorS5K3L2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K3L2Base(); +}; + +#if 0 +struct ExynosCamera3SensorS5K2P2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K2P2Base(); +}; +#endif + +struct ExynosCamera3SensorS5K2P2_12MBase : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K2P2_12MBase(); +}; + +struct ExynosCamera3SensorS5K2P3Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K2P3Base(); +}; + +struct ExynosCamera3SensorS5K2T2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K2T2Base(); +}; + +struct ExynosCamera3SensorS5K2P8Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K2P8Base(); +}; + +struct ExynosCamera3SensorS5K6B2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K6B2Base(); +}; + +struct ExynosCamera3SensorSR261Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorSR261Base(); +}; + +struct ExynosCamera3SensorSR259Base : public ExynosCamera3SensorInfoBase { +public: + ExynosCamera3SensorSR259Base(); +}; + +struct ExynosCamera3SensorS5K3H7Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K3H7Base(); +}; + +struct ExynosCamera3SensorS5K3H5Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K3H5Base(); +}; + +struct ExynosCamera3SensorS5K4H5Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K4H5Base(); +}; + +struct ExynosCamera3SensorS5K6A3Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K6A3Base(); +}; + +struct ExynosCamera3SensorIMX175Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX175Base(); +}; + +#if 0 +struct ExynosCamera3SensorIMX240Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX240Base(); +}; +#endif + +struct ExynosCamera3SensorIMX219Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX219Base(); +}; + +struct ExynosCamera3SensorS5K8B1Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K8B1Base(); +}; + +struct ExynosCamera3SensorS5K6D1Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K6D1Base(); +}; + +struct ExynosCamera3SensorS5K4E6Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K4E6Base(); +}; + +struct ExynosCamera3SensorS5K5E2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K5E2Base(); +}; + +struct ExynosCamera3SensorS5K5E3Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K5E3Base(); +}; + +struct ExynosCamera3SensorSR544Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorSR544Base(); +}; + +struct ExynosCamera3SensorIMX240_2P2Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX240_2P2Base(int sensorId); +}; + +struct ExynosCamera3SensorIMX260_2L1Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorIMX260_2L1Base(int sensorId); +}; + +struct ExynosCamera3SensorS5K3P3Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorS5K3P3Base(int sensorId); +}; + +struct ExynosCamera3SensorOV5670Base : public ExynosCamera3SensorInfoBase { +private: + +public: + ExynosCamera3SensorOV5670Base(); +}; + +}; /* namespace android */ +#endif diff --git a/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfo2P8.cpp b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfo2P8.cpp new file mode 100644 index 0000000..8ca8dbc --- /dev/null +++ b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfo2P8.cpp @@ -0,0 +1,437 @@ +/* +** +** Copyright 2014, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#include "ExynosCameraSensorInfoBase.h" + +namespace android { + +ExynosSensorS5K2P8Base::ExynosSensorS5K2P8Base() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 480; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + minExposureTime = 32; + maxExposureTime = 10000000; + minWBK = 2300; + maxWBK = 10000; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxBasicZoomLevel = MAX_BASIC_ZOOM_LEVEL; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = true; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + | EFFECT_SOLARIZE + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_WHITEBOARD + | EFFECT_BLACKBOARD + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + | FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + | FOCUS_MODE_FIXED + | FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + | WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + | WHITE_BALANCE_TWILIGHT + | WHITE_BALANCE_SHADE + | WHITE_BALANCE_CUSTOM_K + ; + + /* Set the max of preview/picture/video/vtcall LUT */ + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P8_WQHD) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P8) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P8) / (sizeof(int) * SIZE_OF_LUT); + + /* Set preview/picture/video/vtcall LUT */ + previewSizeLut = PREVIEW_SIZE_LUT_2P8; + +#if defined(USE_BNS_DUAL_PREVIEW) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2P8_BDS_BNS15; +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2P8_BDS_BNS20_FHD; +#endif +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2P8_BDS; +#endif // USE_BNS_DUAL_PREVIEW + +#if defined(USE_BNS_DUAL_RECORDING) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualVideoSizeLut = PREVIEW_SIZE_LUT_2P8_BDS_BNS15; +#else + dualVideoSizeLut = PREVIEW_SIZE_LUT_2P8_BDS_BNS20_FHD; +#endif +#else + dualVideoSizeLut = PREVIEW_SIZE_LUT_2P8_BDS; +#endif // USE_BNS_DUAL_RECORDING + + videoSizeLut = VIDEO_SIZE_LUT_2P8_WQHD; + videoSizeBnsLut = VIDEO_SIZE_LUT_2P8_BDS_BNS15_WQHD; + pictureSizeLut = PICTURE_SIZE_LUT_2P8; + videoSizeLutHighSpeed = VIDEO_SIZE_LUT_HIGH_SPEED_2P8; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P8; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P8; + vtcallSizeLut = VTCALL_SIZE_LUT_2P8; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P8_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P8_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P8_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P8_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P8_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P8_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P8_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P8_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P8_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P8_PREVIEW_LIST; + rearPictureList = S5K2P8_PICTURE_LIST; + hiddenRearPreviewList = S5K2P8_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P8_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P8_THUMBNAIL_LIST; + rearVideoList = S5K2P8_VIDEO_LIST; + hiddenRearVideoList = S5K2P8_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P8_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P8_HIDDEN_FPS_RANGE_LIST; + +#if 0 + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + + /* lensFacing, supportedHwLevel are keys for selecting some availability table below */ + focusDistanceNum = 0; + focusDistanceDen = 0; + + supportedHwLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; + lensFacing = ANDROID_LENS_FACING_BACK; + switch (supportedHwLevel) { + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + capabilities = AVAILABLE_CAPABILITIES_LIMITED; + requestKeys = AVAILABLE_REQUEST_KEYS_LIMITED; + resultKeys = AVAILABLE_RESULT_KEYS_LIMITED; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LIMITED; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LIMITED); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LIMITED); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LIMITED); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LIMITED); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + capabilities = AVAILABLE_CAPABILITIES_FULL; + requestKeys = AVAILABLE_REQUEST_KEYS_FULL; + resultKeys = AVAILABLE_RESULT_KEYS_FULL; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_FULL; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_FULL); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_FULL); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_FULL); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_FULL); + break; + case ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + capabilities = AVAILABLE_CAPABILITIES_LEGACY; + requestKeys = AVAILABLE_REQUEST_KEYS_LEGACY; + resultKeys = AVAILABLE_RESULT_KEYS_LEGACY; + characteristicsKeys = AVAILABLE_CHARACTERISTICS_KEYS_LEGACY; + capabilitiesLength = ARRAY_LENGTH(AVAILABLE_CAPABILITIES_LEGACY); + requestKeysLength = ARRAY_LENGTH(AVAILABLE_REQUEST_KEYS_LEGACY); + resultKeysLength = ARRAY_LENGTH(AVAILABLE_RESULT_KEYS_LEGACY); + characteristicsKeysLength = ARRAY_LENGTH(AVAILABLE_CHARACTERISTICS_KEYS_LEGACY); + break; + default: + ALOGE("ERR(%s[%d]):Invalid supported HW level(%d)", __FUNCTION__, __LINE__, + supportedHwLevel); + break; + } + switch (lensFacing) { + case ANDROID_LENS_FACING_FRONT: + aeModes = AVAILABLE_AE_MODES_FRONT; + afModes = AVAILABLE_AF_MODES_FRONT; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_FRONT); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_FRONT); + break; + case ANDROID_LENS_FACING_BACK: + aeModes = AVAILABLE_AE_MODES_BACK; + afModes = AVAILABLE_AF_MODES_BACK; + aeModesLength = ARRAY_LENGTH(AVAILABLE_AE_MODES_BACK); + afModesLength = ARRAY_LENGTH(AVAILABLE_AF_MODES_BACK); + break; + default: + ALOGE("ERR(%s[%d]):Invalid lens facing info(%d)", __FUNCTION__, __LINE__, + lensFacing); + break; + } + + /* Android ColorCorrection Static Metadata */ + colorAberrationModes = AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES; + colorAberrationModesLength = ARRAY_LENGTH(AVAILABLE_COLOR_CORRECTION_ABERRATION_MODES); + + /* Android Control Static Metadata */ + antiBandingModes = AVAILABLE_ANTIBANDING_MODES; + exposureCompensationRange[MIN] = -4; + exposureCompensationRange[MAX] = 4; + exposureCompensationStep = 0.5f; + effectModes = AVAILABLE_EFFECT_MODES; + sceneModes = AVAILABLE_SCENE_MODES; + videoStabilizationModes = AVAILABLE_VIDEO_STABILIZATION_MODES; + awbModes = AVAILABLE_AWB_MODES; + controlModes = AVAILABLE_CONTROL_MODES; + controlModesLength = ARRAY_LENGTH(AVAILABLE_CONTROL_MODES); + max3aRegions[AE] = 1; + max3aRegions[AWB] = 1; + max3aRegions[AF] = 1; + sceneModeOverrides = SCENE_MODE_OVERRIDES; + antiBandingModesLength = ARRAY_LENGTH(AVAILABLE_ANTIBANDING_MODES); + effectModesLength = ARRAY_LENGTH(AVAILABLE_EFFECT_MODES); + sceneModesLength = ARRAY_LENGTH(AVAILABLE_SCENE_MODES); + videoStabilizationModesLength = ARRAY_LENGTH(AVAILABLE_VIDEO_STABILIZATION_MODES); + awbModesLength = ARRAY_LENGTH(AVAILABLE_AWB_MODES); + sceneModeOverridesLength = ARRAY_LENGTH(SCENE_MODE_OVERRIDES); + + /* Android Edge Static Metadata */ + edgeModes = AVAILABLE_EDGE_MODES; + edgeModesLength = ARRAY_LENGTH(AVAILABLE_EDGE_MODES); + + /* Android Flash Static Metadata */ + flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE; + chargeDuration = 0L; + colorTemperature = 0; + maxEnergy = 0; + + /* Android Hot Pixel Static Metadata */ + hotPixelModes = AVAILABLE_HOT_PIXEL_MODES; + hotPixelModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MODES); + + /* Android Lens Static Metadata */ + aperture = 2.2f; + fNumber = 2.2f; + filterDensity = 0.0f; + focalLength = 4.8f; + focalLengthIn35mmLength = 31; + opticalStabilization = AVAILABLE_OPTICAL_STABILIZATION; + hyperFocalDistance = 1.0f / 5.0f; + minimumFocusDistance = 1.0f / 0.05f; + shadingMapSize[WIDTH] = 1; + shadingMapSize[HEIGHT] = 1; + focusDistanceCalibration = ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED; + opticalAxisAngle[0] = 0.0f; + opticalAxisAngle[1] = 0.0f; + lensPosition[X_3D] = 0.0f; + lensPosition[Y_3D] = 20.0f; + lensPosition[Z_3D] = -5.0f; + opticalStabilizationLength = ARRAY_LENGTH(AVAILABLE_OPTICAL_STABILIZATION); + + /* Android Noise Reduction Static Metadata */ + noiseReductionModes = AVAILABLE_NOISE_REDUCTION_MODES; + noiseReductionModesLength = ARRAY_LENGTH(AVAILABLE_NOISE_REDUCTION_MODES); + + /* Android Request Static Metadata */ + maxNumOutputStreams[RAW] = 1; + maxNumOutputStreams[PROCESSED] = 3; + maxNumOutputStreams[PROCESSED_STALL] = 1; + maxNumInputStreams = 0; + maxPipelineDepth = 5; + partialResultCount = 1; + + /* Android Scaler Static Metadata */ + zoomSupport = true; + smoothZoomSupport = false; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + stallDurations = AVAILABLE_STALL_DURATIONS; + croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY; + stallDurationsLength = ARRAY_LENGTH(AVAILABLE_STALL_DURATIONS); + + /* Android Sensor Static Metadata */ + sensitivityRange[MIN] = 100; + sensitivityRange[MAX] = 1600; + colorFilterArrangement = ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB; + exposureTimeRange[MIN] = 14000L; + exposureTimeRange[MAX] = 100000000L; + maxFrameDuration = 125000000L; + sensorPhysicalSize[WIDTH] = 3.20f; + sensorPhysicalSize[HEIGHT] = 2.40f; + whiteLevel = 4000; + timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME; + referenceIlluminant1 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + referenceIlluminant2 = ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT; + blackLevelPattern[R] = 1000; + blackLevelPattern[GR] = 1000; + blackLevelPattern[GB] = 1000; + blackLevelPattern[B] = 1000; + maxAnalogSensitivity = 800; + orientation = BACK_ROTATION; + profileHueSatMapDimensions[HUE] = 1; + profileHueSatMapDimensions[SATURATION] = 2; + profileHueSatMapDimensions[VALUE] = 1; + testPatternModes = AVAILABLE_TEST_PATTERN_MODES; + testPatternModesLength = ARRAY_LENGTH(AVAILABLE_TEST_PATTERN_MODES); + colorTransformMatrix1 = COLOR_MATRIX1_2P8_3X3; + colorTransformMatrix2 = COLOR_MATRIX2_2P8_3X3; + forwardMatrix1 = FORWARD_MATRIX1_2P8_3X3; + forwardMatrix2 = FORWARD_MATRIX2_2P8_3X3; + calibration1 = UNIT_MATRIX_2P8_3X3; + calibration2 = UNIT_MATRIX_2P8_3X3; + + /* Android Statistics Static Metadata */ + faceDetectModes = AVAILABLE_FACE_DETECT_MODES; + faceDetectModesLength = ARRAY_LENGTH(AVAILABLE_FACE_DETECT_MODES); + histogramBucketCount = 64; + maxNumDetectedFaces = 16; + maxHistogramCount = 1000; + maxSharpnessMapValue = 1000; + sharpnessMapSize[WIDTH] = 64; + sharpnessMapSize[HEIGHT] = 64; + hotPixelMapModes = AVAILABLE_HOT_PIXEL_MAP_MODES; + hotPixelMapModesLength = ARRAY_LENGTH(AVAILABLE_HOT_PIXEL_MAP_MODES); + lensShadingMapModes = AVAILABLE_LENS_SHADING_MAP_MODES; + lensShadingMapModesLength = ARRAY_LENGTH(AVAILABLE_LENS_SHADING_MAP_MODES); + shadingAvailableModes = SHADING_AVAILABLE_MODES; + shadingAvailableModesLength = ARRAY_LENGTH(SHADING_AVAILABLE_MODES); + + /* Android Tone Map Static Metadata */ + tonemapCurvePoints = 128; + toneMapModes = AVAILABLE_TONE_MAP_MODES; + toneMapModesLength = ARRAY_LENGTH(AVAILABLE_TONE_MAP_MODES); + + /* Android LED Static Metadata */ + leds = AVAILABLE_LEDS; + ledsLength = ARRAY_LENGTH(AVAILABLE_LEDS); + + /* Android Sync Static Metadata */ + maxLatency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL; //0 + /* END of Camera HAL 3.2 Static Metadatas */ +#endif +}; +}; /* namespace android */ diff --git a/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.cpp b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.cpp new file mode 100644 index 0000000..995a05d --- /dev/null +++ b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.cpp @@ -0,0 +1,6989 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +/*#define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosCameraSensorInfoBase" +#include + +#include "ExynosCameraSensorInfoBase.h" +#include "ExynosExif.h" + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA +#include "ExynosCameraFusionInclude.h" +#endif + +namespace android { + +#ifdef SENSOR_NAME_GET_FROM_FILE +int g_rearSensorId = -1; +int g_frontSensorId = -1; +#endif + +/* #define CALIBRATE_BCROP5_SIZE */ /* HACK for istor EVT0 3aa h/w bcrop5 */ + +#ifdef CALIBRATE_BCROP5_SIZE +typedef int (*LutType)[SIZE_OF_LUT]; + +int calibrateB3(int srcSize, int dstSize) { + int ratio = srcSize * 256 / dstSize; + if (ratio < 0) + ALOGE("ERR(%s:%d):devide by 0", __FUNCTION__, __LINE__); + + int calibrateDstSize = srcSize * 256 / ratio; + + /* make even number */ + calibrateDstSize -= (calibrateDstSize & 0x01); + + return calibrateDstSize; +} +#endif + +int getSensorId(int camId) +{ + int sensorId = -1; + +#ifdef SENSOR_NAME_GET_FROM_FILE + int &curSensorId = (camId == CAMERA_ID_BACK) ? g_rearSensorId : g_frontSensorId; + + if (curSensorId < 0) { + curSensorId = getSensorIdFromFile(camId); + if (curSensorId < 0) { + ALOGE("ERR(%s): invalid sensor ID %d", __FUNCTION__, sensorId); + } + } + + sensorId = curSensorId; +#else + if (camId == CAMERA_ID_BACK) { + sensorId = MAIN_CAMERA_SENSOR_NAME; + } else if (camId == CAMERA_ID_FRONT) { + sensorId = FRONT_CAMERA_SENSOR_NAME; + } else if (camId == CAMERA_ID_BACK_1) { +#ifdef MAIN_1_CAMERA_SENSOR_NAME + sensorId = MAIN_1_CAMERA_SENSOR_NAME; +#endif + } else if (camId == CAMERA_ID_FRONT_1) { +#ifdef FRONT_1_CAMERA_SENSOR_NAME + sensorId = FRONT_1_CAMERA_SENSOR_NAME; +#endif + } else { + ALOGE("ERR(%s):Unknown camera ID(%d)", __FUNCTION__, camId); + } +#endif + + if (sensorId == SENSOR_NAME_NOTHING) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):camId(%d):sensorId == SENSOR_NAME_NOTHING, assert!!!!", + __FUNCTION__, __LINE__, camId); + } + +done: + return sensorId; +} + +void getDualCameraId(int *cameraId_0, int *cameraId_1) +{ + if (cameraId_0 == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):cameraId_0 == NULL, assert!!!!", + __FUNCTION__, __LINE__); + } + + if (cameraId_1 == NULL) { + android_printAssert(NULL, LOG_TAG, "ASSERT(%s[%d]):cameraId_1 == NULL, assert!!!!", + __FUNCTION__, __LINE__); + } + + int sensor1Name = -1; + int tempCameraId_0 = -1; + int tempCameraId_1 = -1; + +#ifdef MAIN_1_CAMERA_SENSOR_NAME + sensor1Name = MAIN_1_CAMERA_SENSOR_NAME; + + if (sensor1Name != SENSOR_NAME_NOTHING) { + tempCameraId_0 = CAMERA_ID_BACK; + tempCameraId_1 = CAMERA_ID_BACK_1; + + goto done; + } +#endif + +#ifdef FRONT_1_CAMERA_SENSOR_NAME + sensor1Name = FRONT_1_CAMERA_SENSOR_NAME; + + if (sensor1Name != SENSOR_NAME_NOTHING) { + tempCameraId_0 = CAMERA_ID_FRONT; + tempCameraId_1 = CAMERA_ID_FRONT_1; + + goto done; + } +#endif + +done: + *cameraId_0 = tempCameraId_0; + *cameraId_1 = tempCameraId_1; +} + +ExynosSensorInfoBase::ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4128; + maxSensorH = 3096; + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + minExposureTime = 0; + maxExposureTime = 0; + minWBK = 0; + maxWBK = 0; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxBasicZoomLevel = MAX_BASIC_ZOOM_LEVEL; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = false; + drcSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + minFps = 0; + maxFps = 30; + /* flite->3aa otf support */ + flite3aaOtfSupport = true; + + rearPreviewListMax = 0; + frontPreviewListMax = 0; + rearPictureListMax = 0; + frontPictureListMax = 0; + hiddenRearPreviewListMax = 0; + hiddenFrontPreviewListMax = 0; + hiddenRearPictureListMax = 0; + hiddenFrontPictureListMax = 0; + thumbnailListMax = 0; + rearVideoListMax = 0; + frontVideoListMax = 0; + hiddenRearVideoListMax = 0; + hiddenFrontVideoListMax = 0; + rearFPSListMax = 0; + frontFPSListMax = 0; + hiddenRearFPSListMax = 0; + hiddenFrontFPSListMax = 0; + + rearPreviewList = NULL; + frontPreviewList = NULL; + rearPictureList = NULL; + frontPictureList = NULL; + hiddenRearPreviewList = NULL; + hiddenFrontPreviewList = NULL; + hiddenRearPictureList = NULL; + hiddenFrontPictureList = NULL; + thumbnailList = NULL; + rearVideoList = NULL; + frontVideoList = NULL; + hiddenRearVideoList = NULL; + hiddenFrontVideoList = NULL; + rearFPSList = NULL; + frontFPSList = NULL; + hiddenRearFPSList = NULL; + hiddenFrontFPSList = NULL; + + meteringList = + METERING_MODE_MATRIX + | METERING_MODE_CENTER + | METERING_MODE_SPOT + ; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + | FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + | FOCUS_MODE_FIXED + | FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + | WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + | WHITE_BALANCE_TWILIGHT + | WHITE_BALANCE_SHADE; + + isoValues = + ISO_AUTO + | ISO_100 + | ISO_200 + | ISO_400 + | ISO_800; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + videoSizeLutHighSpeed240Max = 0; + liveBroadcastSizeLutMax = 0; + depthMapSizeLutMax = 0; + fastAeStableLutMax = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + dualPreviewSizeLut = NULL; + dualVideoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + videoSizeLutHighSpeed240 = NULL; + vtcallSizeLut = NULL; + liveBroadcastSizeLut = NULL; + depthMapSizeLut = NULL; + fastAeStableLut = NULL; + sizeTableSupport = false; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + dof = new DOF_BASE; +#endif +} + +ExynosSensorIMX135Base::ExynosSensorIMX135Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX135_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX135_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX135_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX135_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX135_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX135_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX135_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX135_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX135_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX135_PREVIEW_LIST; + rearPictureList = IMX135_PICTURE_LIST; + hiddenRearPreviewList = IMX135_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX135_HIDDEN_PICTURE_LIST; + thumbnailList = IMX135_THUMBNAIL_LIST; + rearVideoList = IMX135_VIDEO_LIST; + hiddenRearVideoList = IMX135_HIDDEN_VIDEO_LIST; + rearFPSList = IMX135_FPS_RANGE_LIST; + hiddenRearFPSList = IMX135_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorIMX134Base::ExynosSensorIMX134Base() : ExynosSensorInfoBase() +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + maxPreviewW = 3264; + maxPreviewH = 2448; +#else + maxPreviewW = 1920; + maxPreviewH = 1080; +#endif + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 340; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 253; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 44.3f; + horizontalViewAngle[SIZE_RATIO_1_1] = 34.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 48.1f; + horizontalViewAngle[SIZE_RATIO_5_4] = 44.3f; + horizontalViewAngle[SIZE_RATIO_5_3] = 52.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 44.3f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = false; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX134) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX134) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_IMX134; + videoSizeLut = VIDEO_SIZE_LUT_IMX134; + pictureSizeLut = PICTURE_SIZE_LUT_IMX134; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX134; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX134; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX134_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX134_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX134_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX134_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX134_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX134_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX134_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX134_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX134_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX134_PREVIEW_LIST; + rearPictureList = IMX134_PICTURE_LIST; + hiddenRearPreviewList = IMX134_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX134_HIDDEN_PICTURE_LIST; + thumbnailList = IMX134_THUMBNAIL_LIST; + rearVideoList = IMX134_VIDEO_LIST; + hiddenRearVideoList = IMX134_HIDDEN_VIDEO_LIST; + rearFPSList = IMX134_FPS_RANGE_LIST; + hiddenRearFPSList = IMX134_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K3L2Base::ExynosSensorS5K3L2Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 20; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#ifdef USE_SUBDIVIDED_EV + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 2056; + highSpeedRecording60H = 1152; + highSpeedRecording120W = 1024; + highSpeedRecording120H = 574; + scalableSensorSupport = true; + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = 0; + previewSizeLut = PREVIEW_SIZE_LUT_3L2_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3L2; + videoSizeBnsLut = VIDEO_SIZE_LUT_3L2_BNS; + vtcallSizeLut = NULL; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_3L2; + videoSizeLut = VIDEO_SIZE_LUT_3L2; + videoSizeBnsLut = NULL; + vtcallSizeLut = VTCALL_SIZE_LUT_3L2; + } + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2) / (sizeof(int) * SIZE_OF_LUT); + + pictureSizeLut = PICTURE_SIZE_LUT_3L2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3L2; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3L2; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3L2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3L2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3L2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3L2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3L2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3L2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3L2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3L2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3L2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3L2_PREVIEW_LIST; + rearPictureList = S5K3L2_PICTURE_LIST; + hiddenRearPreviewList = S5K3L2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3L2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3L2_THUMBNAIL_LIST; + rearVideoList = S5K3L2_VIDEO_LIST; + hiddenRearVideoList = S5K3L2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3L2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3L2_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K3L8Base::ExynosSensorS5K3L8Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4208; + maxSensorH = 3120; + sensorMarginW = 0; + sensorMarginH = 0; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 20; + fNumberDen = 10; + focalLengthNum = 365; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 76.5f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + minExposureTime = 32; + maxExposureTime = 10000000; + minWBK = 2300; + maxWBK = 10000; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L8_BNS_15) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3L8_BDS) / (sizeof(int) * SIZE_OF_LUT); +#endif + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3L8_BDS) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3L8) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3L8_BNS) / (sizeof(int) * SIZE_OF_LUT); + + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3L8_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3L8_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_3L8_BNS_15; +#else + previewSizeLut = PREVIEW_SIZE_LUT_3L8_BDS; +#endif + videoSizeLut = VIDEO_SIZE_LUT_3L8_BDS; + videoSizeBnsLut = VIDEO_SIZE_LUT_3L8_BNS_15; + pictureSizeLut = PICTURE_SIZE_LUT_3L8; + +#if defined(USE_BNS_DUAL_PREVIEW) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3L8_BNS_15; +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3L8_BNS_20; +#endif +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3L8_BDS; +#endif // USE_BNS_DUAL_PREVIEW + +#if defined(USE_BNS_DUAL_RECORDING) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualVideoSizeLut = VIDEO_SIZE_LUT_3L8_BNS_15; +#else + dualVideoSizeLut = VIDEO_SIZE_LUT_3L8_BNS_20; +#endif +#else + dualVideoSizeLut = VIDEO_SIZE_LUT_3L8_BDS; +#endif // USE_BNS_DUAL_RECORDING + + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_HIGH_SPEED_3L8_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_HIGH_SPEED_3L8_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_3L8_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + vtcallSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3L8_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3L8_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3L8_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3L8_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3L8_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3L8_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3L8_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3L8_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3L8_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3L8_PREVIEW_LIST; + rearPictureList = S5K3L8_PICTURE_LIST; + hiddenRearPreviewList = S5K3L8_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3L8_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3L8_THUMBNAIL_LIST; + rearVideoList = S5K3L8_VIDEO_LIST; + hiddenRearVideoList = S5K3L8_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3L8_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3L8_HIDDEN_FPS_RANGE_LIST; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K3L8_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K3L8_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K3L8_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K3L8_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3L8_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K3L8_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K3L8_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K3L8_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K3L8_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K3L8_PREVIEW_LIST; + frontPictureList = S5K3L8_PICTURE_LIST; + hiddenFrontPreviewList = S5K3L8_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K3L8_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3L8_THUMBNAIL_LIST; + frontVideoList = S5K3L8_VIDEO_LIST; + hiddenFrontVideoList = S5K3L8_HIDDEN_VIDEO_LIST; + frontFPSList = S5K3L8_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K3L8_HIDDEN_FPS_RANGE_LIST; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + dof = new DOF_3L8; +#endif +}; + +ExynosSensorS5K3L8DualBdsBase::ExynosSensorS5K3L8DualBdsBase() : ExynosSensorS5K3L8Base() +{ + /* this sensor is for BDS when Dual */ + if (bnsSupport == true) { + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3L8_BDS; + dualVideoSizeLut = VIDEO_SIZE_LUT_3L8_BDS; + } else { + dualPreviewSizeLut = NULL; + dualVideoSizeLut = NULL; + } +} + +#if 0 +ExynosSensorS5K2P2Base::ExynosSensorS5K2P2Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 28; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_2P2_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_2P2; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2P2_BNS; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_2P2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_2P2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_2P2_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2P2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2P2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P2_PREVIEW_LIST; + rearPictureList = S5K2P2_PICTURE_LIST; + hiddenRearPreviewList = S5K2P2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P2_THUMBNAIL_LIST; + rearVideoList = S5K2P2_VIDEO_LIST; + hiddenRearVideoList = S5K2P2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P2_HIDDEN_FPS_RANGE_LIST; +}; +#endif + +ExynosSensorS5K3P3Base::ExynosSensorS5K3P3Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; +#ifdef USE_CAMERA2_API_SUPPORT /* HACK : current HAL3.2 running with 4:3 ratio */ + maxPreviewW = 1440; + maxPreviewH = 1080; +#endif + maxPictureW = 4608; + maxPictureH = 3456; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4624; + maxSensorH = 3466; + sensorMarginW = 16; + sensorMarginH = 10; + //check until here + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 480; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#ifdef USE_SUBDIVIDED_EV + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT*/ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3P3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_3P3; + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3P3_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3P3; + videoSizeBnsLut = VIDEO_SIZE_LUT_3P3_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_3P3; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3P3_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3P3_BNS; + + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3P3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3P3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3P3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3P3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3P3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3P3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3P3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3P3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3P3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3P3_PREVIEW_LIST; + rearPictureList = S5K3P3_PICTURE_LIST; + hiddenRearPreviewList = S5K3P3_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3P3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3P3_THUMBNAIL_LIST; + rearVideoList = S5K3P3_VIDEO_LIST; + hiddenRearVideoList = S5K3P3_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3P3_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3P3_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K2P2_12MBase::ExynosSensorS5K2P2_12MBase() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 4608; + maxPictureH = 2592; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4624; + maxSensorH = 2604; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 409; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT*/ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P2_12M) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P2_12M) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_12M_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_2P2_12M_BNS; + videoSizeLut = VIDEO_SIZE_LUT_2P2_12M; + videoSizeBnsLut = VIDEO_SIZE_LUT_2P2_12M_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2P2_12M; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P2_12M_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P2_12M_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2P2_12M_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P2_12M_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P2_12M_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P2_12M_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P2_12M_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P2_12M_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P2_12M_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P2_12M_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P2_12M_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P2_12M_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P2_12M_PREVIEW_LIST; + rearPictureList = S5K2P2_12M_PICTURE_LIST; + hiddenRearPreviewList = S5K2P2_12M_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P2_12M_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P2_12M_THUMBNAIL_LIST; + rearVideoList = S5K2P2_12M_VIDEO_LIST; + hiddenRearVideoList = S5K2P2_12M_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P2_12M_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P2_12M_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K2P3Base::ExynosSensorS5K2P3Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2990; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 480; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT*/ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2P3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_2P3_BNS; + videoSizeLut = VIDEO_SIZE_LUT_2P3_BNS; + videoSizeBnsLut = VIDEO_SIZE_LUT_2P3_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2P3; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2P3_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2P3_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2P3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2P3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2P3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2P3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2P3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2P3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2P3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2P3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2P3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2P3_PREVIEW_LIST; + rearPictureList = S5K2P3_PICTURE_LIST; + hiddenRearPreviewList = S5K2P3_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2P3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2P3_THUMBNAIL_LIST; + rearVideoList = S5K2P3_VIDEO_LIST; + hiddenRearVideoList = S5K2P3_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2P3_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2P3_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K2T2Base::ExynosSensorS5K2T2Base() : ExynosSensorInfoBase() +{ +#if defined(ENABLE_13MP_FULL_FRAME) + maxPreviewW = 4800; + maxPreviewH = 2700; +#else + maxPreviewW = 3840; + maxPreviewH = 2160; +#endif + maxPictureW = 5952; + maxPictureH = 3348; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5968; + maxSensorH = 3368; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.13f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 28; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); +#endif +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2_13MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_2T2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2T2_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_2T2_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_2T2; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_2T2_BNS; +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_2T2_13MP_FULL; +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLut = VIDEO_SIZE_LUT_2T2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_2T2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_2T2_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_2T2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_2T2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_2T2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_2T2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K2T2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K2T2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K2T2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K2T2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K2T2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K2T2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K2T2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K2T2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K2T2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K2T2_PREVIEW_LIST; + rearPictureList = S5K2T2_PICTURE_LIST; + hiddenRearPreviewList = S5K2T2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K2T2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K2T2_THUMBNAIL_LIST; + rearVideoList = S5K2T2_VIDEO_LIST; + hiddenRearVideoList = S5K2T2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K2T2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K2T2_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K6B2Base::ExynosSensorS5K6B2Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 1920; + maxPictureH = 1080; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1936; + maxSensorH = 1090; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 220; + fNumberDen = 100; + focalLengthNum = 186; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 240; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 55.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 54.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 60.0f; + horizontalViewAngle[SIZE_RATIO_5_4] = 54.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 64.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 54.2f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; +#if defined(USE_FRONT_PREVIEW_DRC) + drcSupport = true; +#endif + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6B2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6B2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6B2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6B2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6B2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6B2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6B2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6B2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6B2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6B2_PREVIEW_LIST; + frontPictureList = S5K6B2_PICTURE_LIST; + hiddenFrontPreviewList = S5K6B2_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6B2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6B2_THUMBNAIL_LIST; + frontVideoList = S5K6B2_VIDEO_LIST; + hiddenFrontVideoList = S5K6B2_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6B2_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6B2_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorSR261Base::ExynosSensorSR261Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 1920; + maxPictureH = 1080; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1936; + maxSensorH = 1090; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 32; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(SR261_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(SR261_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(SR261_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(SR261_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(SR261_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(SR261_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(SR261_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(SR261_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(SR261_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = SR261_PREVIEW_LIST; + frontPictureList = SR261_PICTURE_LIST; + hiddenFrontPreviewList = SR261_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = SR261_HIDDEN_PICTURE_LIST; + thumbnailList = SR261_THUMBNAIL_LIST; + frontVideoList = SR261_VIDEO_LIST; + hiddenFrontVideoList = SR261_HIDDEN_VIDEO_LIST; + frontFPSList = SR261_FPS_RANGE_LIST; + hiddenFrontFPSList = SR261_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorSR259Base::ExynosSensorSR259Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1440; + maxPreviewH = 1080; + maxPictureW = 1616; + maxPictureH = 1212; + maxVideoW = 1280; + maxVideoH = 720; + maxSensorW = 1632; + maxSensorH = 1228; + sensorMarginW = 16; + sensorMarginH = 16; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 32; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(SR259_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(SR259_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(SR259_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(SR259_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(SR259_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(SR259_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(SR259_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(SR259_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(SR259_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = SR259_PREVIEW_LIST; + frontPictureList = SR259_PICTURE_LIST; + hiddenFrontPreviewList = SR259_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = SR259_HIDDEN_PICTURE_LIST; + thumbnailList = SR259_THUMBNAIL_LIST; + frontVideoList = SR259_VIDEO_LIST; + hiddenFrontVideoList = SR259_HIDDEN_VIDEO_LIST; + frontFPSList = SR259_FPS_RANGE_LIST; + hiddenFrontFPSList = SR259_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K3H7Base::ExynosSensorS5K3H7Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3248; + maxPictureH = 2438; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3264; + maxSensorH = 2448; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3H7) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3H7) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_3H7; + videoSizeLut = VIDEO_SIZE_LUT_3H7; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_3H7; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3H7; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3H7; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3H7_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3H7_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3H7_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3H7_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3H7_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3H7_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3H7_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3H7_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3H7_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3H7_PREVIEW_LIST; + rearPictureList = S5K3H7_PICTURE_LIST; + hiddenRearPreviewList = S5K3H7_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3H7_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3H7_THUMBNAIL_LIST; + rearVideoList = S5K3H7_VIDEO_LIST; + hiddenRearVideoList = S5K3H7_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3H7_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3H7_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K3H5Base::ExynosSensorS5K3H5Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3248; + maxPictureH = 2438; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3264; + maxSensorH = 2448; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3H5_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3H5_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3H5_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3H5_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3H5_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3H5_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3H5_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3H5_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3H5_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3H5_PREVIEW_LIST; + rearPictureList = S5K3H5_PICTURE_LIST; + hiddenRearPreviewList = S5K3H5_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3H5_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3H5_THUMBNAIL_LIST; + rearVideoList = S5K3H5_VIDEO_LIST; + hiddenRearVideoList = S5K3H5_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3H5_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3H5_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K4H5Base::ExynosSensorS5K4H5Base() : ExynosSensorInfoBase() +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + maxPreviewW = 3264; + maxPreviewH = 2448; +#else + maxPreviewW = 2560; + maxPreviewH = 1440; +#endif + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 2560; + maxVideoH = 1440; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 290; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 59.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 59.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 46.1f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 48.8f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH + ; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_4H5; + videoSizeLut = VIDEO_SIZE_LUT_4H5; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_4H5; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K4H5_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K4H5_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K4H5_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K4H5_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K4H5_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K4H5_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K4H5_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K4H5_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K4H5_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K4H5_PREVIEW_LIST; + rearPictureList = S5K4H5_PICTURE_LIST; + hiddenRearPreviewList = S5K4H5_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K4H5_HIDDEN_PICTURE_LIST; + thumbnailList = S5K4H5_THUMBNAIL_LIST; + rearVideoList = S5K4H5_VIDEO_LIST; + hiddenRearVideoList = S5K4H5_HIDDEN_VIDEO_LIST; + rearFPSList = S5K4H5_FPS_RANGE_LIST; + hiddenRearFPSList = S5K4H5_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K4H5YCBase::ExynosSensorS5K4H5YCBase() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 330; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 43.4f; + horizontalViewAngle[SIZE_RATIO_1_1] = 33.6f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_4H5; + videoSizeLut = VIDEO_SIZE_LUT_4H5; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_4H5; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4H5; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4H5; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K4H5_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K4H5_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K4H5_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K4H5_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K4H5_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K4H5_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K4H5_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K4H5_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K4H5_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K4H5_PREVIEW_LIST; + rearPictureList = S5K4H5_PICTURE_LIST; + hiddenRearPreviewList = S5K4H5_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K4H5_HIDDEN_PICTURE_LIST; + thumbnailList = S5K4H5_THUMBNAIL_LIST; + rearVideoList = S5K4H5_VIDEO_LIST; + hiddenRearVideoList = S5K4H5_HIDDEN_VIDEO_LIST; + rearFPSList = S5K4H5_FPS_RANGE_LIST; + hiddenRearFPSList = S5K4H5_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K3M2Base::ExynosSensorS5K3M2Base() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 4144; + maxSensorH = 3106; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + /*| EFFECT_RED_YELLOW*/ + /*| EFFECT_BLUE*/ + /*| EFFECT_COLD_VINTAGE*/ + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 2056; + highSpeedRecording60H = 1152; + highSpeedRecording120W = 1020; + highSpeedRecording120H = 574; + scalableSensorSupport = true; + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_3M2_BNS; + videoSizeLut = VIDEO_SIZE_LUT_3M2; + videoSizeBnsLut = VIDEO_SIZE_LUT_3M2_BNS; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_3M2; + videoSizeLut = VIDEO_SIZE_LUT_3M2; + videoSizeBnsLut = NULL; + } + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3M2) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLut = PICTURE_SIZE_LUT_3M2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_3M2; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_3M2; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3M2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3M2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3M2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3M2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3M2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3M2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3M2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3M2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3M2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3M2_PREVIEW_LIST; + rearPictureList = S5K3M2_PICTURE_LIST; + hiddenRearPreviewList = S5K3M2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3M2_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3M2_THUMBNAIL_LIST; + rearVideoList = S5K3M2_VIDEO_LIST; + hiddenRearVideoList = S5K3M2_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3M2_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3M2_HIDDEN_FPS_RANGE_LIST; +} + +ExynosSensorS5K3M3Base::ExynosSensorS5K3M3Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 4128; + maxPictureH = 3096; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4208; + maxSensorH = 3120; + sensorMarginW = 0; + sensorMarginH = 0; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 20; + fNumberDen = 10; + focalLengthNum = 365; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 76.5f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + minExposureTime = 32; + maxExposureTime = 10000000; + minWBK = 2300; + maxWBK = 10000; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M3_BNS_15) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M3_BDS) / (sizeof(int) * SIZE_OF_LUT); +#endif + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M3_BDS) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3M3) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_3M3_BNS_15; +#else + previewSizeLut = PREVIEW_SIZE_LUT_3M3_BDS; +#endif + videoSizeLut = VIDEO_SIZE_LUT_3M3_BDS; + videoSizeBnsLut = VIDEO_SIZE_LUT_3M3_BNS_15; + pictureSizeLut = PICTURE_SIZE_LUT_3M3; + +#if defined(USE_BNS_DUAL_PREVIEW) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3M3_BNS_15; +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3M3_BNS_20; +#endif +#else + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3M3_BDS; +#endif // USE_BNS_DUAL_PREVIEW + +#if defined(USE_BNS_DUAL_RECORDING) +#if defined(DUAL_BNS_RATIO) && (DUAL_BNS_RATIO == 1500) + dualVideoSizeLut = VIDEO_SIZE_LUT_3M3_BNS_15; +#else + dualVideoSizeLut = VIDEO_SIZE_LUT_3M3_BNS_20; +#endif +#else + dualVideoSizeLut = VIDEO_SIZE_LUT_3M3_BDS; +#endif // USE_BNS_DUAL_RECORDING + + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_3M3_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + vtcallSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(S5K3M3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(S5K3M3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(S5K3M3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(S5K3M3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3M3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(S5K3M3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(S5K3M3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(S5K3M3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(S5K3M3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = S5K3M3_PREVIEW_LIST; + rearPictureList = S5K3M3_PICTURE_LIST; + hiddenRearPreviewList = S5K3M3_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = S5K3M3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3M3_THUMBNAIL_LIST; + rearVideoList = S5K3M3_VIDEO_LIST; + hiddenRearVideoList = S5K3M3_HIDDEN_VIDEO_LIST; + rearFPSList = S5K3M3_FPS_RANGE_LIST; + hiddenRearFPSList = S5K3M3_HIDDEN_FPS_RANGE_LIST; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K3M3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K3M3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K3M3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K3M3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K3M3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K3M3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K3M3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K3M3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K3M3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K3M3_PREVIEW_LIST; + frontPictureList = S5K3M3_PICTURE_LIST; + hiddenFrontPreviewList = S5K3M3_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K3M3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K3M3_THUMBNAIL_LIST; + frontVideoList = S5K3M3_VIDEO_LIST; + hiddenFrontVideoList = S5K3M3_HIDDEN_VIDEO_LIST; + frontFPSList = S5K3M3_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K3M3_HIDDEN_FPS_RANGE_LIST; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + dof = new DOF_3M3; +#endif +}; + +ExynosSensorS5K3M3DualBdsBase::ExynosSensorS5K3M3DualBdsBase() : ExynosSensorS5K3M3Base() +{ + /* this sensor is for BDS when Dual */ + if (bnsSupport == true) { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_3M3_BDS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_3M3_BDS) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_3M3) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_3M3_BDS; + videoSizeLut = VIDEO_SIZE_LUT_3M3_BDS; + videoSizeBnsLut = VIDEO_SIZE_LUT_3M3_BNS_15; + pictureSizeLut = PICTURE_SIZE_LUT_3M3; + + dualPreviewSizeLut = PREVIEW_SIZE_LUT_3M3_BDS; + dualVideoSizeLut = VIDEO_SIZE_LUT_3M3_BDS; + + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_HIGH_SPEED_3M3_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_3M3_BNS; + + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + vtcallSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + + sizeTableSupport = false; + } +}; + +ExynosSensorS5K5E2Base::ExynosSensorS5K5E2Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2560; + maxPictureH = 1920; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2576; + maxSensorH = 1932; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 55.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT*/; + + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_5E2_YC) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_5E2_YC; + pictureSizeLut = PICTURE_SIZE_LUT_5E2_YC; + videoSizeLut = VIDEO_SIZE_LUT_5E2_YC; + dualVideoSizeLut = VIDEO_SIZE_LUT_5E2_YC; + videoSizeBnsLut = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K5E2_YC_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K5E2_YC_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K5E2_YC_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K5E2_YC_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K5E2_YC_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K5E2_YC_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K5E2_YC_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K5E2_YC_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K5E2_YC_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K5E2_YC_PREVIEW_LIST; + frontPictureList = S5K5E2_YC_PICTURE_LIST; + hiddenFrontPreviewList = S5K5E2_YC_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K5E2_YC_HIDDEN_PICTURE_LIST; + thumbnailList = S5K5E2_YC_THUMBNAIL_LIST; + frontVideoList = S5K5E2_YC_VIDEO_LIST; + hiddenFrontVideoList = S5K5E2_YC_HIDDEN_VIDEO_LIST; + frontFPSList = S5K5E2_YC_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K5E2_YC_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K5E8Base::ExynosSensorS5K5E8Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 400; + maxPreviewH = 400; + maxPictureW = 2576; + maxPictureH = 1934; + maxVideoW = 2576; + maxVideoH = 1934; + maxSensorW = 2576; + maxSensorH = 1932; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 55.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT*/; + + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_5E8) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_5E8) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_5E8) / (sizeof(int) * SIZE_OF_LUT); + previewSizeLut = PREVIEW_SIZE_LUT_5E8; + pictureSizeLut = PICTURE_SIZE_LUT_5E8; + videoSizeLut = VIDEO_SIZE_LUT_5E8; + dualVideoSizeLut = VIDEO_SIZE_LUT_5E8; + videoSizeBnsLut = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K5E8_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K5E8_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K5E8_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K5E8_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K5E8_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K5E8_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K5E8_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K5E8_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K5E8_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K5E8_PREVIEW_LIST; + frontPictureList = S5K5E8_PICTURE_LIST; + hiddenFrontPreviewList = S5K5E8_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K5E8_HIDDEN_PICTURE_LIST; + thumbnailList = S5K5E8_THUMBNAIL_LIST; + frontVideoList = S5K5E8_VIDEO_LIST; + hiddenFrontVideoList = S5K5E8_HIDDEN_VIDEO_LIST; + frontFPSList = S5K5E8_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K5E8_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K6A3Base::ExynosSensorS5K6A3Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1280; + maxPreviewH = 720; + maxPictureW = 1392; + maxPictureH = 1402; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1408; + maxSensorH = 1412; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF; + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH;*/ + + focusModeList = + /*FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY; + /*| FOCUS_MODE_INFINITY*/ + /*| FOCUS_MODE_MACRO*/ + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6A3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6A3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6A3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6A3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6A3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6A3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6A3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6A3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6A3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6A3_PREVIEW_LIST; + frontPictureList = S5K6A3_PICTURE_LIST; + hiddenFrontPreviewList = S5K6A3_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6A3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6A3_THUMBNAIL_LIST; + frontVideoList = S5K6A3_VIDEO_LIST; + hiddenFrontVideoList = S5K6A3_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6A3_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6A3_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorIMX175Base::ExynosSensorIMX175Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 26; + fNumberDen = 10; + focalLengthNum = 370; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 276; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 62.2f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + /*| FLASH_MODE_RED_EYE*/ + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + /* + burstPanoramaW = 3264; + burstPanoramaH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + */ + bnsSupport = false; + + if (bnsSupport == true) { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + } else { + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX175) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_IMX175; + videoSizeLut = VIDEO_SIZE_LUT_IMX175; + videoSizeBnsLut = NULL; + pictureSizeLut = PICTURE_SIZE_LUT_IMX175; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX175; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX175; + sizeTableSupport = true; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX175_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX175_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX175_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX175_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX175_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX175_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX175_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX175_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX175_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX175_PREVIEW_LIST; + rearPictureList = IMX175_PICTURE_LIST; + hiddenRearPreviewList = IMX175_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX175_HIDDEN_PICTURE_LIST; + thumbnailList = IMX175_THUMBNAIL_LIST; + rearVideoList = IMX175_VIDEO_LIST; + hiddenRearVideoList = IMX175_HIDDEN_VIDEO_LIST; + rearFPSList = IMX175_FPS_RANGE_LIST; + hiddenRearFPSList = IMX175_HIDDEN_FPS_RANGE_LIST; +}; + +#if 0 +ExynosSensorIMX240Base::ExynosSensorIMX240Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 28; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX240) / (sizeof(int) * SIZE_OF_LUT); +#endif + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX240) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_IMX240_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_IMX240; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_IMX240_BNS; + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX240_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX240; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_IMX240_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_IMX240; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX240_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX240_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX240_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX240_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX240_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX240_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX240_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX240_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX240_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX240_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX240_PREVIEW_LIST; + rearPictureList = IMX240_PICTURE_LIST; + hiddenRearPreviewList = IMX240_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX240_HIDDEN_PICTURE_LIST; + thumbnailList = IMX240_THUMBNAIL_LIST; + rearVideoList = IMX240_VIDEO_LIST; + hiddenRearVideoList = IMX240_HIDDEN_VIDEO_LIST; + rearFPSList = IMX240_FPS_RANGE_LIST; + hiddenRearFPSList = IMX240_HIDDEN_FPS_RANGE_LIST; +}; +#endif + +ExynosSensorIMX228Base::ExynosSensorIMX228Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5952; + maxPictureH = 3348; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5968; + maxSensorH = 3368; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.13f; + horizontalViewAngle[SIZE_RATIO_4_3] = 48.8f; + horizontalViewAngle[SIZE_RATIO_1_1] = 37.4f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 28; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 2; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX228_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX228) / (sizeof(int) * SIZE_OF_LUT); +#endif +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX228_13MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX228_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX228) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX228) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX228_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX228_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX228_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_IMX228_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_IMX228; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_IMX228_BNS; +#ifdef ENABLE_13MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX228_13MP_FULL; +#elif defined(ENABLE_8MP_FULL_FRAME) + videoSizeLut = VIDEO_SIZE_LUT_IMX228_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX228; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_IMX228_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_IMX228; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX228_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX228_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX228_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX228_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX228_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX228_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX228_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX228_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX228_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX228_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX228_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX228_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX228_PREVIEW_LIST; + rearPictureList = IMX228_PICTURE_LIST; + hiddenRearPreviewList = IMX228_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX228_HIDDEN_PICTURE_LIST; + thumbnailList = IMX228_THUMBNAIL_LIST; + rearVideoList = IMX228_VIDEO_LIST; + hiddenRearVideoList = IMX228_HIDDEN_VIDEO_LIST; + rearFPSList = IMX228_FPS_RANGE_LIST; + hiddenRearFPSList = IMX228_HIDDEN_FPS_RANGE_LIST; +}; + + +ExynosSensorIMX219Base::ExynosSensorIMX219Base() : ExynosSensorInfoBase() +{ +#if defined(CAMERA_LCD_SIZE) && (CAMERA_LCD_SIZE >= LCD_SIZE_1920_1080) + maxPreviewW = 3264; + maxPreviewH = 2448; +#else + maxPreviewW = 1920; + maxPreviewH = 1080; +#endif + maxPictureW = 3264; + maxPictureH = 2448; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 3280; + maxSensorH = 2458; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 160; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 1900; + apertureDen = 1000; + + horizontalViewAngle[SIZE_RATIO_16_9] = 56.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 44.3f; + horizontalViewAngle[SIZE_RATIO_1_1] = 34.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 48.1f; + horizontalViewAngle[SIZE_RATIO_5_4] = 44.3f; + horizontalViewAngle[SIZE_RATIO_5_3] = 52.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 44.3f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 31; + + minFps = 20; + maxFps = 24; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + zoomSupport = false; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + drcSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + FOCUS_MODE_AUTO + /*| FOCUS_MODE_INFINITY*/ + | FOCUS_MODE_MACRO + /*| FOCUS_MODE_FIXED*/ + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /*| WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /*| WHITE_BALANCE_TWILIGHT*/ + /*| WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX219) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX219) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX219) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_IMX219; + pictureSizeLut = PICTURE_SIZE_LUT_IMX219; + videoSizeLut = VIDEO_SIZE_LUT_IMX219; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(IMX219_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(IMX219_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(IMX219_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(IMX219_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX219_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(IMX219_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(IMX219_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(IMX219_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(IMX219_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = IMX219_PREVIEW_LIST; + frontPictureList = IMX219_PICTURE_LIST; + hiddenFrontPreviewList = IMX219_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = IMX219_HIDDEN_PICTURE_LIST; + thumbnailList = IMX219_THUMBNAIL_LIST; + frontVideoList = IMX219_VIDEO_LIST; + hiddenFrontVideoList = IMX219_HIDDEN_VIDEO_LIST; + frontFPSList = IMX219_FPS_RANGE_LIST; + hiddenFrontFPSList = IMX219_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K8B1Base::ExynosSensorS5K8B1Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 1920; + maxPictureH = 1080; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 1936; + maxSensorH = 1090; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 24; + fNumberDen = 10; + focalLengthNum = 120; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 2400; + apertureDen = 1000; + + horizontalViewAngle[SIZE_RATIO_16_9] = 79.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 65.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 50.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 65.2f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 22; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_FIXED + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_INFINITY + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K8B1_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K8B1_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K8B1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K8B1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K8B1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K8B1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K8B1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K8B1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K8B1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K8B1_PREVIEW_LIST; + frontPictureList = S5K8B1_PICTURE_LIST; + hiddenFrontPreviewList = S5K8B1_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K8B1_HIDDEN_PICTURE_LIST; + thumbnailList = S5K8B1_THUMBNAIL_LIST; + frontVideoList = S5K8B1_VIDEO_LIST; + hiddenFrontVideoList = S5K8B1_HIDDEN_VIDEO_LIST; + frontFPSList = S5K8B1_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K8B1_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K6D1Base::ExynosSensorS5K6D1Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 2560; + maxPreviewH = 1440; + maxPictureW = 2560; + maxPictureH = 1440; + maxVideoW = 2560; + maxVideoH = 1440; + maxSensorW = 2576; + maxSensorH = 1456; + sensorMarginW = 16; + sensorMarginH = 16; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 160; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 1900; + apertureDen = 1000; + + horizontalViewAngle[SIZE_RATIO_16_9] = 79.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 65.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 50.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 65.2f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 22; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + zoomSupport = false; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + drcSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_FIXED + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_INFINITY + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_6D1_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_6D1) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + +#endif + previewSizeLut = PREVIEW_SIZE_LUT_6D1; + pictureSizeLut = PICTURE_SIZE_LUT_6D1; +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_6D1_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_6D1; +#endif + dualPreviewSizeLut = DUAL_PREVIEW_SIZE_LUT_6D1; + dualVideoSizeLut = DUAL_VIDEO_SIZE_LUT_6D1; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K6D1_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K6D1_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K6D1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K6D1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K6D1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K6D1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K6D1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K6D1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K6D1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K6D1_PREVIEW_LIST; + frontPictureList = S5K6D1_PICTURE_LIST; + hiddenFrontPreviewList = S5K6D1_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K6D1_HIDDEN_PICTURE_LIST; + thumbnailList = S5K6D1_THUMBNAIL_LIST; + frontVideoList = S5K6D1_VIDEO_LIST; + hiddenFrontVideoList = S5K6D1_HIDDEN_VIDEO_LIST; + frontFPSList = S5K6D1_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K6D1_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K4E6Base::ExynosSensorS5K4E6Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 2560; + maxPreviewH = 1440; + maxPictureW = 2592; + maxPictureH = 1950; + maxVideoW = 2560; + maxVideoH = 1440; + maxSensorW = 2608; + maxSensorH = 1960; + sensorMarginW = 16; + sensorMarginH = 10; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 17; + fNumberDen = 10; + focalLengthNum = 210; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureDen = 1000; + apertureNum = APEX_FNUM_TO_APERTURE((double)(fNumberNum) / (double)(fNumberDen)) * apertureDen;; + + horizontalViewAngle[SIZE_RATIO_16_9] = 77.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 77.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 60.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 71.8f; + horizontalViewAngle[SIZE_RATIO_5_4] = 65.2f; + horizontalViewAngle[SIZE_RATIO_5_3] = 74.8f; + horizontalViewAngle[SIZE_RATIO_11_9] = 73.0f; + verticalViewAngle = 61.0f; + focalLengthIn35mmLength = 22; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + drcSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_FIXED + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_INFINITY + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + | SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); +#if defined(ENABLE_8MP_FULL_FRAME) || defined(ENABLE_13MP_FULL_FRAME) + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4E6_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); +#endif + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_4E6) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4E6) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4E6) / (sizeof(int) * SIZE_OF_LUT); + fastAeStableLutMax = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4E6) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_4E6; + pictureSizeLut = PICTURE_SIZE_LUT_4E6; +#if defined(ENABLE_8MP_FULL_FRAME) || defined(ENABLE_13MP_FULL_FRAME) + videoSizeLut = VIDEO_SIZE_LUT_4E6_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_4E6; +#endif + dualPreviewSizeLut = DUAL_PREVIEW_SIZE_LUT_4E6; + dualVideoSizeLut = DUAL_VIDEO_SIZE_LUT_4E6; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_4E6; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4E6; + fastAeStableLut = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_4E6; + vtcallSizeLut = VTCALL_SIZE_LUT_4E6; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K4E6_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K4E6_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K4E6_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K4E6_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K4E6_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K4E6_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K4E6_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K4E6_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K4E6_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K4E6_PREVIEW_LIST; + frontPictureList = S5K4E6_PICTURE_LIST; + hiddenFrontPreviewList = S5K4E6_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K4E6_HIDDEN_PICTURE_LIST; + thumbnailList = S5K4E6_THUMBNAIL_LIST; + frontVideoList = S5K4E6_VIDEO_LIST; + hiddenFrontVideoList = S5K4E6_HIDDEN_VIDEO_LIST; + frontFPSList = S5K4E6_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K4E6_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorS5K5E3Base::ExynosSensorS5K5E3Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2560; + maxPictureH = 1920; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2576; + maxSensorH = 1932; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 55.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL_FRONT; + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_AQUA + | EFFECT_MONO + | EFFECT_NEGATIVE + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT*/; + + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_5E3) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_5E3; + pictureSizeLut = PICTURE_SIZE_LUT_5E3; + videoSizeLut = VIDEO_SIZE_LUT_5E3; + dualVideoSizeLut = VIDEO_SIZE_LUT_5E3; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(S5K5E3_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(S5K5E3_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(S5K5E3_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(S5K5E3_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(S5K5E3_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(S5K5E3_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(S5K5E3_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(S5K5E3_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(S5K5E3_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = S5K5E3_PREVIEW_LIST; + frontPictureList = S5K5E3_PICTURE_LIST; + hiddenFrontPreviewList = S5K5E3_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = S5K5E3_HIDDEN_PICTURE_LIST; + thumbnailList = S5K5E3_THUMBNAIL_LIST; + frontVideoList = S5K5E3_VIDEO_LIST; + hiddenFrontVideoList = S5K5E3_HIDDEN_VIDEO_LIST; + frontFPSList = S5K5E3_FPS_RANGE_LIST; + hiddenFrontFPSList = S5K5E3_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorSR544Base::ExynosSensorSR544Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1280; + maxPreviewH = 960; + maxPictureW = 2592; + maxPictureH = 1944; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2600; + maxSensorH = 1952; + sensorMarginW = 8; + sensorMarginH = 8; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 22; + fNumberDen = 10; + focalLengthNum = 330; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 54.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_RED_YELLOW + | EFFECT_BLUE + | EFFECT_COLD_VINTAGE + | EFFECT_AQUA +#ifndef USE_CAMERA2_API_SUPPORT + | EFFECT_BEAUTY_FACE +#endif + ; + + flashModeList = + FLASH_MODE_OFF + ; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT*/ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_SR544) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_SR544) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_SR544) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_SR544) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_SR544) / (sizeof(int) * SIZE_OF_LUT); + + previewSizeLut = PREVIEW_SIZE_LUT_SR544; + pictureSizeLut = PICTURE_SIZE_LUT_SR544; + videoSizeLut = VIDEO_SIZE_LUT_SR544; + dualVideoSizeLut = VIDEO_SIZE_LUT_SR544; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_SR544; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_SR544; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(SR544_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(SR544_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(SR544_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(SR544_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(SR544_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(SR544_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(SR544_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(SR544_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(SR544_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = SR544_PREVIEW_LIST; + rearPictureList = SR544_PICTURE_LIST; + hiddenRearPreviewList = SR544_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = SR544_HIDDEN_PICTURE_LIST; + thumbnailList = SR544_THUMBNAIL_LIST; + rearVideoList = SR544_VIDEO_LIST; + hiddenRearVideoList = SR544_HIDDEN_VIDEO_LIST; + rearFPSList = SR544_FPS_RANGE_LIST; + hiddenRearFPSList = SR544_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorIMX240_2P2Base::ExynosSensorIMX240_2P2Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 5312; + maxPictureH = 2988; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 5328; + maxSensorH = 3000; + sensorMarginW = 16; + sensorMarginH = 12; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 19; + fNumberDen = 10; + focalLengthNum = 430; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 185; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 28; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + minExposureTime = 32; + maxExposureTime = 10000000; + minWBK = 2300; + maxWBK = 10000; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + | WHITE_BALANCE_CUSTOM_K + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_2P2_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX240_2P2) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + fastAeStableLutMax = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS) / (sizeof(int) * SIZE_OF_LUT); + +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_IMX240_2P2_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_IMX240_2P2; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_IMX240_2P2_BNS; + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX240_2P2_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX240_2P2; +#endif + videoSizeBnsLut = VIDEO_SIZE_LUT_IMX240_2P2_BNS; + pictureSizeLut = PICTURE_SIZE_LUT_IMX240_2P2; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_60FPS_HIGH_SPEED_IMX240_2P2_BNS; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS; + fastAeStableLut = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX240_2P2_BNS; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX240_2P2_BNS; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + fastAeStableLut = NULL; + vtcallSizeLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX240_2P2_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX240_2P2_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX240_2P2_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX240_2P2_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX240_2P2_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX240_2P2_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX240_2P2_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX240_2P2_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX240_2P2_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX240_2P2_PREVIEW_LIST; + rearPictureList = IMX240_2P2_PICTURE_LIST; + hiddenRearPreviewList = IMX240_2P2_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX240_2P2_HIDDEN_PICTURE_LIST; + thumbnailList = IMX240_2P2_THUMBNAIL_LIST; + rearVideoList = IMX240_2P2_VIDEO_LIST; + hiddenRearVideoList = IMX240_2P2_HIDDEN_VIDEO_LIST; + rearFPSList = IMX240_2P2_FPS_RANGE_LIST; + hiddenRearFPSList = IMX240_2P2_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorIMX260_2L1Base::ExynosSensorIMX260_2L1Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 3840; + maxPreviewH = 2160; + maxPictureW = 4032; + maxPictureH = 3024; + maxVideoW = 3840; + maxVideoH = 2160; + maxSensorW = 4032; + maxSensorH = 3024; + sensorMarginW = 0; + sensorMarginH = 0; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 17; + fNumberDen = 10; + focalLengthNum = 420; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureDen = 100; + apertureNum = APEX_FNUM_TO_APERTURE((double)(fNumberNum) / (double)(fNumberDen)) * apertureDen; + + horizontalViewAngle[SIZE_RATIO_16_9] = 68.0f; + horizontalViewAngle[SIZE_RATIO_4_3] = 53.0f; + horizontalViewAngle[SIZE_RATIO_1_1] = 41.0f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 41.0f; + focalLengthIn35mmLength = 26; + + minFps = 1; + maxFps = 30; + +#if defined(USE_SUBDIVIDED_EV) + minExposureCompensation = -20; + maxExposureCompensation = 20; + exposureCompensationStep = 0.1f; +#else + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; +#endif + minExposureTime = 32; + maxExposureTime = 10000000; + minWBK = 2300; + maxWBK = 10000; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_NONE + | EFFECT_MONO + | EFFECT_NEGATIVE + /*| EFFECT_SOLARIZE*/ + | EFFECT_SEPIA + | EFFECT_POSTERIZE + /*| EFFECT_WHITEBOARD*/ + /*| EFFECT_BLACKBOARD*/ + | EFFECT_AQUA + | EFFECT_BEAUTY_FACE + ; + + flashModeList = + FLASH_MODE_OFF + | FLASH_MODE_AUTO + | FLASH_MODE_ON + //| FLASH_MODE_RED_EYE + | FLASH_MODE_TORCH; + + focusModeList = + FOCUS_MODE_AUTO + | FOCUS_MODE_INFINITY + | FOCUS_MODE_MACRO + //| FOCUS_MODE_FIXED + //| FOCUS_MODE_EDOF + | FOCUS_MODE_CONTINUOUS_VIDEO + | FOCUS_MODE_CONTINUOUS_PICTURE + | FOCUS_MODE_TOUCH + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_PARTY + | SCENE_MODE_SPORTS + | SCENE_MODE_CANDLELIGHT + */ + ; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + //| WHITE_BALANCE_WARM_FLUORESCENT + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + //| WHITE_BALANCE_TWILIGHT + //| WHITE_BALANCE_SHADE + | WHITE_BALANCE_CUSTOM_K + ; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = true; + + if (bnsSupport == true) { + +#if defined(USE_BNS_PREVIEW) + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX260_2L1_BNS) / (sizeof(int) * SIZE_OF_LUT); +#else + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); +#endif + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX260_2L1_8MP_FULL) / (sizeof(int) * SIZE_OF_LUT); +#else + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); +#endif + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + vtcallSizeLutMax = sizeof(VTCALL_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + + videoSizeLutHighSpeed60Max = sizeof(VIDEO_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed120Max = sizeof(VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed240Max = sizeof(VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + fastAeStableLutMax = sizeof(FAST_AE_STABLE_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); + liveBroadcastSizeLutMax = sizeof(LIVE_BROADCAST_SIZE_LUT_IIMX260_2L1) / (sizeof(int) * SIZE_OF_LUT); +#if defined(USE_BNS_PREVIEW) + previewSizeLut = PREVIEW_SIZE_LUT_IMX260_2L1_BNS; +#else + previewSizeLut = PREVIEW_SIZE_LUT_IMX260_2L1; +#endif + dualPreviewSizeLut = PREVIEW_SIZE_LUT_IMX260_2L1_BNS; + +#ifdef ENABLE_8MP_FULL_FRAME + videoSizeLut = VIDEO_SIZE_LUT_IMX260_2L1_8MP_FULL; +#else + videoSizeLut = VIDEO_SIZE_LUT_IMX260_2L1; +#endif + pictureSizeLut = PICTURE_SIZE_LUT_IMX260_2L1; + videoSizeLutHighSpeed60 = VIDEO_SIZE_LUT_IMX260_2L1; + videoSizeLutHighSpeed120 = VIDEO_SIZE_LUT_120FPS_HIGH_SPEED_IMX260_2L1; + videoSizeLutHighSpeed240 = VIDEO_SIZE_LUT_240FPS_HIGH_SPEED_IMX260_2L1; + fastAeStableLut = FAST_AE_STABLE_SIZE_LUT_IMX260_2L1; + vtcallSizeLut = VTCALL_SIZE_LUT_IMX260_2L1; + liveBroadcastSizeLut = LIVE_BROADCAST_SIZE_LUT_IIMX260_2L1; + sizeTableSupport = true; + } else { + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + vtcallSizeLutMax = 0; + fastAeStableLutMax = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed = NULL; + vtcallSizeLut = NULL; + fastAeStableLut = NULL; + sizeTableSupport = false; + } + + /* Set the max of preview/picture/video lists */ + rearPreviewListMax = sizeof(IMX260_2L1_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearPictureListMax = sizeof(IMX260_2L1_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPreviewListMax = sizeof(IMX260_2L1_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearPictureListMax = sizeof(IMX260_2L1_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(IMX260_2L1_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearVideoListMax = sizeof(IMX260_2L1_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + depthMapSizeLutMax = sizeof(DEPTH_MAP_SIZE_LUT_IMX260_2L1) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenRearVideoListMax = sizeof(IMX260_2L1_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + rearFPSListMax = sizeof(IMX260_2L1_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenRearFPSListMax = sizeof(IMX260_2L1_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + rearPreviewList = IMX260_2L1_PREVIEW_LIST; + rearPictureList = IMX260_2L1_PICTURE_LIST; + hiddenRearPreviewList = IMX260_2L1_HIDDEN_PREVIEW_LIST; + hiddenRearPictureList = IMX260_2L1_HIDDEN_PICTURE_LIST; + thumbnailList = IMX260_2L1_THUMBNAIL_LIST; + rearVideoList = IMX260_2L1_VIDEO_LIST; + depthMapSizeLut = DEPTH_MAP_SIZE_LUT_IMX260_2L1; + hiddenRearVideoList = IMX260_2L1_HIDDEN_VIDEO_LIST; + rearFPSList = IMX260_2L1_FPS_RANGE_LIST; + hiddenRearFPSList = IMX260_2L1_HIDDEN_FPS_RANGE_LIST; +}; + +ExynosSensorOV5670Base::ExynosSensorOV5670Base() : ExynosSensorInfoBase() +{ + maxPreviewW = 1920; + maxPreviewH = 1080; + maxPictureW = 2592; + maxPictureH = 1944; + maxVideoW = 1920; + maxVideoH = 1080; + maxSensorW = 2608; + maxSensorH = 1960; + sensorMarginW = 16; + sensorMarginH = 16; + + maxThumbnailW = 512; + maxThumbnailH = 384; + + fNumberNum = 245; + fNumberDen = 100; + focalLengthNum = 185; + focalLengthDen = 100; + focusDistanceNum = 0; + focusDistanceDen = 0; + apertureNum = 227; + apertureDen = 100; + + horizontalViewAngle[SIZE_RATIO_16_9] = 69.8f; + horizontalViewAngle[SIZE_RATIO_4_3] = 55.2f; + horizontalViewAngle[SIZE_RATIO_1_1] = 42.8f; + horizontalViewAngle[SIZE_RATIO_3_2] = 55.2f; + horizontalViewAngle[SIZE_RATIO_5_4] = 48.8f; + horizontalViewAngle[SIZE_RATIO_5_3] = 58.4f; + horizontalViewAngle[SIZE_RATIO_11_9] = 48.8f; + verticalViewAngle = 39.4f; + focalLengthIn35mmLength = 27; + + minFps = 1; + maxFps = 30; + + minExposureCompensation = -4; + maxExposureCompensation = 4; + exposureCompensationStep = 0.5f; + maxNumDetectedFaces = 16; + maxNumFocusAreas = 1; + maxNumMeteringAreas = 0; + maxZoomLevel = MAX_ZOOM_LEVEL; + maxZoomRatio = MAX_ZOOM_RATIO; + + zoomSupport = true; + smoothZoomSupport = false; + videoSnapshotSupport = true; + videoStabilizationSupport = false; + autoWhiteBalanceLockSupport = true; + autoExposureLockSupport = true; + visionModeSupport = true; + + antiBandingList = + ANTIBANDING_AUTO + | ANTIBANDING_50HZ + | ANTIBANDING_60HZ + | ANTIBANDING_OFF + ; + + effectList = + EFFECT_NONE + ; + + hiddenEffectList = + EFFECT_AQUA + | EFFECT_MONO + | EFFECT_NEGATIVE + | EFFECT_SEPIA + | EFFECT_POSTERIZE + | EFFECT_COLD_VINTAGE + | EFFECT_BLUE + | EFFECT_RED_YELLOW + ; + + flashModeList = + FLASH_MODE_OFF + /*| FLASH_MODE_AUTO*/ + /*| FLASH_MODE_ON*/ + /*| FLASH_MODE_RED_EYE*/ + /*| FLASH_MODE_TORCH*/ + ; + + focusModeList = + /* FOCUS_MODE_AUTO*/ + FOCUS_MODE_INFINITY + /*| FOCUS_MODE_MACRO*/ + | FOCUS_MODE_FIXED + /*| FOCUS_MODE_EDOF*/ + /*| FOCUS_MODE_CONTINUOUS_VIDEO*/ + /*| FOCUS_MODE_CONTINUOUS_PICTURE*/ + /*| FOCUS_MODE_TOUCH*/ + ; + + sceneModeList = + SCENE_MODE_AUTO + /*| SCENE_MODE_ACTION + | SCENE_MODE_PORTRAIT + | SCENE_MODE_LANDSCAPE + | SCENE_MODE_NIGHT + | SCENE_MODE_NIGHT_PORTRAIT + | SCENE_MODE_THEATRE + | SCENE_MODE_BEACH + | SCENE_MODE_SNOW + | SCENE_MODE_SUNSET + | SCENE_MODE_STEADYPHOTO + | SCENE_MODE_FIREWORKS + | SCENE_MODE_SPORTS + | SCENE_MODE_PARTY + | SCENE_MODE_CANDLELIGHT*/; + + whiteBalanceList = + WHITE_BALANCE_AUTO + | WHITE_BALANCE_INCANDESCENT + | WHITE_BALANCE_FLUORESCENT + /* WHITE_BALANCE_WARM_FLUORESCENT*/ + | WHITE_BALANCE_DAYLIGHT + | WHITE_BALANCE_CLOUDY_DAYLIGHT + /* WHITE_BALANCE_TWILIGHT*/ + /* WHITE_BALANCE_SHADE*/ + ; + + previewSizeLutMax = 0; + pictureSizeLutMax = 0; + videoSizeLutMax = 0; + previewSizeLut = NULL; + pictureSizeLut = NULL; + videoSizeLut = NULL; + sizeTableSupport = false; + + /* vendor specifics */ + highResolutionCallbackW = 3264; + highResolutionCallbackH = 1836; + highSpeedRecording60WFHD = 1920; + highSpeedRecording60HFHD = 1080; + highSpeedRecording60W = 1008; + highSpeedRecording60H = 566; + highSpeedRecording120W = 1008; + highSpeedRecording120H = 566; + scalableSensorSupport = true; + bnsSupport = false; + + previewSizeLutMax = sizeof(PREVIEW_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + pictureSizeLutMax = sizeof(PICTURE_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutMax = sizeof(VIDEO_SIZE_LUT_OV5670) / (sizeof(int) * SIZE_OF_LUT); + videoSizeLutHighSpeed60Max = 0; + videoSizeLutHighSpeed120Max = 0; + + previewSizeLut = PREVIEW_SIZE_LUT_OV5670; + pictureSizeLut = PICTURE_SIZE_LUT_OV5670; + videoSizeLut = VIDEO_SIZE_LUT_OV5670; + dualVideoSizeLut = VIDEO_SIZE_LUT_OV5670; + videoSizeBnsLut = NULL; + videoSizeLutHighSpeed60 = NULL; + videoSizeLutHighSpeed120 = NULL; + sizeTableSupport = true; + + /* Set the max of preview/picture/video lists */ + frontPreviewListMax = sizeof(OV5670_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontPictureListMax = sizeof(OV5670_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPreviewListMax = sizeof(OV5670_HIDDEN_PREVIEW_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontPictureListMax = sizeof(OV5670_HIDDEN_PICTURE_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + thumbnailListMax = sizeof(OV5670_THUMBNAIL_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontVideoListMax = sizeof(OV5670_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + hiddenFrontVideoListMax = sizeof(OV5670_HIDDEN_VIDEO_LIST) / (sizeof(int) * SIZE_OF_RESOLUTION); + frontFPSListMax = sizeof(OV5670_FPS_RANGE_LIST) / (sizeof(int) * 2); + hiddenFrontFPSListMax = sizeof(OV5670_HIDDEN_FPS_RANGE_LIST) / (sizeof(int) * 2); + + /* Set supported preview/picture/video lists */ + frontPreviewList = OV5670_PREVIEW_LIST; + frontPictureList = OV5670_PICTURE_LIST; + hiddenFrontPreviewList = OV5670_HIDDEN_PREVIEW_LIST; + hiddenFrontPictureList = OV5670_HIDDEN_PICTURE_LIST; + thumbnailList = OV5670_THUMBNAIL_LIST; + frontVideoList = OV5670_VIDEO_LIST; + hiddenFrontVideoList = OV5670_HIDDEN_VIDEO_LIST; + frontFPSList = OV5670_FPS_RANGE_LIST; + hiddenFrontFPSList = OV5670_HIDDEN_FPS_RANGE_LIST; +}; + +}; /* namespace android */ diff --git a/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.h b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.h new file mode 100644 index 0000000..85f6890 --- /dev/null +++ b/libcamera/common_v2/SensorInfos/ExynosCameraSensorInfoBase.h @@ -0,0 +1,946 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef EXYNOS_CAMERA_SENSOR_INFO_BASE_H +#define EXYNOS_CAMERA_SENSOR_INFO_BASE_H + +#include +#include +#include +#include "ExynosCameraConfig.h" +#include "ExynosCameraSizeTable.h" +#include "fimc-is-metadata.h" + +/*TODO: This values will be changed */ +#define BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" +#define FRONT_CAMERA_FOCUS_DISTANCES_STR "0.20,0.25,Infinity" + +#define BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR "0.10,0.20,Infinity" +#define BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" + +#define BACK_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity" +#define FRONT_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity" + +#define UNIQUE_ID_BUF_SIZE (32) + +#if defined(SUPPORT_X8_ZOOM) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX +#define MAX_ZOOM_RATIO (8000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX /* CTS and 3rd-Party */ +#elif defined(SUPPORT_X8_ZOOM_AND_800STEP) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X8_800STEP_MAX +#define MAX_ZOOM_RATIO (8000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX /* CTS and 3rd-Party */ +#elif defined(SUPPORT_X4_ZOOM_AND_400STEP) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X4_400STEP_MAX +#define MAX_ZOOM_RATIO (4000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_MAX /* CTS and 3rd-Party */ +#else +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO (4000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_MAX /* CTS and 3rd-Party */ +#endif + +#define ARRAY_LENGTH(x) (sizeof(x)/sizeof(x[0])) +#define COMMON_DENOMINATOR (100) +#define EFFECTMODE_META_2_HAL(x) (1 << (x -1)) + +#define SENSOR_ID_EXIF_SIZE 27 +#define SENSOR_ID_EXIF_TAG "ssuniqueid" + +namespace android { + +enum max_3a_region { + AE, + AWB, + AF, + REGIONS_INDEX_MAX, +}; +enum size_direction { + WIDTH, + HEIGHT, + SIZE_DIRECTION_MAX, +}; +enum coordinate_3d { + X_3D, + Y_3D, + Z_3D, + COORDINATE_3D_MAX, +}; +enum output_streams_type { + RAW, + PROCESSED, + PROCESSED_STALL, + OUTPUT_STREAM_TYPE_MAX, +}; +enum range_type { + MIN, + MAX, + RANGE_TYPE_MAX, +}; +enum bayer_cfa_mosaic_channel { + R, + GR, + GB, + B, + BAYER_CFA_MOSAIC_CHANNEL_MAX, +}; +enum hue_sat_value_index { + HUE, + SATURATION, + VALUE, + HUE_SAT_VALUE_INDEX_MAX, +}; +enum sensor_margin_base_index { + LEFT_BASE, + TOP_BASE, + WIDTH_BASE, + HEIGHT_BASE, + BASE_MAX, +}; + +#ifdef SENSOR_NAME_GET_FROM_FILE +int getSensorIdFromFile(int camId); +#endif +#ifdef SENSOR_FW_GET_FROM_FILE +const char *getSensorFWFromFile(struct ExynosSensorInfoBase *info, int camId); +#endif + +struct sensor_id_exif_data { + char sensor_id_exif[SENSOR_ID_EXIF_SIZE]; +}; + +struct exynos_camera_info { +public: + int previewW; + int previewH; + int previewFormat; + int previewStride; + + int pictureW; + int pictureH; + int pictureFormat; + int hwPictureFormat; + + int videoW; + int videoH; + int hwVideoW; + int hwVideoH; + int maxVideoW; + int maxVideoH; + + int callbackW; + int callbackH; + int callbackFormat; + + int yuvWidth[3]; + int yuvHeight[3]; + int yuvFormat[3]; + + /* This size for internal */ + int hwSensorW; + int hwSensorH; + int hwPreviewW; + int hwPreviewH; + int previewSizeRatioId; + int hwPictureW; + int hwPictureH; + int pictureSizeRatioId; + int hwDisW; + int hwDisH; + int videoSizeRatioId; + int hwPreviewFormat; + + int hwBayerCropW; + int hwBayerCropH; + int hwBayerCropX; + int hwBayerCropY; + + int bnsW; + int bnsH; + + int jpegQuality; + int thumbnailW; + int thumbnailH; + int thumbnailQuality; + + int intelligentMode; + bool visionMode; + int visionModeFps; + int visionModeAeTarget; + + bool recordingHint; + bool dualMode; + bool dualRecordingHint; +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + bool dualCameraMode; // stereo camera +#endif + + bool effectHint; + bool effectRecordingHint; + bool highSpeedRecording; + bool videoStabilization; + bool swVdisMode; + bool swVdisUIMode; + bool highResolutionCallbackMode; + bool is3dnrMode; + bool isDrcMode; + bool isOdcMode; + + int zoom; + int rotation; + int flipHorizontal; + int flipVertical; + bool autoExposureLock; + + int meteringMode; + bool isTouchMetering; + + int sceneMode; + int focusMode; + int flashMode; + int whiteBalanceMode; + bool autoWhiteBalanceLock; + int numValidFocusArea; + + double gpsLatitude; + double gpsLongitude; + double gpsAltitude; + long gpsTimeStamp; + + long long int cityId; + unsigned char weatherId; + + bool hdrMode; + bool wdrMode; + int shotMode; + bool antiShake; + int vtMode; + int vrMode; + int plbMode; + bool gamma; + bool slowAe; + int seriesShotCount; + + bool scalableSensorMode; + char imageUniqueId[UNIQUE_ID_BUF_SIZE]; + bool samsungCamera; + + int autoFocusMacroPosition; + int deviceOrientation; + uint32_t bnsScaleRatio; + uint32_t binningScaleRatio; + + int seriesShotMode; + +}; + +struct ExynosSensorInfoBase { +public: +#ifdef SENSOR_FW_GET_FROM_FILE + char sensor_fw[25]; +#endif + struct sensor_id_exif_data sensor_id_exif_info; + + int maxPreviewW; + int maxPreviewH; + int maxPictureW; + int maxPictureH; + int maxVideoW; + int maxVideoH; + int maxSensorW; + int maxSensorH; + int sensorMarginW; + int sensorMarginH; + int sensorMarginBase[BASE_MAX]; + + int maxThumbnailW; + int maxThumbnailH; + + int fNumberNum; + int fNumberDen; + int focalLengthNum; + int focalLengthDen; + + float horizontalViewAngle[SIZE_RATIO_END]; + float verticalViewAngle; + + /* TODO : Remove unused variables */ + int focusDistanceNum; + int focusDistanceDen; + int apertureNum; + int apertureDen; + int minFps; + int maxFps; + + int minExposureCompensation; + int maxExposureCompensation; + int minExposureTime; + int maxExposureTime; + int minWBK; + int maxWBK; + uint32_t maxNumFocusAreas; + uint32_t maxNumMeteringAreas; + + bool videoSnapshotSupport; + bool videoStabilizationSupport; + bool autoWhiteBalanceLockSupport; + bool autoExposureLockSupport; + bool visionModeSupport; + bool drcSupport; + + /* + ** Camera HAL 3.2 Static Metadatas + ** + ** The order of declaration follows the order of + ** Android Camera HAL3.2 Properties. + ** Please refer the "/system/media/camera/docs/docs.html" + */ + /* Android ColorCorrection Static Metadata */ + uint8_t *colorAberrationModes; + size_t colorAberrationModesLength; + + /* Android Control Static Metadata */ + uint8_t *antiBandingModes; + uint8_t *aeModes; + int32_t exposureCompensationRange[RANGE_TYPE_MAX]; + float exposureCompensationStep; + uint8_t *afModes; + uint8_t *effectModes; + uint8_t *sceneModes; + uint8_t *videoStabilizationModes; + uint8_t *awbModes; + int32_t max3aRegions[REGIONS_INDEX_MAX]; + uint8_t *controlModes; + size_t controlModesLength; + uint8_t *sceneModeOverrides; + uint8_t aeLockAvailable; + uint8_t awbLockAvailable; + size_t antiBandingModesLength; + size_t aeModesLength; + size_t afModesLength; + size_t effectModesLength; + size_t sceneModesLength; + size_t videoStabilizationModesLength; + size_t awbModesLength; + size_t sceneModeOverridesLength; + + /* Android Edge Static Metadata */ + uint8_t *edgeModes; + size_t edgeModesLength; + + /* Android Flash Static Metadata */ + uint8_t flashAvailable; + int64_t chargeDuration; + uint8_t colorTemperature; + uint8_t maxEnergy; + + /* Android Hot Pixel Static Metadata */ + uint8_t *hotPixelModes; + size_t hotPixelModesLength; + + /* Android Lens Static Metadata */ + float aperture; + float fNumber; + float filterDensity; + float focalLength; + int focalLengthIn35mmLength; + uint8_t *opticalStabilization; + float hyperFocalDistance; + float minimumFocusDistance; + int32_t shadingMapSize[SIZE_DIRECTION_MAX]; + uint8_t focusDistanceCalibration; + uint8_t lensFacing; + float opticalAxisAngle[2]; + float lensPosition[COORDINATE_3D_MAX]; + size_t opticalStabilizationLength; + + /* Android Noise Reduction Static Metadata */ + uint8_t *noiseReductionModes; + size_t noiseReductionModesLength; + + /* Android Request Static Metadata */ + int32_t maxNumOutputStreams[OUTPUT_STREAM_TYPE_MAX]; + int32_t maxNumInputStreams; + uint8_t maxPipelineDepth; + int32_t partialResultCount; + uint8_t *capabilities; + int32_t *requestKeys; + int32_t *resultKeys; + int32_t *characteristicsKeys; + size_t capabilitiesLength; + size_t requestKeysLength; + size_t resultKeysLength; + size_t characteristicsKeysLength; + + /* Android Scaler Static Metadata */ + bool zoomSupport; + bool smoothZoomSupport; + int maxZoomLevel; + int maxZoomRatio; + int zoomRatioList[MAX_ZOOM_LEVEL]; + int maxBasicZoomLevel; + int64_t *stallDurations; + uint8_t croppingType; + size_t stallDurationsLength; + + /* Android Sensor Static Metadata */ + int32_t sensitivityRange[RANGE_TYPE_MAX]; + uint8_t colorFilterArrangement; + int64_t exposureTimeRange[RANGE_TYPE_MAX]; + int64_t maxFrameDuration; + float sensorPhysicalSize[SIZE_DIRECTION_MAX]; + int32_t whiteLevel; + uint8_t timestampSource; + uint8_t referenceIlluminant1; + uint8_t referenceIlluminant2; + int32_t blackLevelPattern[BAYER_CFA_MOSAIC_CHANNEL_MAX]; + int32_t maxAnalogSensitivity; + int32_t orientation; + int32_t profileHueSatMapDimensions[HUE_SAT_VALUE_INDEX_MAX]; + int32_t *testPatternModes; + size_t testPatternModesLength; + camera_metadata_rational *colorTransformMatrix1; + camera_metadata_rational *colorTransformMatrix2; + camera_metadata_rational *forwardMatrix1; + camera_metadata_rational *forwardMatrix2; + camera_metadata_rational *calibration1; + camera_metadata_rational *calibration2; + + /* Android Statistics Static Metadata */ + uint8_t *faceDetectModes; + int32_t histogramBucketCount; + int32_t maxNumDetectedFaces; + int32_t maxHistogramCount; + int32_t maxSharpnessMapValue; + int32_t sharpnessMapSize[SIZE_DIRECTION_MAX]; + uint8_t *hotPixelMapModes; + uint8_t *lensShadingMapModes; + size_t lensShadingMapModesLength; + uint8_t *shadingAvailableModes; + size_t shadingAvailableModesLength; + size_t faceDetectModesLength; + size_t hotPixelMapModesLength; + + /* Android Tone Map Static Metadata */ + int32_t tonemapCurvePoints; + uint8_t *toneMapModes; + size_t toneMapModesLength; + + /* Android LED Static Metadata */ + uint8_t *leds; + size_t ledsLength; + + /* Android Info Static Metadata */ + uint8_t supportedHwLevel; + + /* Android Sync Static Metadata */ + int32_t maxLatency; + /* END of Camera HAL 3.2 Static Metadatas */ + + /* vendor specifics */ + int highResolutionCallbackW; + int highResolutionCallbackH; + int highSpeedRecording60WFHD; + int highSpeedRecording60HFHD; + int highSpeedRecording60W; + int highSpeedRecording60H; + int highSpeedRecording120W; + int highSpeedRecording120H; + bool scalableSensorSupport; + bool bnsSupport; + bool flite3aaOtfSupport; + + /* The number of preview(picture) sizes in each list */ + int rearPreviewListMax; + int frontPreviewListMax; + int rearPictureListMax; + int frontPictureListMax; + int hiddenRearPreviewListMax; + int hiddenFrontPreviewListMax; + int hiddenRearPictureListMax; + int hiddenFrontPictureListMax; + int thumbnailListMax; + int rearVideoListMax; + int frontVideoListMax; + int hiddenRearVideoListMax; + int hiddenFrontVideoListMax; + int highSpeedVideoListMax; + int rearFPSListMax; + int frontFPSListMax; + int hiddenRearFPSListMax; + int hiddenFrontFPSListMax; + int highSpeedVideoFPSListMax; + + /* Supported Preview/Picture/Video Lists */ + int (*rearPreviewList)[SIZE_OF_RESOLUTION]; + int (*frontPreviewList)[SIZE_OF_RESOLUTION]; + int (*rearPictureList)[SIZE_OF_RESOLUTION]; + int (*frontPictureList)[SIZE_OF_RESOLUTION]; + int (*hiddenRearPreviewList)[SIZE_OF_RESOLUTION]; + int (*hiddenFrontPreviewList)[SIZE_OF_RESOLUTION]; + int (*hiddenRearPictureList)[SIZE_OF_RESOLUTION]; + int (*hiddenFrontPictureList)[SIZE_OF_RESOLUTION]; + int (*highSpeedVideoList)[SIZE_OF_RESOLUTION]; + int (*thumbnailList)[SIZE_OF_RESOLUTION]; + int (*rearVideoList)[SIZE_OF_RESOLUTION]; + int (*frontVideoList)[SIZE_OF_RESOLUTION]; + int (*hiddenRearVideoList)[SIZE_OF_RESOLUTION]; + int (*hiddenFrontVideoList)[SIZE_OF_RESOLUTION]; + int (*rearFPSList)[2]; + int (*frontFPSList)[2]; + int (*hiddenRearFPSList)[2]; + int (*hiddenFrontFPSList)[2]; + int (*highSpeedVideoFPSList)[2]; + + int antiBandingList; + int effectList; + int hiddenEffectList; + int flashModeList; + int focusModeList; + int sceneModeList; + int whiteBalanceList; + int isoValues; + int meteringList; + + int previewSizeLutMax; + int pictureSizeLutMax; + int videoSizeLutMax; + int vtcallSizeLutMax; + int videoSizeLutHighSpeedMax; + int videoSizeLutHighSpeed60Max; + int videoSizeLutHighSpeed120Max; + int videoSizeLutHighSpeed240Max; + int liveBroadcastSizeLutMax; + int depthMapSizeLutMax; + int fastAeStableLutMax; + + int (*previewSizeLut)[SIZE_OF_LUT]; + int (*pictureSizeLut)[SIZE_OF_LUT]; + int (*videoSizeLut)[SIZE_OF_LUT]; + int (*videoSizeBnsLut)[SIZE_OF_LUT]; + int (*dualPreviewSizeLut)[SIZE_OF_LUT]; + int (*dualVideoSizeLut)[SIZE_OF_LUT]; + int (*videoSizeLutHighSpeed)[SIZE_OF_LUT]; + int (*videoSizeLutHighSpeed60)[SIZE_OF_LUT]; + int (*videoSizeLutHighSpeed120)[SIZE_OF_LUT]; + int (*videoSizeLutHighSpeed240)[SIZE_OF_LUT]; + int (*vtcallSizeLut)[SIZE_OF_LUT]; + int (*liveBroadcastSizeLut)[SIZE_OF_LUT]; + int (*depthMapSizeLut)[SIZE_OF_RESOLUTION]; + int (*fastAeStableLut)[SIZE_OF_LUT]; + bool sizeTableSupport; + +#ifdef BOARD_CAMERA_USES_DUAL_CAMERA + void *dof; +#endif + +public: + ExynosSensorInfoBase(); +}; + +struct ExynosSensorIMX135Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX135Base(); +}; + +struct ExynosSensorIMX134Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX134Base(); +}; + +struct ExynosSensorS5K3L2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3L2Base(); +}; + +struct ExynosSensorS5K3L8Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3L8Base(); +}; + +struct ExynosSensorS5K3L8DualBdsBase : public ExynosSensorS5K3L8Base { +public: + ExynosSensorS5K3L8DualBdsBase(); +}; + +#if 0 +struct ExynosSensorS5K2P2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K2P2Base(); +}; +#endif + +struct ExynosSensorS5K3P3Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3P3Base(); +}; + +struct ExynosSensorS5K2P2_12MBase : public ExynosSensorInfoBase { +public: + ExynosSensorS5K2P2_12MBase(); +}; + +struct ExynosSensorS5K2P3Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K2P3Base(); +}; + +struct ExynosSensorS5K2P8Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K2P8Base(); +}; + +struct ExynosSensorS5K2T2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K2T2Base(); +}; + +struct ExynosSensorS5K6B2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K6B2Base(); +}; + +struct ExynosSensorSR261Base : public ExynosSensorInfoBase { +public: + ExynosSensorSR261Base(); +}; + +struct ExynosSensorSR259Base : public ExynosSensorInfoBase { +public: + ExynosSensorSR259Base(); +}; + +struct ExynosSensorS5K3H7Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3H7Base(); +}; + +struct ExynosSensorS5K3H5Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3H5Base(); +}; + +struct ExynosSensorS5K4H5Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K4H5Base(); +}; + +struct ExynosSensorS5K4H5YCBase : public ExynosSensorInfoBase { +public: + ExynosSensorS5K4H5YCBase(); +}; + +struct ExynosSensorS5K3M2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3M2Base(); +}; + +struct ExynosSensorS5K3M3Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K3M3Base(); +}; + +struct ExynosSensorS5K3M3DualBdsBase : public ExynosSensorS5K3M3Base { +public: + ExynosSensorS5K3M3DualBdsBase(); +}; + +struct ExynosSensorS5K5E2Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K5E2Base(); +}; + +struct ExynosSensorS5K5E8Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K5E8Base(); +}; +struct ExynosSensorS5K6A3Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K6A3Base(); +}; + +struct ExynosSensorIMX175Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX175Base(); +}; + +#if 0 +struct ExynosSensorIMX240Base: public ExynosSensorInfoBase { +public: + ExynosSensorIMX240Base(); +}; +#endif + +struct ExynosSensorIMX228Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX228Base(); +}; + +struct ExynosSensorIMX219Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX219Base(); +}; + +struct ExynosSensorS5K8B1Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K8B1Base(); +}; + +struct ExynosSensorS5K6D1Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K6D1Base(); +}; + +struct ExynosSensorS5K4E6Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K4E6Base(); +}; + +struct ExynosSensorS5K5E3Base : public ExynosSensorInfoBase { +public: + ExynosSensorS5K5E3Base(); +}; + +struct ExynosSensorSR544Base : public ExynosSensorInfoBase { +public: + ExynosSensorSR544Base(); +}; + +struct ExynosSensorIMX240_2P2Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX240_2P2Base(); +}; + +struct ExynosSensorIMX260_2L1Base : public ExynosSensorInfoBase { +public: + ExynosSensorIMX260_2L1Base(); +}; + +struct ExynosSensorOV5670Base : public ExynosSensorInfoBase { +public: + ExynosSensorOV5670Base(); +}; + +/* Helpper functions */ +int getSensorId(int camId); +void getDualCameraId(int *cameraId_0, int *cameraId_1); + +enum CAMERA_ID { + CAMERA_ID_BACK = 0, + CAMERA_ID_FRONT = 1, + CAMERA_ID_BACK_0 = CAMERA_ID_BACK, + CAMERA_ID_FRONT_0 = CAMERA_ID_FRONT, + CAMERA_ID_BACK_1 = 2, + CAMERA_ID_FRONT_1 = 3, + CAMERA_ID_MAX, +}; + +enum MODE { + MODE_PREVIEW = 0, + MODE_PICTURE, + MODE_VIDEO, + MODE_THUMBNAIL, +}; + +enum { + ANTIBANDING_AUTO = (1 << 0), + ANTIBANDING_50HZ = (1 << 1), + ANTIBANDING_60HZ = (1 << 2), + ANTIBANDING_OFF = (1 << 3), +}; + +enum { + SCENE_MODE_AUTO = (1 << 0), + SCENE_MODE_ACTION = (1 << 1), + SCENE_MODE_PORTRAIT = (1 << 2), + SCENE_MODE_LANDSCAPE = (1 << 3), + SCENE_MODE_NIGHT = (1 << 4), + SCENE_MODE_NIGHT_PORTRAIT = (1 << 5), + SCENE_MODE_THEATRE = (1 << 6), + SCENE_MODE_BEACH = (1 << 7), + SCENE_MODE_SNOW = (1 << 8), + SCENE_MODE_SUNSET = (1 << 9), + SCENE_MODE_STEADYPHOTO = (1 << 10), + SCENE_MODE_FIREWORKS = (1 << 11), + SCENE_MODE_SPORTS = (1 << 12), + SCENE_MODE_PARTY = (1 << 13), + SCENE_MODE_CANDLELIGHT = (1 << 14), + SCENE_MODE_AQUA = (1 << 17), +}; + +enum { + FOCUS_MODE_AUTO = (1 << 0), + FOCUS_MODE_INFINITY = (1 << 1), + FOCUS_MODE_MACRO = (1 << 2), + FOCUS_MODE_FIXED = (1 << 3), + FOCUS_MODE_EDOF = (1 << 4), + FOCUS_MODE_CONTINUOUS_VIDEO = (1 << 5), + FOCUS_MODE_CONTINUOUS_PICTURE = (1 << 6), + FOCUS_MODE_TOUCH = (1 << 7), + FOCUS_MODE_CONTINUOUS_PICTURE_MACRO = (1 << 8), +}; + +enum { + WHITE_BALANCE_AUTO = (1 << 0), + WHITE_BALANCE_INCANDESCENT = (1 << 1), + WHITE_BALANCE_FLUORESCENT = (1 << 2), + WHITE_BALANCE_WARM_FLUORESCENT = (1 << 3), + WHITE_BALANCE_DAYLIGHT = (1 << 4), + WHITE_BALANCE_CLOUDY_DAYLIGHT = (1 << 5), + WHITE_BALANCE_TWILIGHT = (1 << 6), + WHITE_BALANCE_SHADE = (1 << 7), + WHITE_BALANCE_CUSTOM_K = (1 << 8), +}; + +enum { + FLASH_MODE_OFF = (1 << 0), + FLASH_MODE_AUTO = (1 << 1), + FLASH_MODE_ON = (1 << 2), + FLASH_MODE_RED_EYE = (1 << 3), + FLASH_MODE_TORCH = (1 << 4), +}; + +/* Metering */ +enum { + METERING_MODE_AVERAGE = (1 << 0), + METERING_MODE_CENTER = (1 << 1), + METERING_MODE_MATRIX = (1 << 2), + METERING_MODE_SPOT = (1 << 3), + METERING_MODE_CENTER_TOUCH = (1 << 4), + METERING_MODE_MATRIX_TOUCH = (1 << 5), + METERING_MODE_SPOT_TOUCH = (1 << 6), + METERING_MODE_AVERAGE_TOUCH = (1 << 7), +}; + +/* Contrast */ +enum { + CONTRAST_AUTO = (1 << 0), + CONTRAST_MINUS_2 = (1 << 1), + CONTRAST_MINUS_1 = (1 << 2), + CONTRAST_DEFAULT = (1 << 3), + CONTRAST_PLUS_1 = (1 << 4), + CONTRAST_PLUS_2 = (1 << 5), +}; + +/* Shot mode */ +enum SHOT_MODE { + SHOT_MODE_NORMAL = 0x00, + SHOT_MODE_AUTO = 0x01, + SHOT_MODE_BEAUTY_FACE = 0x02, + SHOT_MODE_BEST_PHOTO = 0x03, + SHOT_MODE_DRAMA = 0x04, + SHOT_MODE_BEST_FACE = 0x05, + SHOT_MODE_ERASER = 0x06, + SHOT_MODE_PANORAMA = 0x07, + SHOT_MODE_3D_PANORAMA = 0x08, + SHOT_MODE_RICH_TONE = 0x09, + SHOT_MODE_NIGHT = 0x0A, + SHOT_MODE_STORY = 0x0B, + SHOT_MODE_AUTO_PORTRAIT = 0x0C, + SHOT_MODE_PET = 0x0D, + SHOT_MODE_GOLF = 0x0E, + SHOT_MODE_ANIMATED_SCENE = 0x0F, + SHOT_MODE_NIGHT_SCENE = 0x10, + SHOT_MODE_SPORTS = 0x11, + SHOT_MODE_AQUA = 0x12, + SHOT_MODE_MAGIC = 0x13, + SHOT_MODE_OUTFOCUS = 0x14, + SHOT_MODE_3DTOUR = 0x15, + SHOT_MODE_SEQUENCE = 0x16, + SHOT_MODE_LIGHT_TRACE = 0x17, +#ifdef USE_LIMITATION_FOR_THIRD_PARTY + THIRD_PARTY_BLACKBOX_MODE = 0x19, + THIRD_PARTY_VTCALL_MODE = 0x20, + THIRD_PARTY_HANGOUT_MODE = 0x21, +#endif + SHOT_MODE_FRONT_PANORAMA = 0x1B, + SHOT_MODE_SELFIE_ALARM = 0x1C, + SHOT_MODE_INTERACTIVE = 0x1D, + SHOT_MODE_DUAL = 0x1E, + SHOT_MODE_FASTMOTION = 0x1F, + SHOT_MODE_PRO_MODE = 0x22, + SHOT_MODE_VIDEO_COLLAGE = 0x24, + SHOT_MODE_ANTI_FOG = 0x25, + SHOT_MODE_MAX, +}; + +enum SERIES_SHOT_MODE { + SERIES_SHOT_MODE_NONE = 0, + SERIES_SHOT_MODE_LLS = 1, + SERIES_SHOT_MODE_SIS = 2, + SERIES_SHOT_MODE_BURST = 3, + SERIES_SHOT_MODE_ERASER = 4, + SERIES_SHOT_MODE_BEST_FACE = 5, + SERIES_SHOT_MODE_BEST_PHOTO = 6, + SERIES_SHOT_MODE_MAGIC = 7, + SERIES_SHOT_MODE_SELFIE_ALARM = 8, + SERIES_SHOT_MODE_MAX, +}; + +enum ISO_VALUES { + ISO_AUTO = (1 << 0), + ISO_100 = (1 << 1), + ISO_200 = (1 << 2), + ISO_400 = (1 << 3), + ISO_800 = (1 << 4), +}; + +enum { +#ifdef EFFECT_VALUE_VERSION_2_0 + EFFECT_NONE = (1 << 0), + EFFECT_MONO = (1 << 1), + EFFECT_NEGATIVE = (1 << 2), + EFFECT_SOLARIZE = (1 << 3), + EFFECT_SEPIA = (1 << 4), + EFFECT_POSTERIZE = (1 << 5), + EFFECT_WHITEBOARD = (1 << 6), + EFFECT_BLACKBOARD = (1 << 7), + EFFECT_AQUA = (1 << 8), + EFFECT_RED_YELLOW = (1 << 9), + EFFECT_BLUE = (1 << 10), + EFFECT_WARM_VINTAGE = (1 << 11), + EFFECT_COLD_VINTAGE = (1 << 12), + EFFECT_BEAUTY_FACE = (1 << 13), +#else + EFFECT_NONE = (1 << COLORCORRECTION_MODE_FAST), + EFFECT_MONO = (1 << COLORCORRECTION_MODE_EFFECT_MONO), + EFFECT_NEGATIVE = (1 << COLORCORRECTION_MODE_EFFECT_NEGATIVE), + EFFECT_SOLARIZE = (1 << COLORCORRECTION_MODE_EFFECT_SOLARIZE), + EFFECT_SEPIA = (1 << COLORCORRECTION_MODE_EFFECT_SEPIA), + EFFECT_POSTERIZE = (1 << COLORCORRECTION_MODE_EFFECT_POSTERIZE), + EFFECT_WHITEBOARD = (1 << COLORCORRECTION_MODE_EFFECT_WHITEBOARD), + EFFECT_BLACKBOARD = (1 << COLORCORRECTION_MODE_EFFECT_BLACKBOARD), + EFFECT_AQUA = (1 << COLORCORRECTION_MODE_EFFECT_AQUA), + EFFECT_RED_YELLOW = (1 << COLORCORRECTION_MODE_EFFECT_RED_YELLOW_POINT), + EFFECT_BLUE = (1 << COLORCORRECTION_MODE_EFFECT_BLUE_POINT), + EFFECT_WARM_VINTAGE = (1 << COLORCORRECTION_MODE_EFFECT_WARM_VINTAGE), + EFFECT_COLD_VINTAGE = (1 << COLORCORRECTION_MODE_EFFECT_COLD_VINTAGE), + EFFECT_BEAUTY_FACE = (1 << COLORCORRECTION_MODE_EFFECT_BEAUTY_FACE), +#endif +}; + +}; /* namespace android */ +#endif diff --git a/libcamera_external/Exif.cpp b/libcamera_external/Exif.cpp new file mode 100644 index 0000000..20e0a10 --- /dev/null +++ b/libcamera_external/Exif.cpp @@ -0,0 +1,429 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file Exif.cpp + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#define LOG_TAG "Exif" + +#include +#include +#include + +#include + +#include "Exif.h" +#include "model/include/SecCameraHardware-model.h" + +namespace android { +#define CLEAR(x) memset(&(x), 0, sizeof(x)) + +const char Exif::DEFAULT_MAKER[] = "samsung"; +const char Exif::DEFAULT_MODEL[] = "SAMSUNG"; +const char Exif::DEFAULT_SOFTWARE[] = "SAMSUNG"; +const char Exif::DEFAULT_EXIF_VERSION[] = "0220"; +const char Exif::DEFAULT_USERCOMMENTS[] = "User comments"; + +const int Exif::DEFAULT_YCBCR_POSITIONING = 1; + +const int Exif::DEFAULT_EXPOSURE_PROGRAM = 2; + +const int Exif::DEFAULT_FLASH = 0; +const int Exif::DEFAULT_COLOR_SPACE = 1; +const int Exif::DEFAULT_EXPOSURE_MODE = EXIF_EXPOSURE_AUTO; +const int Exif::DEFAULT_APEX_DEN = 100; +const int Exif::DEFAULT_SENSING_METHOD = 2; + +const int Exif::DEFAULT_COMPRESSION = 6; +const int Exif::DEFAULT_RESOLUTION_NUM = 72; +const int Exif::DEFAULT_RESOLUTION_DEN = 1; +const int Exif::DEFAULT_RESOLUTION_UNIT = 2; + +const int Exif::DEFAULT_CONTINUOUS_SHOT_INFO = 0; +Exif::Exif(int cameraId, int CameraType) +{ + mCameraId = cameraId; + mCameraType = CameraType; + + mNum0thIfdTiff = 10; + mNum0thIfdExif = 26; + mNum0thIfdGps = 10; + mNum1thIfdTiff = 9; +} + +Exif::~Exif() +{ +} + +uint32_t Exif::make(void *exifOutBuf, + exif_attribute_t *exifInfo, + unsigned int exifOutBufSize, + unsigned char *thumbBuf, + unsigned int thumbSize) +{ + ALOGV("makeExif E"); + + unsigned char *pCur, *pApp1Start, *pIfdStart, *pGpsIfdPtr, *pNextIfdOffset; + unsigned int tmp, LongerTagOffest = 0; + pApp1Start = pCur = (unsigned char *)exifOutBuf; + + /* Exif Identifier Code & TIFF Header */ + pCur += 4; + /* Skip 4 Byte for APP1 marker and length */ + unsigned char ExifIdentifierCode[6] = {0x45, 0x78, 0x69, 0x66, 0x00, 0x00}; + memcpy(pCur, ExifIdentifierCode, 6); + pCur += 6; + + /* Byte Order - little endian, Offset of IFD - 0x00000008.H */ + unsigned char TiffHeader[8] = {0x49, 0x49, 0x2A, 0x00, 0x08, 0x00, 0x00, 0x00}; + memcpy(pCur, TiffHeader, 8); + pIfdStart = pCur; + pCur += 8; + + const char asciiPrefix[] = {0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0}; + unsigned char tmpBuf[256] = {0, }; + size_t len; + + /* 0th IFD TIFF Tags */ + if (exifInfo->enableGps) + tmp = mNum0thIfdTiff; + else + tmp = mNum0thIfdTiff - 1; + + memcpy(pCur, &tmp, NUM_SIZE); + pCur += NUM_SIZE; + + LongerTagOffest += 8 + NUM_SIZE + tmp * IFD_SIZE + OFFSET_SIZE; + + writeExifIfd(&pCur, EXIF_TAG_IMAGE_WIDTH, EXIF_TYPE_LONG, + 1, exifInfo->width); + writeExifIfd(&pCur, EXIF_TAG_IMAGE_HEIGHT, EXIF_TYPE_LONG, + 1, exifInfo->height); + writeExifIfd(&pCur, EXIF_TAG_MAKE, EXIF_TYPE_ASCII, + strlen((char *)exifInfo->maker) + 1, exifInfo->maker, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_MODEL, EXIF_TYPE_ASCII, + strlen((char *)exifInfo->model) + 1, exifInfo->model, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_ORIENTATION, EXIF_TYPE_SHORT, + 1, exifInfo->orientation); + writeExifIfd(&pCur, EXIF_TAG_SOFTWARE, EXIF_TYPE_ASCII, + strlen((char *)exifInfo->software) + 1, exifInfo->software, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_DATE_TIME, EXIF_TYPE_ASCII, + 20, exifInfo->date_time, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_YCBCR_POSITIONING, EXIF_TYPE_SHORT, + 1, exifInfo->ycbcr_positioning); + writeExifIfd(&pCur, EXIF_TAG_EXIF_IFD_POINTER, EXIF_TYPE_LONG, + 1, LongerTagOffest); + if (exifInfo->enableGps) { + pGpsIfdPtr = pCur; + /* Skip a ifd size for gps IFD pointer */ + pCur += IFD_SIZE; + } + + /* Skip a offset size for next IFD offset */ + pNextIfdOffset = pCur; + pCur += OFFSET_SIZE; + + /* 0th IFD Exif Private Tags */ + pCur = pIfdStart + LongerTagOffest; + + tmp = mNum0thIfdExif; + if (mCameraId == CAMERA_FACING_BACK) { + if (mCameraType != CAMERA_TYPE_ISP) { + tmp -= 3; + } + } else { + tmp -= 8; + } + memcpy(pCur, &tmp , NUM_SIZE); + pCur += NUM_SIZE; + + LongerTagOffest += NUM_SIZE + (tmp * IFD_SIZE) + OFFSET_SIZE; + + writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_TIME, EXIF_TYPE_RATIONAL, + 1, &exifInfo->exposure_time, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_FNUMBER, EXIF_TYPE_RATIONAL, + 1, &exifInfo->fnumber, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_PROGRAM, EXIF_TYPE_SHORT, + 1, exifInfo->exposure_program); + writeExifIfd(&pCur, EXIF_TAG_ISO_SPEED_RATING, EXIF_TYPE_SHORT, + 1, exifInfo->iso_speed_rating); + writeExifIfd(&pCur, EXIF_TAG_EXIF_VERSION, EXIF_TYPE_UNDEFINED, + 4, exifInfo->exif_version); + writeExifIfd(&pCur, EXIF_TAG_DATE_TIME_ORG, EXIF_TYPE_ASCII, + 20, exifInfo->date_time, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_DATE_TIME_DIGITIZE, EXIF_TYPE_ASCII, + 20, exifInfo->date_time, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_IMAGE_UNIQUE_ID, EXIF_TYPE_ASCII, + 12, exifInfo->unique_id, &LongerTagOffest, pIfdStart); + + if (mCameraId == CAMERA_FACING_BACK) { + if (CAMERA_TYPE_ISP == mCameraType) { + writeExifIfd(&pCur, EXIF_TAG_SHUTTER_SPEED, EXIF_TYPE_SRATIONAL, + 1, (rational_t *)&exifInfo->shutter_speed, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_APERTURE, EXIF_TYPE_RATIONAL, + 1, &exifInfo->aperture, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_BRIGHTNESS, EXIF_TYPE_SRATIONAL, + 1, (rational_t *)&exifInfo->brightness, &LongerTagOffest, pIfdStart); + } + writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_BIAS, EXIF_TYPE_SRATIONAL, + 1, (rational_t *)&exifInfo->exposure_bias, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_LIGHT_SOURCE, EXIF_TYPE_SHORT, + 1, exifInfo->light_source); + writeExifIfd(&pCur, EXIF_TAG_FLASH, EXIF_TYPE_SHORT, + 1, exifInfo->flash); + } + + writeExifIfd(&pCur, EXIF_TAG_MAX_APERTURE, EXIF_TYPE_RATIONAL, + 1, &exifInfo->max_aperture, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_METERING_MODE, EXIF_TYPE_SHORT, + 1, exifInfo->metering_mode); + writeExifIfd(&pCur, EXIF_TAG_FOCAL_LENGTH, EXIF_TYPE_RATIONAL, + 1, &exifInfo->focal_length, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_FOCAL_35mm_LENGTH, EXIF_TYPE_SHORT, + 1, exifInfo->focal_35mm_length); + CLEAR(tmpBuf); + len = strlen((char *)exifInfo->user_comment) + 1; + memcpy(tmpBuf, asciiPrefix, sizeof(asciiPrefix)); + memcpy(tmpBuf + sizeof(asciiPrefix), exifInfo->user_comment, len); + writeExifIfd(&pCur, EXIF_TAG_USER_COMMENT, EXIF_TYPE_UNDEFINED, + len + sizeof(asciiPrefix), tmpBuf, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_COLOR_SPACE, EXIF_TYPE_SHORT, + 1, exifInfo->color_space); + writeExifIfd(&pCur, EXIF_TAG_PIXEL_X_DIMENSION, EXIF_TYPE_LONG, + 1, exifInfo->width); + writeExifIfd(&pCur, EXIF_TAG_PIXEL_Y_DIMENSION, EXIF_TYPE_LONG, + 1, exifInfo->height); + writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_MODE, EXIF_TYPE_LONG, + 1, exifInfo->exposure_mode); + writeExifIfd(&pCur, EXIF_TAG_WHITE_BALANCE, EXIF_TYPE_LONG, + 1, exifInfo->white_balance); + + if (mCameraId == CAMERA_FACING_BACK) { + writeExifIfd(&pCur, EXIF_TAG_SCENCE_CAPTURE_TYPE, EXIF_TYPE_LONG, + 1, exifInfo->scene_capture_type); + writeExifIfd(&pCur, EXIF_TAG_SENSING_METHOD, EXIF_TYPE_SHORT, + 1, exifInfo->sensing_method); + } + + tmp = 0; + /* next IFD offset */ + memcpy(pCur, &tmp, OFFSET_SIZE); + pCur += OFFSET_SIZE; + + /* 0th IFD GPS Info Tags */ + if (exifInfo->enableGps) { + /* GPS IFD pointer skipped on 0th IFD */ + writeExifIfd(&pGpsIfdPtr, EXIF_TAG_GPS_IFD_POINTER, EXIF_TYPE_LONG, + 1, LongerTagOffest); + + pCur = pIfdStart + LongerTagOffest; + + tmp = mNum0thIfdGps; + memcpy(pCur, &tmp, NUM_SIZE); + pCur += NUM_SIZE; + + LongerTagOffest += NUM_SIZE + mNum0thIfdGps * IFD_SIZE + OFFSET_SIZE; + + writeExifIfd(&pCur, EXIF_TAG_GPS_VERSION_ID, EXIF_TYPE_BYTE, + 4, exifInfo->gps_version_id); + writeExifIfd(&pCur, EXIF_TAG_GPS_LATITUDE_REF, EXIF_TYPE_ASCII, + 2, exifInfo->gps_latitude_ref); + writeExifIfd(&pCur, EXIF_TAG_GPS_LATITUDE, EXIF_TYPE_RATIONAL, + 3, exifInfo->gps_latitude, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_GPS_LONGITUDE_REF, EXIF_TYPE_ASCII, + 2, exifInfo->gps_longitude_ref); + writeExifIfd(&pCur, EXIF_TAG_GPS_LONGITUDE, EXIF_TYPE_RATIONAL, + 3, exifInfo->gps_longitude, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_GPS_ALTITUDE_REF, EXIF_TYPE_BYTE, + 1, exifInfo->gps_altitude_ref); + writeExifIfd(&pCur, EXIF_TAG_GPS_ALTITUDE, EXIF_TYPE_RATIONAL, + 1, &exifInfo->gps_altitude, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_GPS_TIMESTAMP, EXIF_TYPE_RATIONAL, + 3, exifInfo->gps_timestamp, &LongerTagOffest, pIfdStart); + CLEAR(tmpBuf); + len = strlen((char *)exifInfo->gps_processing_method); + memcpy(tmpBuf, asciiPrefix, sizeof(asciiPrefix)); + memcpy(tmpBuf + sizeof(asciiPrefix), exifInfo->gps_processing_method, len); + writeExifIfd(&pCur, EXIF_TAG_GPS_PROCESSING_METHOD, EXIF_TYPE_UNDEFINED, + len + sizeof(asciiPrefix), tmpBuf, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_GPS_DATESTAMP, EXIF_TYPE_ASCII, + 11, exifInfo->gps_datestamp, &LongerTagOffest, pIfdStart); + tmp = 0; + /* next IFD offset */ + memcpy(pCur, &tmp, OFFSET_SIZE); + pCur += OFFSET_SIZE; + } + + /* 1th IFD TIFF Tags */ + if ((thumbBuf != NULL) && (thumbSize > 0)) { + if (CC_UNLIKELY(!exifOutBufSize)) { + ALOGE("makeExif: error, exifOutBufSize is zero"); + return 0; + } + + tmp = LongerTagOffest; + /* NEXT IFD offset skipped on 0th IFD */ + memcpy(pNextIfdOffset, &tmp, OFFSET_SIZE); + + pCur = pIfdStart + LongerTagOffest; + + tmp = mNum1thIfdTiff; + memcpy(pCur, &tmp, NUM_SIZE); + pCur += NUM_SIZE; + + LongerTagOffest += NUM_SIZE + mNum1thIfdTiff * IFD_SIZE + OFFSET_SIZE; + + writeExifIfd(&pCur, EXIF_TAG_IMAGE_WIDTH, EXIF_TYPE_LONG, + 1, exifInfo->widthThumb); + writeExifIfd(&pCur, EXIF_TAG_IMAGE_HEIGHT, EXIF_TYPE_LONG, + 1, exifInfo->heightThumb); + writeExifIfd(&pCur, EXIF_TAG_COMPRESSION_SCHEME, EXIF_TYPE_SHORT, + 1, exifInfo->compression_scheme); + writeExifIfd(&pCur, EXIF_TAG_ORIENTATION, EXIF_TYPE_SHORT, + 1, exifInfo->orientation); + writeExifIfd(&pCur, EXIF_TAG_X_RESOLUTION, EXIF_TYPE_RATIONAL, + 1, &exifInfo->x_resolution, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_Y_RESOLUTION, EXIF_TYPE_RATIONAL, + 1, &exifInfo->y_resolution, &LongerTagOffest, pIfdStart); + writeExifIfd(&pCur, EXIF_TAG_RESOLUTION_UNIT, EXIF_TYPE_SHORT, + 1, exifInfo->resolution_unit); + writeExifIfd(&pCur, EXIF_TAG_JPEG_INTERCHANGE_FORMAT, EXIF_TYPE_LONG, + 1, LongerTagOffest); + writeExifIfd(&pCur, EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN, EXIF_TYPE_LONG, + 1, thumbSize); + + tmp = 0; + /* next IFD offset */ + memcpy(pCur, &tmp, OFFSET_SIZE); + pCur += OFFSET_SIZE; + +#ifdef SUPPORT_64BITS + if (CC_UNLIKELY((unsigned long)pIfdStart + LongerTagOffest + thumbSize >= ((unsigned long)exifOutBuf + exifOutBufSize))) { + ALOGE("makeExif: error, thumbnail size(%d bytes) is too big ", thumbSize); + return 0; + } +#else + if (CC_UNLIKELY(pIfdStart + LongerTagOffest + thumbSize >= (void *)((uint32_t)exifOutBuf + exifOutBufSize))) { + ALOGE("makeExif: error, thumbnail size(%d bytes) is too big ", thumbSize); + return 0; + } +#endif + memcpy(pIfdStart + LongerTagOffest, + thumbBuf, thumbSize); + LongerTagOffest += thumbSize; + } else { + tmp = 0; + /* NEXT IFD offset skipped on 0th IFD */ + memcpy(pNextIfdOffset, &tmp, OFFSET_SIZE); + } + + unsigned char App1Marker[2] = {0xff, 0xe1}; + memcpy(pApp1Start, App1Marker, 2); + pApp1Start += 2; + + uint32_t size = 10 + LongerTagOffest; + /* APP1 Maker isn't counted */ + tmp = size - 2; + unsigned char size_le[2] = {(unsigned char)((tmp >> 8) & 0xFF), (unsigned char)(tmp & 0xFF)}; + memcpy(pApp1Start, size_le, 2); + + ALOGV("makeExif X: size %d byte", size); + return size; +} + +inline void Exif::writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + uint32_t value) +{ + memcpy(*pCur, &tag, 2); + *pCur += 2; + memcpy(*pCur, &type, 2); + *pCur += 2; + memcpy(*pCur, &count, 4); + *pCur += 4; + memcpy(*pCur, &value, 4); + *pCur += 4; +} + +inline void Exif::writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + unsigned char *pValue) +{ + char buf[4] = {0,}; + memcpy(buf, pValue, count); + memcpy(*pCur, &tag, 2); + *pCur += 2; + memcpy(*pCur, &type, 2); + *pCur += 2; + memcpy(*pCur, &count, 4); + *pCur += 4; + memcpy(*pCur, buf, 4); + *pCur += 4; +} + + +inline void Exif::writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + unsigned char *pValue, + unsigned int *offset, + unsigned char *start) +{ + memcpy(*pCur, &tag, 2); + *pCur += 2; + memcpy(*pCur, &type, 2); + *pCur += 2; + memcpy(*pCur, &count, 4); + *pCur += 4; + memcpy(*pCur, offset, 4); + *pCur += 4; + memcpy(start + *offset, pValue, count); + *offset += count; +} + +inline void Exif::writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + rational_t *pValue, + unsigned int *offset, + unsigned char *start) +{ + memcpy(*pCur, &tag, 2); + *pCur += 2; + memcpy(*pCur, &type, 2); + *pCur += 2; + memcpy(*pCur, &count, 4); + *pCur += 4; + memcpy(*pCur, offset, 4); + *pCur += 4; + memcpy(start + *offset, pValue, 8 * count); + *offset += 8 * count; +} + +}; + diff --git a/libcamera_external/Exif.h b/libcamera_external/Exif.h new file mode 100644 index 0000000..48985c9 --- /dev/null +++ b/libcamera_external/Exif.h @@ -0,0 +1,318 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file Exif.h + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_EXIF_H +#define ANDROID_HARDWARE_EXIF_H + +#include + +#define EXIF_LOG2(x) (log((double)(x))/log(2.0)) +#define ROUND(x, y) ((x) >= 0 ? \ + ((x)*(double)pow(10.0, y) + 0.5)/(double)pow(10.0, y) : \ + ((x)*(double)pow(10.0, y) - 0.5)/(double)pow(10.0, y)) +#define APEX_FNUM_TO_APERTURE(x) ROUND(EXIF_LOG2((double)x)*2, 2) +#define APEX_EXPOSURE_TO_SHUTTER(x) ROUND(EXIF_LOG2((double)x), 2) +#define APEX_ISO_TO_FILMSENSITIVITY(x) ROUND(EXIF_LOG2((x)/3.125), 2) +#define APEX_SHUTTER_TO_EXPOSURE(x) ROUND((double)pow(2.0, x), 0) + +#define NUM_SIZE 2 +#define IFD_SIZE 12 +#define OFFSET_SIZE 4 + +/* Type */ +#define EXIF_TYPE_BYTE 1 +#define EXIF_TYPE_ASCII 2 +#define EXIF_TYPE_SHORT 3 +#define EXIF_TYPE_LONG 4 +#define EXIF_TYPE_RATIONAL 5 +#define EXIF_TYPE_UNDEFINED 7 +#define EXIF_TYPE_SLONG 9 +#define EXIF_TYPE_SRATIONAL 10 + +#define EXIF_MAX_LEN 0x18000 + +/* 0th IFD TIFF Tags */ +#define EXIF_TAG_IMAGE_WIDTH 0x0100 +#define EXIF_TAG_IMAGE_HEIGHT 0x0101 +#define EXIF_TAG_MAKE 0x010f +#define EXIF_TAG_MODEL 0x0110 +#define EXIF_TAG_ORIENTATION 0x0112 +#define EXIF_TAG_SOFTWARE 0x0131 +#define EXIF_TAG_DATE_TIME 0x0132 +#define EXIF_TAG_YCBCR_POSITIONING 0x0213 +#define EXIF_TAG_EXIF_IFD_POINTER 0x8769 +#define EXIF_TAG_GPS_IFD_POINTER 0x8825 + +/* 0th IFD Exif Private Tags */ +#define EXIF_TAG_EXPOSURE_TIME 0x829A +#define EXIF_TAG_FNUMBER 0x829D +#define EXIF_TAG_EXPOSURE_PROGRAM 0x8822 +#define EXIF_TAG_ISO_SPEED_RATING 0x8827 +#define EXIF_TAG_EXIF_VERSION 0x9000 +#define EXIF_TAG_DATE_TIME_ORG 0x9003 +#define EXIF_TAG_DATE_TIME_DIGITIZE 0x9004 +#define EXIF_TAG_SHUTTER_SPEED 0x9201 +#define EXIF_TAG_APERTURE 0x9202 +#define EXIF_TAG_BRIGHTNESS 0x9203 +#define EXIF_TAG_EXPOSURE_BIAS 0x9204 +#define EXIF_TAG_MAX_APERTURE 0x9205 +#define EXIF_TAG_METERING_MODE 0x9207 +#define EXIF_TAG_LIGHT_SOURCE 0x9208 +#define EXIF_TAG_FLASH 0x9209 +#define EXIF_TAG_FOCAL_LENGTH 0x920A +#define EXIF_TAG_FOCAL_35mm_LENGTH 0xA405 +#define EXIF_TAG_USER_COMMENT 0x9286 +#define EXIF_TAG_COLOR_SPACE 0xA001 +#define EXIF_TAG_SENSING_METHOD 0xA217 +#define EXIF_TAG_PIXEL_X_DIMENSION 0xA002 +#define EXIF_TAG_PIXEL_Y_DIMENSION 0xA003 +#define EXIF_TAG_EXPOSURE_MODE 0xA402 +#define EXIF_TAG_WHITE_BALANCE 0xA403 +#define EXIF_TAG_SCENCE_CAPTURE_TYPE 0xA406 +#define EXIF_TAG_IMAGE_UNIQUE_ID 0xA420 +#define EXIF_TAG_CONTINUOUS_SHOT_INFO 0x000B + +/* 0th IFD GPS Info Tags */ +#define EXIF_TAG_GPS_VERSION_ID 0x0000 +#define EXIF_TAG_GPS_LATITUDE_REF 0x0001 +#define EXIF_TAG_GPS_LATITUDE 0x0002 +#define EXIF_TAG_GPS_LONGITUDE_REF 0x0003 +#define EXIF_TAG_GPS_LONGITUDE 0x0004 +#define EXIF_TAG_GPS_ALTITUDE_REF 0x0005 +#define EXIF_TAG_GPS_ALTITUDE 0x0006 +#define EXIF_TAG_GPS_TIMESTAMP 0x0007 +#define EXIF_TAG_GPS_PROCESSING_METHOD 0x001B +#define EXIF_TAG_GPS_DATESTAMP 0x001D + +/* 1th IFD TIFF Tags */ +#define EXIF_TAG_COMPRESSION_SCHEME 0x0103 +#define EXIF_TAG_X_RESOLUTION 0x011A +#define EXIF_TAG_Y_RESOLUTION 0x011B +#define EXIF_TAG_RESOLUTION_UNIT 0x0128 +#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT 0x0201 +#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN 0x0202 + +namespace android { +typedef enum { + EXIF_ORIENTATION_UP = 1, + EXIF_ORIENTATION_90 = 6, + EXIF_ORIENTATION_180 = 3, + EXIF_ORIENTATION_270 = 8, +} ExifOrientationType; + +typedef enum { + EXIF_SCENE_STANDARD, + EXIF_SCENE_LANDSCAPE, + EXIF_SCENE_PORTRAIT, + EXIF_SCENE_NIGHT, +} CamExifSceneCaptureType; + +typedef enum { + EXIF_METERING_UNKNOWN, + EXIF_METERING_AVERAGE, + EXIF_METERING_CENTER, + EXIF_METERING_SPOT, + EXIF_METERING_MULTISPOT, + EXIF_METERING_PATTERN, + EXIF_METERING_PARTIAL, + EXIF_METERING_OTHER = 255, +} CamExifMeteringModeType; + +typedef enum { + EXIF_EXPOSURE_AUTO, + EXIF_EXPOSURE_MANUAL, + EXIF_EXPOSURE_AUTO_BRACKET, +} CamExifExposureModeType; + +typedef enum { + EXIF_WB_AUTO, + EXIF_WB_MANUAL, +} CamExifWhiteBalanceType; + +typedef enum { + EXIF_EXP_PROGRAM_NOT_DEFINE, + EXIF_EXP_PROGRAM_MANUAL, + EXIF_EXP_PROGRAM_NORMAL_PROGRAM, + EXIF_EXP_PROGRAM_APERTURE_PRIO, + EXIF_EXP_PROGRAM_SHUTTER_PRIO, + EXIF_EXP_PROGRAM_CREATIVE_PROGRAM, + EXIF_EXP_PROGRAM_ACTION_PROGRAM, + EXIF_EXP_PROGRAM_PORTRAIT_MODE, + EXIF_EXP_PROGRAM_LANDSCAPE_MODE, +} CamExifExposureProgramType; + +enum { + CAMERA_TYPE_SOC, + CAMERA_TYPE_ISP, +}; + +typedef struct { + uint32_t num; + uint32_t den; +} rational_t; + +typedef struct { + int32_t num; + int32_t den; +} srational_t; + +typedef struct { + bool enableGps; + + unsigned char maker[32]; + unsigned char model[32]; + unsigned char software[32]; + unsigned char exif_version[4]; + unsigned char date_time[20]; + unsigned char user_comment[128]; + unsigned char unique_id[12]; + + uint32_t width; + uint32_t height; + uint32_t widthThumb; + uint32_t heightThumb; + uint32_t cShotInfo; + + uint16_t orientation; + uint16_t ycbcr_positioning; + uint16_t exposure_program; + uint16_t iso_speed_rating; + uint16_t metering_mode; + uint16_t light_source; + uint16_t flash; + uint16_t color_space; + uint16_t exposure_mode; + uint16_t white_balance; + uint16_t scene_capture_type; + uint16_t focal_35mm_length; + uint16_t sensing_method; + + rational_t exposure_time; + rational_t fnumber; + rational_t aperture; + rational_t max_aperture; + rational_t focal_length; + + srational_t shutter_speed; + srational_t brightness; + srational_t exposure_bias; + + unsigned char gps_latitude_ref[2]; + unsigned char gps_longitude_ref[2]; + + uint8_t gps_version_id[4]; + uint8_t gps_altitude_ref; + + rational_t gps_latitude[3]; + rational_t gps_longitude[3]; + rational_t gps_altitude; + rational_t gps_timestamp[3]; + unsigned char gps_datestamp[11]; + unsigned char gps_processing_method[128]; + + rational_t x_resolution; + rational_t y_resolution; + uint16_t resolution_unit; + uint16_t compression_scheme; +} exif_attribute_t; + +class Exif { +public: + Exif(int cameraId, int CameraType = CAMERA_TYPE_ISP); + virtual ~Exif(); + + uint32_t make(void *exifOut, + exif_attribute_t *exifInfo, + unsigned int exifOutBufSize = 0, + unsigned char *thumbBuf = NULL, + unsigned int thumbSize = 0); + + static const char DEFAULT_MAKER[]; + static const char DEFAULT_MODEL[]; + static const char DEFAULT_SOFTWARE[]; + static const char DEFAULT_EXIF_VERSION[]; + static const char DEFAULT_USERCOMMENTS[]; + + static const int DEFAULT_YCBCR_POSITIONING; + static const int DEFAULT_BACK_FNUMBER_NUM; + static const int DEFAULT_BACK_FNUMBER_DEN; + static const int DEFAULT_FRONT_FNUMBER_NUM; + static const int DEFAULT_FRONT_FNUMBER_DEN; + static const int DEFAULT_EXPOSURE_PROGRAM; + static const int DEFAULT_BACK_FOCAL_LEN_NUM; + static const int DEFAULT_BACK_FOCAL_LEN_DEN; + static const int DEFAULT_FRONT_FOCAL_LEN_NUM; + static const int DEFAULT_FRONT_FOCAL_LEN_DEN; + static const int DEFAULT_BACK_FOCAL_LEN_35mm; + static const int DEFAULT_FRONT_FOCAL_LEN_35mm; + static const int DEFAULT_FLASH; + static const int DEFAULT_COLOR_SPACE; + static const int DEFAULT_EXPOSURE_MODE; + static const int DEFAULT_APEX_DEN; + static const int DEFAULT_SENSING_METHOD; + + static const int DEFAULT_COMPRESSION; + static const int DEFAULT_RESOLUTION_NUM; + static const int DEFAULT_RESOLUTION_DEN; + static const int DEFAULT_RESOLUTION_UNIT; + + static const int DEFAULT_CONTINUOUS_SHOT_INFO; + +private: + int mCameraId; + + int mNum0thIfdTiff; + int mNum0thIfdExif; + int mNum0thIfdGps; + int mNum1thIfdTiff; + int mCameraType; /* ISP or SOC */ + + inline void writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + uint32_t value); + inline void writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + unsigned char *pValue); + inline void writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + rational_t *pValue, + unsigned int *offset, + unsigned char *start); + inline void writeExifIfd(unsigned char **pCur, + unsigned short tag, + unsigned short type, + unsigned int count, + unsigned char *pValue, + unsigned int *offset, + unsigned char *start); +}; +}; +#endif /* ANDROID_HARDWARE_EXIF_H */ diff --git a/libcamera_external/ISecCameraHardware.cpp b/libcamera_external/ISecCameraHardware.cpp new file mode 100644 index 0000000..222ae63 --- /dev/null +++ b/libcamera_external/ISecCameraHardware.cpp @@ -0,0 +1,4149 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file ISecCameraHardware.cpp + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_ISECCAMERAHARDWARE_CPP +#define ANDROID_HARDWARE_ISECCAMERAHARDWARE_CPP + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ISecCameraHardware" + +#include + +namespace android { + +ISecCameraHardware::ISecCameraHardware(int cameraId, camera_device_t *dev) + : mCameraId(cameraId), + mParameters(), + mFlagANWindowRegister(false), + mPreviewHeap(NULL), + mPostviewHeap(NULL), + mPostviewHeapTmp(NULL), + mRawHeap(NULL), + mRecordingHeap(NULL), + mJpegHeap(NULL), + mHDRHeap(NULL), + mYUVHeap(NULL), + mPreviewFormat(CAM_PIXEL_FORMAT_YUV420SP), + mPictureFormat(CAM_PIXEL_FORMAT_JPEG), + mFliteFormat(CAM_PIXEL_FORMAT_YUV420SP), + mflipHorizontal(false), + mflipVertical(false), + mNeedSizeChange(false), + mFastCaptureCalled(false), + mRecordingTrace(false), + mMsgEnabled(0), + mGetMemoryCb(0), + mPreviewWindow(NULL), + mNotifyCb(0), + mDataCb(0), + mDataCbTimestamp(0), + mCallbackCookie(0), + mDisablePostview(false), + mAntibanding60Hz(-1), + + mHalDevice(dev) +{ + const image_rect_type *videoSizes, *previewFLiteSizes, *captureFLiteSizes, *videoSize, *fliteSize; + int videoSizesCount, previewFLiteCount, captureFLiteCount; + + if (mCameraId == CAMERA_ID_BACK) { + mZoomSupport = IsZoomSupported(); + mEnableDZoom = mZoomSupport ? IsAPZoomSupported() : false; + mFastCaptureSupport = IsFastCaptureSupportedOnRear(); + + mSensorSize = backSensorSize; + mPreviewSize = backPreviewSizes[0]; + mPictureSize = backPictureSizes[0]; + mThumbnailSize = backThumbSizes[0]; + videoSizes = backRecordingSizes; + videoSizesCount = ARRAY_SIZE(backRecordingSizes); + + previewFLiteSizes = backFLitePreviewSizes; + previewFLiteCount = ARRAY_SIZE(backFLitePreviewSizes); + captureFLiteSizes = backFLiteCaptureSizes; + captureFLiteCount = ARRAY_SIZE(backFLiteCaptureSizes); + } else { + mZoomSupport = IsZoomSupported(); + mEnableDZoom = mZoomSupport ? IsAPZoomSupported() : false; + mFastCaptureSupport = IsFastCaptureSupportedOnFront(); + + mSensorSize = frontSensorSize; + mPreviewSize = frontPreviewSizes[0]; + mPictureSize = frontPictureSizes[0]; + mThumbnailSize = frontThumbSizes[0]; + videoSizes = frontRecordingSizes; + videoSizesCount = ARRAY_SIZE(frontRecordingSizes); + + previewFLiteSizes = frontFLitePreviewSizes; + previewFLiteCount = ARRAY_SIZE(frontFLitePreviewSizes); + captureFLiteSizes = frontFLiteCaptureSizes; + captureFLiteCount = ARRAY_SIZE(frontFLiteCaptureSizes); + } + + mOrgPreviewSize = mPreviewSize; + mPreviewWindowSize.width = mPreviewWindowSize.height = 0; + + videoSize = getFrameSizeRatio(videoSizes, videoSizesCount, mPreviewSize.width, mPreviewSize.height); + mVideoSize = videoSize ? *videoSize : videoSizes[0]; + + fliteSize = getFrameSizeRatio(previewFLiteSizes, previewFLiteCount, mPreviewSize.width, mPreviewSize.height); + mFLiteSize = fliteSize ? *fliteSize : previewFLiteSizes[0]; + fliteSize = getFrameSizeRatio(captureFLiteSizes, captureFLiteCount, mPictureSize.width, mPictureSize.height); + mFLiteCaptureSize = fliteSize ? *fliteSize : captureFLiteSizes[0]; + + mRawSize = mPreviewSize; + mPostviewSize = mPreviewSize; + mCaptureMode = RUNNING_MODE_SINGLE; + +#if FRONT_ZSL + rawImageMem = NULL; + mFullPreviewHeap = NULL; +#endif + + mPreviewWindowSize.width = mPreviewWindowSize.height = 0; + + mFrameRate = 0; + mFps = 0; + mJpegQuality= 96; + mSceneMode = (cam_scene_mode)sceneModes[0].val; + mFlashMode = (cam_flash_mode)flashModes[0].val; + if (mCameraId == CAMERA_ID_BACK) + mFocusMode = (cam_focus_mode)backFocusModes[0].val; + else + mFocusMode = (cam_focus_mode)frontFocusModes[0].val; + + mMaxFrameRate = 30000; + mDropFrameCount = 3; + mbFirst_preview_started = false; + + mIonCameraClient = -1; + mPictureFrameSize = 0; +#if FRONT_ZSL + mFullPreviewFrameSize = 0; +#endif + CLEAR(mAntiBanding); + mAutoFocusExit = false; + mPreviewRunning = false; + mRecordingRunning = false; + mAutoFocusRunning = false; + mPictureRunning = false; + mRecordSrcIndex = -1; + CLEAR(mRecordSrcSlot); + mRecordDstIndex = -1; + CLEAR(mRecordFrameAvailable); + mRecordFrameAvailableCnt = 0; + mFlagFirstFrameDump = false; + mPostRecordIndex = -1; + mRecordTimestamp = 0; + mLastRecordTimestamp = 0; + mPostRecordExit = false; + mPreviewInitialized = false; + mPreviewHeapFd = -1; + mRecordHeapFd = -1; + mPostviewHeapFd = -1; + mPostviewHeapTmpFd = -1; + CLEAR(mFrameMetadata); + CLEAR(mFaces); +#if FRONT_ZSL + mZSLindex = -1; +#endif + mFullPreviewRunning = false; + mPreviewFrameSize = 0; + mRecordingFrameSize = 0; + mRawFrameSize = 0; + mPostviewFrameSize = 0; + mFirstStart = 0; + mTimerSet = 0; + mZoomParamSet = 1; + mZoomSetVal = 0; + mZoomStatus = 0; + mLensStatus = 0; + mZoomStatusBak = 0; + mLensChecked = 0; + mLensStatus = 0; + mCameraPower = true; + + roi_x_pos = 0; + roi_y_pos = 0; + roi_width = 0; + roi_height = 0; + + mBurstShotExit = false; + mPreviewFrameReceived = false; +} + +ISecCameraHardware::~ISecCameraHardware() +{ + if (mPreviewHeap) { + mPreviewHeap->release(mPreviewHeap); + mPreviewHeap = 0; + mPreviewHeapFd = -1; + } + + if (mRecordingHeap) { + mRecordingHeap->release(mRecordingHeap); + mRecordingHeap = 0; + } + + if (mRawHeap != NULL) { + mRawHeap->release(mRawHeap); + mRawHeap = 0; + } + + if (mHDRHeap) { + mHDRHeap->release(mHDRHeap); + mHDRHeap = NULL; + } + +#ifndef RCJUNG + if (mYUVHeap) { + mYUVHeap->release(mYUVHeap); + mYUVHeap = NULL; + } +#endif + + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = 0; + } + + if (mPostviewHeap) { + mPostviewHeap->release(mPostviewHeap); + mPostviewHeap = 0; + } + + if (mPostviewHeapTmp) { + mPostviewHeapTmp->release(mPostviewHeapTmp); + mPostviewHeapTmp = 0; + } + + if (0 < mIonCameraClient) + ion_close(mIonCameraClient); + mIonCameraClient = -1; +} + +bool ISecCameraHardware::init() +{ + mPreviewRunning = false; + mFullPreviewRunning = false; /* for FRONT_ZSL */ + mPreviewInitialized = false; +#ifdef DEBUG_PREVIEW_CALLBACK + mPreviewCbStarted = false; +#endif + mRecordingRunning = false; + mPictureRunning = false; + mPictureStart = false; + mCaptureStarted = false; + mCancelCapture = false; + mAutoFocusRunning = false; + mAutoFocusExit = false; + mFaceDetectionStatus = V4L2_FACE_DETECTION_OFF; + mPreviewFrameReceived = false; + + if (mEnableDZoom) { + /* Thread for zoom */ + mPreviewZoomThread = new CameraThread(this, &ISecCameraHardware::previewZoomThread, "previewZoomThread"); + mPostRecordThread = new CameraThread(this, &ISecCameraHardware::postRecordThread, "postRecordThread"); + mPreviewThread = mRecordingThread = NULL; + } else { + mPreviewThread = new CameraThread(this, &ISecCameraHardware::previewThread, "previewThread"); + mRecordingThread = new CameraThread(this, &ISecCameraHardware::recordingThread, "recordingThread"); + mPreviewZoomThread = mPostRecordThread = NULL; + } + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + mPreviewEnqueueThread = new CameraThread(this, + &ISecCameraHardware::previewEnqueueThread, "previewEnqueueThread"); +#endif + + mPictureThread = new CameraThread(this, &ISecCameraHardware::pictureThread, "pictureThread"); +#if FRONT_ZSL + mZSLPictureThread = new CameraThread(this, &ISecCameraHardware::zslpictureThread, "zslpictureThread"); +#endif + + if (mCameraId == CAMERA_ID_BACK) { + mAutoFocusThread = new CameraThread(this, &ISecCameraHardware::autoFocusThread, "autoFocusThread"); + mAutoFocusThread->run("autoFocusThread", PRIORITY_DEFAULT); + mHDRPictureThread = new CameraThread(this, &ISecCameraHardware::HDRPictureThread); + mRecordingPictureThread = new CameraThread(this, &ISecCameraHardware::RecordingPictureThread); + mDumpPictureThread = new CameraThread(this, &ISecCameraHardware::dumpPictureThread); + } + + mIonCameraClient = ion_open(); + if (mIonCameraClient < 0) { + CLOGE("ERR(%s):ion_open() fail", __func__); + mIonCameraClient = -1; + } + + for (int i = 0; i < REC_BUF_CNT; i++) { + for (int j = 0; j < REC_PLANE_CNT; j++) { + mRecordDstHeap[i][j] = NULL; + mRecordDstHeapFd[i][j] = -1; + } + } + + mInitRecSrcQ(); + mInitRecDstBuf(); + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + m_previewFrameQ.setWaitTime(2000000000); +#endif + + CLOGI("INFO(%s) : out ",__FUNCTION__); + return true; +} + +void ISecCameraHardware::initDefaultParameters() +{ + String8 tempStr; + + /* Preview */ + mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); + + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, + SecCameraParameters::createSizesStr(backPreviewSizes, ARRAY_SIZE(backPreviewSizes)).string()); + + mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, B_KEY_PREVIEW_FPS_RANGE_VALUE); + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, B_KEY_SUPPORTED_PREVIEW_FPS_RANGE_VALUE); + + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, B_KEY_SUPPORTED_PREVIEW_FRAME_RATES_VALUE); + mParameters.setPreviewFrameRate(B_KEY_PREVIEW_FRAME_RATE_VALUE); + + mParameters.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, B_KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO_VALUE); + } else { + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, + SecCameraParameters::createSizesStr(frontPreviewSizes, ARRAY_SIZE(frontPreviewSizes)).string()); + + mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, F_KEY_PREVIEW_FPS_RANGE_VALUE); + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, F_KEY_SUPPORTED_PREVIEW_FPS_RANGE_VALUE); + + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, F_KEY_SUPPORTED_PREVIEW_FRAME_RATES_VALUE); + mParameters.setPreviewFrameRate(F_KEY_PREVIEW_FRAME_RATE_VALUE); + + mParameters.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, F_KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO_VALUE); + } + + mParameters.setPreviewFormat(previewPixelFormats[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, + SecCameraParameters::createValuesStr(previewPixelFormats, ARRAY_SIZE(previewPixelFormats)).string()); + + /* Picture */ + mParameters.setPictureSize(mPictureSize.width, mPictureSize.height); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, mThumbnailSize.width); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, mThumbnailSize.height); + + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, + SecCameraParameters::createSizesStr(backPictureSizes, ARRAY_SIZE(backPictureSizes)).string()); + + mParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, + SecCameraParameters::createSizesStr(backThumbSizes, ARRAY_SIZE(backThumbSizes)).string()); + } else { + mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, + SecCameraParameters::createSizesStr(frontPictureSizes, ARRAY_SIZE(frontPictureSizes)).string()); + + mParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, + SecCameraParameters::createSizesStr(frontThumbSizes, ARRAY_SIZE(frontThumbSizes)).string()); + } + + mParameters.setPictureFormat(picturePixelFormats[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, + SecCameraParameters::createValuesStr(picturePixelFormats, ARRAY_SIZE(picturePixelFormats)).string()); + + mParameters.set(CameraParameters::KEY_JPEG_QUALITY, 96); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, 100); + + mParameters.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP); + + /* Video */ + mParameters.setVideoSize(mVideoSize.width, mVideoSize.height); + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, + SecCameraParameters::createSizesStr(backRecordingSizes, ARRAY_SIZE(backRecordingSizes)).string()); + mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, B_KEY_VIDEO_STABILIZATION_SUPPORTED_VALUE); + } else { + mParameters.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, + SecCameraParameters::createSizesStr(frontRecordingSizes, ARRAY_SIZE(frontRecordingSizes)).string()); + mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, F_KEY_VIDEO_STABILIZATION_SUPPORTED_VALUE); + } + + mParameters.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, KEY_VIDEO_SNAPSHOT_SUPPORTED_VALUE); + + /* UI settings */ + mParameters.set(CameraParameters::KEY_WHITE_BALANCE, whiteBalances[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, + SecCameraParameters::createValuesStr(whiteBalances, ARRAY_SIZE(whiteBalances)).string()); + + mParameters.set(CameraParameters::KEY_EFFECT, effects[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_EFFECTS, + SecCameraParameters::createValuesStr(effects, ARRAY_SIZE(effects)).string()); + + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_SCENE_MODE, sceneModes[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, + SecCameraParameters::createValuesStr(sceneModes, ARRAY_SIZE(sceneModes)).string()); + + if (IsFlashSupported()) { + mParameters.set(CameraParameters::KEY_FLASH_MODE, flashModes[0].desc); + mParameters.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, + SecCameraParameters::createValuesStr(flashModes, ARRAY_SIZE(flashModes)).string()); + } + + mParameters.set(CameraParameters::KEY_FOCUS_MODE, backFocusModes[0].desc); + mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, B_KEY_NORMAL_FOCUS_DISTANCES_VALUE); + mParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, + SecCameraParameters::createValuesStr(backFocusModes, ARRAY_SIZE(backFocusModes)).string()); + + if (IsAutoFocusSupported()) { + /* FOCUS AREAS supported. + * MAX_NUM_FOCUS_AREAS > 0 : supported + * MAX_NUM_FOCUS_AREAS = 0 : not supported + * + * KEY_FOCUS_AREAS = "left,top,right,bottom,weight" + */ + mParameters.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1"); + mParameters.set(CameraParameters::KEY_FOCUS_AREAS, "(0,0,0,0,0)"); + } + } else { + mParameters.set(CameraParameters::KEY_FOCUS_MODE, frontFocusModes[0].desc); + mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, F_KEY_FOCUS_DISTANCES_VALUE); + mParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, + SecCameraParameters::createValuesStr(frontFocusModes, ARRAY_SIZE(frontFocusModes)).string()); + } + + /* Face Detect */ + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, + B_KEY_MAX_NUM_DETECTED_FACES_HW_VALUE); + mParameters.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, + B_KEY_MAX_NUM_DETECTED_FACES_SW_VALUE); + } else { + mParameters.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, + F_KEY_MAX_NUM_DETECTED_FACES_HW_VALUE); + mParameters.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, + F_KEY_MAX_NUM_DETECTED_FACES_SW_VALUE); + } + + /* Zoom */ + if (mZoomSupport) { + int maxZoom = getMaxZoomLevel(); + int maxZoomRatio = getMaxZoomRatio(); + + mParameters.set(CameraParameters::KEY_ZOOM_SUPPORTED, + B_KEY_ZOOM_SUPPORTED_VALUE); + mParameters.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, + B_KEY_SMOOTH_ZOOM_SUPPORTED_VALUE); + mParameters.set(CameraParameters::KEY_MAX_ZOOM, maxZoom - 1); + mParameters.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + + tempStr.setTo(""); + if (getZoomRatioList(tempStr, maxZoom, maxZoomRatio, zoomRatioList) == NO_ERROR) + mParameters.set(CameraParameters::KEY_ZOOM_RATIOS, tempStr.string()); + else + mParameters.set(CameraParameters::KEY_ZOOM_RATIOS, "100"); + + mParameters.set("constant-growth-rate-zoom-supported", "true"); + + CLOGD("INFO(%s):zoomRatioList=%s", "setDefaultParameter", tempStr.string()); + } else { + mParameters.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); + mParameters.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); + mParameters.set(CameraParameters::KEY_MAX_ZOOM, ZOOM_LEVEL_0); + mParameters.set(CameraParameters::KEY_ZOOM, ZOOM_LEVEL_0); + } + + mParameters.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, "0"); + + mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, 0); + mParameters.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, 4); + mParameters.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, -4); + mParameters.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, 0.5); + + /* AE, AWB Lock */ + if (mCameraId == CAMERA_ID_BACK) { + mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, + B_KEY_AUTO_EXPOSURE_LOCK_SUPPORTED_VALUE); + mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, + B_KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED_VALUE); + } else { + mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, + F_KEY_AUTO_EXPOSURE_LOCK_SUPPORTED_VALUE); + mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, + F_KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED_VALUE); + } + + /* AntiBanding */ + char supportedAntiBanding[20] = {0,}; + sprintf(supportedAntiBanding,"auto,%s", (char *)mAntiBanding); + mParameters.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, supportedAntiBanding); + mParameters.set(CameraParameters::KEY_ANTIBANDING, antibandings[0].desc); + + mParameters.set(CameraParameters::KEY_ISO, CameraParameters::ISO_AUTO); + mParameters.set("iso-values", SecCameraParameters::createValuesStr(isos, ARRAY_SIZE(isos)).string()); + + /* View Angle */ + setHorizontalViewAngle(640, 480); + mParameters.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); + mParameters.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, getVerticalViewAngle()); + + /* Burst FPS Value */ +#ifdef BURSTSHOT_MAX_FPS + tempStr.setTo(""); + tempStr = String8::format("(%d,%d)", BURSTSHOT_MAX_FPS, BURSTSHOT_MAX_FPS); + mParameters.set("burstshot-fps-values", tempStr.string()); +#else + mParameters.set("burstshot-fps-values", "(0,0)"); +#endif + + CLOGV("INFO(%s) : out - %s ",__FUNCTION__, mParameters.flatten().string()); + +} + +status_t ISecCameraHardware::setPreviewWindow(preview_stream_ops *w) +{ + mPreviewWindow = w; + + if (CC_UNLIKELY(!w)) { + mPreviewWindowSize.width = mPreviewWindowSize.height = 0; + ALOGE("setPreviewWindow: NULL Surface!"); + return OK; + } + + int halPixelFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + + if (mMovieMode) + halPixelFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; + CLOGD("DEBUG(%s) :size(%d/%d)", __FUNCTION__, mPreviewSize.width, mPreviewSize.height); + + /* YV12 */ + CLOGV("setPreviewWindow: halPixelFormat = %s", + halPixelFormat == HAL_PIXEL_FORMAT_YV12 ? "YV12" : + halPixelFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP ? "NV21" : + halPixelFormat == HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP ? "NV21M" : + halPixelFormat == HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL ? "NV21 FULL" : + "Others"); + + mPreviewWindowSize = mPreviewSize; + + CLOGD("DEBUG [%s(%d)] setPreviewWindow window Size width=%d height=%d", + __FUNCTION__, __LINE__, mPreviewWindowSize.width, mPreviewWindowSize.height); + if (nativeCreateSurface(mPreviewWindowSize.width, mPreviewWindowSize.height, halPixelFormat) == false) { + CLOGE("setPreviewWindow: error, nativeCreateSurface"); + return UNKNOWN_ERROR; + } + + return NO_ERROR; +} + +status_t ISecCameraHardware::startPreview() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + LOG_PERFORMANCE_START(1); + + Mutex::Autolock lock(mLock); + + if (mPictureRunning) { + CLOGW("startPreview: warning, picture is not completed yet"); + if ((mMsgEnabled & CAMERA_MSG_RAW_IMAGE) || + (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME)) { + /* upper layer can access the mmaped memory if raw or postview message is enabled + But the data will be changed after preview is started */ + CLOGE("startPreview: error, picture data is not transferred yet"); + return INVALID_OPERATION; + } + } + + status_t ret; + if (mEnableDZoom) + ret = nativeStartPreviewZoom(); + else + ret = nativeStartPreview(); + + if (ret != NO_ERROR) { + CLOGE("startPreview: error, nativeStartPreview"); + + return NO_INIT; + } + + setDropUnstableInitFrames(); + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + m_flagEnqueueThreadStop = false; + ret = mPreviewEnqueueThread->run("previewEnqueueThread", PRIORITY_URGENT_DISPLAY); + if (ret != NO_ERROR) { + CLOGE("startPreview: error, starting previewEnqueueThread"); + return UNKNOWN_ERROR; + } +#endif + + mPreviewFrameReceived = false; + mFlagFirstFrameDump = false; + if (mEnableDZoom) { + ret = mPreviewZoomThread->run("previewZoomThread", PRIORITY_URGENT_DISPLAY); + } else { + ret = mPreviewThread->run("previewThread", PRIORITY_URGENT_DISPLAY); + } + + if (ret != NO_ERROR) { + CLOGE("startPreview: error, Not starting preview"); + return UNKNOWN_ERROR; + } + +#if FRONT_ZSL + if (/*mSamsungApp &&*/ !mMovieMode && mCameraId == CAMERA_ID_FRONT) { + if (nativeStartFullPreview() != NO_ERROR) { + CLOGE("startPreview: error, nativeStartPreview"); + return NO_INIT; + } + + if (mZSLPictureThread->run("zslpictureThread", PRIORITY_URGENT_DISPLAY) != NO_ERROR) { + CLOGE("startPreview: error, Not starting preview"); + return UNKNOWN_ERROR; + } + + mFullPreviewRunning = true; + } +#endif + mPreviewRunning = true; + + LOG_PERFORMANCE_END(1, "total"); + + CLOGI("INFO(%s) : out ",__FUNCTION__); + + return NO_ERROR; +} + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD +bool ISecCameraHardware::previewEnqueueThread() +{ + int ret = 0; + buffer_handle_t * buf_handle = NULL; + + ret = m_previewFrameQ.waitAndPopProcessQ(&buf_handle); + if (m_flagEnqueueThreadStop == true) { + ALOGD("INFO(%s[%d]):m_flagEnqueueThreadStop(%d)", __FUNCTION__, __LINE__, + m_flagEnqueueThreadStop); + + if(buf_handle != NULL && mPreviewWindow != NULL) { + if(mPreviewWindow->cancel_buffer(mPreviewWindow, buf_handle) != 0) { + ALOGE("ERR(%s):Fail to cancel buffer", __func__); + } + } + return false; + } + if (ret < 0) { + ALOGW("WARN(%s[%d]):wait and pop fail, ret(%d)", __FUNCTION__, __LINE__, ret); + goto func_exit; + } + + if (0 != mPreviewWindow->enqueue_buffer(mPreviewWindow, buf_handle)) { + ALOGE("Could not enqueue gralloc buffer!\n"); + goto func_exit; + } + +func_exit: + return true; +} +#endif + +bool ISecCameraHardware::previewThread() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + int index = nativeGetPreview(); + if (CC_UNLIKELY(index < 0)) { + if (mFastCaptureCalled) { + return false; + } + CLOGE("previewThread: error, nativeGetPreview"); + + if (!mPreviewThread->exitRequested()) { + mNotifyCb(CAMERA_MSG_ERROR, CAMERA_MSG_ERROR, 0, mCallbackCookie); + CLOGI("previewZoomThread: X, after callback"); + } + return false; + }else if (mPreviewThread->exitRequested()) { + return false; + } + +#ifdef DUMP_LAST_PREVIEW_FRAME + mLastIndex = index; +#endif + + mLock.lock(); + + if (mDropFrameCount > 0) { + mDropFrameCount--; + mLock.unlock(); + nativeReleasePreviewFrame(index); + return true; + } + + mLock.unlock(); + + /* Notify the client of a new frame. */ + if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { +#ifdef DEBUG_PREVIEW_CALLBACK + if (!mPreviewCbStarted) { + mPreviewCbStarted = true; + ALOGD("preview callback started"); + } +#endif + mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewHeap, index, NULL, mCallbackCookie); + } + + /* Display a new frame */ + if (CC_LIKELY(mFlagANWindowRegister)) { + bool ret = nativeFlushSurface(mPreviewWindowSize.width, mPreviewWindowSize.height, mPreviewFrameSize, index); + if (CC_UNLIKELY(!ret)) + CLOGE("previewThread: error, nativeFlushSurface"); + mPreviewFrameReceived = true; + } + + mLock.lock(); + if (mFirstStart == 0) + mFirstStart = 1; + mLock.unlock(); + +#if DUMP_FILE + static int i = 0; + if (++i % 15 == 0) { + dumpToFile(mPreviewHeap->data + (mPreviewFrameSize*index), mPreviewFrameSize, "/data/media/0/preview.yuv"); + i = 0; + } +#endif + + /* Release the frame */ + int err = nativeReleasePreviewFrame(index); + if (CC_UNLIKELY(err < 0)) { + CLOGE("previewThread: error, nativeReleasePreviewFrame"); + return false; + } + + /* prevent a frame rate from getting higher than the max value */ + mPreviewThread->calcFrameWaitTime(mMaxFrameRate); + + CLOGV("INFO(%s) : out ",__FUNCTION__); + return true; +} + +bool ISecCameraHardware::previewZoomThread() +{ + CLOGV("INFO(%s) : in ",__FUNCTION__); + + int index = nativeGetPreview(); + int err = -1; + + if (CC_UNLIKELY(index < 0)) { + if (mFastCaptureCalled) { + return false; + } + CLOGE("previewZoomThread: error, nativeGetPreview in %s", recordingEnabled() ? "recording" : "preview"); + if (!mPreviewZoomThread->exitRequested()) { + mNotifyCb(CAMERA_MSG_ERROR, CAMERA_MSG_ERROR, 0, mCallbackCookie); + CLOGI("INFO(%s): Exit, after callback",__FUNCTION__); + } + + return false; + } else if (mPreviewZoomThread->exitRequested()) { + return false; + } + +#ifdef DUMP_LAST_PREVIEW_FRAME + mLastIndex = index; +#endif + + mPostRecordIndex = index; + + mLock.lock(); + if (mDropFrameCount > 0) { + mDropFrameCount--; + mLock.unlock(); + nativeReleasePreviewFrame(index); + CLOGW("DEBUG [%s(%d)] mDropFrameCount(%d), index(%d)",__FUNCTION__, __LINE__,mDropFrameCount, index); + return true; + } + mLock.unlock(); + + /* first frame dump to jpeg */ + if (mFlagFirstFrameDump == true) { + memcpy(mPictureBuf.virt.extP[0], mFliteNode.buffer[index].virt.extP[0], mPictureBuf.size.extS[0]); + nativeMakeJpegDump(); + mFlagFirstFrameDump = false; + } + + /* when recording mode, push frame of dq from FLite */ + if (mRecordingRunning) { + int videoSlotIndex = getRecSrcBufSlotIndex(); + if (videoSlotIndex < 0) { + CLOGE("ERROR(%s): videoSlotIndex is -1", __func__); + } else { + mRecordSrcSlot[videoSlotIndex].buf = &(mFliteNode.buffer[index]); + mRecordSrcSlot[videoSlotIndex].timestamp = mRecordTimestamp; + /* ALOGV("DEBUG(%s): recording src(%d) adr %p, timestamp %lld", __func__, videoSlotIndex, + (mRecordSrcSlot[videoSlotIndex].buf)->virt.p, mRecordSrcSlot[videoSlotIndex].timestamp); */ + mPushRecSrcQ(&mRecordSrcSlot[videoSlotIndex]); + mPostRecordCondition.signal(); + } + } + + mPreviewRunning = true; + +#if DUMP_FILE + static int j = 0; + + int buftype=ExynosBuffer::BUFFER_TYPE(&(mFliteNode.buffer[index])); + if( buftype & 1 /*BUFFER_TYPE_VIRT*/ ) { + if (++j % 15 == 0) { + dumpToFile(mFliteNode.buffer[index].virt.p, + mFliteNode.buffer[index].size.extS[0], "/data/media/0/preview_1.yuv"); + j = 0; + } + } +#endif + + /* Display a new frame */ + if (CC_LIKELY(mFlagANWindowRegister)) { + bool ret = nativeFlushSurface(mPreviewWindowSize.width, mPreviewWindowSize.height, mPreviewFrameSize, index); + if (CC_UNLIKELY(!ret)) + CLOGE("ERROR(%s): error, nativeFlushSurface", __func__); + mPreviewFrameReceived = true; + } else { + /* if not register ANativeWindow, just prepare callback buffer on CAMERA_MSG_PREVIEW_FRAME */ + if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) + if (nativePreviewCallback(index, NULL) < 0) + CLOGE("ERROR(%s): nativePreviewCallback failed", __func__); + } + + /* Notify the client of a new frame. */ + if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { +#ifdef DEBUG_PREVIEW_CALLBACK + if (!mPreviewCbStarted) { + mPreviewCbStarted = true; + ALOGD("preview callback started"); + } +#endif + mDataCb(CAMERA_MSG_PREVIEW_FRAME, mPreviewHeap, index, NULL, mCallbackCookie); + } + +#if DUMP_FILE + static int i = 0; + if (++i % 15 == 0) { + dumpToFile(mPreviewHeap->data + (mPreviewFrameSize*index), mPreviewFrameSize, "/data/media/0/preview.yuv"); + i = 0; + } +#endif + + /* Release the frame */ + err = nativeReleasePreviewFrame(index); + if (CC_UNLIKELY(err < 0)) { + CLOGE("ERROR(%s): error, nativeReleasePreviewFrame", __func__); + return false; + } + + CLOGV("INFO(%s) : out ",__FUNCTION__); + return true; +} + +void ISecCameraHardware::stopPreview() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + /* + * try count to wait for stopping previewThread + * maximum wait time = 30 * 10ms + */ + int waitForCnt = 600; // 30 -->. 600 (300ms --> 6sec) because, Polling timeout is 5 sec. + + LOG_PERFORMANCE_START(1); + + { + Mutex::Autolock lock(mLock); + if (!mPreviewRunning) { + CLOGW("stopPreview: warning, preview has been stopped"); + return; + } + } + + nativeDestroySurface(); + /* don't hold the lock while waiting for the thread to quit */ +#if FRONT_ZSL + if (mFullPreviewRunning) { + mZSLPictureThread->requestExitAndWait(); + nativeForceStopFullPreview(); + nativeStopFullPreview(); + mFullPreviewRunning = false; + } +#endif + if (mEnableDZoom) { + mPreviewZoomThread->requestExit(); + /* if previewThread can't finish, wait for 25ms */ + while (waitForCnt > 0 && mPreviewZoomThread->getTid() >= 0) { + usleep(10000); + waitForCnt--; + } + } else { + mPreviewThread->requestExitAndWait(); + } + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + m_flagEnqueueThreadStop = true; + m_previewFrameQ.sendCmd(WAKE_UP); + mPreviewEnqueueThread->requestExitAndWait(); + m_clearPreviewFrameList(&m_previewFrameQ); +#endif + +#ifdef DUMP_LAST_PREVIEW_FRAME + uint32_t offset = mPreviewFrameSize * mLastIndex; + dumpToFile(mPreviewHeap->base() + offset, mPreviewFrameSize, "/data/media/0/preview-last.dump"); +#endif + + Mutex::Autolock lock(mLock); + + nativeStopPreview(); + + if (mEnableDZoom == true && waitForCnt > 0) { + mPreviewZoomThread->requestExitAndWait(); + } + + mPreviewRunning = false; + mPreviewInitialized = false; +#ifdef DEBUG_PREVIEW_CALLBACK + mPreviewCbStarted = false; +#endif + mPreviewFrameReceived = false; + LOG_PERFORMANCE_END(1, "total"); + + if (mPreviewHeap) { + mPreviewHeap->release(mPreviewHeap); + mPreviewHeap = 0; + mPreviewHeapFd = -1; + } + + if (mRecordingHeap) { + mRecordingHeap->release(mRecordingHeap); + mRecordingHeap = 0; + } + + if (mRawHeap != NULL) { + mRawHeap->release(mRawHeap); + mRawHeap = 0; + } + + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = 0; + } + + if (mHDRHeap) { + mHDRHeap->release(mHDRHeap); + mHDRHeap = 0; + } + + if (mPostviewHeap) { + mPostviewHeap->release(mPostviewHeap); + mPostviewHeap = 0; + } + + if (mPostviewHeapTmp) { + mPostviewHeapTmp->release(mPostviewHeapTmp); + mPostviewHeapTmp = 0; + } + + CLOGI("INFO(%s) : out ",__FUNCTION__); +} + +status_t ISecCameraHardware::storeMetaDataInBuffers(bool enable) +{ + ALOGV("%s", __FUNCTION__); + + if (!enable) { + ALOGE("Non-m_frameMetadata buffer mode is not supported!"); + return INVALID_OPERATION; + } + + return OK; +} + +status_t ISecCameraHardware::startRecording() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + Mutex::Autolock lock(mLock); + + status_t ret; + mLastRecordTimestamp = 0; +#if FRONT_ZSL + if (mFullPreviewRunning) { + nativeForceStopFullPreview(); + mZSLPictureThread->requestExitAndWait(); + nativeStopFullPreview(); + mFullPreviewRunning = false; + } +#endif + + if (mEnableDZoom) { + ret = nativeStartRecordingZoom(); + } else { + ret = nativeStartRecording(); + } + + if (CC_UNLIKELY(ret != NO_ERROR)) { + CLOGE("startRecording X: error, nativeStartRecording"); + return UNKNOWN_ERROR; + } + + if (mEnableDZoom) { + mPostRecordExit = false; + ret = mPostRecordThread->run("postRecordThread", PRIORITY_URGENT_DISPLAY); + } else + ret = mRecordingThread->run("recordingThread", PRIORITY_URGENT_DISPLAY); + + if (CC_UNLIKELY(ret != NO_ERROR)) { + mRecordingTrace = true; + CLOGE("startRecording: error %d, Not starting recording", ret); + return ret; + } + + mRecordingRunning = true; + + CLOGD("DEBUG [%s(%d)] -out-",__FUNCTION__, __LINE__); + return NO_ERROR; +} + +bool ISecCameraHardware::recordingThread() +{ + return true; +} + +bool ISecCameraHardware::postRecordThread() +{ + mPostRecordLock.lock(); + mPostRecordCondition.wait(mPostRecordLock); + mPostRecordLock.unlock(); + + if (mSizeOfRecSrcQ() == 0) { + ALOGW("WARN(%s): mSizeOfRecSrcQ size is zero", __func__); + } else { + rec_src_buf_t srcBuf; + + while (mSizeOfRecSrcQ() > 0) { + int index; + + /* get dst video buf index */ + index = getRecDstBufIndex(); + if (index < 0) { + ALOGW("WARN(%s): getRecDstBufIndex(%d) sleep and continue, skip frame buffer", __func__, index); + usleep(13000); + continue; + } + + /* get src video buf */ + if (mPopRecSrcQ(&srcBuf) == false) { + ALOGW("WARN(%s): mPopRecSrcQ(%d) failed", __func__, index); + return false; + } + + /* ALOGV("DEBUG(%s): SrcBuf(%d, %d, %lld), Dst idx(%d)", __func__, + srcBuf.buf->fd.extFd[0], srcBuf.buf->fd.extFd[1], srcBuf.timestamp, index); */ + + /* Notify the client of a new frame. */ + if (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME) { + bool ret; + /* csc from flite to video MHB and callback */ + ret = nativeCSCRecording(&srcBuf, index); + if (ret == false) { + ALOGE("ERROR(%s): nativeCSCRecording failed.. SrcBuf(%d, %d, %lld), Dst idx(%d)", __func__, + srcBuf.buf->fd.extFd[0], srcBuf.buf->fd.extFd[1], srcBuf.timestamp, index); + setAvailDstBufIndex(index); + return false; + } else { + if (0L < srcBuf.timestamp && mLastRecordTimestamp < srcBuf.timestamp) { + mDataCbTimestamp(srcBuf.timestamp, CAMERA_MSG_VIDEO_FRAME, + mRecordingHeap, index, mCallbackCookie); + mLastRecordTimestamp = srcBuf.timestamp; + LOG_RECORD_TRACE("callback returned"); + } else { + ALOGW("WARN(%s): timestamp(%lld) invaild - last timestamp(%lld) systemtime(%lld)", + __func__, srcBuf.timestamp, mLastRecordTimestamp, systemTime(SYSTEM_TIME_MONOTONIC)); + setAvailDstBufIndex(index); + } + } + } + } + } + LOG_RECORD_TRACE("X"); + return true; +} + +void ISecCameraHardware::stopRecording() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + Mutex::Autolock lock(mLock); + if (!mRecordingRunning) { + ALOGW("stopRecording: warning, recording has been stopped"); + return; + } + + /* We request thread to exit. Don't wait. */ + if (mEnableDZoom) { + mPostRecordExit = true; + + /* Change calling order of requestExit...() and signal + * if you want to change requestExit() to requestExitAndWait(). + */ + mPostRecordThread->requestExit(); + mPostRecordCondition.signal(); + nativeStopRecording(); + } else { + mRecordingThread->requestExit(); + nativeStopRecording(); + } + + mRecordingRunning = false; + + ALOGD("stopRecording X"); +} + +void ISecCameraHardware::releaseRecordingFrame(const void *opaque) +{ + status_t ret = NO_ERROR; + bool found = false; + int i; + + /* We does not release frames recorded any longer + * if this function is called after stopRecording(). + */ + if (mEnableDZoom) + ret = mPostRecordThread->exitRequested(); + else + ret = mRecordingThread->exitRequested(); + + if (CC_UNLIKELY(ret)) { + ALOGW("releaseRecordingFrame: warning, we do not release any more!!"); + return; + } + + { + Mutex::Autolock lock(mLock); + if (CC_UNLIKELY(!mRecordingRunning)) { + ALOGW("releaseRecordingFrame: warning, recording is not running"); + return; + } + } + + struct addrs *addrs = (struct addrs *)mRecordingHeap->data; + + /* find MHB handler to match */ + if (addrs) { + for (i = 0; i < REC_BUF_CNT; i++) { + if ((char *)(&(addrs[i].type)) == (char *)opaque) { + found = true; + break; + } + } + } + + mRecordDstLock.lock(); + if (found) { + mRecordFrameAvailableCnt++; + /* ALOGV("DEBUG(%s): found index[%d] FDy(%d), FDcbcr(%d) availableCount(%d)", __func__, + i, addrs[i].fd_y, addrs[i].fd_cbcr, mRecordFrameAvailableCnt); */ + mRecordFrameAvailable[i] = true; + } else + ALOGE("ERR(%s):no matched index(%p)", __func__, (char *)opaque); + + if (mRecordFrameAvailableCnt > REC_BUF_CNT) { + ALOGW("WARN(%s): mRecordFrameAvailableCnt is more than REC_BUF!!", __func__); + mRecordFrameAvailableCnt = REC_BUF_CNT; + } + mRecordDstLock.unlock(); +} + +status_t ISecCameraHardware::autoFocus() +{ + ALOGV("autoFocus EX"); + /* signal autoFocusThread to run once */ + + if (mCameraId != CAMERA_ID_BACK) { + ALOGV("Do not support autoFocus in front camera"); + mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie); + return NO_ERROR; + } + + mAutoFocusCondition.signal(); + return NO_ERROR; +} + +bool ISecCameraHardware::autoFocusThread() +{ + /* block until we're told to start. we don't want to use + * a restartable thread and requestExitAndWait() in cancelAutoFocus() + * because it would cause deadlock between our callbacks and the + * caller of cancelAutoFocus() which both want to grab the same lock + * in CameraServices layer. + */ + mAutoFocusLock.lock(); + mAutoFocusCondition.wait(mAutoFocusLock); + mAutoFocusLock.unlock(); + + /* check early exit request */ + if (mAutoFocusExit) + return false; + + ALOGV("autoFocusThread E"); + LOG_PERFORMANCE_START(1); + + mAutoFocusRunning = true; + + if (!IsAutoFocusSupported()) { + ALOGV("autofocus not supported"); + mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie); + goto out; + } + + if (!autoFocusCheckAndWaitPreview()) { + ALOGI("autoFocusThread: preview not started"); + mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie); + goto out; + } + + /* Start AF operations */ + if (!nativeSetAutoFocus()) { + ALOGE("autoFocusThread X: error, nativeSetAutofocus"); + goto out; + } + + if (!IsAutofocusRunning()) { + ALOGV("autoFocusThread X: AF is canceled"); + nativeSetParameters(CAM_CID_FOCUS_MODE, mFocusMode | FOCUS_MODE_DEFAULT); + goto out; + } + + if (mMsgEnabled & CAMERA_MSG_FOCUS) { + switch (nativeGetAutoFocus()) { + case 0x02: + ALOGV("autoFocusThread X: AF success"); + mNotifyCb(CAMERA_MSG_FOCUS, true, 0, mCallbackCookie); + break; + + case 0x04: + ALOGV("autoFocusThread X: AF cancel"); + nativeSetParameters(CAM_CID_FOCUS_MODE, mFocusMode | FOCUS_MODE_DEFAULT); + break; + + default: + ALOGW("autoFocusThread X: AF fail"); + mNotifyCb(CAMERA_MSG_FOCUS, false, 0, mCallbackCookie); + break; + } + } + +out: + mAutoFocusRunning = false; + + LOG_PERFORMANCE_END(1, "total"); + return true; +} + +status_t ISecCameraHardware::cancelAutoFocus() +{ + ALOGV("cancelAutoFocus: autoFocusThread is %s", + mAutoFocusRunning ? "running" : "not running"); + + if (!IsAutoFocusSupported()) + return NO_ERROR; + status_t err = NO_ERROR; + + if (mAutoFocusRunning) { + err = nativeCancelAutoFocus(); + } else { + err = nativeSetParameters(CAM_CID_FOCUS_MODE, mFocusMode | FOCUS_MODE_DEFAULT); + } + + if (CC_UNLIKELY(err != NO_ERROR)) { + ALOGE("cancelAutoFocus: error, nativeCancelAutofocus"); + return UNKNOWN_ERROR; + } + mAutoFocusRunning = false; + return NO_ERROR; +} + +#if FRONT_ZSL +bool ISecCameraHardware::zslpictureThread() +{ + mZSLindex = nativeGetFullPreview(); + + if (CC_UNLIKELY(mZSLindex < 0)) { + CLOGE("zslpictureThread: error, nativeGetFullPreview"); + return true; + } + + nativeReleaseFullPreviewFrame(mZSLindex); + + return true; +} +#endif + +/* --1 */ +status_t ISecCameraHardware::takePicture() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + int facedetVal = 0; + + if (mPreviewRunning == false || mPreviewFrameReceived == false) { + CLOGW("WARN(%s): preview is not initialized", __func__); + int retry = 70; + while (retry > 0 && (mPreviewRunning == false || mPreviewFrameReceived == false)) { + usleep(5000); + retry--; + } + } + +#ifdef RECORDING_CAPTURE + if (mMovieMode || mRecordingRunning) { + if (mPictureRunning) { + CLOGW("WARN(%s): pictureThread is alive. wait to finish it", __func__); + int retry_mov = 50; + while (retry_mov > 0 && mPictureRunning == true) { + usleep(5000); + retry_mov--; + } + if (retry_mov > 0) { + CLOGW("WARN(%s): pictureThread is finished", __func__); + } else { + CLOGW("WARN(%s): wait timeout", __func__); + } + } + } +#endif + + mPictureStart = true; + + if (mCaptureMode != RUNNING_MODE_SINGLE || facedetVal == V4L2_FACE_DETECTION_BLINK) { + if (mPreviewRunning) { + CLOGW("takePicture: warning, preview is running"); + stopPreview(); + } + nativeSetFastCapture(false); + } + mPictureStart = false; + +#ifndef RECORDING_CAPTURE + if (mMovieMode || mRecordingRunning) { + CLOGW("takePicture: warning, not support taking picture in recording mode"); + return NO_ERROR; + } +#endif /* RECORDING_CAPTURE */ + + Mutex::Autolock lock(mLock); + if (mPictureRunning) { + CLOGE("takePicture: error, picture already running"); + return INVALID_OPERATION; + } + + if (mPictureThread->run("pictureThread", PRIORITY_DEFAULT) != NO_ERROR) { + CLOGE("takePicture: error, Not starting take picture"); + return UNKNOWN_ERROR; + } + + CLOGI("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; +} + +/* --2 */ +bool ISecCameraHardware::pictureThread() +{ + mPictureRunning = true; + +#ifdef RECORDING_CAPTURE + if (mMovieMode || mRecordingRunning) { + doRecordingCapture(); + } else +#endif + { + if (mPreviewRunning && !mFullPreviewRunning) { + CLOGW("takePicture: warning, preview is running"); + stopPreview(); + } + + doCameraCapture(); + } + + return false; +} + +status_t ISecCameraHardware::doCameraCapture() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + mPictureLock.lock(); + + LOG_PERFORMANCE_START(1); + + LOG_PERFORMANCE_START(2); + if (!nativeStartSnapshot()) { + CLOGE("doCameraCapture: error, nativeStartSnapshot"); + mPictureLock.unlock(); + goto out; + } + LOG_PERFORMANCE_END(2, "nativeStartSnapshot"); + + if ((mMsgEnabled & CAMERA_MSG_SHUTTER) && (mSceneMode != SCENE_MODE_NIGHTSHOT)) { + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); + mPictureLock.lock(); + } + + LOG_PERFORMANCE_START(3); + int postviewOffset; + if (!nativeGetSnapshot(1, &postviewOffset)) { + CLOGE("doCameraCapture: error, nativeGetSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } + LOG_PERFORMANCE_END(3, "nativeGetSnapshot"); + + mPictureLock.unlock(); + + if ((mMsgEnabled & CAMERA_MSG_SHUTTER) && (mSceneMode == SCENE_MODE_NIGHTSHOT)) + mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); + + /* Display postview */ + LOG_PERFORMANCE_START(4); + + /* callbacks for capture */ + if ((mMsgEnabled & CAMERA_MSG_RAW_IMAGE) && (mRawHeap != NULL)) + mDataCb(CAMERA_MSG_RAW_IMAGE, mRawHeap, 0, NULL, mCallbackCookie); + + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + + if ((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) && (mRawHeap != NULL)) + mDataCb(CAMERA_MSG_POSTVIEW_FRAME, mRawHeap, 0, NULL, mCallbackCookie); + + if ((mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) && (mJpegHeap != NULL)) + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + + LOG_PERFORMANCE_END(4, "callback functions"); + +#if DUMP_FILE + dumpToFile(mJpegHeap->data, mPictureFrameSize, "/data/media/0/capture01.jpg"); +#endif + +out: + nativeStopSnapshot(); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + + LOG_PERFORMANCE_END(1, "total"); + + CLOGI("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; +} + +#ifdef RECORDING_CAPTURE +status_t ISecCameraHardware::doRecordingCapture() +{ + CLOGI("INFO(%s) : in ",__FUNCTION__); + + if (!mMovieMode && !mRecordingRunning) { + ALOGI("doRecordingCapture: nothing to do"); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + return NO_ERROR; + } + + if ((mMsgEnabled & CAMERA_MSG_SHUTTER) && (mSceneMode != SCENE_MODE_NIGHTSHOT)) { + mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); + } + + mPictureLock.lock(); + + bool ret = false; + int index = mPostRecordIndex; + if (index < 0) { + CLOGW("WARN(%s):(%d)mPostRecordIndex(%d) invalid", __func__, __LINE__, mPostRecordIndex); + index = 0; + } + + ExynosBuffer rawFrame422Buf; + + int dstW = mPictureSize.width; + int dstH = mPictureSize.height; + + + rawFrame422Buf.size.extS[0] = dstW * dstH * 2; + + if (allocMem(mIonCameraClient, &rawFrame422Buf, 1 << 1) == false) { + CLOGE("ERR(%s):(%d)allocMem(rawFrame422Buf) fail", __FUNCTION__, __LINE__); + mPictureLock.unlock(); + goto out; + } + + /* csc start flite(s) -> rawFrame422Buf */ + if (nativeCSCRecordingCapture(&(mFliteNode.buffer[index]), &rawFrame422Buf) != NO_ERROR) + ALOGE("ERR(%s):(%d)nativeCSCRecordingCapture() fail", __FUNCTION__, __LINE__); + + ret = nativeGetRecordingJpeg(&rawFrame422Buf, dstW, dstH); + if (CC_UNLIKELY(!ret)) { + CLOGE("doRecordingCapture: error, nativeGetRecordingJpeg"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } + + mPictureLock.unlock(); + +#if DUMP_FILE + dumpToFile(mJpegHeap->data, mPictureFrameSize, "/data/media/0/capture01.jpg"); +#endif + + if ((mMsgEnabled & CAMERA_MSG_RAW_IMAGE) && (mRawHeap != NULL)) + mDataCb(CAMERA_MSG_RAW_IMAGE, mRawHeap, 0, NULL, mCallbackCookie); + + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + + if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + +out: + freeMem(&rawFrame422Buf); + + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + + CLOGI("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; +} +#endif + +bool ISecCameraHardware::HDRPictureThread() +{ + int i, ncnt; + int count = 3; + int postviewOffset; + int err; + + ALOGD("HDRPictureThread E"); + + mPictureLock.lock(); + + LOG_PERFORMANCE_START(1); + +// nativeSetParameters(CAM_CID_CONTINUESHOT_PROC, V4L2_INT_STATE_START_CAPTURE); + +#if 0 + for ( i = count ; i ; i--) { + ncnt = count-i+1; + ALOGD("HDRPictureThread: AEB %d frame", ncnt); + nativeSetParameters(CAM_CID_CONTINUESHOT_PROC, V4L2_INT_STATE_FRAME_SYNC); + if (mMsgEnabled & CAMERA_MSG_SHUTTER) { + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_SHUTTER, 0, 0, mCallbackCookie); + mPictureLock.lock(); + } + } +#endif + + for ( i = count ; i ; i--) { + + if (i != count) { + mPictureLock.lock(); + } + + if (!nativeStartYUVSnapshot()) { + ALOGE("HDRPictureThread: error, nativeStartYUVSnapshot"); + mPictureLock.unlock(); + goto out; + } + ALOGE("nativeGetYUVSnapshot: count[%d], i[%d]",count,i); + + if (!nativeGetYUVSnapshot(count-i+1, &postviewOffset)) { + ALOGE("HDRPictureThread: error, nativeGetYUVSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } else { + ALOGE("HDRPictureThread: success, nativeGetYUVSnapshot"); + } + + mPictureLock.unlock(); + + if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { + ALOGD("YUV mHDRTotalFrameSize(mHDRFrameSize) = %d, %d frame", mHDRFrameSize, i); + mPictureLock.unlock(); + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mHDRHeap, 0, NULL, mCallbackCookie); + } + nativeStopSnapshot(); + } + +out: + nativeStopSnapshot(); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + + LOG_PERFORMANCE_END(1, "total"); + + ALOGD("HDRPictureThread X"); + return false; + +} + + bool ISecCameraHardware::RecordingPictureThread() +{ + int i, ncnt; + int count = mMultiFullCaptureNum; + int postviewOffset; + int err; + + ALOGD("RecordingPictureThread : count (%d)E", count); + + if (mCaptureMode == RUNNING_MODE_SINGLE) { + if (mPreviewRunning && !mFullPreviewRunning) { + ALOGW("RecordingPictureThread: warning, preview is running"); + stopPreview(); + } + } + + mPictureLock.lock(); + + LOG_PERFORMANCE_START(1); + ALOGD("RecordingPictureThread : postview width(%d), height(%d)", + mOrgPreviewSize.width, mOrgPreviewSize.height); + //nativeSetParameters(CAM_CID_SET_POSTVIEW_SIZE, + // (int)(mOrgPreviewSize.width<<16|mOrgPreviewSize.height)); + //nativeSetParameters(CAM_CID_CONTINUESHOT_PROC, V4L2_INT_STATE_START_CAPTURE); + + for (i = count ; i ; i--) { + ncnt = count-i+1; + ALOGD("RecordingPictureThread: StartPostview %d frame E", ncnt); + if (!nativeStartPostview()) { + ALOGE("RecordingPictureThread: error, nativeStartPostview"); + mPictureLock.unlock(); + goto out; + } else { + ALOGD("RecordingPictureThread: StartPostview %d frame X", ncnt); + } + + if (!nativeGetPostview(ncnt)) { + ALOGE("RecordingPictureThread: error, nativeGetPostview"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } else { + ALOGE("RecordingPictureThread: nativeGetPostview"); + } + + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) { + ALOGD("RAW mRawFrameSize = %d", mRawFrameSize); + mDataCb(CAMERA_MSG_RAW_IMAGE, mPostviewHeap, 0, NULL, mCallbackCookie); + } + if (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) { + ALOGD("JPEG mRawFrameSize = %d", mPostviewFrameSize); + mDataCb(CAMERA_MSG_POSTVIEW_FRAME, mPostviewHeap, 0, NULL, mCallbackCookie); + } + + LOG_PERFORMANCE_START(2); + ALOGD("RecordingPictureThread: StartSnapshot %d frame E", ncnt); + if (!nativeStartSnapshot()) { + ALOGE("RecordingPictureThread: error, nativeStartSnapshot"); + mPictureLock.unlock(); + goto out; + } else { + ALOGD("RecordingPictureThread: StartSnapshot %d frame X", ncnt); + } + LOG_PERFORMANCE_END(2, "nativeStartSnapshot"); + + LOG_PERFORMANCE_START(3); + if (!nativeGetSnapshot(ncnt, &postviewOffset)) { + ALOGE("RecordingPictureThread: error, nativeGetSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } else { + ALOGD("RecordingPictureThread: nativeGetSnapshot %d frame ", ncnt); + } + LOG_PERFORMANCE_END(3, "nativeGetSnapshot"); + + mPictureLock.unlock(); + + LOG_PERFORMANCE_START(4); + + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) { + mDataCb(CAMERA_MSG_RAW_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + } + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) { + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + } + if ((mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) && (mJpegHeap != NULL)) { + ALOGD("RecordingPictureThread: CAMERA_MSG_COMPRESSED_IMAGE (%d) ", ncnt); + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + + // This delay is added for waiting until saving jpeg file is completed. + // if it take short time to save jpeg file, this delay can be deleted. + usleep(300*1000); + } + + LOG_PERFORMANCE_END(4, "callback functions"); + } + +out: + nativeStopSnapshot(); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + + LOG_PERFORMANCE_END(1, "total"); + + ALOGD("RecordingPictureThread X"); + return false; + +} + +bool ISecCameraHardware::dumpPictureThread() +{ + int err; + mPictureRunning = true; + + if (mPreviewRunning && !mFullPreviewRunning) { + ALOGW("takePicture: warning, preview is running"); + stopPreview(); + } + + /* Start fast capture */ + mCaptureStarted = true; +// err = nativeSetParameters(CAM_CID_SET_FAST_CAPTURE, 0); + mCaptureStarted = false; + if (mCancelCapture/* && mSamsungApp*/) { + ALOGD("pictureThread mCancelCapture %d", mCancelCapture); + mCancelCapture = false; + return false; + } + if (err != NO_ERROR) { + ALOGE("%s: Fast capture command is failed.", __func__); + } else { + ALOGD("%s: Mode change command is issued for fast capture.", __func__); + } + + mPictureLock.lock(); + + LOG_PERFORMANCE_START(1); + LOG_PERFORMANCE_START(2); + if (!nativeStartSnapshot()) { + ALOGE("pictureThread: error, nativeStartSnapshot"); + mPictureLock.unlock(); + goto out; + } + LOG_PERFORMANCE_END(2, "nativeStartSnapshot"); + + LOG_PERFORMANCE_START(3); + /* --4 */ + int postviewOffset; + + if (!nativeGetSnapshot(1, &postviewOffset)) { + ALOGE("pictureThread: error, nativeGetSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } + LOG_PERFORMANCE_END(3, "nativeGetSnapshot"); + + mPictureLock.unlock(); + + /* Display postview */ + LOG_PERFORMANCE_START(4); + + /* callbacks for capture */ + if (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) { + ALOGD("Postview mPostviewFrameSize = %d", mPostviewFrameSize); + mDataCb(CAMERA_MSG_POSTVIEW_FRAME, mPostviewHeap, 0, NULL, mCallbackCookie); + } + if ((mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) && (mJpegHeap != NULL)) { + ALOGD("JPEG COMPLRESSED"); + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + } + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE) { + ALOGD("RAW mRawFrameSize = %d", mRawFrameSize); + mDataCb(CAMERA_MSG_RAW_IMAGE, mPostviewHeap, 0, NULL, mCallbackCookie); + } + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) { + ALOGD("RAW image notify"); + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + } + + LOG_PERFORMANCE_END(4, "callback functions"); + +#if DUMP_FILE + dumpToFile(mJpegHeap->data, mPictureFrameSize, "/data/media/0/capture01.jpg"); +#endif + +out: + nativeStopSnapshot(); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + LOG_PERFORMANCE_END(1, "total"); + + ALOGD("pictureThread X"); + return false; +} + +status_t ISecCameraHardware::pictureThread_RAW() +{ + int postviewOffset; + int jpegSize; + uint8_t *RAWsrc; + ALOGE("pictureThread_RAW E"); + + mPictureLock.lock(); + +// nativeSetParameters(CAM_CID_MAIN_FORMAT, 5); + + if (!nativeStartSnapshot()) { + ALOGE("pictureThread: error, nativeStartSnapshot"); + mPictureLock.unlock(); + goto out; + } + + if (!nativeGetSnapshot(0, &postviewOffset)) { //if (!nativeGetSnapshot(1, &postviewOffset)) { + ALOGE("pictureThread: error, nativeGetSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } + + RAWsrc = (uint8_t *)mPictureBufDummy[0].virt.extP[0]; + + if(mJpegHeap != NULL) + memcpy((uint8_t *)mJpegHeap->data, RAWsrc, mPictureFrameSize ); + + if ((mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME) && (mJpegHeap != NULL)) { //CAMERA_MSG_COMPRESSED_IMAGE + ALOGD("RAW COMPLRESSED"); + mDataCb(CAMERA_MSG_POSTVIEW_FRAME, mJpegHeap, 0, NULL, mCallbackCookie); //CAMERA_MSG_COMPRESSED_IMAGE + } + +// nativeSetParameters(CAM_CID_MAIN_FORMAT, 1); + + if (!nativeStartSnapshot()) { + ALOGE("pictureThread: error, nativeStartSnapshot"); + mPictureLock.unlock(); + goto out; + } + + if (!nativeGetSnapshot(1, &postviewOffset)) { + ALOGE("pictureThread_RAW : error, nativeGetMultiSnapshot"); + mPictureLock.unlock(); + mNotifyCb(CAMERA_MSG_ERROR, -1, 0, mCallbackCookie); + goto out; + } + + if (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { + ALOGD("JPEG COMPRESSED"); + mPictureLock.unlock(); + mDataCb(CAMERA_MSG_COMPRESSED_IMAGE, mJpegHeap, 0, NULL, mCallbackCookie); + mPictureLock.lock(); + } + + if (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) { + ALOGD("RAW image notify"); + mNotifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mCallbackCookie); + } + + mPictureLock.unlock(); + + out: + nativeStopSnapshot(); + mLock.lock(); + mPictureRunning = false; + mLock.unlock(); + + ALOGD("pictureThread_RAW X"); + return false; +} + +status_t ISecCameraHardware::cancelPicture() +{ + mPictureThread->requestExitAndWait(); + + ALOGD("cancelPicture EX"); + return NO_ERROR; +} + +status_t ISecCameraHardware::sendCommand(int32_t command, int32_t arg1, int32_t arg2) +{ + CLOGV("INFO(%s) : in - command %d, arg1 %d, arg2 %d", __FUNCTION__,command, arg1, arg2); + int max; + + switch(command) { + case CAMERA_CMD_DISABLE_POSTVIEW: + mDisablePostview = arg1; + break; + + case CAMERA_CMD_SET_TOUCH_AF_POSITION: + CLOGD("CAMERA_CMD_SET_TOUCH_AF_POSITION X:%d, Y:%d", arg1, arg2); + nativeSetParameters(CAM_CID_SET_TOUCH_AF_POSX, arg1); + nativeSetParameters(CAM_CID_SET_TOUCH_AF_POSY, arg2); + break; + + case CAMERA_CMD_START_STOP_TOUCH_AF: + if (!mPreviewRunning && arg1) { + CLOGW("Preview is not started before Touch AF"); + return NO_INIT; + } + CLOGD("CAMERA_CMD_START_STOP_TOUCH_AF ~~~~~~~~~ arg1 == %d", arg1); + nativeSetParameters(CAM_CID_SET_TOUCH_AF, arg1); + break; + + case CAMERA_CMD_SET_FLIP: + CLOGD("CAMERA_CMD_SET_FLIP arg1 == %d", arg1); + mflipHorizontal = arg1; + break; + + default: + CLOGV("no matching case"); + break; + } + + return NO_ERROR; +} + +void ISecCameraHardware::release() +{ + if (mPreviewThread != NULL) { + mPreviewThread->requestExitAndWait(); + mPreviewThread.clear(); + } + + if (mPreviewZoomThread != NULL) { + mPreviewZoomThread->requestExitAndWait(); + mPreviewZoomThread.clear(); + } + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + if(mPreviewEnqueueThread != NULL) { + mPreviewEnqueueThread->requestExitAndWait(); + mPreviewEnqueueThread.clear(); + } +#endif + +#if FRONT_ZSL + if (mZSLPictureThread != NULL) { + mZSLPictureThread->requestExitAndWait(); + mZSLPictureThread.clear(); + } +#endif + + if (mRecordingThread != NULL) { + mRecordingThread->requestExitAndWait(); + mRecordingThread.clear(); + } + + if (mPostRecordThread != NULL) { + mPostRecordThread->requestExit(); + mPostRecordExit = true; + mPostRecordCondition.signal(); + mPostRecordThread->requestExitAndWait(); + mPostRecordThread.clear(); + } + + if (mAutoFocusThread != NULL) { + /* this thread is normally already in it's threadLoop but blocked + * on the condition variable. signal it so it wakes up and can exit. + */ + mAutoFocusThread->requestExit(); + mAutoFocusExit = true; + mAutoFocusCondition.signal(); + mAutoFocusThread->requestExitAndWait(); + mAutoFocusThread.clear(); + } + + if (mPictureThread != NULL) { + mPictureThread->requestExitAndWait(); + mPictureThread.clear(); + } + + if (mPreviewHeap) { + mPreviewHeap->release(mPreviewHeap); + mPreviewHeap = 0; + mPreviewHeapFd = -1; + } + + if (mRecordingHeap) { + mRecordingHeap->release(mRecordingHeap); + mRecordingHeap = 0; + } + + if (mRawHeap != NULL) { + mRawHeap->release(mRawHeap); + mRawHeap = 0; + } + + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = 0; + } + + if (mHDRHeap) { + mHDRHeap->release(mHDRHeap); + mHDRHeap = 0; + } + + if (mPostviewHeap) { + mPostviewHeap->release(mPostviewHeap); + mPostviewHeap = 0; + } + + if (mHDRPictureThread != NULL) { + mHDRPictureThread->requestExitAndWait(); + mHDRPictureThread.clear(); + } + + if (mRecordingPictureThread != NULL) { + mRecordingPictureThread->requestExitAndWait(); + mRecordingPictureThread.clear(); + } + + if (mDumpPictureThread != NULL) { + mDumpPictureThread->requestExitAndWait(); + mDumpPictureThread.clear(); + } + + nativeDestroySurface(); +} + +status_t ISecCameraHardware::dump(int fd) const +{ + return NO_ERROR; +} + +int ISecCameraHardware::getCameraId() const +{ + return mCameraId; +} + +status_t ISecCameraHardware::setParameters(const CameraParameters ¶ms) +{ + LOG_PERFORMANCE_START(1); + + if (mPictureRunning) { + CLOGW("WARN(%s): warning, capture is not complete. please wait...", __FUNCTION__); + Mutex::Autolock l(&mPictureLock); + } + + Mutex::Autolock l(&mLock); + + CLOGD("DEBUG(%s): [Before Param] %s", __FUNCTION__, params.flatten().string()); + + status_t rc, final_rc = NO_ERROR; + if ((rc = setRecordingMode(params))) + final_rc = rc; + if ((rc = setVideoSize(params))) + final_rc = rc; + if ((rc = setPreviewSize(params))) + final_rc = rc; + if ((rc = setPreviewFormat(params))) + final_rc = rc; + if ((rc = setPictureSize(params))) + final_rc = rc; + if ((rc = setPictureFormat(params))) + final_rc = rc; + if ((rc = setThumbnailSize(params))) + final_rc = rc; + if ((rc = setJpegThumbnailQuality(params))) + final_rc = rc; + if ((rc = setJpegQuality(params))) + final_rc = rc; + if ((rc = setFrameRate(params))) + final_rc = rc; + if ((rc = setRotation(params))) + final_rc = rc; + if ((rc = setFocusMode(params))) + final_rc = rc; + /* Set anti-banding both rear and front camera if needed. */ + if ((rc = setAntiBanding(params))) + final_rc = rc; + /* UI settings */ + if (mCameraId == CAMERA_ID_BACK) { + if ((rc = setSceneMode(params))) final_rc = rc; + if ((rc = setFocusAreas(params))) final_rc = rc; + if ((rc = setFlash(params))) final_rc = rc; + if ((rc = setMetering(params))) final_rc = rc; + if ((rc = setMeteringAreas(params))) final_rc = rc; + /* Set for Adjust Image */ + if ((rc = setSharpness(params))) final_rc = rc; + if ((rc = setContrast(params))) final_rc = rc; + if ((rc = setSaturation(params))) final_rc = rc; + if ((rc = setAntiShake(params))) final_rc = rc; + /* setCapturemode: Do not set before setPASMmode() */ + if ((rc = setZoom(params))) final_rc = rc; + if ((rc = setDzoom(params))) final_rc = rc; + } else { + if ((rc = setBlur(params))) final_rc = rc; + } + if ((rc = setZoom(params))) final_rc = rc; + if ((rc = setWhiteBalance(params))) final_rc = rc; + if ((rc = setEffect(params))) final_rc = rc; + if ((rc = setBrightness(params))) final_rc = rc; + if ((rc = setIso(params))) final_rc = rc; + if ((rc = setAELock(params))) final_rc = rc; + if ((rc = setAWBLock(params))) final_rc = rc; + if ((rc = setGps(params))) final_rc = rc; + + checkHorizontalViewAngle(); + + LOG_PERFORMANCE_END(1, "total"); + + CLOGD("DEBUG(%s): [After Param] %s", __FUNCTION__, params.flatten().string()); + + CLOGD("DEBUG(%s): out - %s", __FUNCTION__, final_rc == NO_ERROR ? "success" : "failed"); + return final_rc; +} + +void ISecCameraHardware::setDropFrame(int count) +{ + /* should be locked */ + if (mDropFrameCount < count) + mDropFrameCount = count; + + CLOGD("DEBUG(%s): mDropFrameCount = %d", __FUNCTION__, mDropFrameCount); +} + +status_t ISecCameraHardware::setAELock(const CameraParameters ¶ms) +{ + const char *str_support = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED); + if (str_support == NULL || (!strcmp(str_support, "false"))) { + CLOGW("WARN(%s): warning, not supported",__FUNCTION__); + return NO_ERROR; + } + + const char *str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK); + const char *prevStr = mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) { + return NO_ERROR; + } + + CLOGI("DEBUG(%s): %s", __FUNCTION__, str); + if (!(!strcmp(str, "true") || !strcmp(str, "false"))) { + CLOGE("ERR(%s): error, invalid value %s",__FUNCTION__, str); + return BAD_VALUE; + } + + mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, str); + + int val; + if (!strcmp(str, "true")) { + val = AE_LOCK; + } else { + val = AE_UNLOCK; + } + + return nativeSetParameters(CAM_CID_AE_LOCK_UNLOCK, val); +} + +status_t ISecCameraHardware::setAWBLock(const CameraParameters ¶ms) +{ + const char *str_support = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED); + if (str_support == NULL || (!strcmp(str_support, "false"))) { + CLOGW("WARN(%s): warning, not supported",__FUNCTION__); + return NO_ERROR; + } + + const char *str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK); + const char *prevStr = mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) { + return NO_ERROR; + } + + CLOGI("DEBUG(%s): %s", __FUNCTION__, str); + if (!(!strcmp(str, "true") || !strcmp(str, "false"))) { + CLOGE("ERR(%s): error, invalid value %s",__FUNCTION__, str); + + return BAD_VALUE; + } + + mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, str); + + int val; + if (!strcmp(str, "true")) { + val = AWB_LOCK; + } else { + val = AWB_UNLOCK; + } + + return nativeSetParameters(CAM_CID_AWB_LOCK_UNLOCK, val); +} + +/* +* called when starting preview +*/ +inline void ISecCameraHardware::setDropUnstableInitFrames() +{ + int32_t frameCount = 3; + + if (mCameraId == CAMERA_ID_BACK) { + if (mbFirst_preview_started == false) { + /* When camera_start_preview is called for the first time after camera application starts. */ + if (mMovieMode == true) { + frameCount = INITIAL_REAR_SKIP_FRAME + 5; + } else { + frameCount = INITIAL_REAR_SKIP_FRAME; + } + mbFirst_preview_started = true; + } else { + /* When startPreview is called after camera application got started. */ + frameCount = INITIAL_REAR_SKIP_FRAME; + } + } else { + if (mbFirst_preview_started == false) { + /* When camera_start_preview is called for the first time after camera application starts. */ + frameCount = INITIAL_FRONT_SKIP_FRAME; + mbFirst_preview_started = true; + } else { + /* When startPreview is called after camera application got started. */ + frameCount = INITIAL_FRONT_SKIP_FRAME; + } + } + + setDropFrame(frameCount); +} + +status_t ISecCameraHardware::setRecordingMode(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_RECORDING_HINT); + const char *prevStr = mParameters.get(CameraParameters::KEY_RECORDING_HINT); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + mParameters.set(CameraParameters::KEY_RECORDING_HINT, str); + + String8 recordHint(str); + CLOGD("DEBUG(%s): %s", __FUNCTION__, recordHint.string()); + + if (recordHint == "true"){ + mFps = mMaxFrameRate / 1000; + CLOGD("DEBUG(%s): fps(%d) %s ", __FUNCTION__, mFps, recordHint.string()); + + mMovieMode = true; + } else { + mMovieMode = false; + } + + return 0 /*nativeSetParameters(CAM_CID_MOVIE_MODE, mMovieMode)*/; + +} + +status_t ISecCameraHardware::setPreviewSize(const CameraParameters ¶ms) +{ + int width, height; + params.getPreviewSize(&width, &height); + + if ((mPreviewSize.width == (uint32_t)width) && (mPreviewSize.height == (uint32_t)height) +#if defined(USE_RECORDING_FLITE_SIZE) + && (mPrevMovieMode == mMovieMode) +#endif + ) { + return NO_ERROR; + } + + if (width <= 0 || height <= 0) + return BAD_VALUE; + + int count, FLiteCount; + const image_rect_type *sizes, *FLiteSizes, *defaultSizes = NULL, *size = NULL, *fliteSize = NULL;; + const image_rect_type *sameRatioSize = NULL; + + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(backPreviewSizes); + sizes = backPreviewSizes; +#if defined(USE_RECORDING_FLITE_SIZE) + if (mMovieMode == true) { + FLiteCount = ARRAY_SIZE(backFLiteRecordingSizes); + FLiteSizes = backFLiteRecordingSizes; + } else +#endif + { + FLiteCount = ARRAY_SIZE(backFLitePreviewSizes); + FLiteSizes = backFLitePreviewSizes; + } + mPrevMovieMode = mMovieMode; + } else { + count = ARRAY_SIZE(frontPreviewSizes); + sizes = frontPreviewSizes; + FLiteCount = ARRAY_SIZE(frontFLitePreviewSizes); + FLiteSizes = frontFLitePreviewSizes; + } + +retry: + size = getFrameSizeSz(sizes, count, (uint32_t)width, (uint32_t)height); + + if (CC_UNLIKELY(!size)) { + if (!defaultSizes) { + defaultSizes = sizes; + sameRatioSize = getFrameSizeRatio(sizes, count, (uint32_t)width, (uint32_t)height); + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(hiddenBackPreviewSizes); + sizes = hiddenBackPreviewSizes; + } else { + count = ARRAY_SIZE(hiddenFrontPreviewSizes); + sizes = hiddenFrontPreviewSizes; + } + goto retry; + } else if (!sameRatioSize) { + sameRatioSize = getFrameSizeRatio(sizes, count, (uint32_t)width, (uint32_t)height); + } + + CLOGW("WARN(%s): not supported size(%dx%d)", __FUNCTION__, width, height); + size = sameRatioSize ? sameRatioSize : &defaultSizes[0]; + } + + mPreviewSize = *size; + mParameters.setPreviewSize((int)size->width, (int)size->height); + + fliteSize = getFrameSizeRatio(FLiteSizes, FLiteCount, size->width, size->height); + mFLiteSize = fliteSize ? *fliteSize : FLiteSizes[0]; + + /* backup orginal preview size due to ALIGN */ + mOrgPreviewSize = mPreviewSize; + mPreviewSize.width = ALIGN(mPreviewSize.width, 16); + mPreviewSize.height = ALIGN(mPreviewSize.height, 2); + + CLOGD("DEBUG(%s): preview size %dx%d/%dx%d/%dx%d", __FUNCTION__, width, height, + mOrgPreviewSize.width, mOrgPreviewSize.height, mPreviewSize.width, mPreviewSize.height); + CLOGD("DEBUG(%s): Flite size %dx%d", __FUNCTION__,mFLiteSize.width, mFLiteSize.height); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setPreviewFormat(const CameraParameters ¶ms) +{ + const char *str = params.getPreviewFormat(); + const char *prevStr = mParameters.getPreviewFormat(); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(previewPixelFormats, ARRAY_SIZE(previewPixelFormats), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + CLOGW("setPreviewFormat: warning, not supported value(%s)", str); + str = reinterpret_cast(previewPixelFormats[0].desc); + goto retry; + } + + CLOGD("DEBUG(%s): %s", __FUNCTION__, str); + mPreviewFormat = (cam_pixel_format)val; + CLOGV("DEBUG(%s): mPreviewFormat = %s", + __FUNCTION__, + mPreviewFormat == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mPreviewFormat == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + "Others"); + mParameters.setPreviewFormat(str); + return NO_ERROR; +} + +status_t ISecCameraHardware::setVideoSize(const CameraParameters ¶ms) +{ + + int width = 0, height = 0; + params.getVideoSize(&width, &height); + + if ((mVideoSize.width == (uint32_t)width) && (mVideoSize.height == (uint32_t)height)) + return NO_ERROR; + + int count; + const image_rect_type *sizes, *defaultSizes = NULL, *size = NULL; + + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(backRecordingSizes); + sizes = backRecordingSizes; + } else { + count = ARRAY_SIZE(frontRecordingSizes); + sizes = frontRecordingSizes; + } + +retry: + for (int i = 0; i < count; i++) { + if (((uint32_t)width == sizes[i].width) && ((uint32_t)height == sizes[i].height)) { + size = &sizes[i]; + break; + } + } + + if (CC_UNLIKELY(!size)) { + if (!defaultSizes) { + defaultSizes = sizes; + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(hiddenBackRecordingSizes); + sizes = hiddenBackRecordingSizes; + } else { + count = ARRAY_SIZE(hiddenFrontRecordingSizes); + sizes = hiddenFrontRecordingSizes; + } + goto retry; + } else { + sizes = defaultSizes; + } + + CLOGW("WARN(%s): warning, not supported size (%dx%d)", __FUNCTION__, size->width, size->height); + size = &sizes[0]; + } + + CLOGD("DEBUG(%s): recording %dx%d", __FUNCTION__, size->width, size->height); + + mVideoSize = *size; + mParameters.setVideoSize((int)size->width, (int)size->height); + + /* const char *str = mParameters.get(CameraParameters::KEY_VIDEO_SIZE); */ + return NO_ERROR; +} + +status_t ISecCameraHardware::setPictureSize(const CameraParameters ¶ms) +{ + int width, height; + params.getPictureSize(&width, &height); + int right_ratio = 177; + + if ((mPictureSize.width == (uint32_t)width) && (mPictureSize.height == (uint32_t)height)) + return NO_ERROR; + + int count, FLiteCount; + const image_rect_type *sizes, *FLiteSizes, *defaultSizes = NULL, *size = NULL, *fliteSize = NULL; + const image_rect_type *sameRatioSize = NULL; + + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(backPictureSizes); + sizes = backPictureSizes; + FLiteCount = ARRAY_SIZE(backFLiteCaptureSizes); + FLiteSizes = backFLiteCaptureSizes; + } else { + count = ARRAY_SIZE(frontPictureSizes); + sizes = frontPictureSizes; + FLiteCount = ARRAY_SIZE(frontFLiteCaptureSizes); + FLiteSizes = frontFLiteCaptureSizes; + } + +retry: + size = getFrameSizeSz(sizes, count, (uint32_t)width, (uint32_t)height); + + if (CC_UNLIKELY(!size)) { + if (!defaultSizes) { + defaultSizes = sizes; + sameRatioSize = getFrameSizeRatio(sizes, count, (uint32_t)width, (uint32_t)height); + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(hiddenBackPictureSizes); + sizes = hiddenBackPictureSizes; + } else { + count = ARRAY_SIZE(hiddenFrontPictureSizes); + sizes = hiddenFrontPictureSizes; + } + goto retry; + } else if (!sameRatioSize) { + sameRatioSize = getFrameSizeRatio(sizes, count, (uint32_t)width, (uint32_t)height); + } + + CLOGW("DEBUG(%s): not supported size(%dx%d)",__FUNCTION__, width, height); + size = sameRatioSize ? sameRatioSize : &defaultSizes[0]; + } + + CLOGD("DEBUG(%s): %dx%d", __FUNCTION__, size->width, size->height); + mPictureSize = mRawSize = *size; + mParameters.setPictureSize((int)size->width, (int)size->height); + + if ((int)(mSensorSize.width * 100 / mSensorSize.height) == right_ratio) { + setHorizontalViewAngle(size->width, size->height); + } + mParameters.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); + + fliteSize = getFrameSizeRatio(FLiteSizes, FLiteCount, size->width, size->height); + mFLiteCaptureSize = fliteSize ? *fliteSize : FLiteSizes[0]; + + return NO_ERROR; +} + +status_t ISecCameraHardware::setPictureFormat(const CameraParameters ¶ms) +{ + const char *str = params.getPictureFormat(); + const char *prevStr = mParameters.getPictureFormat(); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(picturePixelFormats, + ARRAY_SIZE(picturePixelFormats), str); + + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setPictureFormat: warning, not supported value(%s)", str); +#if 0 + str = reinterpret_cast(picturePixelFormats[0].desc); + goto retry; +#else + return BAD_VALUE; +#endif /* FOR HAL TEST */ + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mPictureFormat = (cam_pixel_format)val; + mParameters.setPictureFormat(str); + return NO_ERROR; +} + +status_t ISecCameraHardware::setThumbnailSize(const CameraParameters ¶ms) +{ + int width, height; + width = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); + height = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); + + if (mThumbnailSize.width == (uint32_t)width && mThumbnailSize.height == (uint32_t)height) + return NO_ERROR; + + int count; + const image_rect_type *sizes, *size = NULL; + + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(backThumbSizes); + sizes = backThumbSizes; + } else { + count = ARRAY_SIZE(frontThumbSizes); + sizes = frontThumbSizes; + } + + for (int i = 0; i < count; i++) { + if ((uint32_t)width == sizes[i].width && (uint32_t)height == sizes[i].height) { + size = &sizes[i]; + break; + } + } + + if (!size) { + CLOGW("setThumbnailSize: warning, not supported size(%dx%d)", width, height); + size = &sizes[0]; + } + + CLOGD("DEBUG(%s) : %dx%d ",__FUNCTION__, size->width, size->height); + mThumbnailSize = *size; + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, (int)size->width); + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, (int)size->height); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setJpegThumbnailQuality(const CameraParameters ¶ms) +{ + int val = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + int prevVal = mParameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); + if (val == -1 || prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < 1 || val > 100)) { + CLOGE("setJpegThumbnailQuality: error, invalid value(%d)", val); + return BAD_VALUE; + } + + //CLOGD("setJpegThumbnailQuality: %d", val); + CLOGD("DEBUG(%s) : %d ",__FUNCTION__, val); + + mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, val); + + return 0; +} + +status_t ISecCameraHardware::setJpegQuality(const CameraParameters ¶ms) +{ + int val = params.getInt(CameraParameters::KEY_JPEG_QUALITY); + int prevVal = mParameters.getInt(CameraParameters::KEY_JPEG_QUALITY); + if (val == -1 || prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < 1 || val > 100)) { + ALOGE("setJpegQuality: error, invalid value(%d)", val); + return BAD_VALUE; + } + + //ALOGD("setJpegQuality: %d", val); + CLOGD("DEBUG(%s) : %d ",__FUNCTION__, val); + mJpegQuality = val; + + mParameters.set(CameraParameters::KEY_JPEG_QUALITY, val); + + return nativeSetParameters(CAM_CID_JPEG_QUALITY, mJpegQuality); +} + +status_t ISecCameraHardware::setFrameRate(const CameraParameters ¶ms) +{ + int min, max; + params.getPreviewFpsRange(&min, &max); + int frameRate = params.getPreviewFrameRate(); + int prevFrameRate = mParameters.getPreviewFrameRate(); + if ((frameRate != -1) && (frameRate != prevFrameRate)) + mParameters.setPreviewFrameRate(frameRate); + + if (CC_UNLIKELY(min < 0 || max < 0 || max < min)) { + CLOGE("setFrameRate: error, invalid range(%d, %d)", min, max); + return BAD_VALUE; + } + + /* 0 means auto frame rate */ + int val = (min == max) ? min : 0; + mMaxFrameRate = max; + + if (mMovieMode) + mFps = mMaxFrameRate / 1000; + else + mFps = val / 1000; + + CLOGD("DEBUG(%s) : %d,%d,%d ",__FUNCTION__, min, max, mFps); + + if (mFrameRate == val) + return NO_ERROR; + + mFrameRate = val; + + const char *str = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE); + if (CC_LIKELY(str)) { + mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, str); + } else { + CLOGE("setFrameRate: corrupted data (params)"); + char buffer[32]; + CLEAR(buffer); + snprintf(buffer, sizeof(buffer), "%d,%d", min, max); + mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, buffer); + } + + mParameters.setPreviewFrameRate(val/1000); + + return nativeSetParameters(CAM_CID_FRAME_RATE, val/1000); +} + +status_t ISecCameraHardware::setRotation(const CameraParameters ¶ms) +{ + int val = params.getInt(CameraParameters::KEY_ROTATION); + int prevVal = mParameters.getInt(CameraParameters::KEY_ROTATION); + if (val == -1 || prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val != 0 && val != 90 && val != 180 && val != 270)) { + ALOGE("setRotation: error, invalid value(%d)", val); + return BAD_VALUE; + } + + CLOGD("DEBUG(%s) : %d ",__FUNCTION__, val); + mParameters.set(CameraParameters::KEY_ROTATION, val); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setPreviewFrameRate(const CameraParameters ¶ms) +{ + int val = params.getPreviewFrameRate(); + int prevVal = mParameters.getPreviewFrameRate(); + if (val == -1 || prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < 0 || val > (mMaxFrameRate / 1000) )) { + ALOGE("setPreviewFrameRate: error, invalid value(%d)", val); + return BAD_VALUE; + } + + CLOGD("DEBUG(%s) : %d ",__FUNCTION__, val); + mFrameRate = val * 1000; + mParameters.setPreviewFrameRate(val); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setSceneMode(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_SCENE_MODE); + const char *prevStr = mParameters.get(CameraParameters::KEY_SCENE_MODE); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; + +retry: + val = SecCameraParameters::lookupAttr(sceneModes, ARRAY_SIZE(sceneModes), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setSceneMode: warning, not supported value(%s)", str); +#if 0 + str = reinterpret_cast(sceneModes[0].desc); + goto retry; +#else + return BAD_VALUE; +#endif /* FOR HAL TEST */ + } + + //ALOGD("setSceneMode: %s", str); + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + + mSceneMode = (cam_scene_mode)val; + mParameters.set(CameraParameters::KEY_SCENE_MODE, str); + + return nativeSetParameters(CAM_CID_SCENE_MODE, val); +} + +/* -------------------Focus Area STARTS here---------------------------- */ +status_t ISecCameraHardware::findCenter(struct FocusArea *focusArea, + struct FocusPoint *center) +{ + /* range check */ + if ((focusArea->top > focusArea->bottom) || (focusArea->right < focusArea->left)) { + CLOGE("findCenter: Invalid value range"); + return -EINVAL; + } + + center->x = (focusArea->left + focusArea->right) / 2; + center->y = (focusArea->top + focusArea->bottom) / 2; + + /* ALOGV("%s: center point (%d, %d)", __func__, center->x, center->y); */ + return NO_ERROR; +} + +status_t ISecCameraHardware::normalizeArea(struct FocusPoint *center) +{ + struct FocusPoint tmpPoint; + size_t hRange, vRange; + double hScale, vScale; + + tmpPoint.x = center->x; + tmpPoint.y = center->y; + + /* ALOGD("%s: before x = %d, y = %d", __func__, tmpPoint.x, tmpPoint.y); */ + + hRange = FOCUS_AREA_RIGHT - FOCUS_AREA_LEFT; + vRange = FOCUS_AREA_BOTTOM - FOCUS_AREA_TOP; + hScale = (double)mPreviewSize.height / (double) hRange; + vScale = (double)mPreviewSize.width / (double) vRange; + + /* Nomalization */ + /* ALOGV("normalizeArea: mPreviewSize.width = %d, mPreviewSize.height = %d", + mPreviewSize.width, mPreviewSize.height); + */ + + tmpPoint.x = (center->x + vRange / 2) * vScale; + tmpPoint.y = (center->y + hRange / 2) * hScale; + + center->x = tmpPoint.x; + center->y = tmpPoint.y; + + if (center->x == 0 && center->y == 0) { + CLOGE("normalizeArea: Invalid focus center point"); + return -EINVAL; + } + + return NO_ERROR; +} + +status_t ISecCameraHardware::checkArea(ssize_t top, + ssize_t left, + ssize_t bottom, + ssize_t right, + ssize_t weight) +{ + /* Handles the invalid regin corner case. */ + if ((0 == top) && (0 == left) && (0 == bottom) && (0 == right) && (0 == weight)) { + ALOGD("checkArea: error, All values are zero"); + return NO_ERROR; + } + + if ((FOCUS_AREA_WEIGHT_MIN > weight) || (FOCUS_AREA_WEIGHT_MAX < weight)) { + ALOGE("checkArea: error, Camera area weight is invalid %d", weight); + return -EINVAL; + } + + if ((FOCUS_AREA_TOP > top) || (FOCUS_AREA_BOTTOM < top)) { + ALOGE("checkArea: error, Camera area top coordinate is invalid %d", top ); + return -EINVAL; + } + + if ((FOCUS_AREA_TOP > bottom) || (FOCUS_AREA_BOTTOM < bottom)) { + ALOGE("checkArea: error, Camera area bottom coordinate is invalid %d", bottom ); + return -EINVAL; + } + + if ((FOCUS_AREA_LEFT > left) || (FOCUS_AREA_RIGHT < left)) { + ALOGE("checkArea: error, Camera area left coordinate is invalid %d", left ); + return -EINVAL; + } + + if ((FOCUS_AREA_LEFT > right) || (FOCUS_AREA_RIGHT < right)) { + ALOGE("checkArea: error, Camera area right coordinate is invalid %d", right ); + return -EINVAL; + } + + if (left >= right) { + ALOGE("checkArea: error, Camera area left larger than right"); + return -EINVAL; + } + + if (top >= bottom) { + ALOGE("checkArea: error, Camera area top larger than bottom"); + return -EINVAL; + } + + return NO_ERROR; +} + +/* TODO : muliple focus area is not supported yet */ +status_t ISecCameraHardware::parseAreas(const char *area, + size_t areaLength, + struct FocusArea *focusArea, + int *num_areas) +{ + status_t ret = NO_ERROR; + char *ctx; + char *pArea = NULL; + char *pEnd = NULL; + const char *startToken = "("; + const char endToken = ')'; + const char sep = ','; + ssize_t left, top, bottom, right, weight; + char *tmpBuffer = NULL; + + if (( NULL == area ) || ( 0 >= areaLength)) { + ALOGE("parseAreas: error, area is NULL or areaLength is less than 0"); + return -EINVAL; + } + + tmpBuffer = (char *)malloc(areaLength); + if (NULL == tmpBuffer) { + ALOGE("parseAreas: error, tmpBuffer is NULL"); + return -ENOMEM; + } + + memcpy(tmpBuffer, area, areaLength); + + pArea = strtok_r(tmpBuffer, startToken, &ctx); + + do { + char *pStart = NULL; + pStart = pArea; + if (NULL == pStart) { + ALOGE("parseAreas: error, Parsing of the left area coordinate failed!"); + ret = -EINVAL; + break; + } else { + left = static_cast(strtol(pStart, &pEnd, 10)); + } + + if (sep != *pEnd) { + ALOGE("parseAreas: error, Parsing of the top area coordinate failed!"); + ret = -EINVAL; + break; + } else { + top = static_cast(strtol(pEnd + 1, &pEnd, 10)); + } + + if (sep != *pEnd) { + ALOGE("parseAreas: error, Parsing of the right area coordinate failed!"); + ret = -EINVAL; + break; + } else { + right = static_cast(strtol(pEnd + 1, &pEnd, 10)); + } + + if (sep != *pEnd) { + ALOGE("parseAreas: error, Parsing of the bottom area coordinate failed!"); + ret = -EINVAL; + break; + } else { + bottom = static_cast(strtol(pEnd + 1, &pEnd, 10)); + } + + if (sep != *pEnd) { + ALOGE("parseAreas: error, Parsing of the weight area coordinate failed!"); + ret = -EINVAL; + break; + } else { + weight = static_cast(strtol(pEnd + 1, &pEnd, 10)); + } + + if (endToken != *pEnd) { + ALOGE("parseAreas: error, malformed area!"); + ret = -EINVAL; + break; + } + + ret = checkArea(top, left, bottom, right, weight); + if (NO_ERROR != ret) + break; + + /* + ALOGV("parseAreas: Area parsed [%dx%d, %dx%d] %d", + ( int ) left, + ( int ) top, + ( int ) right, + ( int ) bottom, + ( int ) weight); + */ + + pArea = strtok_r(NULL, startToken, &ctx); + + focusArea->left = (int)left; + focusArea->top = (int)top; + focusArea->right = (int)right; + focusArea->bottom = (int)bottom; + focusArea->weight = (int)weight; + (*num_areas)++; + } while ( NULL != pArea ); + + if (NULL != tmpBuffer) + free(tmpBuffer); + + return ret; +} + +/* TODO : muliple focus area is not supported yet */ +status_t ISecCameraHardware::setFocusAreas(const CameraParameters ¶ms) +{ + if (!IsAutoFocusSupported()) + return NO_ERROR; + + const char *str = params.get(CameraParameters::KEY_FOCUS_AREAS); + const char *prevStr = mParameters.get(CameraParameters::KEY_FOCUS_AREAS); + if ((str == NULL) || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + struct FocusArea focusArea; + struct FocusPoint center; + int err, num_areas = 0; + const char *maxFocusAreasStr = params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS); + if (!maxFocusAreasStr) { + ALOGE("setFocusAreas: error, KEY_MAX_NUM_FOCUS_AREAS is NULL"); + return NO_ERROR; + } + + int maxFocusAreas = atoi(maxFocusAreasStr); + if (!maxFocusAreas) { + ALOGD("setFocusAreas: FocusAreas is not supported"); + return NO_ERROR; + } + + /* Focus area parse here */ + err = parseAreas(str, (strlen(str) + 1), &focusArea, &num_areas); + if (CC_UNLIKELY(err < 0)) { + ALOGE("setFocusAreas: error, parseAreas %s", str); + return BAD_VALUE; + } + if (CC_UNLIKELY(num_areas > maxFocusAreas)) { + ALOGE("setFocusAreas: error, the number of areas is more than max"); + return BAD_VALUE; + } + + /* find center point */ + err = findCenter(&focusArea, ¢er); + if (CC_UNLIKELY(err < 0)) { + ALOGE("setFocusAreas: error, findCenter"); + return BAD_VALUE; + } + + /* Normalization */ + err = normalizeArea(¢er); + if (err < 0) { + ALOGE("setFocusAreas: error, normalizeArea"); + return BAD_VALUE; + } + + ALOGD("setFocusAreas: FocusAreas(%s) to (%d, %d)", str, center.x, center.y); + + mParameters.set(CameraParameters::KEY_FOCUS_AREAS, str); + +#if 0//def ENABLE_TOUCH_AF + if (CC_UNLIKELY(mFocusArea != V4L2_FOCUS_AREA_TRACKING)) { + err = nativeSetParameters(CAM_CID_SET_TOUCH_AF_POSX, center.x); + if (CC_UNLIKELY(err < 0)) { + ALOGE("setFocusAreas: error, SET_TOUCH_AF_POSX"); + return UNKNOWN_ERROR; + } + + err = nativeSetParameters(CAM_CID_SET_TOUCH_AF_POSY, center.y); + if (CC_UNLIKELY(err < 0)) { + ALOGE("setFocusAreas: error, SET_TOUCH_AF_POSX"); + return UNKNOWN_ERROR; + } + + return nativeSetParameters(CAM_CID_SET_TOUCH_AF, 1); + } else{ + return nativeSetParameters(CAM_CID_SET_TOUCH_AF, 0); + } +#endif + + return NO_ERROR; +} + +/* -------------------Focus Area ENDS here---------------------------- */ +status_t ISecCameraHardware::setIso(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_ISO); + const char *prevStr = mParameters.get(CameraParameters::KEY_ISO); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) { + return NO_ERROR; + } + if (prevStr == NULL && !strcmp(str, isos[0].desc)) { /* default */ + return NO_ERROR; + } + + int val; +retry: + val = SecCameraParameters::lookupAttr(isos, ARRAY_SIZE(isos), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setIso: warning, not supported value(%s)", str); + /* str = reinterpret_cast(isos[0].desc); + goto retry; + */ + return BAD_VALUE; + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mParameters.set(CameraParameters::KEY_ISO, str); + + return nativeSetParameters(CAM_CID_ISO, val); +} + +status_t ISecCameraHardware::setBrightness(const CameraParameters ¶ms) +{ + int val; + if (CC_LIKELY(mSceneMode == SCENE_MODE_NONE)) { + val = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); + } else { + switch (mSceneMode) { + case SCENE_MODE_BEACH_SNOW: + val = 2; + break; + + default: + val = 0; + break; + } + } + + int prevVal = mParameters.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); + int max = mParameters.getInt(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION); + int min = mParameters.getInt(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION); + if (prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < min || val > max)) { + ALOGE("setBrightness: error, invalid value(%d)", val); + return BAD_VALUE; + } + + ALOGD("setBrightness: %d", val); + mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, val); + + return nativeSetParameters(CAM_CID_BRIGHTNESS, val); +} + +status_t ISecCameraHardware::setWhiteBalance(const CameraParameters ¶ms) +{ + const char *str; + + if (CC_LIKELY(mSceneMode == SCENE_MODE_NONE)) { + str = params.get(CameraParameters::KEY_WHITE_BALANCE); + } else { + switch (mSceneMode) { + case SCENE_MODE_SUNSET: + case SCENE_MODE_CANDLE_LIGHT: + str = CameraParameters::WHITE_BALANCE_DAYLIGHT; + break; + + case SCENE_MODE_DUSK_DAWN: + str = CameraParameters::WHITE_BALANCE_FLUORESCENT; + break; + + default: + str = CameraParameters::WHITE_BALANCE_AUTO; + break; + } + } + // str = params.get(CameraParameters::KEY_WHITE_BALANCE); + + const char *prevStr = mParameters.get(CameraParameters::KEY_WHITE_BALANCE); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(whiteBalances, ARRAY_SIZE(whiteBalances), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setWhiteBalance: warning, not supported value(%s)", str); + str = reinterpret_cast(whiteBalances[0].desc); + goto retry; + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mParameters.set(CameraParameters::KEY_WHITE_BALANCE, str); + + return nativeSetParameters(CAM_CID_WHITE_BALANCE, val); +} + +status_t ISecCameraHardware::setFlash(const CameraParameters ¶ms) +{ + if (!IsFlashSupported()) + return NO_ERROR; + + const char *str; + + if (CC_LIKELY(mSceneMode == SCENE_MODE_NONE)) { + str = params.get(CameraParameters::KEY_FLASH_MODE); + } else { + switch (mSceneMode) { + case SCENE_MODE_PORTRAIT: + case SCENE_MODE_PARTY_INDOOR: + case SCENE_MODE_BACK_LIGHT: + case SCENE_MODE_TEXT: + str = params.get(CameraParameters::KEY_FLASH_MODE); + break; + + default: + str = CameraParameters::FLASH_MODE_OFF; + break; + } + } + // str = params.get(CameraParameters::KEY_FLASH_MODE); + + const char *prevStr = mParameters.get(CameraParameters::KEY_FLASH_MODE); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(flashModes, ARRAY_SIZE(flashModes), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setFlash: warning, not supported value(%s)", str); + return BAD_VALUE; /* return BAD_VALUE if invalid parameter */ + } + + ALOGD("setFlash: %s", str); + mFlashMode = (cam_flash_mode)val; + mParameters.set(CameraParameters::KEY_FLASH_MODE, str); + + return nativeSetParameters(CAM_CID_FLASH, val); +} + +status_t ISecCameraHardware::setMetering(const CameraParameters ¶ms) +{ + const char *str; + if (CC_LIKELY(mSceneMode == SCENE_MODE_NONE)) { + str = params.get(SecCameraParameters::KEY_METERING); + } else { + switch (mSceneMode) { + case SCENE_MODE_LANDSCAPE: + str = SecCameraParameters::METERING_MATRIX; + break; + + case SCENE_MODE_BACK_LIGHT: + if (mFlashMode == V4L2_FLASH_MODE_OFF) + str = SecCameraParameters::METERING_SPOT; + else + str = SecCameraParameters::METERING_CENTER; + break; + + default: + str = SecCameraParameters::METERING_CENTER; + break; + } + } + + const char *prevStr = mParameters.get(SecCameraParameters::KEY_METERING); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + if (prevStr == NULL && !strcmp(str, meterings[0].desc)) /* default */ + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(meterings, ARRAY_SIZE(meterings), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setMetering: warning, not supported value(%s)", str); + str = reinterpret_cast(meterings[0].desc); + goto retry; + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mParameters.set(SecCameraParameters::KEY_METERING, str); + + return nativeSetParameters(CAM_CID_METERING, val); +} + +status_t ISecCameraHardware::setMeteringAreas(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_METERING_AREAS); + const char *prevStr = mParameters.get(CameraParameters::KEY_METERING_AREAS); + if ((str == NULL) || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + /* Metering area use same strcut with Focus area */ + struct FocusArea meteringArea; + int err, num_areas = 0; + const char *maxMeteringAreasStr = params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS); + if (!maxMeteringAreasStr) { + ALOGE("setMeteringAreas: error, KEY_MAX_NUM_METERING_AREAS is NULL"); + return NO_ERROR; + } + + int maxMeteringAreas = atoi(maxMeteringAreasStr); + if (!maxMeteringAreas) { + ALOGD("setMeteringAreas: FocusAreas is not supported"); + return NO_ERROR; + } + + /* Metering area parse and check(max value) here */ + err = parseAreas(str, (strlen(str) + 1), &meteringArea, &num_areas); + if (CC_UNLIKELY(err < 0)) { + ALOGE("setMeteringAreas: error, parseAreas %s", str); + return BAD_VALUE; + } + if (CC_UNLIKELY(num_areas > maxMeteringAreas)) { + ALOGE("setMeteringAreas: error, the number of areas is more than max"); + return BAD_VALUE; + } + + ALOGD("setMeteringAreas = %s\n", str); + mParameters.set(CameraParameters::KEY_METERING_AREAS, str); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setFocusMode(const CameraParameters ¶ms) +{ + status_t Ret = NO_ERROR; + + const char *str = params.get(CameraParameters::KEY_FOCUS_MODE); + const char *prevStr = mParameters.get(CameraParameters::KEY_FOCUS_MODE); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int count, val; + const cam_strmap_t *focusModes; + + if (mCameraId == CAMERA_ID_BACK) { + count = ARRAY_SIZE(backFocusModes); + focusModes = backFocusModes; + } else { + count = ARRAY_SIZE(frontFocusModes); + focusModes = frontFocusModes; + } + +retry: + val = SecCameraParameters::lookupAttr(focusModes, count, str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setFocusMode: warning, not supported value(%s)", str); + return BAD_VALUE; /* return BAD_VALUE if invalid parameter */ + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mFocusMode = (cam_focus_mode)val; + mParameters.set(CameraParameters::KEY_FOCUS_MODE, str); + if (val == V4L2_FOCUS_MODE_MACRO) { + mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, + B_KEY_MACRO_FOCUS_DISTANCES_VALUE); + } else { + mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, + B_KEY_NORMAL_FOCUS_DISTANCES_VALUE); + } + + return nativeSetParameters(CAM_CID_FOCUS_MODE, val); +} + +status_t ISecCameraHardware::setEffect(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_EFFECT); + const char *prevStr = mParameters.get(CameraParameters::KEY_EFFECT); + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; +retry: + val = SecCameraParameters::lookupAttr(effects, ARRAY_SIZE(effects), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setEffect: warning, not supported value(%s)", str); +#if 0 + str = reinterpret_cast(effects[0].desc); + goto retry; +#else + return BAD_VALUE; +#endif /* FOR HAL TEST */ + } + + CLOGD("DEBUG(%s) : %s ",__FUNCTION__, str); + mParameters.set(CameraParameters::KEY_EFFECT, str); + + return nativeSetParameters(CAM_CID_EFFECT, val); +} + +status_t ISecCameraHardware::setZoom(const CameraParameters ¶ms) +{ + if (!mZoomSupport) + return NO_ERROR; + + int val = params.getInt(CameraParameters::KEY_ZOOM); + int prevVal = mParameters.getInt(CameraParameters::KEY_ZOOM); + int err; + + if (val == -1 || prevVal == val) + return NO_ERROR; + + int max = params.getInt(CameraParameters::KEY_MAX_ZOOM); + if (CC_UNLIKELY(val < 0 || val > max)) { + ALOGE("setZoom: error, invalid value(%d)", val); + return BAD_VALUE; + } + + mParameters.set(CameraParameters::KEY_ZOOM, val); + + if (mEnableDZoom) + /* Set AP zoom ratio */ + return nativeSetZoomRatio(val); + else + /* Set ISP/sensor zoom ratio */ + return nativeSetParameters(CAM_CID_ZOOM, val); +} + +status_t ISecCameraHardware::setDzoom(const CameraParameters& params) +{ + int val = params.getInt("dzoom"); + int prevVal = mParameters.getInt("dzoom"); + int err; + + if (prevVal == val) + return NO_ERROR; + if (val < V4L2_ZOOM_LEVEL_0 || val >= 12) { + ALOGE("invalid value for DZOOM val = %d", val); + return BAD_VALUE; + } + + CLOGD("setDZoom LEVEL %d->%d", prevVal, val); + mParameters.set("dzoom", val); + +// err = nativeSetParameters(CAM_CID_DZOOM, val); + if (err < 0) { + CLOGE("%s: setDZoom failed", __func__); + return err; + } + + return NO_ERROR; +} + +status_t ISecCameraHardware::setSharpness(const CameraParameters& params) +{ + int val = params.getInt("sharpness"); + int prevVal = mParameters.getInt("sharpness"); + if (prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < -2 || val > 2)) { + ALOGE("setSharpness: error, invalid value(%d)", val); + return BAD_VALUE; + } + + ALOGD("setSharpness: %d", val); + mParameters.set("sharpness", val); + + if (mSceneMode == SCENE_MODE_NONE) + return nativeSetParameters(CAM_CID_SHARPNESS, val + 2); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setContrast(const CameraParameters& params) +{ + int val = params.getInt("contrast"); + int prevVal = mParameters.getInt("contrast"); + if (prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < -2 || val > 2)) { + ALOGE("setContrast: error, invalid value(%d)", val); + return BAD_VALUE; + } + + ALOGD("setContrast: %d", val); + mParameters.set("contrast", val); + + if (mSceneMode == SCENE_MODE_NONE) + return nativeSetParameters(CAM_CID_CONTRAST, val + 2); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setSaturation(const CameraParameters& params) +{ + int val = params.getInt("saturation"); + int prevVal = mParameters.getInt("saturation"); + if (prevVal == val) + return NO_ERROR; + + if (CC_UNLIKELY(val < -2 || val > 2)) { + ALOGE("setSaturation: error, invalid value(%d)", val); + return BAD_VALUE; + } + + ALOGD("setSaturation: %d", val); + mParameters.set("saturation", val); + + if (mSceneMode == SCENE_MODE_NONE) + return nativeSetParameters(CAM_CID_SATURATION, val + 2); + + return NO_ERROR; +} + +status_t ISecCameraHardware::setAntiShake(const CameraParameters ¶ms) +{ + int val = params.getInt(SecCameraParameters::KEY_ANTI_SHAKE); + int prevVal = mParameters.getInt(SecCameraParameters::KEY_ANTI_SHAKE); + if (val == -1 || prevVal == val) + return NO_ERROR; + if (prevVal == -1 && val == 0) /* default */ + return NO_ERROR; + + ALOGD("setAntiShake: %d", val); + mParameters.set(SecCameraParameters::KEY_ANTI_SHAKE, val); + + return nativeSetParameters(CAM_CID_ANTISHAKE, val); +} + +status_t ISecCameraHardware::setBlur(const CameraParameters ¶ms) +{ + int val = params.getInt(SecCameraParameters::KEY_BLUR); + int prevVal = mParameters.getInt(SecCameraParameters::KEY_BLUR); + if (val == -1 || prevVal == val) + return NO_ERROR; + if (prevVal == -1 && val == 0) /* default */ + return NO_ERROR; + + ALOGD("setBlur: %d", val); + mParameters.set(SecCameraParameters::KEY_BLUR, val); + if (val > 0) + setDropFrame(2); + + return nativeSetParameters(CAM_CID_BLUR, val); +} + +int ISecCameraHardware::checkFnumber(int f_val, int zoomLevel) +{ + int err = NO_ERROR; + + if (f_val == 0) { + ALOGD("checkFnumber: f number is set to default value. f_val = %d", + f_val); + return err; + } + + switch (zoomLevel) { + case 0: + if (f_val != 31 && f_val != 90) + err = BAD_VALUE; + break; + case 1: + if (f_val != 34 && f_val != 95) + err = BAD_VALUE; + break; + case 2: + if (f_val != 35 && f_val != 100) + err = BAD_VALUE; + break; + case 3: + if (f_val != 37 && f_val != 104) + err = BAD_VALUE; + break; + case 4: + if (f_val != 38 && f_val != 109) + err = BAD_VALUE; + break; + case 5: + if (f_val != 40 && f_val != 113) + err = BAD_VALUE; + break; + case 6: + if (f_val != 41 && f_val != 116) + err = BAD_VALUE; + break; + case 7: + if (f_val != 42 && f_val != 119) + err = BAD_VALUE; + break; + case 8: + if (f_val != 43 && f_val != 122) + err = BAD_VALUE; + break; + case 9: + if (f_val != 44 && f_val != 125) + err = BAD_VALUE; + break; + case 10: + if (f_val != 45 && f_val != 127) + err = BAD_VALUE; + break; + case 11: + if (f_val != 46 && f_val != 129) + err = BAD_VALUE; + break; + case 12: + if (f_val != 46 && f_val != 131) + err = BAD_VALUE; + break; + case 13: + if (f_val != 47 && f_val != 134) + err = BAD_VALUE; + break; + case 14: + if (f_val != 48 && f_val != 136) + err = BAD_VALUE; + break; + case 15: + if (f_val != 49 && f_val != 139) + err = BAD_VALUE; + break; + case 16: + if (f_val != 50 && f_val != 142) + err = BAD_VALUE; + break; + case 17: + if (f_val != 51 && f_val != 145) + err = BAD_VALUE; + break; + case 18: + if (f_val != 52 && f_val != 148) + err = BAD_VALUE; + break; + case 19: + if (f_val != 54 && f_val != 152) + err = BAD_VALUE; + break; + case 20: + if (f_val != 55 && f_val != 156) + err = BAD_VALUE; + break; + case 21: + if (f_val != 56 && f_val != 159) + err = BAD_VALUE; + break; + case 22: + if (f_val != 58 && f_val != 163) + err = BAD_VALUE; + break; + case 23: + if (f_val != 59 && f_val != 167) + err = BAD_VALUE; + break; + case 24: + if (f_val != 60 && f_val != 170) + err = BAD_VALUE; + break; + case 25: + if (f_val != 61 && f_val != 173) + err = BAD_VALUE; + break; + case 26: + if (f_val != 62 && f_val != 176) + err = BAD_VALUE; + break; + case 27: + if (f_val != 63 && f_val != 179) + err = BAD_VALUE; + break; + case 28: + if (f_val != 63 && f_val != 182) + err = BAD_VALUE; + break; + case 29: + if (f_val != 63 && f_val != 184) + err = BAD_VALUE; + break; + default: + err = NO_ERROR; + break; + } + return err; +} + +status_t ISecCameraHardware::setAntiBanding() +{ + status_t ret = NO_ERROR; + const char *prevStr = mParameters.get(CameraParameters::KEY_ANTIBANDING); + + if (prevStr && !strcmp(mAntiBanding, prevStr)) + return NO_ERROR; + +retry: + int val = SecCameraParameters::lookupAttr(antibandings, ARRAY_SIZE(antibandings), mAntiBanding); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGE("setAntiBanding: error, not supported value(%s)", mAntiBanding); + return BAD_VALUE; + } + ALOGD("setAntiBanding: %s", mAntiBanding); + mParameters.set(CameraParameters::KEY_ANTIBANDING, mAntiBanding); + return nativeSetParameters(CAM_CID_ANTIBANDING, val); +} + +status_t ISecCameraHardware::setAntiBanding(const CameraParameters ¶ms) +{ + const char *str = params.get(CameraParameters::KEY_ANTIBANDING); + const char *prevStr = mParameters.get(CameraParameters::KEY_ANTIBANDING); + + if (str == NULL || (prevStr && !strcmp(str, prevStr))) + return NO_ERROR; + + int val; + +retry: + val = SecCameraParameters::lookupAttr(antibandings, ARRAY_SIZE(antibandings), str); + if (CC_UNLIKELY(val == NOT_FOUND)) { + ALOGW("setAntiBanding: warning, not supported value(%s)", str); + str = reinterpret_cast(antibandings[0].desc); + goto retry; + } + + ALOGD("setAntiBanding: %s, val: %d", str, val); + mParameters.set(CameraParameters::KEY_ANTIBANDING, str); + + return nativeSetParameters(CAM_CID_ANTIBANDING, val); +} + +status_t ISecCameraHardware::setGps(const CameraParameters ¶ms) +{ + const char *latitude = params.get(CameraParameters::KEY_GPS_LATITUDE); + const char *logitude = params.get(CameraParameters::KEY_GPS_LONGITUDE); + const char *altitude = params.get(CameraParameters::KEY_GPS_ALTITUDE); + if (latitude && logitude && altitude) { + ALOGV("setParameters: GPS latitude %f, logitude %f, altitude %f", + atof(latitude), atof(logitude), atof(altitude)); + mParameters.set(CameraParameters::KEY_GPS_LATITUDE, latitude); + mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, logitude); + mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, altitude); + } else { + mParameters.remove(CameraParameters::KEY_GPS_LATITUDE); + mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE); + mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE); + } + + const char *timestamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP); + if (timestamp) { + ALOGV("setParameters: GPS timestamp %s", timestamp); + mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, timestamp); + } else { + mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP); + } + + const char *progressingMethod = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); + if (progressingMethod) { + ALOGV("setParameters: GPS timestamp %s", timestamp); + mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, progressingMethod); + } else { + mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD); + } + + return NO_ERROR; +} + +const image_rect_type *ISecCameraHardware::getFrameSizeSz + (const image_rect_type *sizes, int count, uint32_t width, uint32_t height) +{ + for (int i = 0; i < count; i++) { + if ((sizes[i].width == width) && (sizes[i].height == height)) + return &sizes[i]; + } + + return NULL; +} + +const image_rect_type *ISecCameraHardware::getFrameSizeRatio + (const image_rect_type *sizes, int count, uint32_t width, uint32_t height) +{ + const uint32_t ratio = SIZE_RATIO(width, height); + int found = -ENOENT; + + for (int i = 0; i < count; i++) { + if ((sizes[i].width == width) && (sizes[i].height == height)) + return &sizes[i]; + + if (FRM_RATIO(sizes[i]) == ratio) { + if ((-ENOENT == found) || + ((sizes[i].width < sizes[found].width) && + (sizes[i].width > width))) + found = i; + } + } + + if (found != -ENOENT) { + ALOGD("get_framesize: %dx%d -> %dx%d\n", width, height, + sizes[found].width, sizes[found].height); + return &sizes[found]; + } + + return NULL; +} + +void ISecCameraHardware::setZoomRatioList(int *list, int len, float maxZoomRatio) +{ + float zoom_ratio_delta = pow(maxZoomRatio, 1.0f / len); + + for (int i = 0; i <= len; i++) { + list[i] = (int)(pow(zoom_ratio_delta, i) * 1000); + ALOGD("INFO(%s):list[%d]:(%d), (%f)", + __func__, i, list[i], (float)((float)list[i] / 1000)); + } +} + +status_t ISecCameraHardware::getZoomRatioList(String8 & string8Buf, + int maxZoom, int maxZoomRatio, int *list) +{ + char strBuf[32]; + int cur = 0; + int step = maxZoom - 1; + + setZoomRatioList(list, maxZoom - 1, (float)(maxZoomRatio / 1000)); + + for (int i = 0; i < step; i++) { + cur = (int)(list[i] / 10); + snprintf(strBuf, sizeof(strBuf), "%d", cur); + string8Buf.append(strBuf); + string8Buf.append(","); + } + + snprintf(strBuf, sizeof(strBuf), "%d", (maxZoomRatio / 10)); + string8Buf.append(strBuf); + + /* ex : "100,130,160,190,220,250,280,310,340,360,400" */ + + return NO_ERROR; +} + +bool ISecCameraHardware::allocMemSinglePlane(int ionClient, + ExynosBuffer *buf, int index, bool flagCache) +{ + if (ionClient == 0) { + ALOGE("ERR(%s): ionClient is zero (%d)", __func__, ionClient); + return false; + } + + if (buf->size.extS[index] != 0) { + int ret = NO_ERROR; + int flagIon = ((flagCache == true) ? + (ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC ) : 0); + + ret = ion_alloc_fd(ionClient, buf->size.extS[index], 0, + ION_HEAP_SYSTEM_MASK, flagIon, &buf->fd.extFd[index]); + if (ret < 0) { + ALOGE("ERR(%s):ion_alloc_fd(fd=%d) failed(%s)", __FUNCTION__, + buf->fd.extFd[index], strerror(errno)); + buf->fd.extFd[index] = -1; + return false; + } + + buf->virt.extP[index] = (char *)mmap(NULL, buf->size.extS[index], + PROT_READ|PROT_WRITE, MAP_SHARED, buf->fd.extFd[index], 0); + + if (buf->virt.extP[index] == (char *)MAP_FAILED || buf->virt.extP[index] == NULL) { + ALOGE("ERR(%s):ion_map(size=%d) failed", __FUNCTION__, buf->size.extS[index]); + buf->virt.extP[index] = NULL; + return false; + } + } + + return true; +} + +void ISecCameraHardware::freeMemSinglePlane(ExynosBuffer *buf, int index) +{ + if (0 < buf->fd.extFd[index]) { + if (buf->virt.extP[index] != NULL) { + if (munmap(buf->virt.extP[index], buf->size.extS[index]) < 0) { + ALOGE("ERR(%s):munmap failed", __FUNCTION__); + } + } + ion_close(buf->fd.extFd[index]); + } + + buf->fd.extFd[index] = -1; + buf->virt.extP[index] = NULL; + buf->size.extS[index] = 0; +} + +bool ISecCameraHardware::allocMem(int ionClient, ExynosBuffer *buf, int cacheIndex) +{ + for (int i = 0; i < ExynosBuffer::BUFFER_PLANE_NUM_DEFAULT; i++) { + bool flagCache = ((1 << i) & cacheIndex) ? true : false; + if (allocMemSinglePlane(ionClient, buf, i, flagCache) == false) { + freeMem(buf); + ALOGE("ERR(%s): allocMemSinglePlane(%d) fail", __func__, i); + return false; + } + } + + return true; +} + +void ISecCameraHardware::freeMem(ExynosBuffer *buf) +{ + for (int i = 0; i < ExynosBuffer::BUFFER_PLANE_NUM_DEFAULT; i++) + freeMemSinglePlane(buf, i); +} + +void ISecCameraHardware::mInitRecSrcQ(void) +{ + Mutex::Autolock lock(mRecordSrcLock); + mRecordSrcIndex = -1; + + mRecordSrcQ.clear(); +} + +int ISecCameraHardware::getRecSrcBufSlotIndex(void) +{ + Mutex::Autolock lock(mRecordSrcLock); + mRecordSrcIndex++; + mRecordSrcIndex = mRecordSrcIndex % FLITE_BUF_CNT; + return mRecordSrcIndex; +} + +void ISecCameraHardware::mPushRecSrcQ(rec_src_buf_t *buf) +{ + Mutex::Autolock lock(mRecordSrcLock); + mRecordSrcQ.push_back(buf); +} + +bool ISecCameraHardware::mPopRecSrcQ(rec_src_buf_t *buf) +{ + List::iterator r; + + Mutex::Autolock lock(mRecordSrcLock); + + if (mRecordSrcQ.size() == 0) + return false; + + r = mRecordSrcQ.begin()++; + + buf->buf = (*r)->buf; + buf->timestamp = (*r)->timestamp; + mRecordSrcQ.erase(r); + + return true; +} + +int ISecCameraHardware::mSizeOfRecSrcQ(void) +{ + Mutex::Autolock lock(mRecordSrcLock); + + return mRecordSrcQ.size(); +} + +#if 0 +bool ISecCameraHardware::setRecDstBufStatus(int index, enum REC_BUF_STATUS status) +{ + Mutex::Autolock lock(mRecordDstLock); + + if (index < 0 || index >= REC_BUF_CNT) { + ALOGE("ERR(%s): index(%d) out of range, status(%d)", __func__, index, status); + return false; + } + + mRecordDstStatus[index] = status; + return true; +} +#endif + +int ISecCameraHardware::getRecDstBufIndex(void) +{ + Mutex::Autolock lock(mRecordDstLock); + + for (int i = 0; i < REC_BUF_CNT; i++) { + mRecordDstIndex++; + mRecordDstIndex = mRecordDstIndex % REC_BUF_CNT; + + if (mRecordFrameAvailable[mRecordDstIndex] == true) { + mRecordFrameAvailableCnt--; + mRecordFrameAvailable[mRecordDstIndex] = false; + return mRecordDstIndex; + } + } + + return -1; +} + +void ISecCameraHardware::setAvailDstBufIndex(int index) +{ + Mutex::Autolock lock(mRecordDstLock); + mRecordFrameAvailableCnt++; + mRecordFrameAvailable[index] = true; + return; +} + +void ISecCameraHardware::mInitRecDstBuf(void) +{ + Mutex::Autolock lock(mRecordDstLock); + + ExynosBuffer nullBuf; + + mRecordDstIndex = -1; + mRecordFrameAvailableCnt = REC_BUF_CNT; + + for (int i = 0; i < REC_BUF_CNT; i++) { +#ifdef BOARD_USE_MHB_ION + for (int j = 0; j < REC_PLANE_CNT; j++) { + if (mRecordDstHeap[i][j] != NULL) + mRecordDstHeap[i][j]->release(mRecordDstHeap[i][j]); + mRecordDstHeap[i][j] = NULL; + mRecordDstHeapFd[i][j] = -1; + } +#else + if (0 < mRecordingDstBuf[i].fd.extFd[0]) + freeMem(&mRecordingDstBuf[i]); +#endif + mRecordingDstBuf[i] = nullBuf; + mRecordFrameAvailable[i] = true; + } +} + +int ISecCameraHardware::getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf, bool flagAndroidColorFormat) +{ + int FrameSize = 0; + ExynosBuffer alignedBuf; + + /* ALOGV("[%s] (%d) colorFormat %d", __func__, __LINE__, colorFormat); */ + switch (colorFormat) { + /* 1p */ + case V4L2_PIX_FMT_RGB565 : + case V4L2_PIX_FMT_YUYV : + case V4L2_PIX_FMT_UYVY : + case V4L2_PIX_FMT_VYUY : + case V4L2_PIX_FMT_YVYU : + alignedBuf.size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); + if(h==1080){ + alignedBuf.size.extS[0] += 1024*28; + } + /* ALOGV("V4L2_PIX_FMT_YUYV buf->size.extS[0] %d", alignedBuf->size.extS[0]); */ + alignedBuf.size.extS[1] = SPARE_SIZE; + alignedBuf.size.extS[2] = 0; + break; + /* 2p */ + case V4L2_PIX_FMT_NV12 : + case V4L2_PIX_FMT_NV12T : + case V4L2_PIX_FMT_NV21 : + case V4L2_PIX_FMT_NV12M : + case V4L2_PIX_FMT_NV21M : + alignedBuf.size.extS[0] = w * h; + alignedBuf.size.extS[1] = w * h / 2; + alignedBuf.size.extS[2] = SPARE_SIZE; + /* ALOGV("V4L2_PIX_FMT_NV21 buf->size.extS[0] %d buf->size.extS[1] %d", + alignedBuf->size.extS[0], alignedBuf->size.extS[1]); */ + break; + case V4L2_PIX_FMT_NV12MT_16X16 : + if (flagAndroidColorFormat == true) { + alignedBuf.size.extS[0] = w * h; + alignedBuf.size.extS[1] = w * h / 2; + alignedBuf.size.extS[2] = SPARE_SIZE; + } else { + alignedBuf.size.extS[0] = ALIGN_UP(w, 16) * ALIGN_UP(h, 16); + alignedBuf.size.extS[1] = ALIGN(alignedBuf.size.extS[0] / 2, 256); + alignedBuf.size.extS[2] = SPARE_SIZE; + } + /* ALOGV("V4L2_PIX_FMT_NV12M buf->size.extS[0] %d buf->size.extS[1] %d", + alignedBuf->size.extS[0], alignedBuf->size.extS[1]); */ + break; + case V4L2_PIX_FMT_NV16 : + case V4L2_PIX_FMT_NV61 : + alignedBuf.size.extS[0] = ALIGN_UP(w, 16) * ALIGN_UP(h, 16); + alignedBuf.size.extS[1] = ALIGN_UP(w, 16) * ALIGN_UP(h, 16); + alignedBuf.size.extS[2] = SPARE_SIZE; + /* ALOGV("V4L2_PIX_FMT_NV16 buf->size.extS[0] %d buf->size.extS[1] %d", + alignedBuf->size.extS[0], alignedBuf->size.extS[1]); */ + break; + /* 3p */ + case V4L2_PIX_FMT_YUV420 : + case V4L2_PIX_FMT_YVU420 : + /* http://developer.android.com/reference/android/graphics/ImageFormat.html#YV12 */ + alignedBuf.size.extS[0] = ALIGN_UP(w, 16) * h; + alignedBuf.size.extS[1] = ALIGN_UP(w / 2, 16) * h / 2; + alignedBuf.size.extS[2] = ALIGN_UP(w / 2, 16) * h / 2; + alignedBuf.size.extS[3] = SPARE_SIZE; + /* ALOGV("V4L2_PIX_FMT_YUV420 Buf.size.extS[0] %d Buf.size.extS[1] %d Buf.size.extS[2] %d", + alignedBuf.size.extS[0], alignedBuf.size.extS[1], alignedBuf.size.extS[2]); */ + break; + case V4L2_PIX_FMT_YUV420M : + case V4L2_PIX_FMT_YVU420M : + if (flagAndroidColorFormat == true) { + alignedBuf.size.extS[0] = ALIGN_UP(w, 16) * h; + alignedBuf.size.extS[1] = ALIGN_UP(w / 2, 16) * h / 2; + alignedBuf.size.extS[2] = ALIGN_UP(w / 2, 16) * h / 2; + alignedBuf.size.extS[3] = SPARE_SIZE; + } else { + alignedBuf.size.extS[0] = ALIGN_UP(w, 32) * ALIGN_UP(h, 16); + alignedBuf.size.extS[1] = ALIGN_UP(w/2, 16) * ALIGN_UP(h/2, 8); + alignedBuf.size.extS[2] = ALIGN_UP(w/2, 16) * ALIGN_UP(h/2, 8); + alignedBuf.size.extS[3] = SPARE_SIZE; + } + /* ALOGV("V4L2_PIX_FMT_YUV420M buf->size.extS[0] %d buf->size.extS[1] %d buf->size.extS[2] %d", + alignedBuf->size.extS[0], alignedBuf->size.extS[1], alignedBuf->size.extS[2]); */ + break; + case V4L2_PIX_FMT_YUV422P : + alignedBuf.size.extS[0] = ALIGN_UP(w, 16) * ALIGN_UP(h, 16); + alignedBuf.size.extS[1] = ALIGN_UP(w/2, 16) * ALIGN_UP(h/2, 8); + alignedBuf.size.extS[2] = ALIGN_UP(w/2, 16) * ALIGN_UP(h/2, 8); + alignedBuf.size.extS[3] = SPARE_SIZE; + /* ALOGV("V4L2_PIX_FMT_YUV422P Buf.size.extS[0] %d Buf.size.extS[1] %d Buf.size.extS[2] %d", + alignedBuf.size.extS[0], alignedBuf.size.extS[1], alignedBuf.size.extS[2]); */ + break; + case V4L2_PIX_FMT_JPEG: + alignedBuf.size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_YUYV), w, h); + alignedBuf.size.extS[1] = SPARE_SIZE; + alignedBuf.size.extS[2] = 0; + ALOGD("V4L2_PIX_FMT_JPEG buf->size.extS[0] = %d", alignedBuf.size.extS[0]); + break; + default: + ALOGE("ERR(%s):unmatched colorFormat(%d)", __func__, colorFormat); + return 0; + break; + } + + for (int i = 0; i < ExynosBuffer::BUFFER_PLANE_NUM_DEFAULT; i++) + FrameSize += alignedBuf.size.extS[i]; + + if (buf != NULL) { + for (int i = 0; i < ExynosBuffer::BUFFER_PLANE_NUM_DEFAULT; i++) { + buf->size.extS[i] = alignedBuf.size.extS[i]; + + /* if buf has vadr, calculate another vadr per plane */ + if (buf->virt.extP[0] != NULL && i > 0) { + if (buf->size.extS[i] != 0) + buf->virt.extP[i] = buf->virt.extP[i - 1] + buf->size.extS[i - 1]; + else + buf->virt.extP[i] = NULL; + } + } + } + return (FrameSize - SPARE_SIZE); +} + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD +status_t ISecCameraHardware::m_clearPreviewFrameList(ExynosCameraList* queue) +{ + buffer_handle_t *curFrame = NULL; + + if(queue->getSizeOfProcessQ() == 0) { + return NO_ERROR; + } + + ALOGD("DEBUG(%s):remaining frame(%d), we remove them all", + __FUNCTION__, queue->getSizeOfProcessQ()); + + while (0 < queue->getSizeOfProcessQ()) { + queue->popProcessQ(&curFrame); + if (curFrame != NULL) { + ALOGD("DEBUG(%s):remove frame", __FUNCTION__); + if (mPreviewWindow) { + if(mPreviewWindow->cancel_buffer(mPreviewWindow, curFrame) != 0) { + ALOGE("ERR(%s):Fail to cancel buffer", __func__); + } + } else { + ALOGW("DEBUG(%s):mPreviewWindow is NULL", __FUNCTION__); + } + curFrame = NULL; + } + } + ALOGD("DEBUG(%s):EXIT ", __FUNCTION__); + + return NO_ERROR; +} +#endif + +void ISecCameraHardware::checkHorizontalViewAngle(void) +{ + mParameters.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, getHorizontalViewAngle()); +} + +void ISecCameraHardware::setHorizontalViewAngle(int pictureW, int pictureH) +{ + double pi_camera = 3.1415926f; + double distance; + double ratio; + double hViewAngle_half_rad = pi_camera / 180 * + (double)findHorizontalViewAngleByRatio(SIZE_RATIO(16, 9)) / 2; + + distance = ((double)mSensorSize.width / (double)mSensorSize.height * 9 / 2) + / tan(hViewAngle_half_rad); + ratio = (double)pictureW / (double)pictureH; + + m_calculatedHorizontalViewAngle = (float)(atan(ratio * 9 / 2 / distance) * 2 * 180 / pi_camera); +} + + +float ISecCameraHardware::findHorizontalViewAngleByRatio(uint32_t ratio) +{ + uint32_t i; + float view_angle = 0.0f; + + if (mCameraId == CAMERA_ID_BACK) { + for (i = 0; i < ARRAY_SIZE(backHorizontalViewAngle); i++) { + if (ratio == backHorizontalViewAngle[i].size_ratio) { + view_angle = backHorizontalViewAngle[i].view_angle; + break; + } + } + } else { + for (i = 0; i < ARRAY_SIZE(frontHorizontalViewAngle); i++) { + if (ratio == frontHorizontalViewAngle[i].size_ratio) { + view_angle = frontHorizontalViewAngle[i].view_angle; + break; + } + } + } + + return view_angle; +} + +float ISecCameraHardware::getHorizontalViewAngle(void) +{ + int right_ratio = 177; + + if ((int)(mSensorSize.width * 100 / mSensorSize.height) == right_ratio) { + return m_calculatedHorizontalViewAngle; + } else { + return findHorizontalViewAngleByRatio(SIZE_RATIO(mPictureSize.width, mPictureSize.height)); + } +} + +float ISecCameraHardware::getVerticalViewAngle(void) +{ + if (mCameraId == CAMERA_ID_BACK) { + return backVerticalViewAngle; + } else { + return frontVerticalViewAngle; + } +} + +}; /* namespace android */ +#endif /* ANDROID_HARDWARE_ISECCAMERAHARDWARE_CPP */ diff --git a/libcamera_external/ISecCameraHardware.h b/libcamera_external/ISecCameraHardware.h new file mode 100644 index 0000000..434ca3d --- /dev/null +++ b/libcamera_external/ISecCameraHardware.h @@ -0,0 +1,888 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file ISecCameraHardware.h + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_ISECCAMERAHARDWARE_H +#define ANDROID_HARDWARE_ISECCAMERAHARDWARE_H + +/* + * Common Header file + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#ifndef HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL +#define HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL (HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL) /* 0x11E, */ +#endif + +#ifndef HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP +#define HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP (HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M) /* 0x11D, */ +#endif + +/* use exynos feature */ +#include +#include "ExynosBuffer.h" +#include "exynos_v4l2.h" +#include "exynos_format.h" +#include "csc.h" +#include "gralloc_priv.h" + +/* + * Model-specific Header file + */ +#include "model/include/SecCameraHardware-model.h" +#include "SecCameraCommonConfig.h" +#include "SecCameraParameters.h" + +#include "ExynosCameraAutoTimer.h" +#include "ExynosCameraList.h" + +/* + * Define debug feature + */ +#ifdef CAMERA_DEBUG_ALL +#define DEBUG_THREAD_EXIT_WAIT +#define DEBUG_PREVIEW_CALLBACK +#define DEBUG_CAPTURE_RETRY +#endif + +//#define SAVE_DUMP + +#define SPARE_SIZE (32 * 1024) +#define SPARE_PLANE (1) + +#define FD_SERVICE_CAMERA_ID 2 + +#define DUMP_FIRST_PREVIEW_FRAME + +#define USE_USERPTR +#define USE_CAPTURE_MODE +//#define USE_VIDIOC_ENUM_FORMAT +//#define USE_VIDIOC_QUERY_CAP +#define NUM_FLITE_BUF_PLANE (1) +#define FLITE_BUF_CNT (8) +#define PREVIEW_BUF_CNT (8) +#define REC_BUF_CNT (8) +#define REC_PLANE_CNT (2) +#define SKIP_CAPTURE_CNT (1) +#define NUM_OF_DETECTED_FACES (16) + +#define NUM_OF_AE_WINDOW_ROW (17) +#define NUM_OF_AE_WINDOW_COLUMN (13) +#define SIZE_OF_AE_WINDOW_ROW (1) +#define SIZE_OF_AE_WINDOW_COLUMN (1) + +#if !defined(LOG_NFPS) || LOG_NFPS +#define LOG_FPS(...) ((void)0) +#else +#define LOG_FPS(...) ((void)ALOG(LOG_DEBUG, LOG_TAG, __VA_ARGS__)) +#endif + +#if !defined(LOG_NPERFORMANCE) || LOG_NPERFORMANCE +#define LOG_PERFORMANCE_START(n) ((void)0) +#define LOG_PERFORMANCE_END(n, tag) ((void)0) +#else +#define LOG_PERFORMANCE_START(n) \ + struct timeval time_start_##n, time_stop_##n; \ + gettimeofday(&time_start_##n, NULL) + +#define LOG_PERFORMANCE_END(n, tag) \ + gettimeofday(&time_stop_##n, NULL); \ + ALOGD("%s: %s %ld us", __FUNCTION__, tag, \ + (time_stop_##n.tv_sec * 1000000 + time_stop_##n.tv_usec) \ + - (time_start_##n.tv_sec * 1000000 + time_start_##n.tv_usec)) +#endif + +#define LOG_RECORD_TRACE(fmt, ...) \ + if (CC_UNLIKELY(mRecordingTrace)) { \ + ALOGI("%s: " fmt, __FUNCTION__, ##__VA_ARGS__); \ + } + +#define HDR_BUF_CNT 3 +#define CAP_CNT_MAGIC 32 + +#define HWC_ON + +//#define ALLOCATION_REC_BUF_BY_MEM_CB + +#define CLEAR(x) memset(&(x), 0, sizeof(x)) + +namespace android { + +/* cmdType in sendCommand functions */ +enum { + CAMERA_CMD_SET_TOUCH_AF_POSITION = 1503, // CAMERA_CMD_SET_TOUCH_AF_POSITION = 1103, + HAL_OBJECT_TRACKING_STARTSTOP = 1504, + CAMERA_CMD_START_STOP_TOUCH_AF = 1505, + CAMERA_CMD_DISABLE_POSTVIEW = 1109, + CAMERA_CMD_SET_FLIP = 1510, + CAMERA_CMD_SET_AEAWB_LOCk = 1111, + RECORDING_TAKE_PICTURE = 1201, + + HAL_START_CONTINUOUS_AF = 1551, + HAL_STOP_CONTINUOUS_AF = 1552, + HAL_AF_LAMP_CONTROL = 1555, +}; + +enum { + CAMERA_BURST_MEMORY_NONE= 0, + CAMERA_BURST_MEMORY_ION = 1, + CAMERA_BURST_MEMORY_HEAP = 2, +}; + +enum { + CAMERA_BURST_STOP_NONE=0, + CAMERA_BURST_STOP_REQ, + CAMERA_BURST_STOP_PROC, + CAMERA_BURST_STOP_END, +}; + +enum { + CAMERA_HEAP_PREVIEW = 0, + CAMERA_HEAP_POSTVIEW, +}; + +enum { + PREVIEW_THREAD_NONE = 0, + PREVIEW_THREAD_NORMAL, + PREVIEW_THREAD_CHANGED, +}; + +/* Structure for Focus Area */ +typedef struct FocusArea { + int top; + int left; + int bottom; + int right; + int weight; +} FocusArea; + +typedef struct FocusPoint { + int x; + int y; +} FocusPoint; + +typedef struct node_info { + int fd; + int width; + int height; + int format; + int planes; + int buffers; + enum v4l2_memory memory; + enum v4l2_buf_type type; + int ionClient; + ExynosBuffer buffer[VIDEO_MAX_FRAME]; + bool flagStart; +} node_info_t; + +/* for recording */ +typedef struct rec_src_buf { + ExynosBuffer *buf; + nsecs_t timestamp; +} rec_src_buf_t; + +/* for MC */ +enum { + CAMERA_EXT_PREVIEW, + CAMERA_EXT_CAPTURE_YUV, + CAMERA_EXT_CAPTURE_JPEG +}; + +struct addrs { + /* make sure that this is 4 byte. */ + uint32_t type; + unsigned int fd_y; + unsigned int fd_cbcr; + unsigned int buf_index; + unsigned int reserved; +}; + +struct addrs_cap { + unsigned int addr_y; + unsigned int width; + unsigned int height; +}; + +class ISecCameraHardware : public virtual RefBase { +public: + virtual status_t setPreviewWindow(preview_stream_ops *w); + + virtual void setCallbacks(camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user) { + Mutex::Autolock lock(mLock); + mNotifyCb = notify_cb; + mDataCb = data_cb; + mDataCbTimestamp = data_cb_timestamp; + mGetMemoryCb = get_memory; + mCallbackCookie = user; + } + + virtual void enableMsgType(int32_t msgType) { + /* ALOGV("%s: msg=0x%X, ++msg=0x%X", __FUNCTION__, mMsgEnabled, msgType); */ + Mutex::Autolock lock(mLock); + mMsgEnabled |= msgType; + } + virtual void disableMsgType(int32_t msgType) { + /* ALOGV("%s: msg=0x%X, --msg=0x%X", __FUNCTION__, mMsgEnabled, msgType); */ + Mutex::Autolock lock(mLock); +#ifdef DEBUG_PREVIEW_CALLBACK + if (msgType & CAMERA_MSG_PREVIEW_FRAME) { + if (true == mPreviewCbStarted) { + ALOGD("disable ongoing preview callbacks"); + } + mPreviewCbStarted = false; + } +#endif + mMsgEnabled &= ~msgType; + } + virtual bool msgTypeEnabled(int32_t msgType) { + Mutex::Autolock lock(mLock); + return (mMsgEnabled & msgType); + } + + /* Preview */ + virtual status_t startPreview(); + virtual void stopPreview(); + virtual bool previewEnabled() { + Mutex::Autolock lock(mLock); + return mPreviewRunning; + } + + virtual status_t storeMetaDataInBuffers (bool enable); + + /* Recording */ + virtual status_t startRecording(); + virtual void stopRecording(); + virtual bool recordingEnabled() { + Mutex::Autolock lock(mLock); + return mRecordingRunning; + } + + virtual void releaseRecordingFrame(const void *opaque); + + /* Auto Focus*/ + virtual status_t autoFocus(); + virtual status_t cancelAutoFocus(); + + /* Picture */ + virtual status_t takePicture(); + virtual status_t cancelPicture(); + + /* Raw Capture */ + virtual status_t pictureThread_RAW(); + + /* Parameter */ + virtual status_t setParameters(const CameraParameters& params); + virtual CameraParameters getParameters() const { + Mutex::Autolock lock(mLock); + return mParameters; + } + virtual status_t sendCommand(int32_t command, int32_t arg1, int32_t arg2); + + virtual void release(); + virtual status_t dump(int fd) const; + virtual int getCameraId() const; + bool IsAutofocusRunning() { return mAutoFocusRunning; }; + +protected: + int mCameraId; + CameraParameters mParameters; + + bool mFlagANWindowRegister; + bool mPreviewInitialized; +#ifdef DEBUG_PREVIEW_CALLBACK + bool mPreviewCbStarted; /* for debugging */ +#endif + + camera_memory_t *mPreviewHeap; + int mPreviewHeapFd; + camera_memory_t *mPostviewHeap; + int mPostviewHeapFd; + camera_memory_t *mPostviewHeapTmp; + int mPostviewHeapTmpFd; + + camera_memory_t *mRawHeap; + camera_memory_t *mRecordingHeap; + int mRecordHeapFd; + camera_memory_t *mJpegHeap; + ExynosBuffer mPictureBuf; + ExynosBuffer mPictureBufDummy[SKIP_CAPTURE_CNT]; + camera_frame_metadata_t mFrameMetadata; + camera_face_t mFaces[NUM_OF_DETECTED_FACES]; + int mJpegHeapFd; + int mRawHeapFd; + int mHDRHeapFd; + +#if FRONT_ZSL + camera_memory_t *mFullPreviewHeap; + int mZSLindex; + sp rawImageMem; +#endif + bool mFullPreviewRunning; /* for FRONT_ZSL */ + + uint32_t mPreviewFrameSize; + uint32_t mRecordingFrameSize; + uint32_t mRawFrameSize; + uint32_t mPostviewFrameSize; + uint32_t mPictureFrameSize; + uint32_t mRawThumbnailSize; +#ifdef RECORDING_CAPTURE + uint32_t mRecordingPictureFrameSize; +#endif +#if FRONT_ZSL + uint32_t mFullPreviewFrameSize; +#endif + camera_memory_t *mHDRHeap; +#ifndef RCJUNG + camera_memory_t *mYUVHeap; +#endif + uint32_t mHDRFrameSize; + + image_rect_type mPreviewSize; + image_rect_type mOrgPreviewSize; + image_rect_type mPreviewWindowSize; + image_rect_type mRawSize; + image_rect_type mPictureSize; + image_rect_type mThumbnailSize; + image_rect_type mVideoSize; + image_rect_type mSensorSize; + image_rect_type mFLiteSize; /* for FLite */ + image_rect_type mFLiteCaptureSize; /* for FLite during capture */ + image_rect_type mPostviewSize; + + cam_pixel_format mPreviewFormat; + cam_pixel_format mPictureFormat; + cam_pixel_format mFliteFormat; + + int mJpegQuality; + int mFrameRate; + int mFps; + int mflipHorizontal; + int mflipVertical; + cam_scene_mode mSceneMode; + cam_flash_mode mFlashMode; + cam_focus_mode mFocusMode; + cam_focus_area mFocusArea; + bool mPrevMovieMode; + bool mMovieMode; + bool mZoomSupport; + bool mEnableDZoom; + bool mFastCaptureSupport; + + bool mNeedSizeChange; + bool mFastCaptureCalled; + + int mMultiFullCaptureNum; + + int mCaptureMode; + int mFirstStart; + int mTimerSet; + int mExifFnum; + int mExifShutterSpeed; + int mExifLightSource; + int mWeather; + long long int mCityId; + + int mZoomParamSet; + int mZoomSetVal; + int mZoomActionMethod; + int mZoomStatus; + int mZoomStatusBak; + int mLensStatus; + int mLensChecked; + + bool mCameraPower; + + char mAntiBanding[10]; + int mIonCameraClient; + int zoomRatioList[MAX_ZOOM_LEVEL]; + + ISecCameraHardware(int cameraId, camera_device_t *dev); + virtual ~ISecCameraHardware(); + + bool initialized(sp heap) const { + return heap != NULL && heap->base() != MAP_FAILED; + } + + virtual bool init(); + virtual void initDefaultParameters(); + + virtual status_t nativeSetParameters(cam_control_id id, int value, + bool recordingMode = false) = 0; + virtual status_t nativeGetParameters(cam_control_id id, int *value, + bool recordingMode = false) = 0; + + virtual image_rect_type nativeGetWindowSize() = 0; + virtual status_t nativeStartPreview() = 0; + virtual status_t nativeStartPreviewZoom() = 0; + virtual int nativeGetPreview() = 0; + virtual int nativeReleasePreviewFrame(int index) = 0; + virtual void nativeStopPreview() = 0; +#if FRONT_ZSL + virtual status_t nativeStartFullPreview() = 0; + virtual int nativeGetFullPreview() = 0; + virtual int nativeReleaseFullPreviewFrame(int index) = 0; + virtual void nativeStopFullPreview() = 0; + virtual void nativeForceStopFullPreview() = 0; +#endif + + virtual status_t nativeSetZoomRatio(int value) = 0; + virtual status_t nativePreviewCallback(int index, ExynosBuffer *grallocBuf) = 0; + virtual status_t nativeCSCPreview(int index, int type) = 0; + virtual status_t nativeStartRecording() = 0; + virtual status_t nativeCSCRecording(rec_src_buf_t *srcBuf, int dstIndex) = 0; + virtual status_t nativeStartRecordingZoom() = 0; + virtual void nativeStopRecording() = 0; +#ifdef RECORDING_CAPTURE + virtual bool nativeGetRecordingJpeg(ExynosBuffer *yuvBuf, + uint32_t width, uint32_t height) = 0; +#endif + virtual bool nativeSetAutoFocus() = 0; + virtual int nativeGetPreAutoFocus() = 0; + virtual int nativeGetAutoFocus() = 0; + virtual status_t nativeCancelAutoFocus() = 0; + + virtual bool nativeStartYUVSnapshot() = 0; + virtual bool nativeGetYUVSnapshot(int numF, int *postviewOffset) = 0; + virtual bool nativeStartSnapshot() = 0; + virtual bool nativeStartPostview() = 0; + virtual void nativeMakeJpegDump() = 0; + virtual bool nativeGetSnapshot(int numF, int *postviewOffset) = 0; + virtual bool nativeGetPostview(int numF) = 0; + virtual void nativeStopSnapshot() = 0; + virtual bool nativeStartDualCapture(int numF) = 0; + virtual status_t nativeCSCCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf) = 0; + virtual status_t nativeCSCRecordingCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf) = 0; + + virtual int nativegetWBcustomX() = 0; + virtual int nativegetWBcustomY() = 0; + + virtual int nativeSetFastCapture(bool onOff) = 0; + + virtual bool nativeCreateSurface(uint32_t width, uint32_t height, uint32_t halPixelFormat) = 0; + virtual bool nativeDestroySurface(void) = 0; + virtual bool nativeFlushSurfaceYUV420(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type = CAMERA_HEAP_POSTVIEW) = 0; + virtual bool nativeFlushSurface(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type=CAMERA_HEAP_PREVIEW) = 0; + virtual bool beautyLiveFlushSurface(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type=CAMERA_HEAP_PREVIEW) = 0; + +#ifdef RECORDING_CAPTURE + virtual bool conversion420to422(uint8_t *src, uint8_t *dest, int width, int height) = 0; + virtual bool conversion420Tto422(uint8_t *src, uint8_t *dest, int width, int height) = 0; +#endif + + virtual int dumpToFile(void *buf, uint32_t size, char *filePath) { + FILE *fd = NULL; + + fd = fopen(filePath, "wb"); + if (!fd) { + ALOGE("dumpToFile: error, fail to open %s", filePath); + return -1; + } + size_t nwrite = fwrite(buf, sizeof(char), size, fd); + fclose(fd); + ALOGD("dumped: %s (size=%dbytes)", filePath, nwrite); + + return 0; + } + + virtual int getMaxZoomLevel(void) = 0; + virtual int getMaxZoomRatio(void) = 0; + virtual float getZoomRatio(int zoomLevel) = 0; + +private: + typedef bool (ISecCameraHardware::*thread_loop)(void); + class CameraThread : public Thread { + ISecCameraHardware *mHardware; + thread_loop mThreadLoop; + char mName[32]; +#ifdef DEBUG_THREAD_EXIT_WAIT + pid_t threadId; +#endif + bool mExitRequested; + struct timeval mTimeStart; + struct timeval mTimeStop; + public: + CameraThread(ISecCameraHardware *hw, thread_loop loop, + const char *name = "camera:unamed_thread") : +#ifdef SINGLE_PROCESS + /* In single process mode this thread needs to be a java thread, + since we won't be calling through the binder. + */ + Thread(true), +#else + Thread(false), +#endif + mHardware(hw), + mThreadLoop(loop), +#ifdef DEBUG_THREAD_EXIT_WAIT + threadId(-1), +#endif + mExitRequested(false) { + memset(&mTimeStart, 0, sizeof(mTimeStart)); + memset(&mTimeStop, 0, sizeof(mTimeStop)); + CLEAR(mName); + if (name) + strncpy(mName, name, sizeof(mName) - 1); + } + + virtual status_t run(const char *name = 0, + int32_t priority = PRIORITY_DEFAULT, + size_t stack = 0) { + memset(&mTimeStart, 0, sizeof(mTimeStart)); + memset(&mTimeStop, 0, sizeof(mTimeStop)); + mExitRequested = false; + + status_t err = Thread::run(mName, priority, stack); + if (CC_UNLIKELY(err)) { + mExitRequested = true; + } +#if defined(DEBUG_THREAD_EXIT_WAIT) && defined(HAVE_ANDROID_OS) + else { + threadId = getTid(); + ALOGV("Thread started (%s %d)", mName, threadId); + } +#endif + + return err; + } + + virtual void requestExit() { + mExitRequested = true; + Thread::requestExit(); + } + + status_t requestExitAndWait() { +#if defined(DEBUG_THREAD_EXIT_WAIT) && defined(HAVE_ANDROID_OS) + const int waitSliceTime = 5; /* 5ms */ + int timeOutMsec = 200; /* 200ms */ + + if (timeOutMsec < waitSliceTime) + timeOutMsec = waitSliceTime; + + int waitCnt = timeOutMsec / waitSliceTime; + if (timeOutMsec % waitSliceTime) + waitCnt++; + + requestExit(); + ALOGD("request thread exit (%s)", mName); + + pid_t tid = -1; + int cnt; + + for (cnt = 0; cnt <= waitCnt; cnt++) { + tid = getTid(); + if (-1 == tid) + break; + + if (!((cnt + 1) % 4)) { + ALOGV("wait for thread to be finished (%s). %d %d (%d)", mName, threadId, tid, cnt + 1); + } + usleep(waitSliceTime * 1000); + } + + if (-1 == tid) + ALOGD("thread exit %s (%s)", !cnt ? "already" : "", mName); + else + ALOGD("request thread exit again, blocked (%s)", mName); +#else + mExitRequested = true; +#endif + return Thread::requestExitAndWait(); + } + + bool exitRequested() { + return mExitRequested; + } + void calcFrameWaitTime(int maxFps) { + /* Calculate how long to wait between frames */ + if (mTimeStart.tv_sec == 0 && mTimeStart.tv_usec == 0) { + gettimeofday(&mTimeStart, NULL); + } else { + gettimeofday(&mTimeStop, NULL); + unsigned long timeUs = (mTimeStop.tv_sec * 1000000 + mTimeStop.tv_usec) + - (mTimeStart.tv_sec*1000000 + mTimeStart.tv_usec); + gettimeofday(&mTimeStart, NULL); + unsigned long framerateUs = 1000.0 / maxFps * 1000000; + unsigned long delay = framerateUs > timeUs ? framerateUs - timeUs : 0; + LOG_FPS("%s: time %ld us, delay %ld us", mName, timeUs, delay); + usleep(delay); + } + } + + private: + virtual bool threadLoop() { + /* loop until we need to quit */ + return (mHardware->*mThreadLoop)(); + } + }; + + mutable Mutex mLock; + mutable Mutex mPictureLock; + mutable Mutex mBurstThreadLock; + + mutable Mutex mAutoFocusLock; + mutable Condition mAutoFocusCondition; + bool mAutoFocusExit; + + mutable Mutex mBurstShotLock; + mutable Condition mBurstShotCondition; + bool mBurstShotExit; + + bool mPreviewRunning; + bool mRecordingRunning; + bool mAutoFocusRunning; + bool mPictureRunning; + bool mRecordingTrace; + bool mPictureStart; + bool mCaptureStarted; + bool mCancelCapture; + bool mPreviewFrameReceived; + + int roi_x_pos; + int roi_y_pos; + int roi_width; + int roi_height; + + /* protected by mLock */ + sp mPreviewThread; + sp mRecordingThread; + sp mAutoFocusThread; + sp mPictureThread; + + sp mHDRPictureThread; + sp mRecordingPictureThread; + sp mDumpPictureThread; + +#if FRONT_ZSL + sp mZSLPictureThread; +#endif + + /* Thread for zoom */ + sp mPostRecordThread; + sp mPreviewZoomThread; +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + sp mPreviewEnqueueThread; +#endif + +protected: + int32_t mMsgEnabled; + camera_request_memory mGetMemoryCb; + preview_stream_ops *mPreviewWindow; + camera_notify_callback mNotifyCb; + camera_data_callback mDataCb; + camera_data_timestamp_callback mDataCbTimestamp; + void *mCallbackCookie; + node_info_t mFliteNode; + + /* for recording */ + mutable Mutex mRecordSrcLock; + List mRecordSrcQ; + int mRecordSrcIndex; + rec_src_buf_t mRecordSrcSlot[FLITE_BUF_CNT]; + mutable Mutex mRecordDstLock; + int mRecordDstIndex; + camera_memory_t *mRecordDstHeap[REC_BUF_CNT][REC_PLANE_CNT]; + ExynosBuffer mRecordingDstBuf[REC_BUF_CNT]; + int mRecordDstHeapFd[REC_BUF_CNT][REC_PLANE_CNT]; + bool mRecordFrameAvailable[REC_BUF_CNT]; + int mRecordFrameAvailableCnt; + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + bool m_flagEnqueueThreadStop; + ExynosCameraList m_previewFrameQ; +#endif + + /* Focus Areas values */ + static const ssize_t FOCUS_AREA_TOP = -1000; + static const ssize_t FOCUS_AREA_LEFT = -1000; + static const ssize_t FOCUS_AREA_BOTTOM = 1000; + static const ssize_t FOCUS_AREA_RIGHT = 1000; + static const ssize_t FOCUS_AREA_WEIGHT_MIN = 1; + static const ssize_t FOCUS_AREA_WEIGHT_MAX = 1000; + bool allocMemSinglePlane(int ionClient, ExynosBuffer *buf, + int index, bool flagCache = true); + void freeMemSinglePlane(ExynosBuffer *buf, int index); + bool allocMem(int ionClient, ExynosBuffer *buf, int cacheIndex = 0xff); + void freeMem(ExynosBuffer *buf); + + /* for recording */ + void mInitRecSrcQ(void); + void mInitRecDstBuf(void); + int getRecSrcBufSlotIndex(void); + void mPushRecSrcQ(rec_src_buf_t *buf); + bool mPopRecSrcQ(rec_src_buf_t *buf); + int mSizeOfRecSrcQ(void); + int getRecDstBufIndex(void); + void setAvailDstBufIndex(int index); + int getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf, + bool flagAndroidColorFormat = false); + +private: + + int mDropFrameCount; + bool mbFirst_preview_started; + int mMaxFrameRate; + + bool mDisablePostview; + int mLastIndex; +#ifdef DUMP_FIRST_PREVIEW_FRAME + int mFlagFirstFrameDump; +#endif + int mAntibanding60Hz; + int mFaceDetectionStatus; + float m_calculatedHorizontalViewAngle; + + void setDropFrame(int count); + void setDropUnstableInitFrames(); + + bool autoFocusCheckAndWaitPreview() { + for (int i = 0; i < 50; i++) { + if (mPreviewInitialized) + return true; + + usleep(10000); /* 10ms*/ + } + + return false; + } + status_t setAELock(const CameraParameters& params); + status_t setAWBLock(const CameraParameters& params); + status_t setRecordingMode(const CameraParameters& params); + status_t setPreviewSize(const CameraParameters& params); + status_t setPreviewFormat(const CameraParameters& params); + status_t setPictureSize(const CameraParameters& params); + status_t setPictureFormat(const CameraParameters& params); + status_t setThumbnailSize(const CameraParameters& params); + status_t setJpegThumbnailQuality(const CameraParameters& params); + status_t setJpegQuality(const CameraParameters& params); + status_t setFrameRate(const CameraParameters& params); + status_t setRotation(const CameraParameters& params); + status_t setVideoSize(const CameraParameters& params); + status_t setPreviewFrameRate(const CameraParameters& params); + + status_t setSceneMode(const CameraParameters& params); + + /* Focus Area start */ + status_t findCenter(struct FocusArea *focusArea, + struct FocusPoint *center); + status_t normalizeArea(struct FocusPoint *center); + status_t checkArea(ssize_t top, ssize_t left, ssize_t bottom, + ssize_t right, ssize_t weight); + status_t parseAreas(const char *area, size_t areaLength, + struct FocusArea *focusArea, int *num_areas); + status_t setFocusAreas(const CameraParameters& params); + /* Focus Area end */ + + status_t setIso(const CameraParameters& params); + status_t setBrightness(const CameraParameters& params); + status_t setWhiteBalance(const CameraParameters& params); + status_t setFlash(const CameraParameters& params); + status_t setMetering(const CameraParameters& params); + status_t setMeteringAreas(const CameraParameters& params); + status_t setFocusMode(const CameraParameters& params); + status_t setEffect(const CameraParameters& params); + status_t setZoom(const CameraParameters& params); + status_t setSharpness(const CameraParameters& params); + status_t setSaturation(const CameraParameters& params); + status_t setContrast(const CameraParameters& params); + status_t setColorAdjust(const CameraParameters& params); + status_t setAntiShake(const CameraParameters& params); + status_t setBlur(const CameraParameters& params); + status_t setAntiBanding(); + status_t setAntiBanding(const CameraParameters& params); + status_t setGps(const CameraParameters& params); + int checkFnumber(int f_val, int zoomLevel); + status_t setDzoom(const CameraParameters& params); + status_t doCameraCapture(); +#ifdef RECORDING_CAPTURE + status_t doRecordingCapture(); +#endif + + const image_rect_type *getFrameSizeSz(const image_rect_type *sizes, + int count, uint32_t width, uint32_t height); + const image_rect_type *getFrameSizeRatio(const image_rect_type *sizes, + int count, uint32_t width, uint32_t height); + + void checkHorizontalViewAngle(void); + void setHorizontalViewAngle(int pictureW, int pictureH); + float findHorizontalViewAngleByRatio(uint32_t ratio); + float getHorizontalViewAngle(void); + float getVerticalViewAngle(void); + + void setZoomRatioList(int *list, int len, float maxZoomRatio); + status_t getZoomRatioList(String8 & string8Buf, int maxZoom, int maxZoomRatio, int *list); + + bool previewThread(); + bool recordingThread(); + bool autoFocusThread(); + bool pictureThread(); + + bool HDRPictureThread(); + bool RecordingPictureThread(); + bool dumpPictureThread(); + +#if FRONT_ZSL + bool zslpictureThread(); +#endif + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + status_t m_clearPreviewFrameList(ExynosCameraList *queue); +#endif + camera_device_t *mHalDevice; + +public: + int mPostRecordIndex; + nsecs_t mRecordTimestamp; + nsecs_t mLastRecordTimestamp; + + bool mPostRecordExit; + mutable Mutex mPostRecordLock; + mutable Condition mPostRecordCondition; + bool postRecordThread(); + bool previewZoomThread(); + bool previewEnqueueThread(); +}; +}; /* namespace android */ + +#endif /* ANDROID_HARDWARE_ISECCAMERAHARDWARE_H */ diff --git a/libcamera_external/NOTICE b/libcamera_external/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libcamera_external/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libcamera_external/SecCameraCommonConfig.h b/libcamera_external/SecCameraCommonConfig.h new file mode 100644 index 0000000..56e16a2 --- /dev/null +++ b/libcamera_external/SecCameraCommonConfig.h @@ -0,0 +1,299 @@ +#ifndef EXYNOS_CAMERA_COMMON_CONFIG_H__ +#define EXYNOS_CAMERA_COMMON_CONFIG_H__ + +#include + +#include +#include "ISecCameraHardware.h" + + +#define BUILD_DATE() ALOGE("Build Date is (%s) (%s) ", __DATE__, __TIME__) +#define WHERE_AM_I() ALOGE("[(%s)%d] ", __FUNCTION__, __LINE__) +#define LOG_DELAY() usleep(100000) + +#define TARGET_ANDROID_VER_MAJ 4 +#define TARGET_ANDROID_VER_MIN 4 + +/* ---------------------------------------------------------- */ +/* log */ +#define XPaste(s) s +#define Paste2(a, b) XPaste(a)b +#define ID "[CAM_ID(%d)] - " +#define ID_PARM mCameraId + +#define CLOGD(fmt, ...) \ + ALOGD(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGV(fmt, ...) \ + ALOGV(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGW(fmt, ...) \ + ALOGW(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGE(fmt, ...) \ + ALOGE(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGI(fmt, ...) \ + ALOGI(Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__) + +#define CLOGT(cnt, fmt, ...) \ + if (cnt != 0) CLOGI(Paste2("#TRACE#", fmt), ##__VA_ARGS__) \ + +#define CLOG_ASSERT(fmt, ...) \ + android_printAssert(NULL, LOG_TAG, Paste2(ID, fmt), ID_PARM, ##__VA_ARGS__); + +/* ---------------------------------------------------------- */ +/* Align */ +#define ROUND_UP(x, a) (((x) + ((a)-1)) / (a) * (a)) +#define ROUND_OFF_HALF(x, dig) ((float)(floor((x) * pow(10.0f, dig) + 0.5) / pow(10.0f, dig))) + +/* ---------------------------------------------------------- */ +/* Node Prefix */ +#define NODE_PREFIX "/dev/video" + +/* ---------------------------------------------------------- */ +/* Max Camera Name Size */ +#define EXYNOS_CAMERA_NAME_STR_SIZE (256) + +/* ---------------------------------------------------------- */ +/* Linux type */ +#ifndef _LINUX_TYPES_H +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef signed short int16_t; +typedef signed int int32_t; +typedef unsigned int uint32_t; +/*typedef unsigned long long uint64_t;*/ +#endif + +/* ---------------------------------------------------------- */ +/* INCLUDE */ +/* ---------------------------------------------------------- */ + +/* ---------------------------------------------------------- */ +/* SENSOR ENUM */ +/* ---------------------------------------------------------- */ + +typedef enum +{ + SENSOR_NAME_NOTHING = 0, + SENSOR_NAME_S5K3H2 = 1, + SENSOR_NAME_S5K6A3 = 2, + SENSOR_NAME_S5K3H5 = 3, + SENSOR_NAME_S5K3H7 = 4, + SENSOR_NAME_S5K3H7_SUNNY = 5, + SENSOR_NAME_S5K3H7_SUNNY_2M = 6, + SENSOR_NAME_S5K6B2 = 7, + SENSOR_NAME_S5K3L2 = 8, + SENSOR_NAME_S5K4E5 = 9, + SENSOR_NAME_S5K2P2 = 10, + SENSOR_NAME_S5K8B1 = 11, + SENSOR_NAME_S5K1P2 = 12, + SENSOR_NAME_S5K4H5 = 13, + SENSOR_NAME_S5K3M2 = 14, + SENSOR_NAME_S5K2P2_12M = 15, + SENSOR_NAME_S5K6D1 = 16, + SENSOR_NAME_S5K5E3 = 17, + SENSOR_NAME_S5K2T2 = 18, + SENSOR_NAME_S5K2P3 = 19, + SENSOR_NAME_S5K2P8 = 20, + SENSOR_NAME_S5K4E6 = 21, + SENSOR_NAME_S5K5E2 = 22, + SENSOR_NAME_S5K3P3 = 23, + SENSOR_NAME_S5K4H5YC = 24, + SENSOR_NAME_S5K2X8 = 28, + SENSOR_NAME_S5K2L1 = 29, + SENSOR_NAME_S5K4EC = 57, + + SENSOR_NAME_IMX135 = 101, /* 101 ~ 200 Sony sensors */ + SENSOR_NAME_IMX134 = 102, + SENSOR_NAME_IMX175 = 103, + SENSOR_NAME_IMX240 = 104, + SENSOR_NAME_IMX220 = 105, + SENSOR_NAME_IMX228 = 106, + SENSOR_NAME_IMX219 = 107, + SENSOR_NAME_IMX230 = 108, + SENSOR_NAME_IMX260 = 109, + + SENSOR_NAME_SR261 = 201, /* 201 ~ 300 Other vendor sensors */ + SENSOR_NAME_OV5693 = 202, + SENSOR_NAME_SR544 = 203, + SENSOR_NAME_OV5670 = 204, + SENSOR_NAME_DSIM = 205, + SENSOR_NAME_VIRTUAL = 206, + + SENSOR_NAME_CUSTOM = 301, + SENSOR_NAME_SR200 = 302, // SoC Module + SENSOR_NAME_SR352 = 303, + SENSOR_NAME_SR130PC20 = 304, + SENSOR_NAME_S5K5E6 = 305, + SENSOR_NAME_VIRTUAL_ZEBU = 901, + SENSOR_NAME_END, +}IS_SensorNameEnum; + +enum CAMERA_ID { + CAMERA_ID_BACK = 0, + CAMERA_ID_FRONT = 1, + CAMERA_ID_MAX, +}; + +enum YUV_RANGE { + YUV_FULL_RANGE = 0, + YUV_LIMITED_RANGE = 1, +}; + + +enum pipeline { + PIPE_FLITE = 0, + PIPE_3AA, + PIPE_3AC, + PIPE_3AP, + PIPE_ISP, + PIPE_ISPC, + PIPE_ISPP, + PIPE_SCP, + PIPE_3AA_ISP, + PIPE_POST_3AA_ISP, + PIPE_DIS, + PIPE_SCC, + PIPE_GSC, + PIPE_GSC_VIDEO, + PIPE_GSC_PICTURE, + PIPE_JPEG, + MAX_PIPE_NUM, + + /* + * PIPE_XXX_FRONT are deprecated define. + * Don't use this. (just let for common code compile) + */ + PIPE_FLITE_FRONT = 100, + PIPE_3AA_FRONT, + PIPE_3AC_FRONT, + PIPE_3AP_FRONT, + PIPE_ISP_FRONT, + PIPE_ISPC_FRONT, + PIPE_ISPP_FRONT, + PIPE_SCP_FRONT, + PIPE_3AA_ISP_FRONT, + PIPE_POST_3AA_ISP_FRONT, + PIPE_DIS_FRONT, + PIPE_SCC_FRONT, + PIPE_GSC_FRONT, + PIPE_GSC_VIDEO_FRONT, + PIPE_GSC_PICTURE_FRONT, + PIPE_JPEG_FRONT, + MAX_PIPE_NUM_FRONT, + + PIPE_FLITE_REPROCESSING = 200, + PIPE_3AA_REPROCESSING, + PIPE_3AC_REPROCESSING, + PIPE_3AP_REPROCESSING, + PIPE_ISP_REPROCESSING, + PIPE_ISPC_REPROCESSING, + PIPE_ISPP_REPROCESSING, + PIPE_SCC_REPROCESSING, + PIPE_SCP_REPROCESSING, + PIPE_GSC_REPROCESSING, + PIPE_JPEG_REPROCESSING, + MAX_PIPE_NUM_REPROCESSING +}; + + +/* ---------------------------------------------------------- */ +/* From Parameter Header */ +namespace CONFIG_MODE { + enum MODE { + NORMAL = 0x00, + HIGHSPEED_60, + HIGHSPEED_120, + HIGHSPEED_240, + MAX + }; +}; + +/* camera errors */ +enum { + SEC_CAMERA_ERROR_PREVIEWFRAME_TIMEOUT = 1001, + SEC_CAMERA_ERROR_DATALINE_FAIL = 2000 +}; + +struct CONFIG_PIPE { + uint32_t prepare[MAX_PIPE_NUM_REPROCESSING]; +}; + +struct CONFIG_BUFFER { + uint32_t num_bayer_buffers; + uint32_t init_bayer_buffers; + uint32_t num_3aa_buffers; + uint32_t num_hwdis_buffers; + uint32_t num_preview_buffers; + uint32_t num_preview_cb_buffers; + uint32_t num_picture_buffers; + uint32_t num_reprocessing_buffers; + uint32_t num_recording_buffers; + uint32_t num_fastaestable_buffer; + uint32_t reprocessing_bayer_hold_count; + uint32_t front_num_bayer_buffers; + uint32_t front_num_picture_buffers; + uint32_t preview_buffer_margin; +#ifdef USE_CAMERA2_API_SUPPORT + uint32_t num_min_block_request; + uint32_t num_max_block_request; +#endif +}; + +struct CONFIG_BUFFER_PIPE { + struct CONFIG_PIPE pipeInfo; + struct CONFIG_BUFFER bufInfo; +}; + +struct ExynosConfigInfo { + struct CONFIG_BUFFER_PIPE *current; + struct CONFIG_BUFFER_PIPE info[CONFIG_MODE::MAX]; + uint32_t mode; +}; + +/* ---------------------------------------------------------- */ +/* Activity Controller */ +enum auto_focus_type { + AUTO_FOCUS_SERVICE = 0, + AUTO_FOCUS_HAL, +}; + +#ifdef SENSOR_NAME_GET_FROM_FILE +#define SENSOR_NAME_PATH_BACK "/sys/class/camera/rear/rear_sensorid" +#define SENSOR_NAME_PATH_FRONT "/sys/class/camera/front/front_sensorid" +#endif + +#ifdef SENSOR_FW_GET_FROM_FILE +#define SENSOR_FW_PATH_BACK "/sys/class/camera/rear/rear_camfw" +#define SENSOR_FW_PATH_FRONT "/sys/class/camera/front/front_camfw" +#endif + +#if defined(SUPPORT_X8_ZOOM) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX +#define MAX_ZOOM_RATIO (8000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX /* CTS and 3rd-Party */ +#elif defined(SUPPORT_X8_ZOOM_AND_800STEP) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X8_800STEP_MAX +#define MAX_ZOOM_RATIO (8000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_X8_MAX /* CTS and 3rd-Party */ +#elif defined(SUPPORT_X4_ZOOM_AND_400STEP) +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_X4_400STEP_MAX +#define MAX_ZOOM_RATIO (4000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_MAX /* CTS and 3rd-Party */ +#else +#define MAX_ZOOM_LEVEL ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO (4000) +#define MAX_ZOOM_LEVEL_FRONT ZOOM_LEVEL_MAX +#define MAX_ZOOM_RATIO_FRONT (4000) +#define MAX_BASIC_ZOOM_LEVEL ZOOM_LEVEL_MAX /* CTS and 3rd-Party */ +#endif +#endif /* EXYNOS_CAMERA_COMMON_CONFIG_H__ */ + diff --git a/libcamera_external/SecCameraHardware.cpp b/libcamera_external/SecCameraHardware.cpp new file mode 100644 index 0000000..d09e28e --- /dev/null +++ b/libcamera_external/SecCameraHardware.cpp @@ -0,0 +1,5127 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file SecCameraHardware.cpp + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_SECCAMERAHARDWARE_CPP +#define ANDROID_HARDWARE_SECCAMERAHARDWARE_CPP + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "SecCameraHardware" + +#include "SecCameraHardware.h" + +#define CLEAR(x) memset(&(x), 0, sizeof(x)) + +#define CHECK_ERR(x, log) if (CC_UNLIKELY(x < 0)) { \ + ALOGE log; \ + return false; \ + } + +#define CHECK_ERR_N(x, log) if (CC_UNLIKELY(x < 0)) { \ + ALOGE log; \ + return x; \ + } + +#define CHECK_ERR_GOTO(out, x, log) if (CC_UNLIKELY(x < 0)) { \ + ALOGE log; \ + goto out; \ + } + +#ifdef __GNUC__ +#define __swap16gen(x) __statement({ \ + __uint16_t __swap16gen_x = (x); \ + \ + (__uint16_t)((__swap16gen_x & 0xff) << 8 | \ + (__swap16gen_x & 0xff00) >> 8); \ +}) +#else /* __GNUC__ */ +/* Note that these macros evaluate their arguments several times. */ +#define __swap16gen(x) \ + (__uint16_t)(((__uint16_t)(x) & 0xff) << 8 | ((__uint16_t)(x) & 0xff00) >> 8) +#endif + + + +#define MAX_THUMBNAIL_SIZE (60000) + +#define EXYNOS_MEM_DEVICE_DEV_NAME "/dev/exynos-mem" + +#ifdef SENSOR_NAME_GET_FROM_FILE +int g_rearSensorId = -1; +int g_frontSensorId = -1; +#endif + +namespace android { + +struct record_heap { + uint32_t type; // make sure that this is 4 byte. + phyaddr_t y; + phyaddr_t cbcr; + uint32_t buf_index; + uint32_t reserved; +}; + +gralloc_module_t const* SecCameraHardware::mGrallocHal; + +SecCameraHardware::SecCameraHardware(int cameraId, camera_device_t *dev) + : ISecCameraHardware(cameraId, dev) +{ + if (cameraId == CAMERA_ID_BACK) + mFliteFormat = CAM_PIXEL_FORMAT_YUV422I; + else + mFliteFormat = CAM_PIXEL_FORMAT_YUV422I; + + mPreviewFormat = CAM_PIXEL_FORMAT_YUV420SP; + /* set suitably */ + mRecordingFormat = CAM_PIXEL_FORMAT_YUV420; + mPictureFormat = CAM_PIXEL_FORMAT_JPEG; + + mZoomRatio = 1000.00f; + mFimc1CSC = NULL; + mFimc2CSC = NULL; + + CLEAR(mWindowBuffer); + + if (!mGrallocHal) { + int ret = 0; + ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&mGrallocHal); + if (CC_UNLIKELY(ret)) + ALOGE("SecCameraHardware: error, fail on loading gralloc HAL"); + } + + createInstance(cameraId); +} + +SecCameraHardware::~SecCameraHardware() +{ +} + +SecCameraHardware::FLiteV4l2::FLiteV4l2() +{ + mCameraFd = -1; + mBufferCount = 0; + mStreamOn = false; + mCmdStop = 0; + mFastCapture = false; +} + +SecCameraHardware::FLiteV4l2::~FLiteV4l2() +{ +} + +/* HACK */ +#define SENSOR_SCENARIO_MASK 0xF0000000 +#define SENSOR_SCENARIO_SHIFT 28 +#define SENSOR_MODULE_MASK 0x0FFFFFFF +#define SENSOR_MODULE_SHIFT 0 + +int SecCameraHardware::FLiteV4l2::init(const char *devPath, const int cameraId) +{ + int err; + int sensorId = 0; + + mCameraFd = open(devPath, O_RDWR); + mCameraId = cameraId; + CHECK_ERR_N(mCameraFd, ("FLiteV4l2 init: error %d, open %s (%d - %s)", mCameraFd, devPath, errno, strerror(errno))); + CLOGV("DEBUG (%s) : %s, fd(%d) ", devPath, mCameraFd); + +#if defined(USE_VIDIOC_QUERY_CAP) + /* fimc_v4l2_querycap */ + struct v4l2_capability cap; + CLEAR(cap); + err = ioctl(mCameraFd, VIDIOC_QUERYCAP, &cap); + CHECK_ERR_N(err, ("FLiteV4l2 init: error %d, VIDIOC_QUERYCAP", err)); + + if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)) { + ALOGE("FLiteV4l2 init: error, no capture devices"); + return -1; + } +#endif + + /* fimc_v4l2_enuminput */ + struct v4l2_input input; + CLEAR(input); + /* input.index = cameraId; */ + + /* fimc_v4l2_s_input */ + sensorId = getSensorId(cameraId); + CLOGV("DEBUG (FLiteV4l2::init) : sensor Id(%d) ", __func__, sensorId); + + input.index = (2 << SENSOR_SCENARIO_SHIFT) | sensorId; // external camera / sensor domule id + + err = ioctl(mCameraFd, VIDIOC_S_INPUT, &input); + CHECK_ERR_N(err, ("FLiteV4l2 init: error %d, VIDIOC_S_INPUT", err)); + +#ifdef FAKE_SENSOR + fakeIndex = 0; + fakeByteData = 0; +#endif + return 0; +} + +void SecCameraHardware::FLiteV4l2::deinit() +{ + if (mCameraFd >= 0) { + close(mCameraFd); + mCameraFd = -1; + } + mBufferCount = 0; + CLOGV("DEBUG (FLiteV4l2::deinit) : out "); +} + +int SecCameraHardware::FLiteV4l2::startPreview(image_rect_type *fliteSize, + cam_pixel_format format, int numBufs, int fps, bool movieMode, node_info_t *mFliteNode) +{ + /* fimc_v4l2_enum_fmt */ + int err; + bool found = false; + +#if defined(USE_VIDIOC_ENUM_FORMAT) + struct v4l2_fmtdesc fmtdesc; + CLEAR(fmtdesc); + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmtdesc.index = 0; + + while ((err = ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0) { + if (fmtdesc.pixelformat == (uint32_t)format) { + ALOGV("FLiteV4l2 start: %s", fmtdesc.description); + found = true; + break; + } + fmtdesc.index++; + } + if (!found) { + ALOGE("FLiteV4l2 start: error, unsupported pixel format (%c%c%c%c)" + " fmtdesc.pixelformat = %d, %s, err=%d", format, format >> 8, format >> 16, format >> 24, + fmtdesc.pixelformat, fmtdesc.description, err); + return -1; + } +#endif + +#ifdef USE_CAPTURE_MODE + /* + capture_mode = oprmode + oprmode = 0 (preview) + oprmode = 1 (single capture) + oprmode = 2 (HDR capture) + */ + err = sctrl(CAM_CID_CAPTURE_MODE, false); + CHECK_ERR_N(err, ("FLiteV4l2 sctrl: error %d, CAM_CID_CAPTURE_MODE", err)); +#endif + + v4l2_field field; + if (movieMode) + field = (enum v4l2_field)IS_MODE_PREVIEW_VIDEO; + else + field = (enum v4l2_field)IS_MODE_PREVIEW_STILL; + + /* fimc_v4l2_s_fmt */ + struct v4l2_format v4l2_fmt; + CLEAR(v4l2_fmt); + + CLOGD("DEBUG (FLiteV4l2::startPreview) : setting( size: %dx%d)", fliteSize->width, fliteSize->height); + v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_fmt.fmt.pix_mp.width = fliteSize->width; + v4l2_fmt.fmt.pix_mp.height = fliteSize->height; + v4l2_fmt.fmt.pix_mp.pixelformat = format; + v4l2_fmt.fmt.pix_mp.field = field; + err = ioctl(mCameraFd, VIDIOC_S_FMT, &v4l2_fmt); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_S_FMT", err)); + +#ifdef CACHEABLE + sctrl(V4L2_CID_CACHEABLE, 1); +#endif + /* setting fps */ + CLOGD("DEBUG (FLiteV4l2::startPreview) : setting( fps: %d)", fps); + + struct v4l2_streamparm streamParam; + streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + streamParam.parm.capture.timeperframe.numerator = 1; + streamParam.parm.capture.timeperframe.denominator = fps; + + CLOGI("INFO (FLiteV4l2::startPreview) : set framerate (denominator=%d)", fps); + err = sparm(&streamParam); + + CHECK_ERR_N(err, ("ERR(%s): sctrl V4L2_CID_CAM_FRAME_RATE(%d) value(%d)", __FUNCTION__, err, fps)); + + /* sctrl(V4L2_CID_EMBEDDEDDATA_ENABLE, 0); */ + + /* fimc_v4l2_reqbufs */ + struct v4l2_requestbuffers req; + CLEAR(req); + req.count = numBufs; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; +#ifdef USE_USERPTR + req.memory = V4L2_MEMORY_USERPTR; +#else + req.memory = V4L2_MEMORY_MMAP; +#endif + err = ioctl(mCameraFd, VIDIOC_REQBUFS, &req); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_REQBUFS", err)); + + mBufferCount = (int)req.count; + + /* setting mFliteNode for Q/DQ */ + mFliteNode->width = v4l2_fmt.fmt.pix.width; + mFliteNode->height = v4l2_fmt.fmt.pix.height; + mFliteNode->format = v4l2_fmt.fmt.pix.pixelformat; + mFliteNode->planes = NUM_FLITE_BUF_PLANE + SPARE_PLANE; +#ifdef SUPPORT_64BITS + mFliteNode->memory = (enum v4l2_memory)req.memory; + mFliteNode->type = (enum v4l2_buf_type)req.type; +#else + mFliteNode->memory = req.memory; + mFliteNode->type = req.type; +#endif + mFliteNode->buffers= numBufs; + + return 0; +} + +int SecCameraHardware::FLiteV4l2::startCapture(image_rect_type *img, + cam_pixel_format format, int numBufs, int capKind) +{ + /* fimc_v4l2_enum_fmt */ + int err; +#if defined(USE_VIDIOC_ENUM_FORMAT) + bool found = false; + struct v4l2_fmtdesc fmtdesc; + CLEAR(fmtdesc); + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmtdesc.index = 0; + + while ((err = ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0) { + if (fmtdesc.pixelformat == (uint32_t)format) { + ALOGV("FLiteV4l2 start: %s", fmtdesc.description); + found = true; + break; + } + + fmtdesc.index++; + } + + if (!found) { + ALOGE("FLiteV4l2 start: error, unsupported pixel format (%c%c%c%c)" + " fmtdesc.pixelformat = %d, %s, err=%d", format, format >> 8, format >> 16, format >> 24, + fmtdesc.pixelformat, fmtdesc.description, err); + return -1; + } +#endif + + /* fimc_v4l2_s_fmt */ + struct v4l2_format v4l2_fmt; + CLEAR(v4l2_fmt); + +#ifdef USE_CAPTURE_MODE + /* + capture_mode = oprmode + oprmode = 0 (preview) + oprmode = 1 (single capture) + oprmode = 2 (HDR capture) + */ + err = sctrl(CAM_CID_CAPTURE_MODE, true); + CHECK_ERR_N(err, ("ERR(%s): sctrl V4L2_CID_CAMERA_CAPTURE(%d) enable failed", __FUNCTION__, err)); +#endif +#ifdef USE_CAPTURE_FPS_CHANGE + /* setting fps */ + struct v4l2_streamparm streamParam; + streamParam.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + streamParam.parm.capture.timeperframe.numerator = 1; + streamParam.parm.capture.timeperframe.denominator = CAPTURE_FRAMERATE; + + CLOGI("INFO (FLiteV4l2::startCapture) : set framerate (denominator=%d)", CAPTURE_FRAMERATE); + err = sparm(&streamParam); + + CHECK_ERR_N(err, ("ERR(%s): set framerate (denominator=%d) value(%d)", __FUNCTION__, CAPTURE_FRAMERATE, err)); +#endif //#if 1 + + CLOGD("DEBUG(FLiteV4l2::startCapture): requested capture size %dx%d %d", img->width, img->height, format); + + v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_fmt.fmt.pix_mp.width = img->width; + v4l2_fmt.fmt.pix_mp.height = img->height; + v4l2_fmt.fmt.pix_mp.pixelformat = format; + + err = ioctl(mCameraFd, VIDIOC_S_FMT, &v4l2_fmt); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_S_FMT", err)); + + /* fimc_v4l2_reqbufs */ + struct v4l2_requestbuffers req; + CLEAR(req); + req.count = SKIP_CAPTURE_CNT; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; +#ifdef USE_USERPTR + req.memory = V4L2_MEMORY_USERPTR; +#else + req.memory = V4L2_MEMORY_MMAP; +#endif + + err = ioctl(mCameraFd, VIDIOC_REQBUFS, &req); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_REQBUFS", err)); + + mBufferCount = (int)req.count; + + return 0; +} + +int SecCameraHardware::FLiteV4l2::startRecord(image_rect_type *img, image_rect_type *videoSize, + cam_pixel_format format, int numBufs) +{ + /* fimc_v4l2_enum_fmt */ + int err; + bool found = false; + struct v4l2_fmtdesc fmtdesc; + CLEAR(fmtdesc); + fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmtdesc.index = 0; + + while ((err = ioctl(mCameraFd, VIDIOC_ENUM_FMT, &fmtdesc)) == 0) { + if (fmtdesc.pixelformat == (uint32_t)format) { + CLOGV("FLiteV4l2 start: %s", fmtdesc.description); + found = true; + break; + } + fmtdesc.index++; + } + + if (!found) { + CLOGE("FLiteV4l2 start: error, unsupported pixel format (%c%c%c%c)" + " fmtdesc.pixelformat = %d, %s, err=%d", format, format >> 8, format >> 16, format >> 24, + fmtdesc.pixelformat, fmtdesc.description, err); + return -1; + } + + /* fimc_v4l2_s_fmt */ + struct v4l2_format v4l2_fmt; + CLEAR(v4l2_fmt); + + v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_fmt.fmt.pix.width = videoSize->width; + v4l2_fmt.fmt.pix.height = videoSize->height; + v4l2_fmt.fmt.pix.pixelformat = format; + v4l2_fmt.fmt.pix.priv = V4L2_PIX_FMT_MODE_CAPTURE; + + err = ioctl(mCameraFd, VIDIOC_S_FMT, &v4l2_fmt); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_S_FMT", err)); + + v4l2_fmt.type = V4L2_BUF_TYPE_PRIVATE; + v4l2_fmt.fmt.pix.width = videoSize->width; + v4l2_fmt.fmt.pix.height = videoSize->height; + v4l2_fmt.fmt.pix.pixelformat = format; + v4l2_fmt.fmt.pix.field = (enum v4l2_field)IS_MODE_PREVIEW_STILL; + + CLOGD("Sensor FMT %dx%d", v4l2_fmt.fmt.pix.width, v4l2_fmt.fmt.pix.height); + + err = ioctl(mCameraFd, VIDIOC_S_FMT, &v4l2_fmt); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_S_FMT", err)); + + /* fimc_v4l2_reqbufs */ + struct v4l2_requestbuffers req; + CLEAR(req); + req.count = numBufs; + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; +#ifdef USE_USERPTR + req.memory = V4L2_MEMORY_USERPTR; +#else + req.memory = V4L2_MEMORY_MMAP; +#endif + + err = ioctl(mCameraFd, VIDIOC_REQBUFS, &req); + CHECK_ERR_N(err, ("FLiteV4l2 start: error %d, VIDIOC_REQBUFS", err)); + + mBufferCount = (int)req.count; + + return 0; +} + +int SecCameraHardware::FLiteV4l2::reqBufZero(node_info_t *mFliteNode) +{ + int err; + /* fimc_v4l2_reqbufs */ + struct v4l2_requestbuffers req; + CLEAR(req); + req.count = 0; + req.type = mFliteNode->type; + req.memory = mFliteNode->memory; + CLOGV("DEBUG(FLiteV4l2::reqBufZero): type[%d] memory[%d]", req.type, req.memory); + err = ioctl(mCameraFd, VIDIOC_REQBUFS, &req); + CHECK_ERR_N(err, ("FLiteV4l2 reqBufZero: error %d", err)); + return 1; +} + +int SecCameraHardware::FLiteV4l2::querybuf2(unsigned int index, int planeCnt, ExynosBuffer *buf) +{ + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + int err; + + CLEAR(v4l2_buf); + CLEAR(planes); + + /* loop for buffer count */ + v4l2_buf.index = index; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_buf.memory = V4L2_MEMORY_MMAP; + v4l2_buf.length = planeCnt; + v4l2_buf.m.planes = planes; + + err = ioctl(mCameraFd , VIDIOC_QUERYBUF, &v4l2_buf); + if (err < 0) { + CLOGE("ERR(FLiteV4l2::querybuf2)(%d): error %d, index %d", __LINE__, err, index); + return 0; + } + + /* loop for planes */ + for (int i = 0; i < planeCnt; i++) { + unsigned int length = v4l2_buf.m.planes[i].length; + unsigned int offset = v4l2_buf.m.planes[i].m.mem_offset; + char *vAdr = (char *) mmap(0, + v4l2_buf.m.planes[i].length, + PROT_READ | PROT_WRITE, MAP_SHARED, + mCameraFd, offset); + CLOGV("DEBUG(%s): [%d][%d] length %d, offset %d vadr %p", __FUNCTION__, index, i, length, offset, vAdr); + if (vAdr == NULL) { + CLOGE("ERR(FLiteV4l2::querybuf2)(%d): [%d][%d] vAdr is %p", __LINE__, index, i, vAdr); + return 0; + } else { + buf->virt.extP[i] = vAdr; + buf->size.extS[i] = length; + memset(buf->virt.extP[i], 0x0, buf->size.extS[i]); + } + } + + return 1; +} + +int SecCameraHardware::FLiteV4l2::expBuf(unsigned int index, int planeCnt, ExynosBuffer *buf) +{ + struct v4l2_exportbuffer v4l2_expbuf; + int err; + + for (int i = 0; i < planeCnt; i++) { + memset(&v4l2_expbuf, 0, sizeof(v4l2_expbuf)); + v4l2_expbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_expbuf.index = index; + err = ioctl(mCameraFd, VIDIOC_EXPBUF, &v4l2_expbuf); + if (err < 0) { + CLOGE("ERR(FLiteV4l2::expBuf)(%d): [%d][%d] failed %d", __LINE__, index, i, err); + goto EXP_ERR; + } else { + CLOGV("DEBUG(%s): [%d][%d] fd %d", __FUNCTION__, index, i, v4l2_expbuf.fd); + buf->fd.extFd[i] = v4l2_expbuf.fd; + } + } + return 1; + +EXP_ERR: + for (int i = 0; i < planeCnt; i++) { + if (buf->fd.extFd[i] > 0) + ion_close(buf->fd.extFd[i]); + } + return 0; +} + + +sp SecCameraHardware::FLiteV4l2::querybuf(uint32_t *frmsize) +{ + struct v4l2_buffer v4l2_buf; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_buf.memory = V4L2_MEMORY_MMAP; + v4l2_buf.length = 0; + + for (int i = 0; i < mBufferCount; i++) { + int err; + v4l2_buf.index = i; + err = ioctl(mCameraFd , VIDIOC_QUERYBUF, &v4l2_buf); + if (err < 0) { + CLOGE("FLiteV4l2 querybufs: error %d, index %d", err, i); + *frmsize = 0; + return NULL; + } + } + + *frmsize = v4l2_buf.length; + + return mBufferCount == 1 ? + new MemoryHeapBase(mCameraFd, v4l2_buf.length, v4l2_buf.m.offset) : NULL; +} + +int SecCameraHardware::FLiteV4l2::qbuf(uint32_t index) +{ + struct v4l2_buffer v4l2_buf; + CLEAR(v4l2_buf); + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_buf.memory = V4L2_MEMORY_MMAP; + v4l2_buf.index = index; + + int err = ioctl(mCameraFd, VIDIOC_QBUF, &v4l2_buf); + CHECK_ERR_N(err, ("FLiteV4l2 qbuf: error %d", err)); + + return 0; +} + +int SecCameraHardware::FLiteV4l2::dqbuf() +{ + struct v4l2_buffer v4l2_buf; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_buf.memory = V4L2_MEMORY_MMAP; + + int err = ioctl(mCameraFd, VIDIOC_DQBUF, &v4l2_buf); + CHECK_ERR_N(err, ("FLiteV4l2 dqbuf: error %d", err)); + + if (v4l2_buf.index > (uint32_t)mBufferCount) { + CLOGE("FLiteV4l2 dqbuf: error %d, invalid index", v4l2_buf.index); + return -1; + } + + return v4l2_buf.index; +} + +int SecCameraHardware::FLiteV4l2::dqbuf(uint32_t *index) +{ + struct v4l2_buffer v4l2_buf; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2_buf.memory = V4L2_MEMORY_MMAP; + + int err = ioctl(mCameraFd, VIDIOC_DQBUF, &v4l2_buf); + if (err >= 0) + *index = v4l2_buf.index; + + return err; +} + +int SecCameraHardware::FLiteV4l2::qbuf2(node_info_t *node, uint32_t index) +{ + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + int i; + int ret = 0; + + CLEAR(planes); + CLEAR(v4l2_buf); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = node->type; + v4l2_buf.memory = node->memory; + v4l2_buf.index = index; + v4l2_buf.length = node->planes; + + for(i = 0; i < node->planes; i++){ + if (node->memory == V4L2_MEMORY_DMABUF) + v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]); + else if (node->memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(node->buffer[index].virt.extP[i]); + } else if (node->memory == V4L2_MEMORY_MMAP) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(node->buffer[index].virt.extP[i]); + } else { + CLOGE("ERR(%s):invalid node->memory(%d)", __func__, node->memory); + return -1; + } + v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]); + } + + ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf); + if (ret < 0) { + CLOGE("ERR(FLiteV4l2::qbuf2):exynos_v4l2_qbuf failed (index:%d)(ret:%d)", index, ret); + return -1; + }else{ + CLOGV("DEBUG (FLiteV4l2::qbuf2) : exynos_v4l2_qbuf(index:%d)", index); + } + + return ret; +} + +int SecCameraHardware::FLiteV4l2::qbufForCapture(ExynosBuffer *buf, uint32_t index) +{ + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + unsigned int i; + int ret = 0; + + CLEAR(planes); + CLEAR(v4l2_buf); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; +#ifdef USE_USERPTR + v4l2_buf.memory = V4L2_MEMORY_USERPTR; +#else + v4l2_buf.memory = V4L2_MEMORY_MMAP; +#endif + v4l2_buf.index = index; + v4l2_buf.length = 2; + + for(i = 0; i < v4l2_buf.length; i++){ + if (v4l2_buf.memory == V4L2_MEMORY_DMABUF) + v4l2_buf.m.planes[i].m.fd = (int)(buf->fd.extFd[i]); + else if (v4l2_buf.memory == V4L2_MEMORY_USERPTR) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(buf->virt.extP[i]); + } else if (v4l2_buf.memory == V4L2_MEMORY_MMAP) { + v4l2_buf.m.planes[i].m.userptr = (unsigned long)(buf->virt.extP[i]); + } else { + ALOGE("ERR(FLiteV4l2::qbufForCapture):invalid node->memory(%d)", v4l2_buf.memory); + return -1; + } + v4l2_buf.m.planes[i].length = (unsigned long)(buf->size.extS[i]); + } + + ret = exynos_v4l2_qbuf(this->getFd(), &v4l2_buf); + if (ret < 0) { + CLOGE("ERR(FLiteV4l2::qbufForCapture):exynos_v4l2_qbuf failed (index:%d)(ret:%d)", index, ret); + return -1; + } + + return ret; +} + + +int SecCameraHardware::FLiteV4l2::dqbuf2(node_info_t *node) +{ + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + int ret; + + CLEAR(planes); + CLEAR(v4l2_buf); + + v4l2_buf.type = node->type; + v4l2_buf.memory = node->memory; + v4l2_buf.m.planes = planes; + v4l2_buf.length = node->planes; + + ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); + if (ret < 0) { + if (ret != -EAGAIN) + ALOGE("ERR(%s):VIDIOC_DQBUF failed (%d)", __func__, ret); + return ret; + } + + if (v4l2_buf.flags & V4L2_BUF_FLAG_ERROR) { + ALOGE("ERR(FLiteV4l2::dqbuf2):VIDIOC_DQBUF returned with error (%d)", V4L2_BUF_FLAG_ERROR); + return -1; + }else{ + CLOGV("DEBUG [FLiteV4l2::dqbuf2(%d)] exynos_v4l2_dqbuf(index:%d)", __LINE__, v4l2_buf.index); + } + + return v4l2_buf.index; +} + +int SecCameraHardware::FLiteV4l2::dqbufForCapture(ExynosBuffer *buf) +{ + struct v4l2_buffer v4l2_buf; + struct v4l2_plane planes[VIDEO_MAX_PLANES]; + int ret; + + CLEAR(planes); + CLEAR(v4l2_buf); + + v4l2_buf.m.planes = planes; + v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; +#ifdef USE_USERPTR + v4l2_buf.memory = V4L2_MEMORY_USERPTR; +#else + v4l2_buf.memory = V4L2_MEMORY_MMAP; +#endif + v4l2_buf.length = 2; + + ret = exynos_v4l2_dqbuf(this->getFd(), &v4l2_buf); + if (ret < 0) { + if (ret != -EAGAIN) + ALOGE("ERR(FLiteV4l2::dqbufForCapture):VIDIOC_DQBUF failed (%d)", ret); + return ret; + } + + if (v4l2_buf.flags & V4L2_BUF_FLAG_ERROR) { + ALOGE("ERR(FLiteV4l2::dqbufForCapture):VIDIOC_DQBUF returned with error (%d)", V4L2_BUF_FLAG_ERROR); + return -1; + } + + return v4l2_buf.index; +} + +#ifdef FAKE_SENSOR +int SecCameraHardware::FLiteV4l2::fakeQbuf2(node_info_t *node, uint32_t index) +{ + fakeByteData++; + fakeByteData = fakeByteData % 0xFF; + + for (int i = 0; i < node->planes; i++) { + memset(node->buffer[index].virt.extP[i], fakeByteData, + node->buffer[index].size.extS[i]); + + } + fakeIndex = index; + return fakeIndex; +} + +int SecCameraHardware::FLiteV4l2::fakeDqbuf2(node_info_t *node) +{ + return fakeIndex; +} +#endif + + +int SecCameraHardware::FLiteV4l2::stream(bool on) +{ + enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + int err = 0; + + CLOGV("DEBUG (FLiteV4l2::stream) : in "); + + err = sctrl(V4L2_CID_IS_S_STREAM, on ? IS_ENABLE_STREAM : IS_DISABLE_STREAM); + CHECK_ERR_N(err, ("s_stream: error %d", err)); + + err = ioctl(mCameraFd, on ? VIDIOC_STREAMON : VIDIOC_STREAMOFF, &type); + CHECK_ERR_N(err, ("FLiteV4l2 stream: error %d", err)); + + mStreamOn = on; + + CLOGD("DEBUG (FLiteV4l2::stream) : stream turning %s", on ? "on" : "off"); + + return 0; +} + +int SecCameraHardware::FLiteV4l2::polling(bool recordingMode) +{ + int err = 0; + struct pollfd events; + CLEAR(events); + events.fd = mCameraFd; + events.events = POLLIN | POLLERR; + + const long timeOut = 1500; + + if (recordingMode) { + const long sliceTimeOut = 66; + + for (int i = 0; i < (timeOut / sliceTimeOut); i++) { + if (!mStreamOn) + return 0; + err = poll(&events, 1, sliceTimeOut); + if (err > 0) + break; + } + } else { +#if 1 /* for test fast capture */ + const int count = 40; + for (int i = 0; i < count; i++) { + err = poll(&events, 1, timeOut / count); + if (mFastCapture) { + return 0; + } + } +#else + err = poll(&events, 1, timeOut); +#endif + } + + if (CC_UNLIKELY(err <= 0)) + CLOGE("FLiteV4l2 poll: error %d", err); + + return err; +} + +int SecCameraHardware::FLiteV4l2::gctrl(uint32_t id, int *value) +{ + struct v4l2_control ctrl; + CLEAR(ctrl); + ctrl.id = id; + + int err = ioctl(mCameraFd, VIDIOC_G_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 gctrl: error %d, id %#x", err, id)); + *value = ctrl.value; + + return 0; +} + +int SecCameraHardware::FLiteV4l2::gctrl(uint32_t id, unsigned short *value) +{ + struct v4l2_control ctrl; + CLEAR(ctrl); + ctrl.id = id; + + int err = ioctl(mCameraFd, VIDIOC_G_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 gctrl: error %d, id %#x", err, id)); + *value = (unsigned short)ctrl.value; + + return 0; +} + +int SecCameraHardware::FLiteV4l2::gctrl(uint32_t id, char *value, int size) +{ + struct v4l2_ext_controls ctrls; + struct v4l2_ext_control ctrl; + + CLEAR(ctrls); + ctrls.ctrl_class = V4L2_CTRL_CLASS_CAMERA; + ctrls.count = 1; + ctrls.controls = &ctrl; + + CLEAR(ctrl); + ctrl.id = id; + ctrl.string = value; + ctrl.size=size; + + int err = ioctl(mCameraFd, VIDIOC_G_EXT_CTRLS, &ctrls); + CHECK_ERR_N(err, ("FLiteV4l2 gctrl: error %d, id %#x", err, id)); + + return 0; +} + +int SecCameraHardware::FLiteV4l2::sctrl(uint32_t id, int value) +{ + struct v4l2_control ctrl; + CLEAR(ctrl); + ctrl.id = id; + ctrl.value = value; + + int err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 sctrl: error %d, id %#x value %d", err, id, value)); + + return 0; +} + +int SecCameraHardware::FLiteV4l2::sparm(struct v4l2_streamparm *stream_parm) +{ + int err = ioctl(mCameraFd, VIDIOC_S_PARM, stream_parm); + CHECK_ERR_N(err, ("FLiteV4l2 sparm: error %d, value %d", err, + stream_parm->parm.capture.timeperframe.denominator)); + + return 0; +} + + +int SecCameraHardware::FLiteV4l2::getYuvPhyaddr(int index, + phyaddr_t *y, + phyaddr_t *cbcr) +{ + struct v4l2_control ctrl; + int err; + + if (y) { + CLEAR(ctrl); + ctrl.id = V4L2_CID_PADDR_Y; + ctrl.value = index; + + err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 getYuvPhyaddr: error %d, V4L2_CID_PADDR_Y", err)); + + *y = ctrl.value; + } + + if (cbcr) { + CLEAR(ctrl); + ctrl.id = V4L2_CID_PADDR_CBCR; + ctrl.value = index; + + err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 getYuvPhyaddr: error %d, V4L2_CID_PADDR_CBCR", err)); + + *cbcr = ctrl.value; + } + return 0; +} + +int SecCameraHardware::FLiteV4l2::getYuvPhyaddr(int index, + phyaddr_t *y, + phyaddr_t *cb, + phyaddr_t *cr) +{ + struct v4l2_control ctrl; + int err; + + if (y) { + CLEAR(ctrl); + ctrl.id = V4L2_CID_PADDR_Y; + ctrl.value = index; + + err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 getYuvPhyaddr: error %d, V4L2_CID_PADDR_Y", err)); + + *y = ctrl.value; + } + + if (cb) { + CLEAR(ctrl); + ctrl.id = V4L2_CID_PADDR_CB; + ctrl.value = index; + + err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 getYuvPhyaddr: error %d, V4L2_CID_PADDR_CB", err)); + + *cb = ctrl.value; + } + + if (cr) { + CLEAR(ctrl); + ctrl.id = V4L2_CID_PADDR_CR; + ctrl.value = index; + + err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 getYuvPhyaddr: error %d, V4L2_CID_PADDR_CR", err)); + + *cr = ctrl.value; + } + + return 0; +} + +int SecCameraHardware::FLiteV4l2::setFastCaptureFimc(uint32_t IsFastCaptureCalled) +{ + mFastCapture = IsFastCaptureCalled; + ALOGD("Set Fast capture in fimc : %d", mFastCapture); + return 0; +} + +int SecCameraHardware::FLiteV4l2::reset() +{ + struct v4l2_control ctrl; + CLEAR(ctrl); + ctrl.id = V4L2_CID_CAMERA_RESET; + ctrl.value = 0; + + int err = ioctl(mCameraFd, VIDIOC_S_CTRL, &ctrl); + CHECK_ERR_N(err, ("FLiteV4l2 reset: error %d", err)); + + return 0; +} + +void SecCameraHardware::FLiteV4l2::forceStop() +{ + mCmdStop = 1; + return; +} + +int SecCameraHardware::FLiteV4l2::getFd() +{ + return mCameraFd; +} + +bool SecCameraHardware::init() +{ + CLOGD("DEBUG [%s(%d)] -in-", __FUNCTION__, __LINE__); + + LOG_PERFORMANCE_START(1); + CLEAR(mFliteNode); + CSC_METHOD cscMethod = CSC_METHOD_HW; + int err; + + if (mCameraId == CAMERA_ID_BACK) + err = mFlite.init(FLITE0_DEV_PATH, mCameraId); + else + err = mFlite.init(FLITE1_DEV_PATH, mCameraId); + + + if (CC_UNLIKELY(err < 0)) { + ALOGE("initCamera X: error(%d), %s", err, + (mCameraId == CAMERA_ID_BACK) ? FLITE0_DEV_PATH : FLITE1_DEV_PATH); + goto fimc_out; + } + if (mCameraId == CAMERA_ID_BACK) { + char read_prop[92]; + int res_prop = 0; + int value = 0; + + CLEAR(read_prop); + res_prop = property_get("persist.sys.camera.fw_update", read_prop, "0"); + ALOGD("Lens Close Hold persist.sys.camera.fw_update [%s], res %d", read_prop, res_prop); + + // ISP boot option : "0" => Normal, "1" => fw_update + if (!strcmp(read_prop, "1")) + value = 0x1; + else + { + CLEAR(read_prop); + res_prop = property_get("persist.sys.camera.samsung", read_prop, "0"); + ALOGD("Lens Close Hold persist.sys.camera.samsung [%s], res %d", read_prop, res_prop); + + // samsung app : "0" => 3th party app, "1" => samsung app + if (!strcmp(read_prop, "1")) + value = 0x0; + else + value = 0x1; + } + + CLEAR(read_prop); + res_prop = property_get("persist.sys.camera.mem", read_prop, "0"); + ALOGD("ISP mem : property get [%s], res %d", read_prop, res_prop); + + // ISP target memory : "0" => NOR, "1" => EEPROM + if (!strcmp(read_prop, "1")) + value |= (0x1 << 8); + else + value |= 0x0; + + ALOGD("call camera init with value: 0x%02X", value); + if (CC_UNLIKELY(err < 0)) { + ALOGE("camera init error:%d", err); + } + } + mFliteNode.fd = mFlite.getFd(); + ALOGV("mFlite fd %d", mFlite.getFd()); + + setExifFixedAttribute(); + + /* init CSC (fimc1, fimc2) */ + mFimc1CSC = csc_init(cscMethod); + if (mFimc1CSC == NULL) + ALOGE("ERR(%s): mFimc1CSC csc_init() fail", __func__); + err = csc_set_hw_property(mFimc1CSC, CSC_HW_PROPERTY_FIXED_NODE, FIMC1_NODE_NUM); + if (err != 0) + ALOGE("ERR(%s): fimc0 open failed %d", __func__, err); + + mFimc2CSC = csc_init(cscMethod); + if (mFimc2CSC == NULL) + ALOGE("ERR(%s): mFimc2CSC csc_init() fail", __func__); + err = csc_set_hw_property(mFimc2CSC, CSC_HW_PROPERTY_FIXED_NODE, FIMC2_NODE_NUM); + if (err != 0) + ALOGE("ERR(%s): fimc1 open failed %d", __func__, err); + + LOG_PERFORMANCE_END(1, "total"); + return ISecCameraHardware::init(); + +fimc_out: + mFlite.deinit(); + return false; +} + +void SecCameraHardware::initDefaultParameters() +{ + char str[16]; + CLEAR(str); + if (mCameraId == CAMERA_FACING_BACK) { + snprintf(str, sizeof(str), "%f", (double)Exif::DEFAULT_BACK_FOCAL_LEN_NUM/ + Exif::DEFAULT_BACK_FOCAL_LEN_DEN); + mParameters.set(CameraParameters::KEY_FOCAL_LENGTH, str); + } else { + snprintf(str, sizeof(str), "%f", (double)Exif::DEFAULT_FRONT_FOCAL_LEN_NUM/ + Exif::DEFAULT_FRONT_FOCAL_LEN_DEN); + mParameters.set(CameraParameters::KEY_FOCAL_LENGTH, str); + } + + ISecCameraHardware::initDefaultParameters(); +} + +void SecCameraHardware::release() +{ + CLOGD("INFO(%s) : in ",__FUNCTION__); + + /* Forced wake up sound interrupt */ + mCameraPower = false; + ExynosBuffer nullBuf; + int i = 0; + + ISecCameraHardware::release(); + + mFlite.deinit(); + + /* flite buffer free */ + for (i = 0; i < FLITE_BUF_CNT; i++) { + freeMem(&mFliteNode.buffer[i]); + mFliteNode.buffer[i] = nullBuf; + } + + /* capture buffer free */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + freeMem(&mPictureBufDummy[i]); + mPictureBufDummy[i] = nullBuf; + } + freeMem(&mPictureBuf); + mPictureBuf = nullBuf; + + mInitRecSrcQ(); + mInitRecDstBuf(); + + /* deinit CSC (fimc0, fimc1) */ + if (mFimc1CSC) + csc_deinit(mFimc1CSC); + mFimc1CSC = NULL; + + if (mFimc2CSC) + csc_deinit(mFimc2CSC); + mFimc2CSC = NULL; +} + +int SecCameraHardware::nativeSetFastCapture(bool onOff) +{ + mFastCaptureCalled = onOff; + + mFlite.setFastCaptureFimc(mFastCaptureCalled); + return 0; +} + +bool SecCameraHardware::nativeCreateSurface(uint32_t width, uint32_t height, uint32_t halPixelFormat) +{ + CLOGV("INFO(%s) : in ",__FUNCTION__); + + int min_bufs; + + if (CC_UNLIKELY(mFlagANWindowRegister == true)) { + ALOGI("Surface already exist!!"); + return true; + } + + status_t err; + + if (mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &min_bufs)) { + ALOGE("%s: could not retrieve min undequeued buffer count", __FUNCTION__); + return INVALID_OPERATION; + } + + if (min_bufs >= PREVIEW_BUF_CNT) { + ALOGE("%s: warning, min undequeued buffer count %d is too high (expecting at most %d)", __FUNCTION__, + min_bufs, PREVIEW_BUF_CNT - 1); + } + + CLOGD("DEBUG [%s(%d)] setting buffer count to %d", __FUNCTION__, __LINE__,PREVIEW_BUF_CNT); + if (mPreviewWindow->set_buffer_count(mPreviewWindow, PREVIEW_BUF_CNT)) { + ALOGE("%s: could not set buffer count", __FUNCTION__); + return INVALID_OPERATION; + } + + if (mPreviewWindow->set_usage(mPreviewWindow, GRALLOC_SET_USAGE_FOR_CAMERA) != 0) { + ALOGE("ERR(%s):could not set usage on gralloc buffer", __func__); + return INVALID_OPERATION; + } + + CLOGD("DEBUG [%s(%d)] (%dx%d)", __FUNCTION__, __LINE__, width, height); + if (mPreviewWindow->set_buffers_geometry(mPreviewWindow, + width, height, + halPixelFormat)) { + CLOGE("%s: could not set buffers geometry ", __FUNCTION__); + return INVALID_OPERATION; + } + + mFlagANWindowRegister = true; + + return true; +} + +bool SecCameraHardware::nativeDestroySurface(void) +{ + CLOGV("DEBUG [%s(%d)] -in-", __FUNCTION__, __LINE__); + mFlagANWindowRegister = false; + + return true; +} + +int SecCameraHardware::save_dump_path( uint8_t *real_data, int data_size, const char* filePath) +{ + FILE *fp = NULL; + char *buffer = NULL; + + CLOGE("save dump E"); + fp = fopen(filePath, "wb"); + + if (fp == NULL) { + CLOGE("Save dump image open error"); + return -1; + } + + CLOGE("%s: real_data size ========> %d", __func__, data_size); + buffer = (char *) malloc(data_size); + if (buffer == NULL) { + CLOGE("Save buffer alloc failed"); + if (fp) + fclose(fp); + + return -1; + } + + memcpy(buffer, real_data, data_size); + + fflush(stdout); + + fwrite(buffer, 1, data_size, fp); + + fflush(fp); + + if (fp) + fclose(fp); + if (buffer) + free(buffer); + + CLOGE("save dump X"); + return 0; +} + +bool SecCameraHardware::nativeFlushSurfaceYUV420(uint32_t width, uint32_t height, uint32_t size, uint32_t index, int type) +{ + //ALOGV("%s: width=%d, height=%d, size=0x%x, index=%d", __FUNCTION__, width, height, size, index); + ExynosBuffer dstBuf; + buffer_handle_t *buf_handle = NULL; + + if (CC_UNLIKELY(!mPreviewWindowSize.width)) { + ALOGE("nativeFlushSurfacePostview: error, Preview window %dx%d", mPreviewWindowSize.width, mPreviewWindowSize.height); + return false; + } + + if (CC_UNLIKELY(mFlagANWindowRegister == false)) { + ALOGE("%s::mFlagANWindowRegister == false fail", __FUNCTION__); + return false; + } + + if (mPreviewWindow && mGrallocHal) { + int stride; + if (0 != mPreviewWindow->dequeue_buffer(mPreviewWindow, &buf_handle, &stride)) { + ALOGE("Could not dequeue gralloc buffer!\n"); + return false; + } else { + if (mPreviewWindow->lock_buffer(mPreviewWindow, buf_handle) != 0) + ALOGE("ERR(%s):Could not lock gralloc buffer !!", __func__ ); + } + +#ifdef SUPPORT_64BITS + void *vaddr[3] = {NULL}; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, vaddr)) { +#else + unsigned int vaddr[3]; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, (void **)vaddr)) { +#endif + + char *src; + char *ptr = (char *)vaddr[0]; + src = (char *)mPostviewHeap->data; + + memcpy(ptr, src, width * height); + src += width * height; + ptr = (char *)vaddr[1]; + memcpy(ptr, src, (width * height ) >> 1); + + mGrallocHal->unlock(mGrallocHal, *buf_handle); + } + + if (0 != mPreviewWindow->enqueue_buffer(mPreviewWindow, buf_handle)) { + ALOGE("Could not enqueue gralloc buffer!\n"); + return false; + } + } else if (!mGrallocHal) { + ALOGE("nativeFlushSurfaceYUV420: gralloc HAL is not loaded"); + return false; + } + + return true; + +CANCEL: + if (mPreviewWindow->cancel_buffer(mPreviewWindow, buf_handle) != 0) + ALOGE("ERR(%s):Fail to cancel buffer", __func__); + + return false; +} + +bool SecCameraHardware::nativeFlushSurface( + uint32_t width, uint32_t height, uint32_t size, uint32_t index, int type) +{ + //ALOGV("%s: width=%d, height=%d, size=0x%x, index=%d", __FUNCTION__, width, height, size, index); + ExynosBuffer dstBuf; + buffer_handle_t *buf_handle = NULL; + + if (CC_UNLIKELY(!mPreviewWindowSize.width)) { + CLOGE("nativeFlushSurface: error, Preview window %dx%d", + mPreviewWindowSize.width, mPreviewWindowSize.height); + return false; + } + + if (CC_UNLIKELY(mFlagANWindowRegister == false)) { + CLOGE("%s::mFlagANWindowRegister == false fail", __FUNCTION__); + return false; + } + + if (mPreviewWindow && mGrallocHal) { + int stride; + if (0 != mPreviewWindow->dequeue_buffer(mPreviewWindow, &buf_handle, &stride)) { + CLOGE("Could not dequeue gralloc buffer!\n"); + return false; + } else { + if (mPreviewWindow->lock_buffer(mPreviewWindow, buf_handle) != 0) + CLOGE("ERR(%s):Could not lock gralloc buffer !!", __func__ ); + } +#ifdef SUPPORT_64BITS + void *vaddr[3] = {NULL}; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, vaddr)) { +#else + unsigned int vaddr[3]; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, (void **)vaddr)) { +#endif + /* csc start flite(s) -> fimc0 -> gralloc(d) */ + if (mFimc1CSC) { + /* set zoom info */ + bool flag; + flag = getCropRect(mFLiteSize.width, mFLiteSize.height, mPreviewSize.width, + mPreviewSize.height, &mPreviewZoomRect.x, &mPreviewZoomRect.y, + &mPreviewZoomRect.w, &mPreviewZoomRect.h, 2, 2, 2, 2, mZoomRatio); + if(false == flag) { + CLOGE("ERR(%s):mFLiteSize.width = %u mFLiteSize.height = %u " + "mPreviewSize.width = %u mPreviewSize.height = %u ", __func__, + mFLiteSize.width, mFLiteSize.height, mPreviewSize.width, mPreviewSize.height); + CLOGE("ERR(%s):Preview CropRect failed X = %u Y = %u W = %u H = %u ", __func__, + mPreviewZoomRect.x, mPreviewZoomRect.y, mPreviewZoomRect.w, mPreviewZoomRect.h); + } + +#ifdef DEBUG_PREVIEW + CLOGD("DEBUG PREVIEW(%s:%d): src(%d,%d,%d,%d,%d,%d), dst(%d,%d), fmt(%s)" + , __FUNCTION__, __LINE__ + , mFLiteSize.width + , mFLiteSize.height + , mPreviewZoomRect.x + , mPreviewZoomRect.y + , mPreviewZoomRect.w + , mPreviewZoomRect.h + , mPreviewSize.width + , mPreviewSize.height + , mFliteNode.format == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mFliteNode.format == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + mFliteNode.format == CAM_PIXEL_FORMAT_YUV422I ? "YUYV" : + "Others"); + +#endif + + /* src : FLite */ + csc_set_src_format(mFimc1CSC, + mFLiteSize.width, mFLiteSize.height, + mPreviewZoomRect.x, mPreviewZoomRect.y, + mPreviewZoomRect.w, mPreviewZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + if (type == CAMERA_HEAP_POSTVIEW) { + csc_set_src_buffer(mFimc1CSC, + (void **)mPictureBufDummy[0].fd.extFd, CSC_MEMORY_DMABUF); + } else { + csc_set_src_buffer(mFimc1CSC, + (void **)mFliteNode.buffer[index].fd.extFd, CSC_MEMORY_DMABUF); + } + + //mSaveDump("/data/camera_source%d.yuv", &mFliteNode.buffer[index], index); + + int halPixelFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_FULL; + + /* when recording, narrow color range will be applied */ + if (mMovieMode) + halPixelFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; + + /* dst : GRALLOC */ + csc_set_dst_format(mFimc1CSC, + mPreviewSize.width, mPreviewSize.height, + 0, 0, + mPreviewSize.width, mPreviewSize.height, + halPixelFormat, + 0); + + const private_handle_t *priv_handle = private_handle_t::dynamicCast(*buf_handle); + dstBuf.fd.extFd[0] = priv_handle->fd; + dstBuf.fd.extFd[1] = priv_handle->fd1; + dstBuf.virt.extP[0] = (char *)vaddr[0]; + dstBuf.virt.extP[1] = (char *)vaddr[1]; + dstBuf.size.extS[0] = mPreviewSize.width * mPreviewSize.height; + dstBuf.size.extS[1] = mPreviewSize.width * mPreviewSize.height / 2; + + csc_set_dst_buffer(mFimc1CSC, + (void **)dstBuf.fd.extFd, CSC_MEMORY_DMABUF); + + mFimc1CSCLock.lock(); + if (csc_convert(mFimc1CSC) != 0) { + ALOGE("ERR(%s):csc_convert(mFimc1CSC) fail", __func__); + mFimc1CSCLock.unlock(); + goto CANCEL; + } + mFimc1CSCLock.unlock(); + } + + mGrallocHal->unlock(mGrallocHal, *buf_handle); + } + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + m_previewFrameQ.pushProcessQ(&buf_handle); +#else + if (0 != mPreviewWindow->enqueue_buffer(mPreviewWindow, buf_handle)) { + ALOGE("Could not enqueue gralloc buffer!\n"); + return false; + } +#endif + } else if (!mGrallocHal) { + ALOGE("nativeFlushSurface: gralloc HAL is not loaded"); + return false; + } + +#if 0 + static int count=0; + mSaveDump("/data/camera_flush%d.yuv", &dstBuf, count); + count++; + if(count > 3) count = 0; +#endif + /* if CAMERA_MSG_PREVIEW_METADATA, prepare callback buffer */ + if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { + if (nativePreviewCallback(index, &dstBuf) < 0) + ALOGE("ERROR(%s): nativePreviewCallback failed", __func__); + } + + + return true; + +CANCEL: + if (mPreviewWindow->cancel_buffer(mPreviewWindow, buf_handle) != 0) + ALOGE("ERR(%s):Fail to cancel buffer", __func__); + + return false; +} + +bool SecCameraHardware::beautyLiveFlushSurface(uint32_t width, uint32_t height, uint32_t size, uint32_t index, int type) +{ + //ALOGV("%s: width=%d, height=%d, size=0x%x, index=%d", __FUNCTION__, width, height, size, index); + ExynosBuffer dstBuf; + buffer_handle_t *buf_handle = NULL; + + if (CC_UNLIKELY(!mPreviewWindowSize.width)) { + ALOGE("beautyLiveFlushSurface: error, Preview window %dx%d", mPreviewWindowSize.width, mPreviewWindowSize.height); + return false; + } + + if (CC_UNLIKELY(mFlagANWindowRegister == false)) { + ALOGE("%s::mFlagANWindowRegister == false fail", __FUNCTION__); + return false; + } + + if (mPreviewWindow && mGrallocHal) { + int stride; + if (0 != mPreviewWindow->dequeue_buffer(mPreviewWindow, &buf_handle, &stride)) { + ALOGE("Could not dequeue gralloc buffer!\n"); + return false; + } else { + if (mPreviewWindow->lock_buffer(mPreviewWindow, buf_handle) != 0) + ALOGE("ERR(%s):Could not lock gralloc buffer !!", __func__ ); + } + +#ifdef SUPPORT_64BITS + void *vaddr[3] = {NULL}; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, vaddr)) { +#else + unsigned int vaddr[3]; + if (!mGrallocHal->lock(mGrallocHal, + *buf_handle, + GRALLOC_LOCK_FOR_CAMERA, + 0, 0, width, height, (void **)vaddr)) { +#endif +/////////////////////////////////////////////////////////////////////// + char *frame = NULL; + if ( type==CAMERA_HEAP_PREVIEW) { + frame = ((char *)mPreviewHeap->data) + (mPreviewFrameSize * index); + } else { + frame = ((char *)mPostviewHeap->data); + } + char *src = frame; + char *ptr = (char *)vaddr[0]; + + if (src == NULL || ptr == NULL) + return false; + + // Y + memcpy(ptr, src, width * height); + src += width * height; + if (mPreviewFormat == CAM_PIXEL_FORMAT_YVU420P) { + /* YV12 */ + //ALOGV("%s: yuv420p YV12", __func__); + // V + ptr = (char *)vaddr[1]; + if (src == NULL || ptr == NULL) + return false; + + memcpy(ptr, src, width * height / 4); + src += width * height / 4; + // U + ptr = (char *)vaddr[2]; + if (src == NULL || ptr == NULL) + return false; + + memcpy(ptr, src, width * height / 4); + } else if (mPreviewFormat == CAM_PIXEL_FORMAT_YUV420SP) { + /* NV21 */ + //ALOGV("%s: yuv420sp NV21", __func__); + ptr = (char *)vaddr[1]; + if (src == NULL || ptr == NULL) + return false; + + memcpy(ptr, src, (width * height) >> 1); + } +/////////////////////////////////////////////////////////////////////// + mGrallocHal->unlock(mGrallocHal, *buf_handle); + } + +#ifdef USE_DEDICATED_PREVIEW_ENQUEUE_THREAD + m_previewFrameQ.pushProcessQ(&buf_handle); +#else + if (0 != mPreviewWindow->enqueue_buffer(mPreviewWindow, buf_handle)) { + ALOGE("Could not enqueue gralloc buffer!\n"); + return false; + } +#endif + } else if (!mGrallocHal) { + ALOGE("nativeFlushSurface: gralloc HAL is not loaded"); + return false; + } + + /* if CAMERA_MSG_PREVIEW_METADATA, prepare callback buffer */ + if (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) { + if (nativePreviewCallback(index, &dstBuf) < 0) + ALOGE("ERROR(%s): nativePreviewCallback failed", __func__); + } + + return true; + +CANCEL: + if (mPreviewWindow->cancel_buffer(mPreviewWindow, buf_handle) != 0) + ALOGE("ERR(%s):Fail to cancel buffer", __func__); + + return false; +} + + + +image_rect_type SecCameraHardware::nativeGetWindowSize() +{ + image_rect_type window; + if (!mMovieMode) { + window.width = mPreviewSize.width; + window.height = mPreviewSize.height; + return window; + } + + switch (FRM_RATIO(mPreviewSize)) { + case CAM_FRMRATIO_QCIF: + window.width = 528; + window.height = 432; + break; + case CAM_FRMRATIO_VGA: + window.width = 640; + window.height = 480; + break; + case CAM_FRMRATIO_WVGA: + window.width = 800; + window.height = 480; + break; + case CAM_FRMRATIO_D1: + window.width = 720; + window.height = 480; + break; + case CAM_FRMRATIO_HD: + window.width = 800; + window.height = 450; + break; + default: + ALOGW("nativeGetWindowSize: invalid frame ratio %d", FRM_RATIO(mPreviewSize)); + window.width = mPreviewSize.width; + window.height = mPreviewSize.height; + break; + } + return window; +} + +bool SecCameraHardware::allocatePreviewHeap() +{ + if (mPreviewHeap) { + mPreviewHeap->release(mPreviewHeap); + mPreviewHeap = 0; + mPreviewHeapFd = -1; + } + + /* not need to alloc MHB by mM2MExyMemFd for ion */ +#ifdef BOARD_USE_MHB_ION + if (mEnableDZoom) { + mPreviewHeap = mGetMemoryCb(-1, mPreviewFrameSize, PREVIEW_BUF_CNT, &mPreviewHeapFd); + if (!mPreviewHeap || mPreviewHeapFd < 0) { + ALOGE("allocatePreviewHeap: error, fail to create preview heap(%d)", mPreviewHeap); + return false; + } + } else { + mPreviewHeap = mGetMemoryCb((int)mFlite.getfd(), mPreviewFrameSize, PREVIEW_BUF_CNT, 0); + if (!mPreviewHeap) { + ALOGE("allocatePreviewHeap: error, fail to create preview heap(%d %d)", mPreviewHeap, mPreviewHeapFd); + return false; + } + } +#else + if (mEnableDZoom) { + mPreviewHeap = mGetMemoryCb(-1, mPreviewFrameSize, PREVIEW_BUF_CNT, 0); + if (!mPreviewHeap) { + ALOGE("allocatePreviewHeap: error, fail to create preview heap(%d)", mPreviewHeap); + return false; + } + } else { + mPreviewHeap = mGetMemoryCb((int)mFlite.getfd(), mPreviewFrameSize, PREVIEW_BUF_CNT, 0); + if (!mPreviewHeap) { + ALOGE("allocatePreviewHeap: error, fail to create preview heap(%d)", mPreviewHeap); + return false; + } + } +#endif + + ALOGD("allocatePreviewHeap: %dx%d, frame %dx%d", + mOrgPreviewSize.width, mOrgPreviewSize.height, mPreviewFrameSize, PREVIEW_BUF_CNT); + + return true; +} + +status_t SecCameraHardware::nativeStartPreview() +{ + CLOGV("INFO(%s) : in ",__FUNCTION__); + + int err; + int formatMode; + + /* YV12 */ + CLOGD("DEBUG[%s(%d)]Preview Format = %s", + __FUNCTION__, __LINE__, + mFliteFormat == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mFliteFormat == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + mFliteFormat == CAM_PIXEL_FORMAT_YUV422I ? "YUYV" : + "Others"); + + err = mFlite.startPreview(&mFLiteSize, mFliteFormat, FLITE_BUF_CNT, mFps, mMovieMode, &mFliteNode); + CHECK_ERR_N(err, ("nativeStartPreview: error, mFlite.start")); + + mFlite.querybuf(&mPreviewFrameSize); + if (mPreviewFrameSize == 0) { + CLOGE("nativeStartPreview: error, mFlite.querybuf"); + return UNKNOWN_ERROR; + } + + if (!allocatePreviewHeap()) { + CLOGE("nativeStartPreview: error, allocatePreviewHeap"); + return NO_MEMORY; + } + + for (int i = 0; i < FLITE_BUF_CNT; i++) { + err = mFlite.qbuf(i); + CHECK_ERR_N(err, ("nativeStartPreview: error %d, mFlite.qbuf(%d)", err, i)); + } + + err = mFlite.stream(true); + CHECK_ERR_N(err, ("nativeStartPreview: error %d, mFlite.stream", err)); + + CLOGV("DEBUG[%s(%d)]-out -", __FUNCTION__, __LINE__); + return NO_ERROR; +} + +status_t SecCameraHardware::nativeStartPreviewZoom() +{ + CLOGV("INFO(%s) : in ",__FUNCTION__); + + int err; + int formatMode; + int i = 0; + ExynosBuffer nullBuf; + + /* YV12 */ + CLOGD("DEBUG[%s(%d)] : Preview Format = %s", + __FUNCTION__, __LINE__, + mFliteFormat == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mFliteFormat == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + mFliteFormat == CAM_PIXEL_FORMAT_YUV422I ? "YUYV" : + "Others"); + + CLOGD("DEBUG[%s(%d)] : size:%dx%d/ fps: %d ", + __FUNCTION__, __LINE__, mFLiteSize.width, mFLiteSize.height, mFps); + + err = mFlite.startPreview(&mFLiteSize, mFliteFormat, FLITE_BUF_CNT, mFps, mMovieMode, &mFliteNode); + CHECK_ERR_N(err, ("nativeStartPreviewZoom: error, mFlite.start")); + mFliteNode.ionClient = mIonCameraClient; + + CLOGI("INFO(%s) : mFliteNode.fd [%d]" , __FUNCTION__, mFliteNode.fd); + CLOGI("INFO(%s) : mFliteNode.width [%d]" , __FUNCTION__, mFliteNode.width); + CLOGI("INFO(%s) : mFliteNode.height [%d]" , __FUNCTION__, mFliteNode.height); + CLOGI("INFO(%s) : mFliteNode.format [%c%c%c%c]" , __FUNCTION__, mFliteNode.format, + mFliteNode.format >> 8, mFliteNode.format >> 16, mFliteNode.format >> 24); + CLOGI("INFO(%s) : mFliteNode.planes [%d]" , __FUNCTION__, mFliteNode.planes); + CLOGI("INFO(%s) : mFliteNode.buffers[%d]" , __FUNCTION__, mFliteNode.buffers); + CLOGI("INFO(%s) : mFliteNode.memory [%d]" , __FUNCTION__, mFliteNode.memory); + CLOGI("INFO(%s) : mFliteNode.type [%d]" , __FUNCTION__, mFliteNode.type); + CLOGI("INFO(%s) : mFliteNode.ionClient [%d]", __FUNCTION__, mFliteNode.ionClient); + +#ifdef USE_USERPTR + /* For FLITE buffer */ + for (i = 0; i < FLITE_BUF_CNT; i++) { + mFliteNode.buffer[i] = nullBuf; + /* YUV size */ + getAlignedYUVSize(mFliteFormat, mFLiteSize.width, mFLiteSize.height, &mFliteNode.buffer[i]); + if (allocMem(mIonCameraClient, &mFliteNode.buffer[i], 0) == false) { + CLOGE("ERR(%s):mFliteNode allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + CLOGD("DEBUG(%s): mFliteNode allocMem(%d) adr(%p), size(%d), ion(%d)", __FUNCTION__, + i, mFliteNode.buffer[i].virt.extP[0], mFliteNode.buffer[i].size.extS[0], mIonCameraClient); + memset(mFliteNode.buffer[i].virt.extP[0], 0, mFliteNode.buffer[i].size.extS[0]); + } + } +#else + /* loop for buffer count */ + for (int i = 0; i < mFliteNode.buffers; i++) { + err = mFlite.querybuf2(i, mFliteNode.planes, &mFliteNode.buffer[i]); + CHECK_ERR_N(err, ("nativeStartPreviewZoom: error, mFlite.querybuf2")); + } +#endif + CLOGV("DEBUG(%s) : mMsgEnabled(%d)", __FUNCTION__, mMsgEnabled); + + /* allocate preview callback buffer */ + mPreviewFrameSize = getAlignedYUVSize(mPreviewFormat, mOrgPreviewSize.width, mOrgPreviewSize.height, NULL); + CLOGD("DEBUG(%s) : mPreviewFrameSize(%d)(%dx%d) for callback", __FUNCTION__, + mPreviewFrameSize, mOrgPreviewSize.width, mOrgPreviewSize.height); + + if (!allocatePreviewHeap()) { + CLOGE("ERR(%s)(%d) : allocatePreviewHeap() fail", __FUNCTION__, __LINE__); + goto DESTROYMEM; + } + + mPreviewZoomRect.x = 0; + mPreviewZoomRect.y = 0; + mPreviewZoomRect.w = mFLiteSize.width; + mPreviewZoomRect.h = mFLiteSize.height; + + for (int i = 0; i < FLITE_BUF_CNT; i++) { + err = mFlite.qbuf2(&(mFliteNode), i); + CHECK_ERR_GOTO(DESTROYMEM, err, ("nativeStartPreviewZoom: error %d, mFlite.qbuf(%d)", err, i)); + } + +#if !defined(USE_USERPTR) + /* export FD */ + for (int i = 0; i < mFliteNode.buffers; i++) { + err = mFlite.expBuf(i, mFliteNode.planes, &mFliteNode.buffer[i]); + CHECK_ERR_N(err, ("nativeStartPreviewZoom: error, mFlite.expBuf")); + } +#endif + err = mFlite.stream(true); + CHECK_ERR_GOTO(DESTROYMEM, err, ("nativeStartPreviewZoom: error %d, mFlite.stream", err)); + + CLOGV("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; + +DESTROYMEM: +#ifdef USE_USERPTR + /* flite buffer free */ + for (i = 0; i < FLITE_BUF_CNT; i++) { + freeMem(&mFliteNode.buffer[i]); + mFliteNode.buffer[i] = nullBuf; + } +#else + if (mFlite.reqBufZero(&mFliteNode) < 0) + ALOGE("ERR(%s): mFlite.reqBufZero() fail", __func__); + for (i = 0; i < FLITE_BUF_CNT; i++) + mFliteNode.buffer[i] = nullBuf; +#endif + return UNKNOWN_ERROR; +} + +int SecCameraHardware::nativeGetPreview() +{ + int index = -1; + int retries = 10; + int ret = -1; + + CLOGV("INFO(%s) : in ",__FUNCTION__); + + /* All buffers are empty. Request a frame to the device */ +retry: + if (mFlite.polling() == 0) { + if (mFastCaptureCalled) { + CLOGD("DEBUG[%s(%d)] : mFastCaptureCalled!!-",__FUNCTION__, __LINE__); + return -1; + } + CLOGE("ERR[%s(%d)] : no frame, RESET!!!", __FUNCTION__, __LINE__); + if (retries-- <= 0) + return -1; + + mFlite.stream(false); + mFlite.deinit(); + if (mCameraId == CAMERA_ID_BACK) + mFlite.init(FLITE0_DEV_PATH, mCameraId); + else + mFlite.init(FLITE1_DEV_PATH, mCameraId); + + if (mEnableDZoom) + ret = nativeStartPreviewZoom(); + else + ret = nativeStartPreview(); + + goto retry; + } else { + ret = mFlite.dqbuf2(&mFliteNode); + index = ret; + CHECK_ERR_N(index, ("nativeGetPreview: error, mFlite.dqbuf")); + } + + if (mEnableDZoom) + mRecordTimestamp = systemTime(SYSTEM_TIME_MONOTONIC); + + if (!mPreviewInitialized) { + mPreviewInitialized = true; + CLOGD("DEBUG(%s) : preview started",__FUNCTION__); + } + + return index; +} + +status_t SecCameraHardware::nativePreviewCallback(int index, ExynosBuffer *grallocBuf) +{ + /* If orginal size and format(defined by app) is different to + * changed size and format(defined by hal), do CSC again. + * But orginal and changed size and format(defined by app) is same, + * just copy to callback buffer from gralloc buf + */ + + ExynosBuffer dstBuf; + dstBuf.fd.extFd[0] = mPreviewHeapFd; + getAlignedYUVSize(mPreviewFormat, mOrgPreviewSize.width, mOrgPreviewSize.height, &dstBuf); + char *srcAdr = NULL; + srcAdr = (char *)mPreviewHeap->data; + srcAdr += (index * mPreviewFrameSize); + + /* YV12 */ + if (mPreviewFormat == V4L2_PIX_FMT_NV21 || + mPreviewFormat == V4L2_PIX_FMT_NV21M) { + dstBuf.virt.extP[0] = srcAdr; + dstBuf.virt.extP[1] = dstBuf.virt.extP[0] + dstBuf.size.extS[0]; + } else if (mPreviewFormat == V4L2_PIX_FMT_YVU420 || + mPreviewFormat == V4L2_PIX_FMT_YVU420M) { + dstBuf.virt.extP[0] = srcAdr; + dstBuf.virt.extP[1] = dstBuf.virt.extP[0] + dstBuf.size.extS[0]; + dstBuf.virt.extP[2] = dstBuf.virt.extP[1] + dstBuf.size.extS[1]; + } + + if (grallocBuf == NULL || + mOrgPreviewSize.width != mPreviewSize.width || + mOrgPreviewSize.height != mPreviewSize.height || + mOrgPreviewSize.width != mFLiteSize.width || + mOrgPreviewSize.height != mFLiteSize.height || + HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP != V4L2_PIX_2_HAL_PIXEL_FORMAT(mPreviewFormat)) { + + /* csc start flite(s) -> fimc0 -> callback(d) */ + if (mFimc1CSC) { + /* set zoom info */ + bool flag; + flag = getCropRect(mFLiteSize.width, mFLiteSize.height, mPreviewSize.width, + mPreviewSize.height, &mPreviewZoomRect.x, &mPreviewZoomRect.y, + &mPreviewZoomRect.w, &mPreviewZoomRect.h, 2, 2, 2, 2, mZoomRatio); + if(false == flag) { + CLOGE("ERR(%s):mFLiteSize.width = %u mFLiteSize.height = %u " + "mPreviewSize.width = %u mPreviewSize.height = %u ", __func__, + mFLiteSize.width, mFLiteSize.height, mPreviewSize.width, mPreviewSize.height); + CLOGE("ERR(%s):Preview CropRect failed X = %u Y = %u W = %u H = %u ", __func__, + mPreviewZoomRect.x, mPreviewZoomRect.y, mPreviewZoomRect.w, mPreviewZoomRect.h); + } + /* src : FLite */ + CLOGV("DEBUG(%s):SRC size(%u x %u / %u, %u, %u, %u) ,Format(%s)", __func__, + mFLiteSize.width, mFLiteSize.height, mPreviewZoomRect.x, mPreviewZoomRect.y,mPreviewZoomRect.w, mPreviewZoomRect.h, + mFliteNode.format == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mFliteNode.format == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + mFliteNode.format == CAM_PIXEL_FORMAT_YUV422I ? "YUYV" : + "Others"); + + + csc_set_src_format(mFimc1CSC, + mFLiteSize.width, mFLiteSize.height, + mPreviewZoomRect.x, mPreviewZoomRect.y, + mPreviewZoomRect.w, mPreviewZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + +#ifdef USE_USERPTR + csc_set_src_buffer(mFimc1CSC, (void **)mFliteNode.buffer[index].virt.extP, CSC_MEMORY_USERPTR); +#else + csc_set_dst_buffer(mFimc1CSC, (void **)mFliteNode.buffer[index].fd.extFd, CSC_MEMORY_DMABUF); +#endif + + /* dst : callback buffer(MHB) */ + CLOGV("DEBUG(%s):DST size(%u x %u), Format(%s)", __func__, + mOrgPreviewSize.width, mOrgPreviewSize.height, + mPreviewFormat == CAM_PIXEL_FORMAT_YVU420P ? "YV12" : + mPreviewFormat == CAM_PIXEL_FORMAT_YUV420SP ? "NV21" : + mPreviewFormat == CAM_PIXEL_FORMAT_YUV422I ? "YUYV" : + "Others"); + + csc_set_dst_format(mFimc1CSC, + mOrgPreviewSize.width, mOrgPreviewSize.height, + 0, 0, + mOrgPreviewSize.width, mOrgPreviewSize.height, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mPreviewFormat), + 0); + + csc_set_dst_buffer(mFimc1CSC, + (void **)dstBuf.virt.extP, CSC_MEMORY_USERPTR); + + mFimc1CSCLock.lock(); + if (csc_convert(mFimc1CSC) != 0) { + CLOGE("ERR(%s):csc_convert(mFimc1CSC) fail", __func__); + mFimc1CSCLock.unlock(); + return false; + } + mFimc1CSCLock.unlock(); + } else { + CLOGE("ERR(%s): mFimc1CSC == NULL", __func__); + return false; + } + } else { + for (int plane = 0; plane < 2; plane++) { + /* just memcpy only */ + char *srcAddr = NULL; + char *dstAddr = NULL; + srcAddr = grallocBuf->virt.extP[plane]; + dstAddr = dstBuf.virt.extP[plane]; + memcpy(dstAddr, srcAddr, dstBuf.size.extS[plane]); + } + } + /* mSaveDump("/data/camera_preview%d.yuv", &dstBuf, index); */ + + return NO_ERROR; +} + + +int SecCameraHardware::nativeReleasePreviewFrame(int index) +{ +#ifdef FAKE_SENSOR + return mFlite.fakeQbuf2(&mFliteNode, index); +#else + return mFlite.qbuf2(&mFliteNode, index); +#endif +} + +void SecCameraHardware::nativeStopPreview(void) +{ + int err = 0; + int i = 0; + ExynosBuffer nullBuf; + + err = mFlite.stream(false); + if (CC_UNLIKELY(err < 0)) + CLOGE("nativeStopPreview: error, mFlite.stream"); + +#ifdef USE_USERPTR + /* flite buffer free */ + for (i = 0; i < FLITE_BUF_CNT; i++) { + freeMem(&mFliteNode.buffer[i]); + mFliteNode.buffer[i] = nullBuf; + } +#else + for (i = 0; i < FLITE_BUF_CNT; i++) { + for (int j = 0; j < mFliteNode.planes; j++) { + munmap((void *)mFliteNode.buffer[i].virt.extP[j], + mFliteNode.buffer[i].size.extS[j]); + ion_free(mFliteNode.buffer[i].fd.extFd[j]); + } + mFliteNode.buffer[i] = nullBuf; + } + + if (mFlite.reqBufZero(&mFliteNode) < 0) + ALOGE("ERR(%s): mFlite.reqBufZero() fail", __func__); +#endif + CLOGV("INFO(%s) : out ",__FUNCTION__); +} + +bool SecCameraHardware::allocateRecordingHeap() +{ + int framePlaneSize1 = ALIGN(mVideoSize.width, 16) * ALIGN(mVideoSize.height, 16) + MFC_7X_BUFFER_OFFSET; + int framePlaneSize2 = ALIGN(mVideoSize.width, 16) * ALIGN(mVideoSize.height / 2, 16) + MFC_7X_BUFFER_OFFSET;; + + if (mRecordingHeap != NULL) { + mRecordingHeap->release(mRecordingHeap); + mRecordingHeap = 0; + mRecordHeapFd = -1; + } + +#ifdef BOARD_USE_MHB_ION + mRecordingHeap = mGetMemoryCb(-1, sizeof(struct addrs), REC_BUF_CNT, &mRecordHeapFd); + if (mRecordingHeap == NULL || mRecordHeapFd < 0) { + ALOGE("ERR(%s): mGetMemoryCb(mRecordingHeap(%d), size(%d) fail [Heap %p, Fd %d]",\ + __func__, REC_BUF_CNT, sizeof(struct addrs), mRecordingHeap, mRecordHeapFd); + return false; + } +#else + mRecordingHeap = mGetMemoryCb(-1, sizeof(struct addrs), REC_BUF_CNT, 0); + if (mRecordingHeap == NULL) { + ALOGE("ERR(%s): mGetMemoryCb(mRecordingHeap(%d), size(%d) fail [Heap %p]",\ + __func__, REC_BUF_CNT, sizeof(struct addrs), mRecordingHeap); + return false; + } +#endif + + for (int i = 0; i < REC_BUF_CNT; i++) { +#ifdef BOARD_USE_MHB_ION + for (int j = 0; j < REC_PLANE_CNT; j++) { + if (mRecordDstHeap[i][j] != NULL) { + mRecordDstHeap[i][j]->release(mRecordDstHeap[i][j]); + mRecordDstHeap[i][j] = 0; + mRecordDstHeapFd[i][j] = -1; + } + } + + mRecordDstHeap[i][0] = mGetMemoryCb(-1, framePlaneSize1, 1, &(mRecordDstHeapFd[i][0])); + mRecordDstHeap[i][1] = mGetMemoryCb(-1, framePlaneSize2, 1, &(mRecordDstHeapFd[i][1])); + ALOGV("DEBUG(%s): mRecordDstHeap[%d][0] adr(%p), fd(%d)", __func__, i, mRecordDstHeap[i][0]->data, mRecordDstHeapFd[i][0]); + ALOGV("DEBUG(%s): mRecordDstHeap[%d][1] adr(%p), fd(%d)", __func__, i, mRecordDstHeap[i][1]->data, mRecordDstHeapFd[i][1]); + + if (mRecordDstHeap[i][0] == NULL || mRecordDstHeapFd[i][0] <= 0 || + mRecordDstHeap[i][1] == NULL || mRecordDstHeapFd[i][1] <= 0) { + ALOGE("ERR(%s): mGetMemoryCb(mRecordDstHeap[%d] size(%d/%d) fail",\ + __func__, i, framePlaneSize1, framePlaneSize2); + goto error; + } + +#ifdef NOTDEFINED + if (mRecordDstHeap[i][j] == NULL) { + ALOGE("ERR(%s): mGetMemoryCb(mRecordDstHeap[%d][%d], size(%d) fail",\ + __func__, i, j, framePlaneSize); + goto error; + } +#endif +#else + freeMem(&mRecordingDstBuf[i]); + + mRecordingDstBuf[i].size.extS[0] = framePlaneSize1; + mRecordingDstBuf[i].size.extS[1] = framePlaneSize2; + + if (allocMem(mIonCameraClient, &mRecordingDstBuf[i], ((1 << 1) | 1)) == false) { + ALOGE("ERR(%s):allocMem(mRecordingDstBuf() fail", __func__); + goto error; + } +#endif + } + + ALOGD("DEBUG(%s): %dx%d, frame plane (%d/%d)x%d", __func__, + ALIGN(mVideoSize.width, 16), ALIGN(mVideoSize.height, 2), framePlaneSize1, framePlaneSize2, REC_BUF_CNT); + + return true; + +error: + if (mRecordingHeap == NULL) { + mRecordingHeap->release(mRecordingHeap); + mRecordingHeap = NULL; + } + + for (int i = 0; i < REC_BUF_CNT; i++) { + for (int j = 0; j < REC_PLANE_CNT; j++) { + if (mRecordDstHeap[i][j] != NULL) { + mRecordDstHeap[i][j]->release(mRecordDstHeap[i][j]); + mRecordDstHeap[i][j] = 0; + } + } + + freeMem(&mRecordingDstBuf[i]); + } + + return false; +} + +#ifdef RECORDING_CAPTURE +bool SecCameraHardware::allocateRecordingSnapshotHeap() +{ + /* init jpeg heap */ + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = NULL; + } + +#ifdef BOARD_USE_MHB_ION + int jpegHeapFd = -1; + mJpegHeap = mGetMemoryCb(-1, mRecordingPictureFrameSize, 1, jpegHeapFd); + if (mJpegHeap == NULL || jpegHeapFd < 0) { + ALOGE("allocateRecordingSnapshotHeap: error, fail to create jpeg heap"); + return UNKNOWN_ERROR; + } +#else + mJpegHeap = mGetMemoryCb(-1, mRecordingPictureFrameSize, 1, 0); + if (mJpegHeap == NULL) { + ALOGE("allocateRecordingSnapshotHeap: error, fail to create jpeg heap"); + return UNKNOWN_ERROR; + } +#endif + + ALOGD("allocateRecordingSnapshotHeap: jpeg %dx%d, size %d", + mPictureSize.width, mPictureSize.height, mRecordingPictureFrameSize); + + return true; +} +#endif + +status_t SecCameraHardware::nativeStartRecording(void) +{ + CLOGI("INFO(%s) : in ", __FUNCTION__); +#ifdef NOTDEFINED + if (mMirror) { + nativeSetParameters(CAM_CID_HFLIP, 1, 0); + nativeSetParameters(CAM_CID_HFLIP, 1, 1); + } else { + nativeSetParameters(CAM_CID_HFLIP, 0, 0); + nativeSetParameters(CAM_CID_HFLIP, 0, 1); + } + + uint32_t recordingTotalFrameSize; + mFimc1.querybuf(&recordingTotalFrameSize); + if (recordingTotalFrameSize == 0) { + ALOGE("nativeStartPreview: error, mFimc1.querybuf"); + return UNKNOWN_ERROR; + } + + if (!allocateRecordingHeap()) { + ALOGE("nativeStartRecording: error, allocateRecordingHeap"); + return NO_MEMORY; + } + + for (int i = 0; i < REC_BUF_CNT; i++) { + err = mFimc1.qbuf(i); + CHECK_ERR_N(err, ("nativeStartRecording: error, mFimc1.qbuf(%d)", i)); + } + + err = mFimc1.stream(true); + CHECK_ERR_N(err, ("nativeStartRecording: error, mFimc1.stream")); +#endif + CLOGI("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; +} + +/* for zoom recording */ +status_t SecCameraHardware::nativeStartRecordingZoom(void) +{ + int err; + Mutex::Autolock lock(mNativeRecordLock); + + CLOGI("INFO(%s) : in - (%d/%d)", __FUNCTION__, mVideoSize.width, mVideoSize.height); + + /* 1. init zoom size info */ + mRecordZoomRect.x = 0; + mRecordZoomRect.y = 0; + mRecordZoomRect.w = mVideoSize.width; + mRecordZoomRect.h = mVideoSize.height; + + /* 2. init buffer var src and dst */ + mInitRecSrcQ(); + mInitRecDstBuf(); + + /* 3. alloc MHB for recording dst */ + if (!allocateRecordingHeap()) { + ALOGE("nativeStartPostRecording: error, allocateRecordingHeap"); + goto destroyMem; + } + + CLOGI("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; + +destroyMem: + mInitRecSrcQ(); + mInitRecDstBuf(); + return UNKNOWN_ERROR; +} + +#ifdef NOTDEFINED +int SecCameraHardware::nativeGetRecording() +{ + int index; + phyaddr_t y, cbcr; + int err, retries = 3; + +retry: + err = mFimc1.polling(true); + if (CC_UNLIKELY(err <= 0)) { + if (mFimc1.getStreamStatus()) { + if (!err && (retries > 0)) { + ALOGW("nativeGetRecording: warning, wait for input (%d)", retries); + usleep(300000); + retries--; + goto retry; + } + ALOGE("nativeGetRecording: error, mFimc1.polling err=%d cnt=%d", err, retries); + } else { + ALOGV("nativeGetRecording: stop getting a frame"); + } + return UNKNOWN_ERROR; + } + + index = mFimc1.dqbuf(); + CHECK_ERR_N(index, ("nativeGetRecording: error, mFimc1.dqbuf")); + + /* get fimc capture physical addr */ + err = mFimc1.getYuvPhyaddr(index, &y, &cbcr); + CHECK_ERR_N(err, ("nativeGetRecording: error, mFimc1.getYuvPhyaddr")); + + Mutex::Autolock lock(mNativeRecordLock); + + if (!mRecordingHeap) + return NO_MEMORY; + + struct record_heap *heap = (struct record_heap *)mRecordingHeap->data; + heap[index].type = kMetadataBufferTypeCameraSource; + heap[index].y = y; + heap[index].cbcr = cbcr; + heap[index].buf_index = index; + + return index; +} + +int SecCameraHardware::nativeReleaseRecordingFrame(int index) +{ + return mFimc1.qbuf(index); +} + +int SecCameraHardware::nativeReleasePostRecordingFrame(int index) +{ + return NO_ERROR; +} +void SecCameraHardware::nativeStopPostRecording() +{ + Mutex::Autolock lock(mNativeRecordLock); + + ALOGD("nativeStopPostRecording EX"); +} +#endif + +void SecCameraHardware::nativeStopRecording() +{ + Mutex::Autolock lock(mNativeRecordLock); + + mInitRecSrcQ(); + mInitRecDstBuf(); + + ALOGD("nativeStopRecording EX"); +} + +bool SecCameraHardware::getCropRect(unsigned int src_w, unsigned int src_h, + unsigned int dst_w, unsigned int dst_h, unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int align_x, int align_y, int align_w, int align_h, float zoomRatio) +{ + bool flag = true; + *crop_w = src_w; + *crop_h = src_h; + + if (src_w == 0 || src_h == 0 || dst_w == 0 || dst_h == 0) { + ALOGE("ERR(%s):width or height valuse is 0, src(%dx%d), dst(%dx%d)", + __func__, src_w, src_h, dst_w, dst_h); + return false; + } + + /* Calculation aspect ratio */ + if (src_w != dst_w + || src_h != dst_h) { + float src_ratio = 1.0f; + float dst_ratio = 1.0f; + + /* ex : 1024 / 768 */ + src_ratio = (float)src_w / (float)src_h; + + /* ex : 352 / 288 */ + dst_ratio = (float)dst_w / (float)dst_h; + + if (dst_ratio <= src_ratio) { + /* shrink w */ + *crop_w = src_h * dst_ratio; + *crop_h = src_h; + } else { + /* shrink h */ + *crop_w = src_w; + *crop_h = src_w / dst_ratio; + } + } + + flag = getRectZoomAlign(src_w, src_h, dst_w, dst_h, crop_x, crop_y, + crop_w, crop_h, align_x, align_y, align_w, align_h, zoomRatio); + if(false == flag) { + ALOGE("ERR(%s):src_w = %u src_h = %u dst_w = %u dst_h = %u " + "crop_x = %u crop_y = %u crop_w = %u crop_h = %u " + "align_w = %d align_h = %d zoom = %f", __func__, + src_w, src_h, dst_w, dst_h, *crop_x, *crop_y, + *crop_w, *crop_h, align_w, align_h, zoomRatio); + } + + return true; +} + +bool SecCameraHardware::getRectZoomAlign(unsigned int src_w, unsigned int src_h, + unsigned int dst_w, unsigned int dst_h, unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int align_x, int align_y, int align_w, int align_h, float zoomRatio) +{ + int x = 0; + int y = 0; + + if (zoomRatio != 0) { + /* calculation zoom */ + *crop_w = (unsigned int)((float)*crop_w * 1000 / zoomRatio); + *crop_h = (unsigned int)((float)*crop_h * 1000 / zoomRatio); + } + + /* Alignment by desired size */ + unsigned int w_remain = (*crop_w & (align_w - 1)); + if (w_remain != 0) { + if ( (unsigned int)(align_w >> 1) <= w_remain + && (unsigned int)(*crop_w + (align_w - w_remain)) <= src_w) { + *crop_w += (align_w - w_remain); + } + else + *crop_w -= w_remain; + } + + unsigned int h_remain = (*crop_h & (align_h - 1)); + if (h_remain != 0) { + if ( (unsigned int)(align_h >> 1) <= h_remain + && (unsigned int)(*crop_h + (align_h - h_remain)) <= src_h) { + *crop_h += (align_h - h_remain); + } + else + *crop_h -= h_remain; + } + + x = ((int)src_w - (int)*crop_w) >> 1; + y = ((int)src_h - (int)*crop_h) >> 1; + + if (x < 0 || y < 0) { + ALOGE("ERR(%s):crop size too big (%u, %u, %u, %u)", + __func__, *crop_x, *crop_y, *crop_w, *crop_h); + return false; + } + + *crop_x = ALIGN_DOWN(x, align_x); + *crop_y = ALIGN_DOWN(y, align_y); + + return true; +} + +status_t SecCameraHardware::nativeCSCPreview(int index, int type) +{ + ExynosBuffer dstBuf; + char *dstAdr = NULL; + dstAdr = (char *)mPreviewHeap->data; + dstAdr += (index * mPreviewFrameSize); + + if (mFimc1CSC) { + /* set zoom info */ + mPreviewZoomRect.w = ALIGN_DOWN((uint32_t)((float)mFLiteSize.width * 1000 / mZoomRatio), 2); + mPreviewZoomRect.h = ALIGN_DOWN((uint32_t)((float)mFLiteSize.height * 1000 / mZoomRatio), 2); + + mPreviewZoomRect.x = ALIGN_DOWN(((mFLiteSize.width - mPreviewZoomRect.w) / 2), 2); + mPreviewZoomRect.y = ALIGN_DOWN(((mFLiteSize.height - mPreviewZoomRect.h) / 2), 2); + +#ifdef DEBUG_PREVIEW + CLOGD("DEBUG PREVIEW(%s) (%d): src(%d,%d,%d,%d,%d,%d), dst(%d,%d), fmt(%d)" + , __FUNCTION__, __LINE__ + , mFLiteSize.width + , mFLiteSize.height + , mPreviewZoomRect.x + , mPreviewZoomRect.y + , mPreviewZoomRect.w + , mPreviewZoomRect.h + , mPreviewSize.width + , mPreviewSize.height + , V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format) + ); +#endif + + /* src : FLite */ + csc_set_src_format(mFimc1CSC, + mFLiteSize.width, mFLiteSize.height, + mPreviewZoomRect.x, mPreviewZoomRect.y, + mPreviewZoomRect.w, mPreviewZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + if (type == CAMERA_HEAP_POSTVIEW) { + csc_set_src_buffer(mFimc1CSC, (void **)mPictureBufDummy[0].virt.extP, CSC_MEMORY_USERPTR); + } else { + csc_set_src_buffer(mFimc1CSC, (void **)mFliteNode.buffer[index].virt.extP, CSC_MEMORY_USERPTR); + } + + /* mSaveDump("/data/camera_preview%d.yuv", &mFliteNode.buffer[index], index); */ + + int halPixelFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL; + + /* dst : GRALLOC */ + csc_set_dst_format(mFimc1CSC, + mPreviewSize.width, mPreviewSize.height, + 0, 0, + mPreviewSize.width, mPreviewSize.height, + halPixelFormat, + 0); + + dstBuf.fd.extFd[0] = mPreviewHeapFd; + dstBuf.virt.extP[0] = dstAdr; + dstBuf.size.extS[0] = mPreviewSize.width * mPreviewSize.height; + dstBuf.virt.extP[1] = dstBuf.virt.extP[0] + dstBuf.size.extS[0]; + dstBuf.size.extS[1] = mPreviewSize.width * mPreviewSize.height / 2; + + csc_set_dst_buffer(mFimc1CSC, (void **) dstBuf.virt.extP, CSC_MEMORY_USERPTR); + + mFimc1CSCLock.lock(); + if (csc_convert(mFimc1CSC) != 0) { + ALOGE("ERR(%s):csc_convert(mFimc1CSC) fail", __func__); + mFimc1CSCLock.unlock(); + return false; + } + mFimc1CSCLock.unlock(); + } + return true; +} + +status_t SecCameraHardware::nativeCSCRecording(rec_src_buf_t *srcBuf, int dstIdx) +{ + Mutex::Autolock lock(mNativeRecordLock); + + /* csc start flite(s) -> fimc1 -> callback(d) */ + if (mFimc2CSC) { + struct addrs *addrs; + bool flag; + /* set zoom info */ + flag = getCropRect(mFLiteSize.width, mFLiteSize.height, mVideoSize.width, + mVideoSize.height, &mRecordZoomRect.x, &mRecordZoomRect.y, + &mRecordZoomRect.w, &mRecordZoomRect.h, 2, 2, 2, 2, mZoomRatio); + if(false == flag) { + ALOGE("ERR(%s):mFLiteSize.width = %u mFLiteSize.height = %u " + "mVideoSize.width = %u mVideoSize.height = %u ", __func__, + mFLiteSize.width, mFLiteSize.height, mVideoSize.width, mVideoSize.height); + ALOGE("ERR(%s):Recording CropRect failed X = %u Y = %u W = %u H = %u ", __func__, + mRecordZoomRect.x, mRecordZoomRect.y, mRecordZoomRect.w, mRecordZoomRect.h); + } + +#ifdef DEBUG_RECORDING + ALOGD("DEBUG RECORDING(%s) (%d): src(%d,%d,%d,%d,%d,%d), dst(%d,%d) %d", __func__, __LINE__ + , mFLiteSize.width + , mFLiteSize.height + , mRecordZoomRect.x + , mRecordZoomRect.y + , mRecordZoomRect.w + , mRecordZoomRect.h + , mVideoSize.width + , mVideoSize.height + , dstIdx + ); +#endif + /* src : FLite */ + csc_set_src_format(mFimc2CSC, + mFLiteSize.width, mFLiteSize.height, + mRecordZoomRect.x, mRecordZoomRect.y, + mRecordZoomRect.w, mRecordZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + csc_set_src_buffer(mFimc2CSC, (void **)srcBuf->buf->fd.extFd, CSC_MEMORY_DMABUF); + //csc_set_src_buffer(mFimc2CSC, (void **)srcBuf->buf->virt.extP, CSC_MEMORY_USERPTR); + + /* dst : MHB(callback */ + csc_set_dst_format(mFimc2CSC, + mVideoSize.width, mVideoSize.height, + 0, 0, mVideoSize.width, mVideoSize.height, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mRecordingFormat), + /* HAL_PIXEL_FORMAT_CUSTOM_YCbCr_420_SP, */ + 0); + + ExynosBuffer dstBuf; + getAlignedYUVSize(mRecordingFormat, mVideoSize.width, mVideoSize.height, &dstBuf); + for (int i = 0; i < REC_PLANE_CNT; i++) { +#if defined(ALLOCATION_REC_BUF_BY_MEM_CB) + dstBuf.virt.extP[i] = (char *)mRecordDstHeap[dstIdx][i]->data; + dstBuf.fd.extFd[i] = mRecordDstHeapFd[dstIdx][i]; +#else + dstBuf.virt.extP[i] = (char *)mRecordingDstBuf[dstIdx].virt.extP[i]; + dstBuf.fd.extFd[i] = mRecordingDstBuf[dstIdx].fd.extFd[i]; +#endif + } + +#ifdef DEBUG_RECORDING + ALOGD("DEBUG(%s) (%d): dst(%d,%d,%p,%p,%d,%d) index(%d)", __func__, __LINE__ + , dstBuf.fd.extFd[0] + , dstBuf.fd.extFd[1] + , dstBuf.virt.extP[0] + , dstBuf.virt.extP[1] + , dstBuf.size.extS[0] + , dstBuf.size.extS[1] + , dstIdx + ); +#endif + csc_set_dst_buffer(mFimc2CSC, (void **)dstBuf.fd.extFd, CSC_MEMORY_DMABUF); + + mFimc2CSCLock.lock(); + if (csc_convert_with_rotation(mFimc2CSC, 0, mflipHorizontal, mflipVertical) != 0) { + ALOGE("ERR(%s):csc_convert(mFimc2CSC) fail", __func__); + mFimc2CSCLock.unlock(); + return false; + } + mFimc2CSCLock.unlock(); + + addrs = (struct addrs *)mRecordingHeap->data; + addrs[dstIdx].type = kMetadataBufferTypeCameraSource; + addrs[dstIdx].fd_y = (unsigned int)dstBuf.fd.extFd[0]; + addrs[dstIdx].fd_cbcr = (unsigned int)dstBuf.fd.extFd[1]; + addrs[dstIdx].buf_index = dstIdx; + /* ALOGV("DEBUG(%s): After CSC Camera Meta index %d fd(%d, %d)", __func__, dstIdx, addrs[dstIdx].fd_y, addrs[dstIdx].fd_cbcr); */ + /* mSaveDump("/data/camera_recording%d.yuv", &dstBuf, dstIdx); */ + } else { + ALOGE("ERR(%s): mFimc2CSC == NULL", __func__); + return false; + } + return true; +} + +status_t SecCameraHardware::nativeCSCCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf) +{ + bool flag; + + if (mFimc2CSC) { + /* set zoom info */ + flag = getCropRect(mFLiteCaptureSize.width, mFLiteCaptureSize.height, mPictureSize.width, + mPictureSize.height, &mPictureZoomRect.x, &mPictureZoomRect.y, + &mPictureZoomRect.w, &mPictureZoomRect.h, 2, 2, 2, 2, mZoomRatio); + + if(false == flag) { + ALOGE("ERR(%s):mFLiteCaptureSize.width = %u mFLiteCaptureSize.height = %u " + "mPictureSize.width = %u mPictureSize.height = %u ", __func__, + mFLiteCaptureSize.width, mFLiteCaptureSize.height, mPictureSize.width, mPictureSize.height); + ALOGE("ERR(%s):Capture CropRect failed X = %u Y = %u W = %u H = %u ", __func__, + mPictureZoomRect.x, mPictureZoomRect.y, mPictureZoomRect.w, mPictureZoomRect.h); + } + +#ifdef DEBUG_CAPTURE + ALOGD("DEBUG(%s) (%d): (%d, %d), (%d, %d), (%d, %d, %d, %d)", __func__, __LINE__ + , mFLiteCaptureSize.width + , mFLiteCaptureSize.height + , mPictureSize.width + , mPictureSize.height + , mPictureZoomRect.x + , mPictureZoomRect.y + , mPictureZoomRect.w + , mPictureZoomRect.h + ); +#endif + /* src : FLite */ + csc_set_src_format(mFimc2CSC, + mFLiteCaptureSize.width, mFLiteCaptureSize.height, + mPictureZoomRect.x, mPictureZoomRect.y, + mPictureZoomRect.w, mPictureZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + csc_set_src_buffer(mFimc2CSC, (void **)srcBuf->virt.extP, CSC_MEMORY_USERPTR); + + /* dst : buffer */ +#ifdef USE_NV21_CALLBACK + if (mPictureFormat == CAM_PIXEL_FORMAT_YUV420SP) { + csc_set_dst_format(mFimc2CSC, + mPictureSize.width, mPictureSize.height, + 0, 0, mPictureSize.width, mPictureSize.height, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mPictureFormat), + 0); + + csc_set_dst_buffer(mFimc2CSC, (void **)dstBuf->virt.extP, CSC_MEMORY_USERPTR); + } + else +#endif + { + csc_set_dst_format(mFimc2CSC, + mPictureSize.width, mPictureSize.height, + 0, 0, mPictureSize.width, mPictureSize.height, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + csc_set_dst_buffer(mFimc2CSC, (void **)dstBuf->fd.extFd, CSC_MEMORY_DMABUF); + } + + mFimc2CSCLock.lock(); + if (mCameraId == CAMERA_ID_FRONT) { + if (csc_convert_with_rotation(mFimc2CSC, 0, mflipHorizontal, mflipVertical) != 0) { + ALOGE("ERR(%s):csc_convert_with_rotation(mFimc2CSC) fail", __func__); + mFimc2CSCLock.unlock(); + return false; + } + } else { + if (csc_convert(mFimc2CSC) != 0) { + ALOGE("ERR(%s):csc_convert(mFimc2CSC) fail", __func__); + mFimc2CSCLock.unlock(); + return false; + } + } + mFimc2CSCLock.unlock(); + /* mSaveDump("/data/camera_recording%d.yuv", &dstBuf, dstIdx); */ + } else { + ALOGE("ERR(%s): mFimc2CSC == NULL", __func__); + return false; + } + return true; +} + +status_t SecCameraHardware::nativeCSCRecordingCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf) +{ + if (mFimc2CSC) { + ALOGD("DEBUG(%s) (%d) src : mFLiteSize(%d x %d), mPreviewZoomRect(%d, %d, %d, %d)", __FUNCTION__, __LINE__, + mFLiteSize.width, + mFLiteSize.height, + mPreviewZoomRect.x, + mPreviewZoomRect.y, + mPreviewZoomRect.w, + mPreviewZoomRect.h); + + ALOGD("DEBUG(%s) (%d) dst : (%d x %d)", __FUNCTION__, __LINE__, + mPreviewSize.width, mPreviewSize.height); + + int dstW = mPictureSize.width; + int dstH = mPictureSize.height; + + /* src : FLite */ + csc_set_src_format(mFimc2CSC, + mFLiteSize.width, mFLiteSize.height, + mPreviewZoomRect.x, mPreviewZoomRect.y, + mPreviewZoomRect.w, mPreviewZoomRect.h, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + csc_set_src_buffer(mFimc2CSC, (void **)srcBuf->virt.extP, CSC_MEMORY_USERPTR); + + /* dst : buffer */ + csc_set_dst_format(mFimc2CSC, + dstW, dstH, + 0, 0, dstW, dstH, + V4L2_PIX_2_HAL_PIXEL_FORMAT(mFliteNode.format), + 0); + + csc_set_dst_buffer(mFimc2CSC, (void **)dstBuf->fd.extFd, CSC_MEMORY_DMABUF); + + mFimc2CSCLock.lock(); + if (csc_convert(mFimc2CSC) != 0) { + ALOGE("ERR(%s):csc_convert(mFimc2CSC) fail", __func__); + mFimc2CSCLock.unlock(); + return INVALID_OPERATION; + } + mFimc2CSCLock.unlock(); + /* mSaveDump("/data/camera_recording%d.yuv", &dstBuf, dstIdx); */ + } else { + ALOGE("ERR(%s): mFimc1CSC == NULL", __func__); + return INVALID_OPERATION; + } + return NO_ERROR; +} + +int SecCameraHardware::nativegetWBcustomX() +{ + int x_value, err; + + err = mFlite.gctrl(V4L2_CID_CAMERA_WB_CUSTOM_X, &x_value); + CHECK_ERR(err, ("nativegetWBcustomX: error [%d]", x_value)); + + ALOGV("%s res[%d]", __func__, x_value); + + return x_value; +} + +int SecCameraHardware::nativegetWBcustomY() +{ + int y_value, err; + + err = mFlite.gctrl(V4L2_CID_CAMERA_WB_CUSTOM_Y, &y_value); + CHECK_ERR(err, ("nativegetWBcustomY: error [%d]", y_value)); + + ALOGV("%s res[%d]", __func__, y_value); + + return y_value; +} + + + +status_t SecCameraHardware::nativeSetZoomRatio(int value) +{ + /* Calculation of the crop information */ + mZoomRatio = getZoomRatio(value); + + ALOGD("%s: Zoomlevel = %d, mZoomRatio = %f", __FUNCTION__, value, mZoomRatio); + + return NO_ERROR; +} + +#ifdef RECORDING_CAPTURE +bool SecCameraHardware::nativeGetRecordingJpeg(ExynosBuffer *yuvBuf, uint32_t width, uint32_t height) +{ + bool ret = false; + + Exif exif(mCameraId); + + uint8_t *outBuf; + int jpegSize = 0; + int thumbSize = 0; + uint32_t exifSize = 0; + + ExynosBuffer jpegBuf; + jpegBuf.size.extS[0] = width * height * 2; + + ExynosBuffer exifBuf; + exifBuf.size.extS[0] = EXIF_MAX_LEN; + + ExynosBuffer thumbnailYuvBuf; + thumbnailYuvBuf.size.extS[0] = mThumbnailSize.width * mThumbnailSize.height * 2; + + bool thumbnail = false; + + /* Thumbnail */ + LOG_PERFORMANCE_START(1); + + /* use for both thumbnail and main jpeg */ + if (allocMem(mIonCameraClient, &jpegBuf, 1 << 1) == false) { + ALOGE("ERR(%s):(%d)allocMem(jpegBuf) fail", __func__, __LINE__); + goto jpeg_encode_done; + } + + if (mThumbnailSize.width == 0 || mThumbnailSize.height == 0) + goto encode_jpeg; + + if (allocMem(mIonCameraClient, &thumbnailYuvBuf, 1 << 1) == false) { + ALOGE("ERR(%s):(%d)allocMem(thumbnailYuvBuf) fail", __func__, __LINE__); + goto encode_jpeg; + } + + LOG_PERFORMANCE_START(3); + + scaleDownYuv422((uint8_t *)yuvBuf->virt.extP[0], (int)width, (int)height, + (uint8_t *)thumbnailYuvBuf.virt.extP[0], (int)mThumbnailSize.width, (int)mThumbnailSize.height); + + LOG_PERFORMANCE_END(3, "scaleDownYuv422"); + + if (this->EncodeToJpeg(&thumbnailYuvBuf, &jpegBuf, + mThumbnailSize.width, mThumbnailSize.height, + CAM_PIXEL_FORMAT_YUV422I, + &thumbSize, + JPEG_THUMBNAIL_QUALITY) != NO_ERROR) { + ALOGE("ERR(%s):(%d)EncodeToJpeg", __func__, __LINE__); + goto encode_jpeg; + } + + outBuf = (uint8_t *)jpegBuf.virt.extP[0]; + + thumbnail = true; + + LOG_PERFORMANCE_END(1, "encode thumbnail"); + +encode_jpeg: + /* EXIF */ + setExifChangedAttribute(); + + if (allocMem(mIonCameraClient, &exifBuf, 1 << 1) == false) { + ALOGE("ERR(%s):(%d)allocMem(exifBuf) fail", __func__, __LINE__); + goto jpeg_encode_done; + } + +#if 0 //##mmkim for test + if (CC_LIKELY(thumbnail)) + exifSize = exif.make(exifBuf.virt.extP[0], &mExifInfo, exifBuf.size.extS[0], outBuf, thumbSize); + else + exifSize = exif.make(exifBuf.virt.extP[0], &mExifInfo); + + if (CC_UNLIKELY(!exifSize)) { + ALOGE("getJpeg: error, fail to make EXIF"); + goto jpeg_encode_done; + } +#endif + /* Jpeg */ + LOG_PERFORMANCE_START(2); + +#ifdef SAMSUNG_JPEG_QUALITY_ADJUST_TARGET + adjustJpegQuality(); +#endif + + if (this->EncodeToJpeg(yuvBuf, &jpegBuf, + width, height, + CAM_PIXEL_FORMAT_YUV422I, + &jpegSize, + mJpegQuality) != NO_ERROR) { + ALOGE("ERR(%s):(%d)EncodeToJpeg", __func__, __LINE__); + goto jpeg_encode_done; + } + + outBuf = (uint8_t *)jpegBuf.virt.extP[0]; + + LOG_PERFORMANCE_END(2, "encode jpeg"); + + LOG_PERFORMANCE_START(4); + mRecordingPictureFrameSize = jpegSize + exifSize; + /* picture frame size is should be calculated before call allocateSnapshotHeap */ + if (!allocateRecordingSnapshotHeap()) { + ALOGE("getJpeg: error, allocateSnapshotHeap"); + return UNKNOWN_ERROR; + } + + memcpy(mJpegHeap->data, outBuf, 2); + memcpy((uint8_t *)mJpegHeap->data + 2, exifBuf.virt.extP[0], exifSize); + memcpy((uint8_t *)mJpegHeap->data + 2 + exifSize, outBuf + 2, jpegSize - 2); + LOG_PERFORMANCE_END(4, "jpeg + exif"); + + ret = true; + +jpeg_encode_done: + + freeMem(&thumbnailYuvBuf); + freeMem(&exifBuf); + freeMem(&jpegBuf); + + return ret; +} +#endif + +#if FRONT_ZSL +bool SecCameraHardware::allocateFullPreviewHeap() +{ + if (mFullPreviewHeap) { + mFullPreviewHeap->release(mFullPreviewHeap); + mFullPreviewHeap = NULL; + } + + mFullPreviewHeap = mGetMemoryCb((int)mFimc1.getfd(), + mFullPreviewFrameSize, kBufferZSLCount, 0); + if (!mFullPreviewHeap || mFullPreviewHeap->data == MAP_FAILED) { + CLOGE("ERR(%s): heap creation fail", __func__); + return false; + } + + CLOGD("allocateFullPreviewHeap: %dx%d, frame %dx%d", + mPictureSize.width, mPictureSize.height, mFullPreviewFrameSize, kBufferZSLCount); + + return true; +} + +status_t SecCameraHardware::nativeStartFullPreview(void) +{ + CLOGD("INFO(%s) : in ",__FUNCTION__); + + int err; + cam_pixel_format captureFormat = CAM_PIXEL_FORMAT_YUV422I; + + err = mFimc1.startCapture(&mPictureSize, captureFormat, kBufferZSLCount, 0); + CHECK_ERR_N(err, ("nativeStartFullPreview: error, mFimc1.start")); + + mFimc1.querybuf(&mFullPreviewFrameSize); + if (mFullPreviewFrameSize == 0) { + CLOGE("nativeStartFullPreview: error, mFimc1.querybuf"); + return UNKNOWN_ERROR; + } + + if (!allocateFullPreviewHeap()) { + ALOGE("nativeStartFullPreview: error, allocateFullPreviewHeap"); + return NO_MEMORY; + } + + for (int i = 0; i < kBufferZSLCount; i++) { + err = mFimc1.qbuf(i); + CHECK_ERR_N(err, ("nativeStartFullPreview: error, mFimc1.qbuf(%d)", i)); + } + + rawImageMem = new MemoryHeapBase(mFullPreviewFrameSize); + + err = mFimc1.stream(true); + CHECK_ERR_N(err, ("nativeStartFullPreview: error, mFimc1.stream")); + + CLOGD("INFO(%s) : out ",__FUNCTION__); + return NO_ERROR; +} + +int SecCameraHardware::nativeGetFullPreview() +{ + int index; + phyaddr_t y, cbcr; + int err; + + err = mFimc1.polling(); + CHECK_ERR_N(err, ("nativeGetFullPreview: error, mFimc1.polling")); + + index = mFimc1.dqbuf(); + CHECK_ERR_N(index, ("nativeGetFullPreview: error, mFimc1.dqbuf")); + + mJpegIndex = index; + + return index; +} + +int SecCameraHardware::nativeReleaseFullPreviewFrame(int index) +{ + return mFimc1.qbuf(index); +} + +void SecCameraHardware::nativeStopFullPreview() +{ + if (mFimc1.stream(false) < 0) + CLOGE("nativeStopFullPreview X: error, mFimc1.stream(0)"); + + if (mFullPreviewHeap) { + mFullPreviewHeap->release(mFullPreviewHeap); + mFullPreviewHeap = NULL; + } + + rawImageMem.clear(); + + CLOGD("INFO(%s) : out ",__FUNCTION__); +} + +void SecCameraHardware::nativeForceStopFullPreview() +{ + mFimc1.forceStop(); +} + +bool SecCameraHardware::getZSLJpeg() +{ + int ret; + + ALOGE("%s:: mJpegIndex : %d", __func__, mJpegIndex); + +#ifdef SUPPORT_64BITS + memcpy( (unsigned char *)rawImageMem->base(), + (unsigned char *)((unsigned long)mFullPreviewHeap->data + mJpegIndex * mFullPreviewFrameSize), + mFullPreviewFrameSize ); +#else + memcpy( (unsigned char *)rawImageMem->base(), + (unsigned char *)((unsigned int)mFullPreviewHeap->data + mJpegIndex * mFullPreviewFrameSize), + mFullPreviewFrameSize ); +#endif + + sp thumbnailJpeg; + sp rawThumbnail; + + unsigned char *thumb; + int thumbSize = 0; + + bool thumbnail = false; + /* Thumbnail */ + LOG_PERFORMANCE_START(1); + + if (mThumbnailSize.width == 0 || mThumbnailSize.height == 0) + goto encode_jpeg; + + rawThumbnail = new MemoryHeapBase(mThumbnailSize.width * mThumbnailSize.height * 2); + + LOG_PERFORMANCE_START(3); +#ifdef USE_HW_SCALER + ret = scaleDownYUVByFIMC((unsigned char *)rawImageMem->base(), + (int)mPictureSize.width, + (int)mPictureSize.height, + (unsigned char *)rawThumbnail->base(), + (int)mThumbnailSize.width, + (int)mThumbnailSize.height, + CAM_PIXEL_FORMAT_YUV422I); + + if (!ret) { + CLOGE("Fail to scale down YUV data for thumbnail!\n"); + goto encode_jpeg; + } +#else + ret = scaleDownYuv422((unsigned char *)rawImageMem->base(), + (int)mPictureSize.width, + (int)mPictureSize.height, + (unsigned char *)rawThumbnail->base(), + (int)mThumbnailSize.width, + (int)mThumbnailSize.height); + + if (!ret) { + CLOGE("Fail to scale down YUV data for thumbnail!\n"); + goto encode_jpeg; + } +#endif + LOG_PERFORMANCE_END(3, "scaleDownYuv422"); + + thumbnailJpeg = new MemoryHeapBase(mThumbnailSize.width * mThumbnailSize.height * 2); + +#ifdef CHG_ENCODE_JPEG + ret = EncodeToJpeg((unsigned char*)rawThumbnail->base(), + mThumbnailSize.width, + mThumbnailSize.height, + CAM_PIXEL_FORMAT_YUV422I, + (unsigned char*)thumbnailJpeg->base(), + &thumbSize, + JPEG_THUMBNAIL_QUALITY); + + if (ret != NO_ERROR) { + ALOGE("thumbnail:EncodeToJpeg failed\n"); + goto encode_jpeg; + } +#endif + if (thumbSize > MAX_THUMBNAIL_SIZE) { + ALOGE("thumbnail size is over limit\n"); + goto encode_jpeg; + } + + thumb = (unsigned char *)thumbnailJpeg->base(); + thumbnail = true; + + LOG_PERFORMANCE_END(1, "encode thumbnail"); + +encode_jpeg: + + /* EXIF */ + setExifChangedAttribute(); + + Exif exif(mCameraId); + uint32_t exifSize; + + unsigned char *jpeg; + int jpegSize = 0; + int jpegQuality = mParameters.getInt(CameraParameters::KEY_JPEG_QUALITY); + + sp JpegHeap = new MemoryHeapBase(mPictureSize.width * mPictureSize.height * 2); + sp exifHeap = new MemoryHeapBase(EXIF_MAX_LEN); + if (!initialized(exifHeap)) { + ALOGE("getJpeg: error, could not initialize Camera exif heap"); + return false; + } + + if (!thumbnail) + exifSize = exif.make(exifHeap->base(), &mExifInfo); + else + exifSize = exif.make(exifHeap->base(), &mExifInfo, exifHeap->getSize(), thumb, thumbSize); + +#ifdef CHG_ENCODE_JPEG +#ifdef SAMSUNG_JPEG_QUALITY_ADJUST_TARGET + adjustJpegQuality(); +#endif + + ret = EncodeToJpeg((unsigned char*)rawImageMem->base(), + mPictureSize.width, + mPictureSize.height, + CAM_PIXEL_FORMAT_YUV422I, + (unsigned char*)JpegHeap->base(), + &jpegSize, + mJpegQuality); + + if (ret != NO_ERROR) { + ALOGE("EncodeToJpeg failed\n"); + return false; + } +#endif + mPictureFrameSize = jpegSize + exifSize; + + /* picture frame size is should be calculated before call allocateSnapshotHeap */ + if (!allocateSnapshotHeap()) { + CLOGE("getJpeg: error, allocateSnapshotHeap"); + return false; + } + + jpeg = (unsigned char *)JpegHeap->base(); + memcpy((unsigned char *)mJpegHeap->data, jpeg, 2); + memcpy((unsigned char *)mJpegHeap->data + 2, exifHeap->base(), exifSize); + memcpy((unsigned char *)mJpegHeap->data + 2 + exifSize, jpeg + 2, jpegSize - 2); + + return true; +} +#endif + +bool SecCameraHardware::allocatePostviewHeap() +{ + cam_pixel_format postviewFmt; + + CLOGD("INFO(%s) : in ",__FUNCTION__); + if ( mPostviewHeap && mPostviewHeap->size == mPostviewFrameSize ) + return true; + + if (mPostviewHeap) { + mPostviewHeap->release(mPostviewHeap); + mPostviewHeap = 0; + } + + if (mPostviewHeapTmp) { + mPostviewHeapTmp->release(mPostviewHeapTmp); + mPostviewHeapTmp = NULL; + } + + postviewFmt = CAM_PIXEL_FORMAT_YUV422I; + + mPostviewFrameSize = getAlignedYUVSize(postviewFmt, mPostviewSize.width, mPostviewSize.height, NULL); + mPostviewHeap = mGetMemoryCb(-1, mPostviewFrameSize, 1, &mPostviewHeapFd); + if (!mPostviewHeap || mPostviewHeap->data == MAP_FAILED) { + CLOGE("ERR(%s): Virtual postview heap creation fail", __func__); + return false; + } + + mPostviewHeapTmp = mGetMemoryCb(-1, mPostviewFrameSize, 1, &mPostviewHeapTmpFd); + if (!mPostviewHeapTmp || mPostviewHeapTmp->data == MAP_FAILED) { + CLOGE("ERR(%s): Virtual postview heap creation fail", __func__); + return false; + } + + CLOGD("allocatePostviewHeap: postview %dx%d, frame %d", + mPostviewSize.width, mPostviewSize.height, mPostviewFrameSize); + + return true; +} + +bool SecCameraHardware::allocateSnapshotHeap() +{ + /* init jpeg heap */ + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = 0; + } + + mJpegHeap = mGetMemoryCb(-1, mPictureFrameSize, 1, &mJpegHeapFd); + if (mJpegHeap == NULL || mJpegHeap->data == MAP_FAILED) { + CLOGE("ERR(%s): Jpeg heap creation fail", __func__); + if (mJpegHeap) { + mJpegHeap->release(mJpegHeap); + mJpegHeap = NULL; + } + return false; + } + + CLOGD("allocateSnapshotHeap: jpeg %dx%d, size %d", + mPictureSize.width, mPictureSize.height, mPictureFrameSize); + +#if 0 + /* RAW_IMAGE or POSTVIEW_FRAME heap */ + if ((mMsgEnabled & CAMERA_MSG_RAW_IMAGE) || (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME)) { + mRawFrameSize = getAlignedYUVSize(mFliteFormat, mRawSize.width, mRawSize.height, NULL); + mRawHeap = mGetMemoryCb(-1, mRawFrameSize, 1, &mRawHeapFd); + + ALOGD("allocateSnapshotHeap: postview %dx%d, frame %d", + mRawSize.width, mRawSize.height, mRawFrameSize); + } +#endif + + return true; +} + +bool SecCameraHardware::allocateHDRHeap() +{ + /* init postview heap */ + if (mHDRHeap) { + mHDRHeap->release(mHDRHeap); + mHDRHeap = NULL; + } + + mRawSize = mPictureSize; + mHDRFrameSize = mRawSize.width*mRawSize.height*2; + mHDRHeap = mGetMemoryCb(-1, mHDRFrameSize, 1, &mHDRHeapFd); + if (!mHDRHeap || mHDRHeap->data == MAP_FAILED) { + ALOGE("ERR(%s): HDR heap creation fail", __func__); + goto out; + } + + return true; + + out: + + if (mHDRHeap) { + mHDRHeap->release(mHDRHeap); + mHDRHeap = NULL; + } + + return false; +} + +#ifndef RCJUNG +bool SecCameraHardware::allocateYUVHeap() +{ + /* init YUV main image heap */ + if (mYUVHeap) { + mYUVHeap->release(mYUVHeap); + mYUVHeap = 0; + } + + mYUVHeap = mGetMemoryCb(-1, mRawFrameSize, 1, 0); + if (!mYUVHeap || mYUVHeap->data == MAP_FAILED) { + ALOGE("ERR(%s): YUV heap creation fail", __func__); + goto out; + } + + ALOGD("allocateYUVHeap: YUV %dx%d, frame %d", + mOrgPreviewSize.width, mOrgPreviewSize.height, mRawFrameSize); + + return true; + + out: + + if (mYUVHeap) { + mYUVHeap->release(mYUVHeap); + mYUVHeap = NULL; + } + + return false; +} +#endif + +void SecCameraHardware::nativeMakeJpegDump() +{ + int postviewOffset; + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + ExynosBuffer jpegBuf; + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + if (getJpegOnBack(&postviewOffset) >= 0) { + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + jpegBuf.size.extS[0] = mPictureFrameSize; + jpegBuf.virt.extP[0] = (char *)mJpegHeap->data; + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + mSaveDump("/data/camera_%d.jpeg", &jpegBuf, 0); + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + } else { + CLOGV("DEBUG(%s): (%d) fail!!!!", __FUNCTION__, __LINE__); + } +} + +bool SecCameraHardware::nativeStartPostview() +{ + ALOGD("nativeStartPostview E"); + + int err; + int nBufs = 1; + int i; + + /* For YUV postview */ + cam_pixel_format captureFormat = CAM_PIXEL_FORMAT_YUV422I; + + if ( captureFormat==CAM_PIXEL_FORMAT_YUV422I ) + mPostviewFrameSize = mPostviewSize.width * mPostviewSize.height * 2; /* yuv422I */ + else if ( captureFormat == CAM_PIXEL_FORMAT_YUV420SP ) + mPostviewFrameSize = mPostviewSize.width * mPostviewSize.height * 1.5; /* yuv420sp */ + + ALOGD("Postview size : width = %d, height = %d, frame size = %d", + mPostviewSize.width, mPostviewSize.height, mPreviewFrameSize); + err = mFlite.startCapture(&mPostviewSize, captureFormat, nBufs, START_CAPTURE_POSTVIEW); + CHECK_ERR(err, ("nativeStartPostview: error, mFlite.start")); + + ALOGD("nativeStartPostview GC"); + + getAlignedYUVSize(captureFormat, mPostviewSize.width, mPostviewSize.height, &mPictureBuf); + if (allocMem(mIonCameraClient, &mPictureBuf, 1 << 1) == false) { + ALOGE("ERR(%s):mPictureBuf allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + ALOGV("DEBUG(%s): mPictureBuf allocMem adr(%p), size(%d), ion(%d) w/h(%d/%d)", __FUNCTION__, + mPictureBuf.virt.extP[0], mPictureBuf.size.extS[0], mIonCameraClient, + mPostviewSize.width, mPostviewSize.height); + memset(mPictureBuf.virt.extP[0], 0, mPictureBuf.size.extS[0]); + } + +#ifdef USE_USERPTR + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + getAlignedYUVSize(captureFormat, mPostviewSize.width, mPostviewSize.height, &mPictureBufDummy[i]); + if (allocMem(mIonCameraClient, &mPictureBufDummy[i], 1 << 1) == false) { + ALOGE("ERR(%s):mPictureBuf dummy allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + ALOGV("DEBUG(%s): mPictureBuf dummy allocMem adr(%p), size(%d), ion(%d) w/h(%d/%d)", __FUNCTION__, + mPictureBufDummy[i].virt.extP[0], mPictureBufDummy[i].size.extS[0], mIonCameraClient, + mPostviewSize.width, mPostviewSize.height); + memset(mPictureBufDummy[i].virt.extP[0], 0, mPictureBufDummy[i].size.extS[0]); + } + } +#else + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.querybuf2(i, mFliteNode.planes, &mPictureBufDummy[i]); + CHECK_ERR_N(err, ("nativeStartPreviewZoom: error, mFlite.querybuf2")); + } +#endif + + /* qbuf dummy buffer for skip */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.qbufForCapture(&mPictureBufDummy[i], i); + CHECK_ERR(err, ("nativeStartPostview: error, mFlite.qbuf(%d)", i)); + } + + ALOGD("Normal Capture Stream on"); + err = mFlite.stream(true); + CHECK_ERR(err, ("nativeStartPostview: error, mFlite.stream")); + + if ( mCaptureMode == RUNNING_MODE_SINGLE ) { + // PlayShutterSound(); +// Fimc_stream_true_part2(); + } + + ALOGD("nativeStartPostview X"); + return true; +} + +bool SecCameraHardware::nativeStartYUVSnapshot() +{ + ALOGD("nativeStartYUVSnapshot E"); + + int err; + int nBufs = 1; + int i = 0; + ExynosBuffer nullBuf; + + cam_pixel_format captureFormat = CAM_PIXEL_FORMAT_YUV422I; + + err = mFlite.startCapture(&mFLiteCaptureSize, captureFormat, nBufs, START_CAPTURE_YUV_MAIN); + CHECK_ERR(err, ("nativeStartYUVSnapshot: error, mFlite.start")); + +#ifdef USE_USERPTR + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + getAlignedYUVSize(captureFormat, mFLiteCaptureSize.width, mFLiteCaptureSize.height, &mPictureBufDummy[i]); + if (allocMem(mIonCameraClient, &mPictureBufDummy[i], 1 << 1) == false) { + ALOGE("ERR(%s):mPictureBuf dummy allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + ALOGV("DEBUG(%s): mPictureBuf dummy allocMem adr(%p), size(%d), ion(%d) w/h(%d/%d)", __FUNCTION__, + mPictureBufDummy[i].virt.extP[0], mPictureBufDummy[i].size.extS[0], mIonCameraClient, + mFLiteCaptureSize.width, mFLiteCaptureSize.height); + memset(mPictureBufDummy[i].virt.extP[0], 0, mPictureBufDummy[i].size.extS[0]); + } + } +#else + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.querybuf2(i, mFliteNode.planes, &mPictureBufDummy[i]); + CHECK_ERR_N(err, ("nativeStartYUVSnapshot: error, mFlite.querybuf2")); + } +#endif + + /* qbuf dummy buffer for skip */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.qbufForCapture(&mPictureBufDummy[i], i); + CHECK_ERR(err, ("nativeStartYUVSnapshot: error, mFlite.qbuf(%d)", i)); + } + + err = mFlite.stream(true); + CHECK_ERR(err, ("nativeStartYUVSnapshot: error, mFlite.stream")); + + if ( mCaptureMode == RUNNING_MODE_SINGLE ) { + // PlayShutterSound(); +// Fimc_stream_true_part2(); + } + + ALOGD("nativeStartYUVSnapshot X"); + return true; +} + +bool SecCameraHardware::nativeGetYUVSnapshot(int numF, int *postviewOffset) +{ + ALOGD("nativeGetYUVSnapshot E"); + + int err; + int i = 0; + +retry: + +// err = mFlite.sctrl(CAM_CID_TRANSFER, numF); + CHECK_ERR(err, ("nativeGetYUVSnapshot: error, capture start")) + /* + * Put here if Capture Start Command code is additionally needed + * in case of ISP. + */ + + /* + * Waiting for frame(stream) to be input. + * ex) poll() + */ + err = mFlite.polling(); + if (CC_UNLIKELY(err <= 0)) { + LOG_FATAL("nativeGetYUVSnapshot: fail to get a frame!"); + return false; + } + ALOGV("DEBUG(%s): (%d) nativeGetYUVSnapshot dq start", __FUNCTION__, __LINE__); + + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + int ret = mFlite.dqbufForCapture(&mPictureBufDummy[i]); + ALOGV("DEBUG(%s) (%d): dqbufForCapture dq(%d), ret = %d", __FUNCTION__, __LINE__, i, ret); + } + + /* Stop capturing stream(or frame) data. */ + err = mFlite.stream(false); + CHECK_ERR(err, ("nativeGetYUVSnapshot: error, mFlite.stream")); + +#ifdef SAVE_DUMP +#if 0 + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], + mPictureBufDummy[0].size.extS[0], "/data/dump_jpeg_only.jpg"); +#endif +#endif + + if (!allocateHDRHeap()) { + ALOGE("getEncodedJpeg: error, allocateSnapshotHeap"); + return false; + } + memcpy((char *)mHDRHeap->data, (uint8_t *)mPictureBufDummy[0].virt.extP[0], mHDRFrameSize); + ALOGD("%s: mHDRHeap memcpy end size (mHDRFrameSize) = %d", __func__, mHDRFrameSize); + //mGetMemoryCb(mPictureBufDummy[0].fd.extFd[0], mHDRFrameSize, 1, mCallbackCookie); + +#if 0 + err = getYUV(numF); + if (mCaptureMode == RUNNING_MODE_HDR) { + if (numF == 1) { + struct record_heap *scrab_heap = (struct record_heap *)mHDRHeap->data; + + scrab_heap[3].type = kMetadataBufferTypeCameraSource; + scrab_heap[3].buf_index = 3; + scrab_heap[3].reserved = (uint32_t)mPictureBufDummy[0].virt.extP[0]; + ALOGE("Scrab memory set to ION. scrab_heap[3].reserved = %08x", scrab_heap[3].reserved); + } + } + CHECK_ERR(err, ("nativeGetYUVSnapshot: error, getYUV")); +#endif + ALOGD("nativeGetYUVSnapshot X"); + return true; +} + +/* --3 */ +bool SecCameraHardware::nativeStartSnapshot() +{ + CLOGV("DEBUG (%s) : in ",__FUNCTION__); + + int err; + int nBufs = 1; + int i = 0; + ExynosBuffer nullBuf; + + cam_pixel_format captureFormat = CAM_PIXEL_FORMAT_YUV422I; + +#if FRONT_ZSL + if (mCameraId == CAMERA_ID_FRONT && ISecCameraHardware::mFullPreviewRunning) + return true; +#endif + + err = mFlite.startCapture(&mFLiteCaptureSize, captureFormat, nBufs, START_CAPTURE_YUV_MAIN); + CHECK_ERR(err, ("nativeStartSnapshot: error, mFlite.start")); + + /* + TODO : The only one buffer should be used + between mPictureBuf and mPictureBufDummy in case of jpeg capture + */ + /* For picture buffer */ +#ifdef USE_NV21_CALLBACK + if(mPictureFormat == CAM_PIXEL_FORMAT_YUV420SP) { + getAlignedYUVSize(mPictureFormat, mPictureSize.width, mPictureSize.height, &mPictureBuf); + mPictureBuf.size.extS[0] = mPictureBuf.size.extS[0] + mPictureBuf.size.extS[1]; + mPictureBuf.size.extS[1] = 0; + } + else +#endif + { + getAlignedYUVSize(captureFormat, mPictureSize.width, mPictureSize.height, &mPictureBuf); + } + + if (allocMem(mIonCameraClient, &mPictureBuf, 1 << 1) == false) { + CLOGE("ERR(%s):mPictureBuf allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + CLOGV("DEBUG(%s): mPictureBuf allocMem adr(%p), size(%d), ion(%d) w/h(%d/%d)", __FUNCTION__, + mPictureBuf.virt.extP[0], mPictureBuf.size.extS[0], mIonCameraClient, + mPictureSize.width, mPictureSize.height); + memset(mPictureBuf.virt.extP[0], 0, mPictureBuf.size.extS[0]); + } + +#ifdef USE_USERPTR + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + getAlignedYUVSize(captureFormat, mFLiteCaptureSize.width, mFLiteCaptureSize.height, &mPictureBufDummy[i]); + if (allocMem(mIonCameraClient, &mPictureBufDummy[i], 1 << 1) == false) { + CLOGE("ERR(%s):mPictureBuf dummy allocMem() fail", __func__); + return UNKNOWN_ERROR; + } else { + CLOGD("DEBUG(%s): mPictureBuf dummy allocMem adr(%p), size(%d), ion(%d) w/h(%d/%d)", __FUNCTION__, + mPictureBufDummy[i].virt.extP[0], mPictureBufDummy[i].size.extS[0], mIonCameraClient, + mPictureSize.width, mPictureSize.height); + memset(mPictureBufDummy[i].virt.extP[0], 0, mPictureBufDummy[i].size.extS[0]); + } + } +#else + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.querybuf2(i, mFliteNode.planes, &mPictureBufDummy[i]); + CHECK_ERR_N(err, ("nativeStartPreviewZoom: error, mFlite.querybuf2")); + } +#endif + + /* qbuf dummy buffer for skip */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.qbufForCapture(&mPictureBufDummy[i], i); + CHECK_ERR(err, ("nativeStartSnapshot: error, mFlite.qbuf(%d)", i)); + } + +#if !defined(USE_USERPTR) + /* export FD */ + for (int i = 0; i < SKIP_CAPTURE_CNT; i++) { + err = mFlite.expBuf(i, mFliteNode.planes, &mPictureBufDummy[i]); + CHECK_ERR_N(err, ("nativeStartSnapshot: error, mFlite.expBuf")); + } +#endif + + err = mFlite.stream(true); + CHECK_ERR(err, ("nativeStartSnapshot: error, mFlite.stream")); + + CLOGV("DEBUG (%s) : out ",__FUNCTION__); + return true; +} + +bool SecCameraHardware::nativeGetPostview(int numF) +{ + int err; + int i = 0; + + ALOGD("nativeGetPostview E"); +retry: + + /* + * Put here if Capture Start Command code is needed in addition */ + #if 0 + if (mCameraId == CAMERA_ID_BACK) { + err = mFlite.sctrl(CAM_CID_POSTVIEW_TRANSFER, numF); + CHECK_ERR(err, ("nativeGetPostview: error, capture start")); + } + #endif + /* + * Waiting for frame(stream) to be input. + * ex) poll() + */ + err = mFlite.polling(); + if (CC_UNLIKELY(err <= 0)) { +#ifdef ISP_LOGWRITE + ALOGE("polling error - SEC_ISP_DBG_logwrite = %s", __func__); + SEC_ISP_DBG_logwrite(); +#endif + ALOGE("nativeGetPostview: error, mFlite.polling"); + return false; + } + + /* + * Get out a filled buffer from driver's queue. */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + int ret = mFlite.dqbufForCapture(&mPictureBufDummy[i]); + ALOGV("DEBUG(%s) (%d): dqbufForCapture dq(%d), ret = %d", __FUNCTION__, __LINE__, i, ret); + } + + /* + * Stop capturing stream(or frame) data. */ + err = mFlite.stream(false); + CHECK_ERR(err, ("nativeGetPostview: error, mFlite.stream")); + + if (mCameraId == CAMERA_ID_BACK) { + err = getPostview(numF); + CHECK_ERR(err, ("nativeGetPostview: error, getPostview")); + } + + ALOGD("nativeGetPostview X"); + return true; +} + +/* --4 */ +bool SecCameraHardware::nativeGetSnapshot(int numF, int *postviewOffset) +{ + CLOGV("DEBUG (%s) : in ",__FUNCTION__); + + int err; + int i = 0; + bool retryDone = false; + +#if FRONT_ZSL + if (mCameraId == CAMERA_ID_FRONT && ISecCameraHardware::mFullPreviewRunning) + return getZSLJpeg(); +#endif + +retry: + + /* + * Waiting for frame(stream) to be input. + * ex) poll() + */ + err = mFlite.polling(); + if (CC_UNLIKELY(err <= 0)) { +#ifdef DEBUG_CAPTURE_RETRY + LOG_FATAL("nativeGetSnapshot: fail to get a frame!"); +#else + if (!retryDone) { + CLOGW("nativeGetSnapshot: warning. Reset the camera device"); + mFlite.stream(false); + nativeStopSnapshot(); + mFlite.reset(); + nativeStartSnapshot(); + retryDone = true; + goto retry; + } + CLOGE("nativeGetSnapshot: error, mFlite.polling"); +#endif + return false; + } + CLOGV("DEBUG(%s): (%d) nativeGetSnapshot dq start", __FUNCTION__, __LINE__); + + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + if(i > 0) { + err = mFlite.polling(); + } + int ret = mFlite.dqbufForCapture(&mPictureBufDummy[i]); + CLOGD("DEBUG(%s) (%d): dqbufForCapture dq(%d), ret = %d", __FUNCTION__, __LINE__, i, ret); + } + +#ifdef SAVE_DUMP + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], + mPictureBufDummy[0].size.extS[0], "/data/dump_raw.yuv"); +#endif + + /* Stop capturing stream(or frame) data. */ + err = mFlite.stream(false); + CHECK_ERR(err, ("nativeGetSnapshot: error, mFlite.stream")); + + /* last capture was stored dummy buffer due to zoom. + * and zoom applied to capture image by fimc */ + err = nativeCSCCapture(&mPictureBufDummy[i-1], &mPictureBuf); + CHECK_ERR_N(err, ("nativeGetSnapshot: error, nativeCSCCapture")); + +#ifdef SAVE_DUMP + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], + mPictureBufDummy[0].size.extS[0], "/data/dump_jpeg_only.jpg"); +#endif +#ifdef USE_NV21_CALLBACK + if (mPictureFormat == CAM_PIXEL_FORMAT_YUV420SP) { + mPictureFrameSize = mPictureBuf.size.extS[0]; + if (!allocateSnapshotHeap()) { + ALOGE("%s: error , allocateSnapshotHeap", __func__); + return false; + } + memcpy((unsigned char *)mJpegHeap->data, mPictureBuf.virt.extP[0], mPictureBuf.size.extS[0]); + } + else +#endif + { + if (numF == 0 && mCaptureMode == RUNNING_MODE_RAW ) { //(mCaptureMode == RUNNING_MODE_RAW ) { + int jpegSize; + nativeGetParameters(CAM_CID_JPEG_MAIN_SIZE, &jpegSize ); + mPictureFrameSize = (uint32_t)jpegSize; + } else { + /* Get Jpeg image including EXIF. */ + if (mCameraId == CAMERA_ID_BACK) + err = getJpegOnBack(postviewOffset); + else + err = getJpegOnFront(postviewOffset); + + CHECK_ERR(err, ("nativeGetSnapshot: error, getJpeg")); + } + +#if 0 + if ((mMsgEnabled & CAMERA_MSG_RAW_IMAGE) || (mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME)) { + int copySize = mRawFrameSize; + if (mPictureBuf.size.extS[0] < copySize) + copySize = mPictureBuf.size.extS[0]; + + memcpy((char *)mRawHeap->data, mPictureBuf.virt.extP[0], copySize); + ALOGV("DEBUG(%s): (%d) copied mRawHeap", __FUNCTION__, __LINE__); + } +#endif + +#ifdef DUMP_JPEG_FILE + ExynosBuffer jpegBuf; + CLOGV("DEBUG(%s): (%d) %d", __FUNCTION__, __LINE__, mPictureFrameSize); + jpegBuf.size.extS[0] = mPictureFrameSize; + jpegBuf.virt.extP[0] = (char *)mJpegHeap->data; + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); + mSaveDump("/data/camera_capture%d.jpg", &jpegBuf, 1); + CLOGV("DEBUG(%s): (%d)", __FUNCTION__, __LINE__); +#endif + } + CLOGV("DEBUG (%s) : out ",__FUNCTION__); + return true; +} + +bool SecCameraHardware::nativeStartDualCapture(int numF) +{ + int err; + + ALOGD("nativeStartDualCapture E - frame: %d", numF); + err = mFlite.sctrl(V4L2_CID_CAMERA_SET_DUAL_CAPTURE, numF); + CHECK_ERR(err, ("nativeStartDualCapture: error, capture start")); + + ALOGD("nativeStartDualCapture X"); + + return true; +} + +int SecCameraHardware::getYUV(int fnum) +{ + ALOGE("%s: start, fnum = %d", __func__, fnum); + + struct record_heap *heap = (struct record_heap *)mHDRHeap->data; + int nAddr; + + nAddr = mPictureBufDummy[0].phys.extP[0]; + + heap[fnum - 1].type = kMetadataBufferTypeCameraSource; + heap[fnum - 1].y = nAddr; + heap[fnum - 1].cbcr = nAddr + (mRawSize.width * mRawSize.height); + heap[fnum - 1].buf_index = fnum - 1; + /* + Just 2 buffer of ION memories are allocated for HDR. + Fimc0 memory is used for last one buffer to reduce + the number of ION memory to 2 from 3. + In case of 16M camera, 64M HDR memory is needed instead of 96M. + */ + + ALOGE("Fnum = %d, ION memory using", fnum); +#ifdef SUPPORT_64BITS + heap[fnum - 1].reserved = (unsigned long)mPictureBufDummy[0].virt.extP[0]; +#else + heap[fnum - 1].reserved = (uint32_t)mPictureBufDummy[0].virt.extP[0]; +#endif +#if 0 + if (fnum <= 2) { + ALOGE("Fnum = %d, ION memory using", fnum); + heap[fnum - 1].reserved = (uint32_t*)mPictureBufDummy[0].virt.extP[0]; + } else { + uint32_t last_frame = (uint32_t)mRawHeap->base(); + heap[fnum - 1].reserved = last_frame; + ALOGE("Fnum = %d, Fimc0 memory using", fnum); + } +#endif + + ALOGE("getYUV hdrheappointer(ion) : %x, mHDRFrameSize = %d", heap[fnum - 1].reserved, mHDRFrameSize); + + /* Note : Sensor return main image size, not only JPEG but also YUV. */ + //err = mFlite.gctrl(V4L2_CID_CAM_JPEG_MAIN_SIZE, &yuvSize); + //CHECK_ERR(err, ("getYUV: error %d, jpeg size", err)); + //mRawFrameSize = yuvSize; + mRawFrameSize = mRawSize.width*mRawSize.height*2; /* yuv422I */ + + /* picture frame size is should be calculated before call allocatePostviewHeap */ +#ifdef SUPPORT_64BITS + char *postview = (char *)mPictureBufDummy[0].virt.extP[0]; + memcpy((char *)(unsigned long)heap[fnum - 1].reserved , postview, mRawFrameSize); +#else + uint8_t *postview = (uint8_t *)mPictureBufDummy[0].virt.extP[0]; + memcpy((void*)heap[fnum - 1].reserved , postview, mRawFrameSize); +#endif +#if 0 /* FIMC output data */ + if (fnum == 1) + save_dump_path(postview, mRawFrameSize, "/data/dump_HDR1.yuv"); + if (fnum == 2) + save_dump_path(postview, mRawFrameSize, "/data/dump_HDR2.yuv"); + if (fnum == 3) + save_dump_path(postview, mRawFrameSize, "/data/dump_HDR3.yuv"); +#endif +#if 0 /* ION memory data */ + if (fnum == 1) + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], mHDRFrameSize, "/data/dump_HDR1.yuv"); + if (fnum == 2) + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], mHDRFrameSize, "/data/dump_HDR2.yuv"); + if (fnum == 3) + save_dump_path((uint8_t*)mPictureBufDummy[0].virt.extP[0], mHDRFrameSize, "/data/dump_HDR3.yuv"); +#endif +#if 0 /* ION memory data through mHDRHeap pointer */ + if (fnum == 1) + save_dump_path((uint8_t*)(((struct record_heap *)mHDRHeap->data)[fnum - 1].reserved), mHDRFrameSize, "/data/dump_HDR1.yuv"); + if (fnum == 2) + save_dump_path((uint8_t*)(((struct record_heap *)mHDRHeap->data)[fnum - 1].reserved), mHDRFrameSize, "/data/dump_HDR2.yuv"); + if (fnum == 3) + save_dump_path((uint8_t*)(((struct record_heap *)mHDRHeap->data)[fnum - 1].reserved), mHDRFrameSize, "/data/dump_HDR3.yuv"); +#endif + + ALOGD("%s: mPostviewHeap memcpy end size (mRawFrameSize) = %d", __func__, mRawFrameSize); + + return 0; +} + +#ifndef RCJUNG +int SecCameraHardware::getOneYUV() +{ + ALOGE("%s: start", __func__); + + int yuvSize = 0; + int err; + + /* Note : Sensor return main image size, not only JPEG but also YUV. */ + err = mFlite.gctrl(V4L2_CID_CAM_JPEG_MAIN_SIZE, &yuvSize); + CHECK_ERR(err, ("getYUV: error %d, jpeg size", err)); + + /* picture frame size is should be calculated before call allocateYUVHeap */ + mRawSize = mPictureSize; + mRawFrameSize = mRawSize.width * mRawSize.height * 2; /* yuv422I */ + + setExifChangedAttribute(); + + uint8_t *YUVmain = (uint8_t *)mPictureBufDummy[0].virt.extP[0]; + + if (!allocateYUVHeap()) { + ALOGE("getYUV: error, allocateYUVHeap"); + return false; + } + + memcpy(mYUVHeap->data, YUVmain, mRawFrameSize); + + ALOGD("%s: mYUVHeap memcpy end size (mRawFrameSize) = %d", __func__, mRawFrameSize); + + return true; +} +#endif + +int SecCameraHardware::getPostview(int num) +{ + ALOGD("%s: start", __func__); + + /* picture frame size is should be calculated before call allocatePostviewHeap */ + if (!allocatePostviewHeap()) { + ALOGE("getPostview: error, allocatePostviewHeap"); + return UNKNOWN_ERROR; + } + + uint8_t *postview = (uint8_t *)mPictureBufDummy[0].virt.extP[0]; + + memcpy((char *)mPostviewHeap->data, postview, mPostviewFrameSize); + +#ifdef SAVE_DUMP +#if 0 + char *fileName = NULL; + sprintf(fileName, "%s_%d.yuv%c", "/data/dump_postview_yuv", num, NULL); + ALOGD("getPostview: dump postview image = %s", fileName); + save_dump_path((uint8_t *)mPictureBufDummy[0].virt.extP[0], mPictureBufDummy[0].size.extS[0], fileName); +#endif +#endif + + ALOGE("%s: Postview memcpy end size = %d", __func__, mPostviewFrameSize); + + return 0; +} + +inline int SecCameraHardware::getJpegOnBack(int *postviewOffset) +{ + status_t ret; + +#if !defined(REAR_USE_YUV_CAPTURE) + if (mCaptureMode == RUNNING_MODE_RAW) + return internalGetJpegForRawWithZoom(postviewOffset); + else + return internalGetJpegForSocYuvWithZoom(postviewOffset); +#else + if (mEnableDZoom) + ret = internalGetJpegForSocYuvWithZoom(postviewOffset); + else + ret = internalGetJpegForSocYuv(postviewOffset); + + return ret; +#endif +} + +inline int SecCameraHardware::getJpegOnFront(int *postviewOffset) +{ + status_t ret; + ret = internalGetJpegForSocYuvWithZoom(postviewOffset); + return ret; +} + +void SecCameraHardware::nativeStopSnapshot() +{ + ExynosBuffer nullBuf; + int i = 0; + + if (mRawHeap != NULL) { + mRawHeap->release(mRawHeap); + mRawHeap = 0; + } + + freeMem(&mPictureBuf); + +#ifdef USE_USERPTR + /* capture buffer free */ + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + freeMem(&mPictureBufDummy[i]); + mPictureBufDummy[i] = nullBuf; + } +#else + for (i = 0; i < SKIP_CAPTURE_CNT; i++) { + for (int j = 0; j < mFliteNode.planes; j++) { + munmap((void *)mPictureBufDummy[i].virt.extP[j], + mPictureBufDummy[i].size.extS[j]); + ion_free(mPictureBufDummy[i].fd.extFd[j]); + } + mPictureBufDummy[i] = nullBuf; + } + + if (mFlite.reqBufZero(&mFliteNode) < 0) + ALOGE("ERR(%s): mFlite.reqBufZero() fail", __func__); +#endif + + + mPictureBuf = nullBuf; + CLOGD("DEBUG (%s) : out ",__FUNCTION__); +} + +bool SecCameraHardware::nativeSetAutoFocus() +{ + ALOGV("nativeSetAutofocus E"); + + int i, waitUs = 10000, tryCount = (900 * 1000) / waitUs; + for (i = 1; i <= tryCount; i++) { + if (mPreviewInitialized) + break; + else + usleep(waitUs); + + if (!(i % 40)) + ALOGD("AF: waiting for preview\n"); + } + + if (CC_UNLIKELY(i > tryCount)) + ALOGI("cancelAutoFocus: cancel timeout"); + + if (!IsAutofocusRunning()) { + ALOGW("nativeSetAutofocus X: AF Cancel is called"); + return true; + } + + int err = mFlite.sctrl(V4L2_CID_CAM_SINGLE_AUTO_FOCUS, AUTO_FOCUS_ON); + CHECK_ERR(err, ("nativeSetAutofocus X: error, mFlite.sctrl")) + + ALOGV("nativeSetAutofocus X"); + + return true; +} + +int SecCameraHardware::nativeGetPreAutoFocus() +{ + ALOGV("nativeGetPreAutofocus E"); + int status, i; + + const int tryCount = 500; + + usleep(150000); + + for (i = 0 ; i < tryCount ; i ++) { + int err; + err = mFlite.gctrl(V4L2_CID_CAM_AUTO_FOCUS_RESULT, &status); + CHECK_ERR_N(err, ("nativeGetPreAutoFocus: error %d", err)); + if (status != 0x0) + break; + usleep(10000); + } + + ALOGV("nativeGetPreAutofocus X %d", status); + return status; +} + +int SecCameraHardware::nativeGetAutoFocus() +{ + ALOGV("nativeGetAutofocus E"); + int status, i; + /* AF completion takes more much time in case of night mode. + So be careful if you modify tryCount. */ + const int tryCount = 300; + + for (i = 0; i < tryCount; i ++) { + int err; + usleep(20000); + err = mFlite.gctrl(V4L2_CID_CAMERA_AUTO_FOCUS_DONE, &status); + CHECK_ERR_N(err, ("nativeGetAutofocus: error %d", err)); + if ((status != 0x0) && (status != 0x08)) { + break; + } + } + + if (i == tryCount) + ALOGE("nativeGetAutoFocus: error, AF hasn't been finished yet."); + + ALOGV("nativeGetAutofocus X"); + return status; +} + +status_t SecCameraHardware::nativeCancelAutoFocus() +{ + ALOGV("nativeCancelAutofocus E1"); +//#if NOTDEFINED +// int err = mFlite.sctrl(V4L2_CID_CAMERA_CANCEL_AUTO_FOCUS, 0); + int err = mFlite.sctrl(V4L2_CID_CAM_SINGLE_AUTO_FOCUS, AUTO_FOCUS_OFF); + CHECK_ERR(err, ("nativeCancelAutofocus: error, mFlite.sctrl")) +//#endif + ALOGV("nativeCancelAutofocus X2"); + return NO_ERROR; +} + +inline status_t SecCameraHardware::nativeSetParameters(cam_control_id id, int value, bool recordingMode) +{ + int err = NO_ERROR; + + if (CC_LIKELY(!recordingMode)) + err = mFlite.sctrl(id, value); + else { + if (mCameraId == CAMERA_ID_FRONT) + err = mFlite.sctrl(id, value); + } + + return NO_ERROR; +} + +inline status_t SecCameraHardware::nativeGetParameters(cam_control_id id, int *value, bool recordingMode) +{ + int err = NO_ERROR; + + if (CC_LIKELY(!recordingMode)) + err = mFlite.gctrl(id, value); + + CHECK_ERR_N(err, ("nativeGetParameters X: error %d", err)) + + return NO_ERROR; +} + +void SecCameraHardware::setExifFixedAttribute() +{ + char property[PROPERTY_VALUE_MAX]; + + CLEAR(mExifInfo); + /* 0th IFD TIFF Tags */ + /* Maker */ + property_get("ro.product.manufacturer", property, Exif::DEFAULT_MAKER); + strncpy((char *)mExifInfo.maker, property, + sizeof(mExifInfo.maker) - 1); + mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0'; + + /* Model */ + property_get("ro.product.model", property, Exif::DEFAULT_MODEL); + strncpy((char *)mExifInfo.model, property, + sizeof(mExifInfo.model) - 1); + mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0'; + + /* Software */ + property_get("ro.build.PDA", property, Exif::DEFAULT_SOFTWARE); + strncpy((char *)mExifInfo.software, property, + sizeof(mExifInfo.software) - 1); + mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0'; + + /* YCbCr Positioning */ + mExifInfo.ycbcr_positioning = Exif::DEFAULT_YCBCR_POSITIONING; + + /* 0th IFD Exif Private Tags */ + /* F Number */ + if (mCameraId == CAMERA_ID_BACK) { + mExifInfo.fnumber.num = Exif::DEFAULT_BACK_FNUMBER_NUM; + mExifInfo.fnumber.den = Exif::DEFAULT_BACK_FNUMBER_DEN; + } else { + mExifInfo.fnumber.num = Exif::DEFAULT_FRONT_FNUMBER_NUM; + mExifInfo.fnumber.den = Exif::DEFAULT_FRONT_FNUMBER_DEN; + } + + /* Exposure Program */ + mExifInfo.exposure_program = Exif::DEFAULT_EXPOSURE_PROGRAM; + + /* Exif Version */ + memcpy(mExifInfo.exif_version, Exif::DEFAULT_EXIF_VERSION, sizeof(mExifInfo.exif_version)); + + /* Aperture */ + double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den); + mExifInfo.aperture.num = av*Exif::DEFAULT_APEX_DEN; + mExifInfo.aperture.den = Exif::DEFAULT_APEX_DEN; + + /* Maximum lens aperture */ + mExifInfo.max_aperture.num = mExifInfo.aperture.num; + mExifInfo.max_aperture.den = mExifInfo.aperture.den; + + /* Lens Focal Length */ + if (mCameraId == CAMERA_ID_BACK) { + mExifInfo.focal_length.num = Exif::DEFAULT_BACK_FOCAL_LEN_NUM; + mExifInfo.focal_length.den = Exif::DEFAULT_BACK_FOCAL_LEN_DEN; + } else { + mExifInfo.focal_length.num = Exif::DEFAULT_FRONT_FOCAL_LEN_NUM; + mExifInfo.focal_length.den = Exif::DEFAULT_FRONT_FOCAL_LEN_DEN; + } + + /* Lens Focal Length in 35mm film*/ + if (mCameraId == CAMERA_ID_BACK) { + mExifInfo.focal_35mm_length = Exif::DEFAULT_BACK_FOCAL_LEN_35mm; + } else { + mExifInfo.focal_35mm_length = Exif::DEFAULT_FRONT_FOCAL_LEN_35mm; + } + + /* Color Space information */ + mExifInfo.color_space = Exif::DEFAULT_COLOR_SPACE; + + /* Exposure Mode */ + mExifInfo.exposure_mode = Exif::DEFAULT_EXPOSURE_MODE; + + /* Sensing Method */ + mExifInfo.sensing_method = Exif::DEFAULT_SENSING_METHOD; + + /* 0th IFD GPS Info Tags */ + unsigned char gps_version[4] = {0x02, 0x02, 0x00, 0x00}; + memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version)); + + /* 1th IFD TIFF Tags */ + mExifInfo.compression_scheme = Exif::DEFAULT_COMPRESSION; + mExifInfo.x_resolution.num = Exif::DEFAULT_RESOLUTION_NUM; + mExifInfo.x_resolution.den = Exif::DEFAULT_RESOLUTION_DEN; + mExifInfo.y_resolution.num = Exif::DEFAULT_RESOLUTION_NUM; + mExifInfo.y_resolution.den = Exif::DEFAULT_RESOLUTION_DEN; + mExifInfo.resolution_unit = Exif::DEFAULT_RESOLUTION_UNIT; +} + +void SecCameraHardware::setExifChangedAttribute() +{ + /* 0th IFD TIFF Tags */ + /* Width, Height */ + if (!mMovieMode) { + mExifInfo.width = mPictureSize.width; + mExifInfo.height = mPictureSize.height; + } else { + mExifInfo.width = mVideoSize.width; + mExifInfo.height = mVideoSize.height; + } + + /* ISP firmware version */ +#ifdef SENSOR_FW_GET_FROM_FILE + char *camera_fw = NULL; + char *savePtr = NULL; + char sensor_fw[12]; + size_t sizes = sizeof(sensor_fw) / sizeof(sensor_fw[0]); + camera_fw = strtok_r((char *)getSensorFWFromFile(sensor_fw, sizes, mCameraId), " ", &savePtr); + strncpy((char *)mExifInfo.unique_id, camera_fw, sizeof(mExifInfo.unique_id) - 1); + ALOGD("Exif: unique_id = %s", mExifInfo.unique_id); +#else + char unique_id[12] = {'\0',}; + mFlite.gctrl(V4L2_CID_CAM_SENSOR_FW_VER, unique_id, 12); + ALOGD("Exif: unique_id = %s", unique_id); + strncpy((char *)mExifInfo.unique_id, unique_id, sizeof(mExifInfo.unique_id) - 1); + mExifInfo.unique_id[sizeof(mExifInfo.unique_id) - 1] = '\0'; +#endif + + /* Orientation */ + switch (mParameters.getInt(CameraParameters::KEY_ROTATION)) { + case 90: + mExifInfo.orientation = EXIF_ORIENTATION_90; + break; + case 180: + mExifInfo.orientation = EXIF_ORIENTATION_180; + break; + case 270: + mExifInfo.orientation = EXIF_ORIENTATION_270; + break; + case 0: + default: + mExifInfo.orientation = EXIF_ORIENTATION_UP; + break; + } + ALOGD("Exif: setRotation = %d, orientation = %d", mParameters.getInt(CameraParameters::KEY_ROTATION), mExifInfo.orientation); + + /* Date time */ + time_t rawtime; + struct tm *timeinfo; + time(&rawtime); + timeinfo = localtime(&rawtime); + strftime((char *)mExifInfo.date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo); + + /* 0th IFD Exif Private Tags */ + /* Exposure Time */ + int err; + int exposureTimeNum; + int exposureTimeDen; + int rt_num, rt_den; + + err = mFlite.gctrl(V4L2_CID_EXIF_EXPOSURE_TIME_NUM, &exposureTimeNum); + if (err < 0) + ALOGE("setExifChangedAttribute: exposure time num err = %d", err); + err = mFlite.gctrl(V4L2_CID_EXIF_EXPOSURE_TIME_DEN, &exposureTimeDen); + if (err < 0) + ALOGE("setExifChangedAttribute: exposure time den err = %d", err); + if (exposureTimeNum > 0) { + mExifInfo.exposure_time.num = exposureTimeNum; + } else { + ALOGE("exposureTimeNum is negative. set to 1"); + mExifInfo.exposure_time.num = 1; + } + mExifInfo.exposure_time.den = exposureTimeDen; + ALOGD("Exif: exposure time num = %d, den = %d", exposureTimeNum, exposureTimeDen); + + /* Shutter Speed */ + double exposure = (double)mExifInfo.exposure_time.den / (double)mExifInfo.exposure_time.num; + mExifInfo.shutter_speed.num = APEX_EXPOSURE_TO_SHUTTER(exposure) * Exif::DEFAULT_APEX_DEN; + if(mExifInfo.shutter_speed.num < 0) + mExifInfo.shutter_speed.num = 0; + mExifInfo.shutter_speed.den = Exif::DEFAULT_APEX_DEN; + ALOGD("Exif: shutter speed num = %d, den = %d", mExifInfo.shutter_speed.num, mExifInfo.shutter_speed.den); + + /* Flash */ + if (mCameraId == CAMERA_ID_BACK) { + err = mFlite.gctrl(V4L2_CID_CAMERA_EXIF_FLASH, (int *)&mExifInfo.flash); + if (err < 0) + ALOGE("setExifChangedAttribute: Flash value err = %d", err); + ALOGD("mEixfInfo.flash = %x", mExifInfo.flash); + } + + /* Color Space information */ + mExifInfo.color_space = Exif::DEFAULT_COLOR_SPACE; + + /* User Comments */ + strncpy((char *)mExifInfo.user_comment, Exif::DEFAULT_USERCOMMENTS, sizeof(mExifInfo.user_comment) - 1); + /* ISO Speed Rating */ + err = mFlite.gctrl(V4L2_CID_CAMERA_EXIF_ISO, (int *)&mExifInfo.iso_speed_rating); + if (err < 0) + ALOGE("setExifChangedAttribute: ISO Speed Rating err = %d", err); + + /* Brightness */ + int bv; + err = mFlite.gctrl(V4L2_CID_CAMERA_EXIF_BV, &bv); + if (err < 0) + ALOGE("setExifChangedAttribute: Brightness value err = %d", err); + mExifInfo.brightness.num = bv; + mExifInfo.brightness.den = Exif::DEFAULT_APEX_DEN; + + if (mCameraId == CAMERA_ID_BACK) { + /* Exposure Bias */ + float exposure = mParameters.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION) * + mParameters.getFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP); + mExifInfo.exposure_bias.num = (exposure * Exif::DEFAULT_APEX_DEN); + mExifInfo.exposure_bias.den = Exif::DEFAULT_APEX_DEN; + } + + /* Metering Mode */ + const char *metering = mParameters.get("metering"); + if (!metering || !strcmp(metering, "center")) + mExifInfo.metering_mode = EXIF_METERING_CENTER; + else if (!strcmp(metering, "spot")) + mExifInfo.metering_mode = EXIF_METERING_SPOT; + else if (!strcmp(metering, "matrix")) + mExifInfo.metering_mode = EXIF_METERING_AVERAGE; + + /* White Balance */ + const char *wb = mParameters.get(CameraParameters::KEY_WHITE_BALANCE); + if (!wb || !strcmp(wb, CameraParameters::WHITE_BALANCE_AUTO)) + mExifInfo.white_balance = EXIF_WB_AUTO; + else + mExifInfo.white_balance = EXIF_WB_MANUAL; + + /* Scene Capture Type */ + switch (mSceneMode) { + case SCENE_MODE_PORTRAIT: + mExifInfo.scene_capture_type = EXIF_SCENE_PORTRAIT; + break; + case SCENE_MODE_LANDSCAPE: + mExifInfo.scene_capture_type = EXIF_SCENE_LANDSCAPE; + break; + case SCENE_MODE_NIGHTSHOT: + mExifInfo.scene_capture_type = EXIF_SCENE_NIGHT; + break; + default: + mExifInfo.scene_capture_type = EXIF_SCENE_STANDARD; + break; + } + + /* 0th IFD GPS Info Tags */ + const char *strLatitude = mParameters.get(CameraParameters::KEY_GPS_LATITUDE); + const char *strLogitude = mParameters.get(CameraParameters::KEY_GPS_LONGITUDE); + const char *strAltitude = mParameters.get(CameraParameters::KEY_GPS_ALTITUDE); + + if (strLatitude != NULL && strLogitude != NULL && strAltitude != NULL) { + if (atof(strLatitude) > 0) { + strncpy((char *)mExifInfo.gps_latitude_ref, "N", sizeof(mExifInfo.gps_latitude_ref) - 1); + } else { + strncpy((char *)mExifInfo.gps_latitude_ref, "S", sizeof(mExifInfo.gps_latitude_ref) - 1); + } + mExifInfo.gps_latitude_ref[sizeof(mExifInfo.gps_latitude_ref) - 1] = '\0'; + + if (atof(strLogitude) > 0){ + strncpy((char *)mExifInfo.gps_longitude_ref, "E", sizeof(mExifInfo.gps_longitude_ref) - 1); + } else { + strncpy((char *)mExifInfo.gps_longitude_ref, "W", sizeof(mExifInfo.gps_longitude_ref) - 1); + } + mExifInfo.gps_longitude_ref[sizeof(mExifInfo.gps_longitude_ref) - 1] = '\0'; + + if (atof(strAltitude) > 0) + mExifInfo.gps_altitude_ref = 0; + else + mExifInfo.gps_altitude_ref = 1; + + double latitude = fabs(atof(strLatitude)); + double longitude = fabs(atof(strLogitude)); + double altitude = fabs(atof(strAltitude)); + + mExifInfo.gps_latitude[0].num = (uint32_t)latitude; + mExifInfo.gps_latitude[0].den = 1; + mExifInfo.gps_latitude[1].num = (uint32_t)((latitude - mExifInfo.gps_latitude[0].num) * 60); + mExifInfo.gps_latitude[1].den = 1; + mExifInfo.gps_latitude[2].num = (uint32_t)(round((((latitude - mExifInfo.gps_latitude[0].num) * 60) - + mExifInfo.gps_latitude[1].num) * 60)); + mExifInfo.gps_latitude[2].den = 1; + + mExifInfo.gps_longitude[0].num = (uint32_t)longitude; + mExifInfo.gps_longitude[0].den = 1; + mExifInfo.gps_longitude[1].num = (uint32_t)((longitude - mExifInfo.gps_longitude[0].num) * 60); + mExifInfo.gps_longitude[1].den = 1; + mExifInfo.gps_longitude[2].num = (uint32_t)(round((((longitude - mExifInfo.gps_longitude[0].num) * 60) - + mExifInfo.gps_longitude[1].num) * 60)); + mExifInfo.gps_longitude[2].den = 1; + + mExifInfo.gps_altitude.num = (uint32_t)altitude; + mExifInfo.gps_altitude.den = 1; + + const char *strTimestamp = mParameters.get(CameraParameters::KEY_GPS_TIMESTAMP); + long timestamp = 0; + if (strTimestamp) + timestamp = atol(strTimestamp); + + struct tm tm_data; + gmtime_r(×tamp, &tm_data); + mExifInfo.gps_timestamp[0].num = tm_data.tm_hour; + mExifInfo.gps_timestamp[0].den = 1; + mExifInfo.gps_timestamp[1].num = tm_data.tm_min; + mExifInfo.gps_timestamp[1].den = 1; + mExifInfo.gps_timestamp[2].num = tm_data.tm_sec; + mExifInfo.gps_timestamp[2].den = 1; + strftime((char *)mExifInfo.gps_datestamp, sizeof(mExifInfo.gps_datestamp), "%Y:%m:%d", &tm_data); + + const char *progressingMethod = mParameters.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); + if (progressingMethod) { + size_t len = strlen(progressingMethod); + if (len >= sizeof(mExifInfo.gps_processing_method)) + len = sizeof(mExifInfo.gps_processing_method) - 1; + + CLEAR(mExifInfo.gps_processing_method); + strncpy((char *)mExifInfo.gps_processing_method, progressingMethod, len); + } + + mExifInfo.enableGps = true; + } else { + mExifInfo.enableGps = false; + } + + /* 1th IFD TIFF Tags */ + mExifInfo.widthThumb = mThumbnailSize.width; + mExifInfo.heightThumb = mThumbnailSize.height; + +} + +int SecCameraHardware::internalGetJpegForSocYuvWithZoom(int *postviewOffset) +{ + ExynosBuffer thumbnailJpeg; + ExynosBuffer rawThumbnail; + ExynosBuffer jpegOutBuf; + ExynosBuffer exifOutBuf; + ExynosBuffer nullBuf; + Exif exif(mCameraId, CAMERA_TYPE_SOC); + int i; + + uint8_t *thumb; + int32_t thumbSize; + bool thumbnail = false; + int err = -1; + int ret = UNKNOWN_ERROR; + + s5p_rect zoomRect; + CLOGD("DEBUG(%s)(%d): picture size(%d/%d)", __FUNCTION__, __LINE__, mPictureSize.width, mPictureSize.height); + CLOGD("DEBUG(%s)(%d): thumbnail size(%d/%d)", __FUNCTION__, __LINE__, mThumbnailSize.width, mThumbnailSize.height); + + zoomRect.w = (uint32_t)((float)mPictureSize.width * 1000 / mZoomRatio); + zoomRect.h = (uint32_t)((float)mPictureSize.height * 1000 / mZoomRatio); + + if (zoomRect.w % 2) + zoomRect.w -= 1; + + if (zoomRect.h % 2) + zoomRect.h -= 1; + + zoomRect.x = (mPictureSize.width - zoomRect.w) / 2; + zoomRect.y = (mPictureSize.height - zoomRect.h) / 2; + + if (zoomRect.x % 2) + zoomRect.x -= 1; + + if (zoomRect.y % 2) + zoomRect.y -= 1; + + /* Making thumbnail image */ + if (mThumbnailSize.width == 0 || mThumbnailSize.height == 0) + goto encodeJpeg; + + /* alloc rawThumbnail */ + rawThumbnail.size.extS[0] = mThumbnailSize.width * mThumbnailSize.height * 2; + if (allocMem(mIonCameraClient, &rawThumbnail, 1 << 1) == false) { + CLOGE("ERR(%s): rawThumbnail allocMem() fail", __func__); + goto destroyMem; + } else { + CLOGV("DEBUG(%s): rawThumbnail allocMem adr(%p), size(%d), ion(%d)", __FUNCTION__, + rawThumbnail.virt.extP[0], rawThumbnail.size.extS[0], mIonCameraClient); + memset(rawThumbnail.virt.extP[0], 0, rawThumbnail.size.extS[0]); + } + + if (mPictureBuf.size.extS[0] <= 0) { + goto destroyMem; + } + + LOG_PERFORMANCE_START(3); + + scaleDownYuv422((unsigned char *)mPictureBuf.virt.extP[0], + (int)mPictureSize.width, + (int)mPictureSize.height, + (unsigned char *)rawThumbnail.virt.extP[0], + (int)mThumbnailSize.width, + (int)mThumbnailSize.height); + LOG_PERFORMANCE_END(3, "scaleDownYuv422"); + + /* alloc thumbnailJpeg */ + thumbnailJpeg.size.extS[0] = mThumbnailSize.width * mThumbnailSize.height * 2; + if (allocMem(mIonCameraClient, &thumbnailJpeg, 1 << 1) == false) { + CLOGE("ERR(%s): thumbnailJpeg allocMem() fail", __func__); + goto destroyMem; + } else { + CLOGV("DEBUG(%s): thumbnailJpeg allocMem adr(%p), size(%d), ion(%d)", __FUNCTION__, + thumbnailJpeg.virt.extP[0], thumbnailJpeg.size.extS[0], mIonCameraClient); + memset(thumbnailJpeg.virt.extP[0], 0, thumbnailJpeg.size.extS[0]); + } + + err = EncodeToJpeg(&rawThumbnail, + &thumbnailJpeg, + mThumbnailSize.width, + mThumbnailSize.height, + CAM_PIXEL_FORMAT_YUV422I, + &thumbSize, + JPEG_THUMBNAIL_QUALITY); + CHECK_ERR_GOTO(encodeJpeg, err, ("getJpeg: error, EncodeToJpeg(thumbnail)")); + + thumb = (uint8_t *)thumbnailJpeg.virt.extP[0]; + thumbnail = true; + + LOG_PERFORMANCE_END(1, "encode thumbnail"); + +encodeJpeg: + /* Making EXIF header */ + + setExifChangedAttribute(); + + uint32_t exifSize; + int32_t jpegSize; + uint8_t *jpeg; + + /* alloc exifOutBuf */ + exifOutBuf.size.extS[0] = EXIF_MAX_LEN; + if (allocMem(mIonCameraClient, &exifOutBuf, 1 << 1) == false) { + CLOGE("ERR(%s): exifTmpBuf allocMem() fail", __func__); + goto destroyMem; + } else { + CLOGV("DEBUG(%s): exifTmpBuf allocMem adr(%p), size(%d), ion(%d)", __FUNCTION__, + exifOutBuf.virt.extP[0], exifOutBuf.size.extS[0], mIonCameraClient); + memset(exifOutBuf.virt.extP[0], 0, exifOutBuf.size.extS[0]); + } + + if (!thumbnail) + exifSize = exif.make((void *)exifOutBuf.virt.extP[0], &mExifInfo); + else + exifSize = exif.make((void *)exifOutBuf.virt.extP[0], &mExifInfo, exifOutBuf.size.extS[0], thumb, thumbSize); + if (CC_UNLIKELY(!exifSize)) { + CLOGE("ERR(%s): getJpeg: error, fail to make EXIF", __FUNCTION__); + goto destroyMem; + } + + /* alloc jpegOutBuf */ + jpegOutBuf.size.extS[0] = mPictureSize.width * mPictureSize.height * 2; + if (allocMem(mIonCameraClient, &jpegOutBuf, 1 << 1) == false) { + CLOGE("ERR(%s): jpegOutBuf allocMem() fail", __func__); + goto destroyMem; + } else { + CLOGV("DEBUG(%s): jpegOutBuf allocMem adr(%p), size(%d), ion(%d)", __FUNCTION__, + jpegOutBuf.virt.extP[0], jpegOutBuf.size.extS[0], mIonCameraClient); + memset(jpegOutBuf.virt.extP[0], 0, jpegOutBuf.size.extS[0]); + } + + /* Making main jpeg image */ + err = EncodeToJpeg(&mPictureBuf, + &jpegOutBuf, + mPictureSize.width, + mPictureSize.height, + CAM_PIXEL_FORMAT_YUV422I, + &jpegSize, + mJpegQuality); + + CHECK_ERR_GOTO(destroyMem, err, ("getJpeg: error, EncodeToJpeg(Main)")); + + /* Finally, Creating Jpeg image file including EXIF */ + mPictureFrameSize = jpegSize + exifSize; + + /*note that picture frame size is should be calculated before call allocateSnapshotHeap */ + if (!allocateSnapshotHeap()) { + ALOGE("getEncodedJpeg: error, allocateSnapshotHeap"); + goto destroyMem; + } + + jpeg = (unsigned char *)jpegOutBuf.virt.extP[0]; + memcpy((unsigned char *)mJpegHeap->data, jpeg, 2); + memcpy((unsigned char *)mJpegHeap->data + 2, exifOutBuf.virt.extP[0], exifSize); + memcpy((unsigned char *)mJpegHeap->data + 2 + exifSize, jpeg + 2, jpegSize - 2); + CLOGD("DEBUG(%s): success making jpeg(%d) and exif(%d)", __FUNCTION__, jpegSize, exifSize); + + ret = NO_ERROR; + +destroyMem: + freeMem(&thumbnailJpeg); + freeMem(&rawThumbnail); + freeMem(&jpegOutBuf); + freeMem(&exifOutBuf); + + return ret; +} + +int SecCameraHardware::internalGetJpegForRawWithZoom(int *postviewOffset) +{ + ExynosBuffer thumbnailJpeg; + ExynosBuffer rawThumbnail; + ExynosBuffer jpegOutBuf; + ExynosBuffer exifOutBuf; + ExynosBuffer nullBuf; + Exif exif(mCameraId); + int i; + + uint8_t *thumb; + bool thumbnail = false; + int err; + int ret = UNKNOWN_ERROR; + + int32_t jpegSize = -1; + int32_t jpegOffset; + int32_t thumbSize = 0; + int32_t thumbOffset; + + err = mFlite.gctrl(V4L2_CID_CAM_JPEG_MAIN_SIZE, &jpegSize); + CHECK_ERR(err, ("getJpeg: error %d, jpeg size", err)); + + ALOGD("internalgetJpeg: jpegSize = %d", jpegSize); + + err = mFlite.gctrl(V4L2_CID_CAM_JPEG_MAIN_OFFSET, &jpegOffset); + CHECK_ERR(err, ("getJpeg: error %d, jpeg offset", err)); + + + s5p_rect zoomRect; + ALOGV("DEBUG(%s)(%d): picture size(%d/%d)", __FUNCTION__, __LINE__, mPictureSize.width, mPictureSize.height); + + zoomRect.w = (uint32_t)((float)mPictureSize.width * 1000 / mZoomRatio); + zoomRect.h = (uint32_t)((float)mPictureSize.height * 1000 / mZoomRatio); + + if (zoomRect.w % 2) + zoomRect.w -= 1; + + if (zoomRect.h % 2) + zoomRect.h -= 1; + + zoomRect.x = (mPictureSize.width - zoomRect.w) / 2; + zoomRect.y = (mPictureSize.height - zoomRect.h) / 2; + + if (zoomRect.x % 2) + zoomRect.x -= 1; + + if (zoomRect.y % 2) + zoomRect.y -= 1; + + + if (mPictureBufDummy[0].size.extS[0] <= 0) + return UNKNOWN_ERROR; + + LOG_PERFORMANCE_START(3); + +#if 0 + ALOGD("mPostviewFrameSize = %d", mPostviewFrameSize); + save_dump_path((uint8_t*)mPostviewHeap->data, + mPostviewFrameSize, "/data/dump_postview_yuv.yuv"); +#endif + + thumbnail = false; + + LOG_PERFORMANCE_END(1, "encode thumbnail"); +#ifdef SAVE_DUMP +#if 0 + save_dump_path((uint8_t*)thumbnailJpeg.virt.extP[0], + thumbSize, "/data/dump_thumbnail_jpeg.jpg"); +#endif +#endif + +encodeJpeg: + /* Making EXIF header */ + + setExifChangedAttribute(); + + uint32_t exifSize; + uint8_t *jpeg; + + /* alloc exifOutBuf */ + exifOutBuf.size.extS[0] = EXIF_MAX_LEN; + if (allocMem(mIonCameraClient, &exifOutBuf, 1 << 1) == false) { + ALOGE("ERR(%s): exifTmpBuf allocMem() fail", __func__); + goto destroyMem; + } else { + ALOGV("DEBUG(%s): exifTmpBuf allocMem adr(%p), size(%d), ion(%d)", __FUNCTION__, + exifOutBuf.virt.extP[0], exifOutBuf.size.extS[0], mIonCameraClient); + memset(exifOutBuf.virt.extP[0], 0, exifOutBuf.size.extS[0]); + } + + if (!thumbnail) + exifSize = exif.make((void *)exifOutBuf.virt.extP[0], &mExifInfo); + else + exifSize = exif.make((void *)exifOutBuf.virt.extP[0], &mExifInfo, exifOutBuf.size.extS[0], thumb, thumbSize); + if (CC_UNLIKELY(!exifSize)) { + ALOGE("ERR(%s): getJpeg: error, fail to make EXIF", __FUNCTION__); + goto destroyMem; + } + + /* Finally, Creating Jpeg image file including EXIF */ + mPictureFrameSize = jpegSize + exifSize; + + { + ALOGD("jpegSize = %d, exifSize = %d", jpegSize, exifSize); + /*note that picture frame size is should be calculated before call allocateSnapshotHeap */ + if (!allocateSnapshotHeap()) { + ALOGE("getEncodedJpeg: error, allocateSnapshotHeap"); + goto destroyMem; + } + + // jpeg = (unsigned char *)jpegOutBuf.virt.extP[0]; + jpeg = (unsigned char *)mPictureBufDummy[0].virt.extP[0]; + memcpy((unsigned char *)mJpegHeap->data, jpeg, 2); + memcpy((unsigned char *)mJpegHeap->data + 2, exifOutBuf.virt.extP[0], exifSize); + memcpy((unsigned char *)mJpegHeap->data + 2 + exifSize, jpeg + 2, jpegSize - 2); + ALOGD("DEBUG(%s): success making jpeg(%d) and exif(%d)", __FUNCTION__, jpegSize, exifSize); + +#ifdef SAVE_DUMP +#if 0 + save_dump_path((uint8_t*)mJpegHeap->data, + mPictureFrameSize, "/data/dump_full_img.jpg"); +#endif +#endif + } + + ret = NO_ERROR; + +destroyMem: + freeMemSinglePlane(&jpegOutBuf, 0); + freeMemSinglePlane(&exifOutBuf, 0); + + return ret; +} + +int SecCameraHardware::EncodeToJpeg(ExynosBuffer *yuvBuf, ExynosBuffer *jpegBuf, + int width, int height, cam_pixel_format srcformat, int* jpegSize, int quality) +{ + ExynosJpegEncoder *jpegEnc = NULL; + int ret = UNKNOWN_ERROR; + + /* 1. ExynosJpegEncoder create */ + jpegEnc = new ExynosJpegEncoder(); + if (jpegEnc == NULL) { + ALOGE("ERR(%s) (%d): jpegEnc is null", __FUNCTION__, __LINE__); + return ret; + } + + ret = jpegEnc->create(); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.create(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* 2. cache on */ + ret = jpegEnc->setCache(JPEG_CACHE_ON); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setCache(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* 3. set quality */ + ret = jpegEnc->setQuality(quality); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setQuality(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + ret = jpegEnc->setSize(width, height); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setSize() fail", __func__); + goto jpeg_encode_done; + } + + /* 4. set yuv format */ + ret = jpegEnc->setColorFormat(srcformat); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setColorFormat(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* 5. set jpeg format */ + ret = jpegEnc->setJpegFormat(V4L2_PIX_FMT_JPEG_422); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setJpegFormat(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* src and dst buffer alloc */ + /* 6. set yuv format(src) from FLITE */ + ret = jpegEnc->setInBuf(((char **)&(yuvBuf->virt.extP[0])), (int *)yuvBuf->size.extS); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setInBuf(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* 6. set jpeg format(dest) from FLITE */ + ret = jpegEnc->setOutBuf((char *)jpegBuf->virt.extP[0], jpegBuf->size.extS[0]); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.setOutBuf(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + ret = jpegEnc->updateConfig(); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.updateConfig(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* start encoding */ + ret = jpegEnc->encode(); + if (ret < 0) { + ALOGE("ERR(%s):jpegEnc.encode(%d) fail", __func__, ret); + goto jpeg_encode_done; + } + + /* start encoding */ + *jpegSize = jpegEnc->getJpegSize(); + if ((*jpegSize) <= 0) { + ALOGE("ERR(%s):jpegEnc.getJpegSize(%d) is too small", __func__, *jpegSize); + goto jpeg_encode_done; + } + + ALOGD("DEBUG(%s): jpegEnc success!! size(%d)", __func__, *jpegSize); + ret = NO_ERROR; + +jpeg_encode_done: + jpegEnc->destroy(); + delete jpegEnc; + + if (ret != NO_ERROR) { + ALOGE("ERR(%s): (%d) [yuvBuf->fd.extFd[0] %d][yuvSize[0] %d]", __FUNCTION__, __LINE__, + yuvBuf->fd.extFd[0], yuvBuf->size.extS[0]); + ALOGE("ERR(%s): (%d) [jpegBuf->fd.extFd %d][jpegBuf->size.extS %d]", __FUNCTION__, __LINE__, + jpegBuf->fd.extFd[0], jpegBuf->size.extS[0]); + ALOGE("ERR(%s): (%d) [w %d][h %d][colorFormat %d]", __FUNCTION__, __LINE__, + width, height, srcformat); + } + + return ret; +} + +bool SecCameraHardware::scaleDownYuv422(uint8_t *srcBuf, int srcW, int srcH, + uint8_t *dstBuf, int dstW, int dstH) + +{ + int step_x, step_y; + int dst_pos; + char *src_buf = (char *)srcBuf; + char *dst_buf = (char *)dstBuf; + + if (dstW & 0x01 || dstH & 0x01) { + ALOGE("ERROR(%s): (%d) width or height invalid(%d/%d)", __FUNCTION__, __LINE__, + dstW & 0x01, dstH & 0x01); + return false; + } + + step_x = srcW / dstW; + step_y = srcH / dstH; + + unsigned int srcWStride = srcW * 2; + unsigned int stepXStride = step_x * 2; + + dst_pos = 0; + for (int y = 0; y < dstH; y++) { + int src_y_start_pos; + src_y_start_pos = srcWStride * step_y * y; + for (int x = 0; x < dstW; x += 2) { + int src_pos; + src_pos = src_y_start_pos + (stepXStride * x); + dst_buf[dst_pos++] = src_buf[src_pos]; + dst_buf[dst_pos++] = src_buf[src_pos + 1]; + dst_buf[dst_pos++] = src_buf[src_pos + 2]; + dst_buf[dst_pos++] = src_buf[src_pos + 3]; + } + } + + return true; +} + +bool SecCameraHardware::conversion420to422(uint8_t *srcBuf, uint8_t *dstBuf, int width, int height) +{ + int i, j, k; + int vPos1, vPos2, uPos1, uPos2; + + /* copy y table */ + for (i = 0; i < width * height; i++) { + j = i * 2; + dstBuf[j] = srcBuf[i]; + } + + for (i = 0; i < width * height / 2; i += 2) { + j = width * height; + k = width * 2; + uPos1 = (i / width) * k * 2 + (i % width) * 2 + 1; + uPos2 = uPos1 + k; + vPos1 = uPos1 + 2; + vPos2 = uPos2 + 2; + + if (uPos1 >= 0) { + dstBuf[vPos1] = srcBuf[j + i]; /* V table */ + dstBuf[vPos2] = srcBuf[j + i]; /* V table */ + dstBuf[uPos1] = srcBuf[j + i + 1]; /* U table */ + dstBuf[uPos2] = srcBuf[j + i + 1]; /* U table */ + } + } + + return true; +} + +bool SecCameraHardware::conversion420Tto422(uint8_t *srcBuf, uint8_t *dstBuf, int width, int height) +{ + int i, j, k; + int vPos1, vPos2, uPos1, uPos2; + + /* copy y table */ + for (i = 0; i < width * height; i++) { + j = i * 2; + dstBuf[j] = srcBuf[i]; + } + for (i = 0; i < width * height / 2; i+=2) { + j = width * height; + k = width * 2; + uPos1 = (i / width) * k * 2 + (i % width) * 2 + 1; + uPos2 = uPos1 + k; + vPos1 = uPos1 + 2; + vPos2 = uPos2 + 2; + + if (uPos1 >= 0) { + dstBuf[uPos1] = srcBuf[j + i]; /* V table */ + dstBuf[uPos2] = srcBuf[j + i]; /* V table */ + dstBuf[vPos1] = srcBuf[j + i + 1]; /* U table */ + dstBuf[vPos2] = srcBuf[j + i + 1]; /* U table */ + } + } + + return true; +} + +int SecCameraHardware::mSaveDump(const char *filepath, ExynosBuffer *dstBuf, int index) +{ + FILE *yuv_fp = NULL; + char filename[100]; + char *buffer = NULL; + CLEAR(filename); + + /* file create/open, note to "wb" */ + snprintf(filename, 100, filepath, index); + yuv_fp = fopen(filename, "wb"); + if (yuv_fp == NULL) { + CLOGE("Save jpeg file open error[%s]", filename); + return -1; + } + + int yuv_size = dstBuf->size.extS[0] + dstBuf->size.extS[1] + dstBuf->size.extS[2]; + + buffer = (char *) malloc(yuv_size); + if (buffer == NULL) { + CLOGE("Save YUV] buffer alloc failed"); + if (yuv_fp) + fclose(yuv_fp); + + return -1; + } + + memcpy(buffer, dstBuf->virt.extP[0], dstBuf->size.extS[0]); + if (dstBuf->size.extS[1] > 0) { + memcpy(buffer + dstBuf->size.extS[0], + dstBuf->virt.extP[1], dstBuf->size.extS[1]); + if (dstBuf->size.extS[2] > 0) { + memcpy(buffer + dstBuf->size.extS[0] + dstBuf->size.extS[1], + dstBuf->virt.extP[2], dstBuf->size.extS[2]); + } + } + + fflush(stdout); + + fwrite(buffer, 1, yuv_size, yuv_fp); + + fflush(yuv_fp); + + if (yuv_fp) + fclose(yuv_fp); + if (buffer) + free(buffer); + + return 0; +} + + +int SecCameraHardware::createInstance(int cameraId) +{ + if (!init()) { + ALOGE("createInstance: error, camera cannot be initialiezed"); + mInitialized = false; + return NO_INIT; + } + + initDefaultParameters(); + CLOGD("DEBUG[%s(%d)]-%s camera created", __FUNCTION__, __LINE__, + cameraId == CAMERA_ID_BACK ? "Back" : "Front"); + + mInitialized = true; + return NO_ERROR; +} + +int SecCameraHardware::getMaxZoomLevel(void) +{ + int zoomLevel = 0; + + zoomLevel = MAX_BASIC_ZOOM_LEVEL; + + return zoomLevel; +} + +int SecCameraHardware::getMaxZoomRatio(void) +{ + int maxZoomRatio = 0; + + if (mCameraId == CAMERA_ID_BACK) { + maxZoomRatio = MAX_ZOOM_RATIO; + } else { + maxZoomRatio = MAX_ZOOM_RATIO_FRONT; + } + + return maxZoomRatio; +} + +float SecCameraHardware::getZoomRatio(int zoomLevel) +{ + float zoomRatio = 1.00f; + if (IsZoomSupported() == true) + zoomRatio = (float)zoomRatioList[zoomLevel]; + else + zoomRatio = 1000.00f; + + return zoomRatio; +} + +#ifdef SAMSUNG_JPEG_QUALITY_ADJUST_TARGET +void SecCameraHardware::adjustJpegQuality(void) +{ + mJpegQuality = JPEG_QUALITY_ADJUST_TARGET; + ALOGD("DEBUG(%s):adjusted JpegQuality %d", __func__, mJpegQuality); +} +#endif + +int getSensorId(int camId) +{ + int sensorId = -1; + +#ifdef SENSOR_NAME_GET_FROM_FILE + int &curSensorId = (camId == CAMERA_ID_BACK) ? g_rearSensorId : g_frontSensorId; + + if (curSensorId < 0) { + curSensorId = getSensorIdFromFile(camId); + if (curSensorId < 0) { + ALOGE("ERR(%s): invalid sensor ID %d", __FUNCTION__, sensorId); + } + } + + sensorId = curSensorId; +#else + if (camId == CAMERA_ID_BACK) { + sensorId = BACK_CAMERA_SENSOR_NAME; + } else if (camId == CAMERA_ID_FRONT) { + sensorId = FRONT_CAMERA_SENSOR_NAME; + } else { + ALOGE("ERR(%s):Unknown camera ID(%d)", __FUNCTION__, camId); + } +#endif + + return sensorId; +} + +#ifdef SENSOR_NAME_GET_FROM_FILE +int getSensorIdFromFile(int camId) +{ + FILE *fp = NULL; + int numread = -1; + char sensor_name[50]; + int sensorName = -1; + bool ret = true; + + if (camId == CAMERA_ID_BACK) { + fp = fopen(SENSOR_NAME_PATH_BACK, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } else { + fp = fopen(SENSOR_NAME_PATH_FRONT, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + } + + if (fgets(sensor_name, sizeof(sensor_name), fp) == NULL) { + ALOGE("ERR(%s[%d]):failed to read sysfs entry", __FUNCTION__, __LINE__); + goto err; + } + + numread = strlen(sensor_name); + ALOGD("DEBUG(%s[%d]):Sensor name is %s(%d)", __FUNCTION__, __LINE__, sensor_name, numread); + + /* TODO: strncmp for check sensor name, str is vendor specific sensor name + * ex) + * if (strncmp((const char*)sensor_name, "str", numread - 1) == 0) { + * sensorName = SENSOR_NAME_IMX135; + * } + */ + sensorName = atoi(sensor_name); + +err: + if (fp != NULL) + fclose(fp); + + return sensorName; +} +#endif + +#ifdef SENSOR_FW_GET_FROM_FILE +const char *getSensorFWFromFile(char *sensor_fw, size_t size, int camId) +{ + FILE *fp = NULL; + int numread = -1; + + if (camId == CAMERA_ID_BACK) { + fp = fopen(SENSOR_FW_PATH_BACK, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open SENSOR_FW_PATH_BACK", __FUNCTION__, __LINE__); + goto err; + } + } else { + fp = fopen(SENSOR_FW_PATH_FRONT, "r"); + if (fp == NULL) { + ALOGE("ERR(%s[%d]):failed to open SENSOR_FW_PATH_FRONT", __FUNCTION__, __LINE__); + goto err; + } + } + if (fgets(sensor_fw, size, fp) == NULL) { + ALOGE("ERR(%s[%d]):camId(%d)failed to read sysfs entry", __FUNCTION__, __LINE__, camId); + goto err; + } + + numread = strlen(sensor_fw); + ALOGD("DEBUG(%s[%d]):Sensor fw is %s(%d)", __FUNCTION__, __LINE__, sensor_fw, numread); + +err: + if (fp != NULL) + fclose(fp); + + return (const char*)sensor_fw; +} +#endif + +}; /* namespace android */ + +#endif /* ANDROID_HARDWARE_SECCAMERAHARDWARE_CPP */ diff --git a/libcamera_external/SecCameraHardware.h b/libcamera_external/SecCameraHardware.h new file mode 100644 index 0000000..ff02744 --- /dev/null +++ b/libcamera_external/SecCameraHardware.h @@ -0,0 +1,389 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file SecCameraHardware.h + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_SECCAMERAHARDWARE_H +#define ANDROID_HARDWARE_SECCAMERAHARDWARE_H + +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include "ISecCameraHardware.h" + +#include +//#include "ExynosExif.h" +#include "Exif.h" + +namespace android { + +typedef uint32_t phyaddr_t; + +typedef struct _s5p_rect { + uint32_t x; + uint32_t y; + uint32_t w; + uint32_t h; +} s5p_rect; + +typedef struct _s5p_img { + uint32_t width; + uint32_t height; + uint32_t format; + uint32_t offset; + uint32_t base; + int memory_id; +} s5p_img; + +typedef struct ADDRS +{ + unsigned int addrY; + unsigned int addrCbCr; + unsigned int addrCr; +} addrs_t; + +typedef struct { + uint16_t soi; + uint8_t dummy0[22]; + struct { + uint16_t jpegWidth; + uint16_t jpegHeight; + uint16_t thumbWidth; + uint16_t thumbHeight; + uint8_t dummy0[11]; + } __attribute__((packed)) resInfo; + uint8_t dummy1[593]; +} __attribute__((packed)) jpegHeader_t; + +typedef struct { + uint16_t sosi; + uint16_t size0; + struct { + uint16_t infoVer; + uint16_t chipId; + uint16_t evtNum; + uint16_t imageWidth; + uint16_t imageHeight; + uint16_t thumbWidth; + uint16_t thumbHeight; + uint32_t expTime; /* usec */ + uint16_t frameTime; /* msec */ + uint16_t analogGain; + uint16_t digitalGain; + struct { + uint16_t red; + uint16_t green; + uint16_t blue; + } __attribute__((packed)) wbGain; + uint16_t brightness; + uint16_t constrast; + uint16_t afPosition; + } __attribute__((packed)) jpegInfo; + uint16_t size1; + uint16_t eosi; +} __attribute__((packed)) jpegInfo_t; + +struct jpeg_encode_param { + int jpegFd; /* input */ + void *srcBuf; /* input */ + uint32_t srcWidth; /* input */ + uint32_t srcHeight; /* input */ + uint32_t srcBufSize; /* input */ + uint32_t srcFormat; /* input */ + + void *destJpegBuf; /* output */ + uint32_t destJpegSize; /* output */ +}; + +class SecCameraHardware : public ISecCameraHardware { +public: + int createInstance(int cameraId); + bool mInitialized; + + virtual void release(); +protected: + virtual bool init(); + virtual void initDefaultParameters(); + + image_rect_type nativeGetWindowSize(); + + virtual status_t nativeStartPreview(); + virtual status_t nativeStartPreviewZoom(); + virtual int nativeGetPreview(); + virtual int nativeReleasePreviewFrame(int index); + virtual void nativeStopPreview(); +#if FRONT_ZSL + virtual status_t nativeStartFullPreview(); + virtual int nativeGetFullPreview(); + virtual int nativeReleaseFullPreviewFrame(int index); + virtual void nativeStopFullPreview(); + virtual void nativeForceStopFullPreview(); + bool getZSLJpeg(); +#endif + + virtual status_t nativeSetZoomRatio(int value); + virtual status_t nativePreviewCallback(int index, ExynosBuffer *grallocBuf); + virtual status_t nativeStartRecording(); + virtual status_t nativeStartRecordingZoom(); + virtual void nativeStopRecording(); + virtual status_t nativeCSCPreview(int index, int type); + virtual status_t nativeCSCRecording(rec_src_buf_t *srcBuf, int dstIdx); + virtual bool getCropRect(unsigned int src_w, unsigned int src_h, + unsigned int dst_w, unsigned int dst_h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int align_x, int align_y,int align_w, int align_h, + float zoomRatio); + virtual bool getRectZoomAlign(unsigned int src_w, unsigned int src_h, + unsigned int dst_w, unsigned int dst_h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int align_x, int align_y,int align_w, int align_h, + float zoomRatio); +#ifdef RECORDING_CAPTURE + virtual bool nativeGetRecordingJpeg(ExynosBuffer *yuvBuf, + uint32_t width, uint32_t height); +#endif + virtual bool nativeSetAutoFocus(); + virtual int nativeGetPreAutoFocus(); + virtual int nativeGetAutoFocus(); + virtual status_t nativeCancelAutoFocus(); + virtual bool nativeStartYUVSnapshot(); + virtual bool nativeGetYUVSnapshot(int numF, int *postviewOffset); + + virtual bool nativeStartSnapshot(); + virtual bool nativeStartPostview(); + virtual void nativeMakeJpegDump(); + virtual bool nativeGetSnapshot(int numF, int *postviewOffset); + virtual bool nativeGetPostview(int numF); + virtual void nativeStopSnapshot(); + virtual bool nativeStartDualCapture(int numF); + virtual status_t nativeCSCCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf); + virtual status_t nativeCSCRecordingCapture(ExynosBuffer *srcBuf, ExynosBuffer *dstBuf); + virtual int nativegetWBcustomX(); + virtual int nativegetWBcustomY(); + + virtual int nativeSetFastCapture(bool onOff); + + virtual status_t nativeSetParameters(cam_control_id id, int value, + bool recording = false); + virtual status_t nativeGetParameters(cam_control_id id, int *value, + bool recordingMode = false); + + virtual bool nativeCreateSurface(uint32_t width, uint32_t height, + uint32_t halPixelFormat); + virtual bool nativeDestroySurface(void); + virtual bool nativeFlushSurfaceYUV420(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type = CAMERA_HEAP_POSTVIEW); + virtual bool nativeFlushSurface(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type=CAMERA_HEAP_PREVIEW); + virtual bool beautyLiveFlushSurface(uint32_t width, uint32_t height, + uint32_t size, uint32_t index, + int type=CAMERA_HEAP_PREVIEW); + + virtual bool conversion420to422(uint8_t *src, uint8_t *dest, int width, int height); + virtual bool conversion420Tto422(uint8_t *src, uint8_t *dest, int width, int height); + +public: + SecCameraHardware(int cameraId, camera_device_t *dev); + virtual ~SecCameraHardware(); + int mSaveDump(const char *filepath, ExynosBuffer *dstBuf, int index); + +private: + static gralloc_module_t const* mGrallocHal; + + class FLiteV4l2 { + public: + FLiteV4l2(); + virtual ~FLiteV4l2(); + + int getfd() { + return mCameraFd; + } + + int init(const char *devPath, const int cameraId); + void deinit(); + + /* preview */ + int startPreview(image_rect_type *fliteSize, cam_pixel_format format, + int numBufs, int fps, bool movieMode, node_info_t *mFliteNode); + + /* capture */ + int startCapture(image_rect_type *img, cam_pixel_format format, + int numBufs, int capKind); + int startRecord(image_rect_type *img, image_rect_type *videoSize, + cam_pixel_format format, int numBufs); + sp querybuf(uint32_t *frmsize); + int reqBufZero(node_info_t *mFliteNode); + int querybuf2(unsigned int index, int planeCnt, ExynosBuffer *buf); + int expBuf(unsigned int index, int planeCnt, ExynosBuffer *buf); + + int qbuf(uint32_t index); + int dqbuf(); + int dqbuf(uint32_t *index); + + int qbuf2(node_info_t *node, uint32_t index); + int qbufForCapture(ExynosBuffer *buf, uint32_t index); + int dqbuf2(node_info_t *node); + int dqbufForCapture(ExynosBuffer *buf); + +#ifdef FAKE_SENSOR + int fakeQbuf2(node_info_t *node, uint32_t index); + int fakeDqbuf2(node_info_t *node); +#endif + int stream(bool on); + int polling(bool recordingMode=false); + int gctrl(uint32_t id, int *value); + int gctrl(uint32_t id, unsigned short *value); + int gctrl(uint32_t id, char *value, int size); + int sctrl(uint32_t id, int value); + int sparm(struct v4l2_streamparm *stream_parm); + int getYuvPhyaddr(int index, phyaddr_t *y, phyaddr_t *cbcr); + int getYuvPhyaddr(int index, phyaddr_t *y, phyaddr_t *cb, phyaddr_t *cr); + int reset(); + void forceStop(); + bool getStreamStatus() { + return mStreamOn; + } + int setFastCaptureFimc(uint32_t IsFastCaptureCalled); + + int getFd(); + + private: +#ifdef FAKE_SENSOR + int fakeIndex; + int fakeByteData; +#endif + int mCameraId; + int mCameraFd; + int mBufferCount; + bool mStreamOn; + int mCmdStop; + bool mFastCapture; + }; + +#if FRONT_ZSL + static const int kBufferZSLCount = 4; +#endif + + int mJpegIndex; + +#ifdef CHG_ENCODE_JPEG + int EncodeToJpeg(unsigned char* src, int width, int height, + cam_pixel_format srcformat, unsigned char* dst, int* jpegSize, + int quality = ExynosJpegEncoder::QUALITY_LEVEL_1); +#endif + int EncodeToJpeg(ExynosBuffer *yuvBuf, ExynosBuffer *jpegBuf, + int width, int height, cam_pixel_format srcformat, int* jpegSize, + int quality); + + FLiteV4l2 mFlite; + FLiteV4l2 mFimc1; + void *mFimc1CSC; + mutable Mutex mFimc1CSCLock; + + void *mFimc2CSC; + mutable Mutex mFimc2CSCLock; + + // media controller variable + s5p_rect mPreviewZoomRect; + s5p_rect mPictureZoomRect; + s5p_rect mRecordZoomRect; + float mZoomRatio; + + addrs_t mWindowBuffer; + + cam_pixel_format mRecordingFormat; + mutable Mutex mNativeRecordLock; + + exif_attribute_t mExifInfo; + + bool allocatePreviewHeap(); + bool allocateRecordingHeap(); + +#ifdef RECORDING_CAPTURE + bool allocateRecordingSnapshotHeap(); +#endif + bool allocateSnapshotHeap(); + bool allocatePostviewHeap(); +#if FRONT_ZSL + bool allocateFullPreviewHeap(); +#endif + + int save_dump_path(uint8_t *real_data, int data_size, const char* filePath); + bool allocateHDRHeap(); + int getYUV(int fnum); +#ifndef RCJUNG + int getOneYUV(); + bool allocateYUVHeap(); +#endif + int getPostview(int num); + int getJpegOnBack(int *postviewOffset); + int getJpegOnFront(int *postviewOffset); + + void setExifFixedAttribute(); + void setExifChangedAttribute(); + + int internalGetJpegForSocYuvWithZoom(int *postviewOffset); + int internalGetJpegForSocYuv(int *postviewOffset); + int internalGetJpegForRawWithZoom(int *postviewOffset); + bool scaleDownYuv422(uint8_t *srcBuf, int srcW, int srcH, + uint8_t *dstBuf, int dstW, int dstH); + int getMaxZoomLevel(void); + int getMaxZoomRatio(void); + float getZoomRatio(int zoomLevel); + +#ifdef SAMSUNG_JPEG_QUALITY_ADJUST_TARGET + void adjustJpegQuality(void); +#endif + +}; + +/* Helpper function */ +int getSensorId(int camId); + +#ifdef SENSOR_NAME_GET_FROM_FILE +int getSensorIdFromFile(int camId); +#endif + +#ifdef SENSOR_FW_GET_FROM_FILE +const char *getSensorFWFromFile(char *sensor_fw, size_t size, int camId); +#endif + +}; /* namespace android */ + +#endif /* ANDROID_HARDWARE_SECCAMERAHARDWARE_H */ diff --git a/libcamera_external/SecCameraHardware1MetadataConverter.cpp b/libcamera_external/SecCameraHardware1MetadataConverter.cpp new file mode 100644 index 0000000..e2d4d36 --- /dev/null +++ b/libcamera_external/SecCameraHardware1MetadataConverter.cpp @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SecCameraHarewareMetadataConverter" + +#include "SecCameraHardware1MetadataConverter.h" + +namespace android { +#define SET_BIT(x) (1 << x) + +SecCameraHardware1MetadataConverter::SecCameraHardware1MetadataConverter(int cameraId, CameraParameters *parameters) +{ + m_cameraId = cameraId; +} + +SecCameraHardware1MetadataConverter::~SecCameraHardware1MetadataConverter() +{ +} + +status_t SecCameraHardware1MetadataConverter::constructStaticInfo(int cameraId, camera_metadata_t **cameraInfo) +{ + status_t ret = NO_ERROR; + uint8_t flashAvailable = 0x0; + + CameraMetadata info; + + if (CHECK_FLASH_LED(cameraId)) { + flashAvailable = 0x1; + } else { + flashAvailable = 0x0; + } + + ALOGD("DEBUG(%s[%d]): cameraId(%d) flashAvailable ? %d", __FUNCTION__, __LINE__, cameraId, flashAvailable); + + ret = info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1); + if (ret < 0) + ALOGD("DEBUG(%s):ANDROID_FLASH_INFO_AVAILABLE update failed(%d)", __FUNCTION__, ret); + + *cameraInfo = info.release(); + + return OK; +} +}; /* namespace android */ diff --git a/libcamera_external/SecCameraHardware1MetadataConverter.h b/libcamera_external/SecCameraHardware1MetadataConverter.h new file mode 100644 index 0000000..e39ef4d --- /dev/null +++ b/libcamera_external/SecCameraHardware1MetadataConverter.h @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2014, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SEC_CAMERA_HARDWARE_METADATA_CONVERTER_H__ +#define SEC_CAMERA_HARDWARE_METADATA_CONVERTER_H__ + +#include +#include +#include +#include + +#include "SecCameraHardware.h" +#include + +namespace android { +enum rectangle_index { + X1, + Y1, + X2, + Y2, + RECTANGLE_MAX_INDEX, +}; + +class SecCameraHardwareMetadataConverter : public virtual RefBase { +public: + SecCameraHardwareMetadataConverter(){}; + ~SecCameraHardwareMetadataConverter(){}; +}; + +class SecCameraHardware1MetadataConverter : public virtual SecCameraHardwareMetadataConverter { +public: + SecCameraHardware1MetadataConverter(int cameraId, CameraParameters *parameters); + ~SecCameraHardware1MetadataConverter(); + static status_t constructStaticInfo(int cameraId, camera_metadata_t **info); +private: + int m_cameraId; +}; + +}; /* namespace android */ +#endif diff --git a/libcamera_external/SecCameraInterface.cpp b/libcamera_external/SecCameraInterface.cpp new file mode 100644 index 0000000..1426db4 --- /dev/null +++ b/libcamera_external/SecCameraInterface.cpp @@ -0,0 +1,766 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + + /*! + * \file SecCameraInterface.cpp + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_SECCAMERAINTERFACE_CPP +#define ANDROID_HARDWARE_SECCAMERAINTERFACE_CPP + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "SecCameraInterface" + +#include "SecCameraInterface.h" +#include "SecCameraHardware.h" + +namespace android { + +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(id); + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + bool hasFlash = false; + FILE *fp = NULL; + int ret = 0; + char flashFilePath[100] = {'\0',}; + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + cameraId += 1; +#endif + +#ifdef BOARD_BACK_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 0) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + +#ifdef BOARD_FRONT_CAMERA_USES_EXTERNAL_CAMERA + if (cameraId == 1) { + return HAL_ext_camera_device_open_wrapper(module, id, device); + } +#endif + + return 0; +} + +static int HAL_camera_device_close(struct hw_device_t* device) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId; + enum CAMERA_STATE state; + char camid[10]; + + ALOGI("INFO(%s[%d]): in", __FUNCTION__, __LINE__); + + if (!g_cam_device[cameraId]) + ALOGI("camera device already closed"); + + if (device) { + camera_device_t *cam_device = (camera_device_t *)device; + cameraId = obj(cam_device)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_CLOSED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + delete static_cast(cam_device->priv); + free(cam_device); + g_cam_device[cameraId] = NULL; + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d)", __FUNCTION__, __LINE__, cameraId); + } + + /* Update torch status */ + g_cam_torchEnabled[cameraId] = false; + snprintf(camid, sizeof(camid), "%d\n", cameraId); + g_callbacks->torch_mode_status_change(g_callbacks, camid, TORCH_MODE_STATUS_AVAILABLE_OFF); + + ALOGI("INFO(%s[%d]): out", __FUNCTION__, __LINE__); + + return 0; +} + +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + ret = obj(dev)->setPreviewWindow(buf); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); + return ret; +} + +static void HAL_camera_device_set_callbacks(struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + obj(dev)->setCallbacks(notify_cb, data_cb, data_cb_timestamp, + get_memory, + user); +} + +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGI("INFO(%s):", __FUNCTION__); + obj(dev)->enableMsgType(msg_type); +} + +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGI("INFO(%s):", __FUNCTION__); + obj(dev)->disableMsgType(msg_type); +} + +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type) +{ + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->msgTypeEnabled(msg_type); +} + +static int HAL_camera_device_start_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_PREVIEW; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + ret = obj(dev)->startPreview(); + + ALOGI("INFO(%s[%d]):camera(%d) out from startPreview()", + __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startPreview FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); + + return ret; +} + +static void HAL_camera_device_stop_preview(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); +/* HACK : If camera in recording state, */ +/* CameraService have to call the stop_recording before the stop_preview */ +#if 1 + if (cam_state[cameraId] == CAMERA_RECORDING) { + ALOGE("ERR(%s[%d]):camera(%d) in RECORDING RUNNING state ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + HAL_camera_device_stop_recording(dev); + ALOGE("ERR(%s[%d]):cameraId=%d out from stop_recording ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + + for (int i=0; i<30; i++) { + ALOGE("ERR(%s[%d]):camera(%d) The stop_recording must be called " + "before the stop_preview ---- INVALID ----", + __FUNCTION__, __LINE__, cameraId); + } + ALOGE("ERR(%s[%d]):camera(%d) sleep 500ms for ---- INVALID ---- state", + __FUNCTION__, __LINE__, cameraId); + usleep(500000); /* to notify, sleep 500ms */ + } +#endif + state = CAMERA_PREVIEWSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + obj(dev)->stopPreview(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopPreview()", + __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_preview_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->previewEnabled(); +} + +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return obj(dev)->storeMetaDataInBuffers(enable); +} + +static int HAL_camera_device_start_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + static int ret; + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDING; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return -1; + } + + ret = obj(dev)->startRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from startRecording()", + __FUNCTION__, __LINE__, cameraId); + + if (ret == OK) { + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording succeeded)", + __FUNCTION__, __LINE__, cameraId); + } else { + ALOGI("INFO(%s[%d]):camera(%d) out (startRecording FAILED)", + __FUNCTION__, __LINE__, cameraId); + } + return ret; +} + +static void HAL_camera_device_stop_recording(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RECORDINGSTOPPED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return; + } + + obj(dev)->stopRecording(); + ALOGI("INFO(%s[%d]):camera(%d) out from stopRecording()", + __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_recording_enabled(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->recordingEnabled(); +} + +static void HAL_camera_device_release_recording_frame(struct camera_device *dev, + const void *opaque) +{ + /* ExynosCameraAutoTimer autoTimer(__FUNCTION__); */ + ALOGI("INFO(%s):", __FUNCTION__); + obj(dev)->releaseRecordingFrame(opaque); +} + +static int HAL_camera_device_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->autoFocus(); +} + +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->cancelAutoFocus(); +} + +static int HAL_camera_device_take_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->takePicture(); +} + +static int HAL_camera_device_cancel_picture(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->cancelPicture(); +} + +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + String8 str(parms); + CameraParameters p(str); + return obj(dev)->setParameters(p); +} + +char *HAL_camera_device_get_parameters(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + String8 str; + CameraParameters parms = obj(dev)->getParameters(); + str = parms.flatten(); + return strdup(str.string()); +} + +static void HAL_camera_device_put_parameters( + __unused struct camera_device *dev, + char *parms) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + free(parms); +} + +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->sendCommand(cmd, arg1, arg2); +} + +static void HAL_camera_device_release(struct camera_device *dev) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + uint32_t cameraId = obj(dev)->getCameraId(); + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + + state = CAMERA_RELEASED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", + __FUNCTION__, cameraId, state); + return; + } + + obj(dev)->release(); + ALOGI("INFO(%s[%d]):camera(%d) out from release()", + __FUNCTION__, __LINE__, cameraId); + + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); +} + +static int HAL_camera_device_dump(struct camera_device *dev, int fd) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGI("INFO(%s):", __FUNCTION__); + return obj(dev)->dump(fd); +} + +static int HAL_getNumberOfCameras() +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + ALOGV("DEBUG(%s):", __FUNCTION__); + return sizeof(sCameraInfo) / sizeof(sCameraInfo[0]); +} + +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + if (callbacks == NULL) + ALOGE("ERR(%s[%d]):dev is NULL", __FUNCTION__, __LINE__); + + g_callbacks = callbacks; + + return 0; +} + +static int HAL_getCameraInfo(int cameraId, struct camera_info *info) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + status_t ret = NO_ERROR; + + ALOGV("DEBUG(%s):", __FUNCTION__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s):Invalid camera ID %d", __FUNCTION__, cameraId); + return -EINVAL; + } + + memcpy(info, &sCameraInfo[cameraId], sizeof(CameraInfo)); + info->device_version = HARDWARE_DEVICE_API_VERSION(1, 0); + + if (g_cam_info[cameraId] == NULL) { + ALOGD("DEBUG(%s[%d]):Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + ret = SecCameraHardware1MetadataConverter::constructStaticInfo(cameraId, &g_cam_info[cameraId]); + if (ret != 0) { + ALOGE("ERR(%s[%d]): static information is NULL", __FUNCTION__, __LINE__); + return -EINVAL; + } + info->static_camera_characteristics = g_cam_info[cameraId]; + } else { + ALOGD("DEBUG(%s[%d]):Reuse!! Return static information (%d)", __FUNCTION__, __LINE__, cameraId); + info->static_camera_characteristics = g_cam_info[cameraId]; + } + + /* set service arbitration (resource_cost, conflicting_devices, conflicting_devices_length */ + info->resource_cost = sCameraConfigInfo[cameraId].resource_cost; + info->conflicting_devices = sCameraConfigInfo[cameraId].conflicting_devices; + info->conflicting_devices_length = sCameraConfigInfo[cameraId].conflicting_devices_length; + ALOGD("INFO(%s info->resource_cost = %d ", __FUNCTION__, info->resource_cost); + if (info->conflicting_devices_length) { + for (size_t i = 0; i < info->conflicting_devices_length; i++) { + ALOGD("INFO(%s info->conflicting_devices = %s ", __FUNCTION__, info->conflicting_devices[i]); + } + } else { + ALOGD("INFO(%s info->conflicting_devices_length is zero ", __FUNCTION__); + } + + return NO_ERROR; +} + +static int HAL_set_torch_mode(const char* camera_id, bool enabled) +{ + ExynosCameraAutoTimer autoTimer(__FUNCTION__); + + int cameraId = atoi(camera_id); + FILE *fp = NULL; + char flashFilePath[100] = {'\0',}; + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + int ret = 0; + + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { + ALOGE("ERR(%s[%d]):Invalid camera ID %d", __FUNCTION__, __LINE__, cameraId); + return -EINVAL; + } + + /* Check the android.flash.info.available */ + /* If this camera device does not support flash, It have to return -ENOSYS */ + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + ALOGV("DEBUG(%s[%d]): Flash metadata exist", __FUNCTION__, __LINE__); + } else { + ALOGE("ERR(%s[%d]): Can not find flash metadata", __FUNCTION__, __LINE__); + return -ENOSYS; + } + + ALOGI("INFO(%s[%d]): Current Camera State (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + + /* Add the check the camera state that camera in use or not */ + if (cam_state[cameraId] > CAMERA_CLOSED) { + ALOGE("ERR(%s[%d]): Camera Device is busy (state = %d)", __FUNCTION__, __LINE__, cam_state[cameraId]); + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_OFF", __FUNCTION__, __LINE__, cameraId); + } + return -EBUSY; + } + + /* Add the sysfs file read (sys/class/camera/flash/torch_flash) then set 0 or 1 */ + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, flashFilePath, fp); + return -ENOSYS; + } + + if (enabled) { + fwrite("1", sizeof(char), 1, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + } + + fflush(fp); + + ret = fclose(fp); + if (ret != 0) { + ALOGE("ERR(%s[%d]): file close failed(%d)", __FUNCTION__, __LINE__, ret); + } + + if (enabled) { + g_cam_torchEnabled[cameraId] = true; + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_ON); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_ON", __FUNCTION__, __LINE__, cameraId); + } + } else { + g_cam_torchEnabled[cameraId] = false; + if (g_callbacks) { + g_callbacks->torch_mode_status_change(g_callbacks, camera_id, TORCH_MODE_STATUS_AVAILABLE_OFF); + ALOGI("INFO(%s[%d]):camera(%d) TORCH_MODE_STATUS_AVAILABLE_OFF", __FUNCTION__, __LINE__, cameraId); + } + } + + ALOGI("INFO(%s[%d]):out =====", __FUNCTION__, __LINE__); + + return NO_ERROR; +} + +static int HAL_init() +{ + ALOGI("INFO(%s[%d]):in =====", __FUNCTION__, __LINE__); + + return OK; +} + +static int HAL_open_legacy(__unused const struct hw_module_t* module, __unused const char* id, + __unused uint32_t halVersion, __unused struct hw_device_t** device) +{ + ALOGV("DEBUG(%s):", __FUNCTION__); + return NO_ERROR; +} + +static void HAL_get_vendor_tag_ops(__unused vendor_tag_ops_t* ops) +{ + ALOGV("INFO(%s):", __FUNCTION__); +} + + +static int HAL_ext_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t **device) +{ + int cameraId = atoi(id); + CameraMetadata metadata; + camera_metadata_entry flashAvailable; + bool hasFlash = false; + FILE *fp = NULL; + int ret = 0; + char *flashFilePath = NULL; + enum CAMERA_STATE state; + + ALOGI("INFO(%s[%d]):camera(%d) in", __FUNCTION__, __LINE__, cameraId); + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + if (cameraId < 1 || cameraId > HAL_getNumberOfCameras()) +#else + if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) +#endif + { + if (cameraId != 99) { + ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); + return -EINVAL; + } else { + cameraId = 2; + } + } + + state = CAMERA_OPENED; + if (check_camera_state(state, cameraId) == false) { + ALOGE("ERR(%s):camera(%d) state(%d) is INVALID", __FUNCTION__, cameraId, state); + return -1; + } + +#ifdef BOARD_FRONT_CAMERA_ONLY_USE + if ((unsigned int)cameraId <= (sizeof(sCameraInfo) / sizeof(sCameraInfo[0]))) +#else + if ((unsigned int)cameraId < (sizeof(sCameraInfo) / sizeof(sCameraInfo[0]))) +#endif + { + if (g_cam_device[cameraId]) { + ALOGE("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); + goto done; + } + + g_cam_device[cameraId] = (camera_device_t *)malloc(sizeof(camera_device_t)); + if (!g_cam_device[cameraId]) { + ALOGE("DEBUG(%s):camera_device_open: error, fail to get memory", __FUNCTION__, id); + return -ENOMEM; + } + + g_cam_device[cameraId]->common.tag = HARDWARE_DEVICE_TAG; + g_cam_device[cameraId]->common.version = 1; + g_cam_device[cameraId]->common.module = const_cast(module); + g_cam_device[cameraId]->common.close = HAL_camera_device_close; + + g_cam_device[cameraId]->ops = &camera_device_ops; + + ALOGD("DEBUG(%s):open camera %s", __FUNCTION__, id); + g_cam_device[cameraId]->priv = new SecCameraHardware(cameraId, g_cam_device[cameraId]); + if (!obj(g_cam_device[cameraId])->mInitialized) { + ALOGE("Instance is not created"); + if (g_cam_device[cameraId]->priv) { + delete static_cast(g_cam_device[cameraId]->priv); + free(g_cam_device[cameraId]); + g_cam_device[cameraId] = 0; + } + return -ENOSYS; + } + else{ + ALOGI("INFO(%s[%d]):camera(%d) out from new g_cam_device[%d]->priv()", + __FUNCTION__, __LINE__, cameraId, cameraId); + } + } else { + ALOGE("DEBUG(%s):camera(%s) open fail - must front camera open first", + __FUNCTION__, id); + return -EINVAL; + } + +done: + *device = (hw_device_t *)g_cam_device[cameraId]; + cam_stateLock[cameraId].lock(); + cam_state[cameraId] = state; + cam_stateLock[cameraId].unlock(); + + if (g_cam_info[cameraId]) { + metadata = g_cam_info[cameraId]; + flashAvailable = metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + hasFlash = true; + } else { + hasFlash = false; + } + } + + if(hasFlash && g_cam_torchEnabled[cameraId]) { + if (cameraId == CAMERA_ID_BACK) { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_REAR_FILE_PATH); + } else { + snprintf(flashFilePath, sizeof(flashFilePath), TORCH_FRONT_FILE_PATH); + } + + fp = fopen(flashFilePath, "w+"); + + if (fp == NULL) { + ALOGE("ERR(%s[%d]):torch file open(%s) fail, ret(%d)", + __FUNCTION__, __LINE__, flashFilePath, fp); + } else { + fwrite("0", sizeof(char), 1, fp); + fflush(fp); + fclose(fp); + + g_cam_torchEnabled[cameraId] = false; + } + } + + g_callbacks->torch_mode_status_change(g_callbacks, id, TORCH_MODE_STATUS_NOT_AVAILABLE); + + ALOGI("INFO(%s[%d]):camera(%d) out", __FUNCTION__, __LINE__, cameraId); + return 0; +} + +int HAL_ext_camera_device_open_wrapper( + const struct hw_module_t* module, + const char *id, + struct hw_device_t **device) +{ + return HAL_ext_camera_device_open(module, id, device); +} + +}; /* namespace android */ + +#endif /* ANDROID_HARDWARE_SECCAMERAINTERFACE_CPP */ diff --git a/libcamera_external/SecCameraInterface.h b/libcamera_external/SecCameraInterface.h new file mode 100644 index 0000000..0e65156 --- /dev/null +++ b/libcamera_external/SecCameraInterface.h @@ -0,0 +1,411 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file SecCameraInterface.h + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#ifndef ANDROID_HARDWARE_SECCAMERAINTERFACE_H +#define ANDROID_HARDWARE_SECCAMERAINTERFACE_H + +#include +#include +#include +#include +#include +#include +#include + +#include "SecCameraHardware1MetadataConverter.h" +#include "SecCameraInterfaceState.h" + +#ifndef CAMERA_MODULE_VERSION +#define CAMERA_MODULE_VERSION CAMERA_MODULE_API_VERSION_2_4 +#endif + +#define SET_METHOD(m) m : HAL_camera_device_##m + +#define MAX_NUM_OF_CAMERA 2 + +namespace android { + +static CameraInfo sCameraInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + CAMERA_FACING_BACK, + BACK_ROTATION /* orientation */ + }, +#endif + { + CAMERA_FACING_FRONT, + FRONT_ROTATION /* orientation */ + } +}; + +/* flashlight control */ +#define TORCH_REAR_FILE_PATH "/sys/class/camera/flash/rear_torch_flash" +#define TORCH_FRONT_FILE_PATH "/sys/class/camera/flash/front_torch_flash" + +/* This struct used in device3.3 service arbitration */ +struct CameraConfigInfo { + int resource_cost; + char** conflicting_devices; + size_t conflicting_devices_length; +}; + +const CameraConfigInfo sCameraConfigInfo[] = { +#if !defined(BOARD_FRONT_CAMERA_ONLY_USE) + { + 51, /* resoruce_cost : [0 , 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"1"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + }, +#endif + { + 51, /* resoruce_cost : [0, 100] */ + NULL, /* conflicting_devices : NULL, (char *[]){"0"}, (char *[]){"0", "1"} */ + 0, /* conflicting_devices_lenght : The length of the array in the conflicting_devices field */ + } +}; + +static camera_metadata_t *g_cam_info[MAX_NUM_OF_CAMERA] = {NULL, NULL}; +static const camera_module_callbacks_t *g_callbacks = NULL; + +static camera_device_t *g_cam_device[MAX_NUM_OF_CAMERA]; +static bool g_cam_torchEnabled[MAX_NUM_OF_CAMERA] = {false, false}; + +static inline SecCameraHardware *obj(struct camera_device *dev) +{ + return reinterpret_cast(dev->priv); +} + +/** + * Open camera device + */ +static int HAL_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device); + +/** + * Close camera device + */ +static int HAL_camera_device_close(struct hw_device_t* device); + +/** + * Set the preview_stream_ops to which preview frames are sent + */ +static int HAL_camera_device_set_preview_window( + struct camera_device *dev, + struct preview_stream_ops *buf); + +/** + * Set the notification and data callbacks + */ +static void HAL_camera_device_set_callbacks( + struct camera_device *dev, + camera_notify_callback notify_cb, + camera_data_callback data_cb, + camera_data_timestamp_callback data_cb_timestamp, + camera_request_memory get_memory, + void* user); + +/** + * The following three functions all take a msg_type, which is a bitmask of + * the messages defined in include/ui/Camera.h + */ + +/** + * Enable a message, or set of messages. + */ +static void HAL_camera_device_enable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera + * HAL should not rely on its client to call releaseRecordingFrame() to + * release video recording frames sent out by the cameral HAL before and + * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL + * clients must not modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). + */ +static void HAL_camera_device_disable_msg_type( + struct camera_device *dev, + int32_t msg_type); + +/** + * Query whether a message, or a set of messages, is enabled. Note that + * this is operates as an AND, if any of the messages queried are off, this + * will return false. + */ +static int HAL_camera_device_msg_type_enabled( + struct camera_device *dev, + int32_t msg_type); + +/** + * Start preview mode. + */ +static int HAL_camera_device_start_preview(struct camera_device *dev); + +/** + * Stop a previously started preview. + */ +static void HAL_camera_device_stop_preview(struct camera_device *dev); + +/** + * Returns true if preview is enabled. + */ +static int HAL_camera_device_preview_enabled(struct camera_device *dev); + +/** + * Request the camera HAL to store meta data or real YUV data in the video + * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If + * it is not called, the default camera HAL behavior is to store real YUV + * data in the video buffers. + * + * This method should be called before startRecording() in order to be + * effective. + * + * If meta data is stored in the video buffers, it is up to the receiver of + * the video buffers to interpret the contents and to find the actual frame + * data with the help of the meta data in the buffer. How this is done is + * outside of the scope of this method. + * + * Some camera HALs may not support storing meta data in the video buffers, + * but all camera HALs should support storing real YUV data in the video + * buffers. If the camera HAL does not support storing the meta data in the + * video buffers when it is requested to do do, INVALID_OPERATION must be + * returned. It is very useful for the camera HAL to pass meta data rather + * than the actual frame data directly to the video encoder, since the + * amount of the uncompressed frame data can be very large if video size is + * large. + * + * @param enable if true to instruct the camera HAL to store + * meta data in the video buffers; false to instruct + * the camera HAL to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ +static int HAL_camera_device_store_meta_data_in_buffers( + struct camera_device *dev, + int enable); + +/** + * Start record mode. When a record image is available, a + * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding + * frame. Every record frame must be released by a camera HAL client via + * releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames, + * and the client must not modify/access any video recording frames. + */ +static int HAL_camera_device_start_recording(struct camera_device *dev); + +/** + * Stop a previously started recording. + */ +static void HAL_camera_device_stop_recording(struct camera_device *dev); + +/** + * Returns true if recording is enabled. + */ +static int HAL_camera_device_recording_enabled(struct camera_device *dev); + +/** + * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + * + * It is camera HAL client's responsibility to release video recording + * frames sent out by the camera HAL before the camera HAL receives a call + * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's + * responsibility to manage the life-cycle of the video recording frames. + */ +static void HAL_camera_device_release_recording_frame( + struct camera_device *dev, + const void *opaque); + +/** + * Start auto focus, the notification callback routine is called with + * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be + * called again if another auto focus is needed. + */ +static int HAL_camera_device_auto_focus(struct camera_device *dev); + +/** + * Cancels auto-focus function. If the auto-focus is still in progress, + * this function will cancel it. Whether the auto-focus is in progress or + * not, this function will return the focus position to the default. If + * the camera does not support auto-focus, this is a no-op. + */ +static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev); + +/** + * Take a picture. + */ +static int HAL_camera_device_take_picture(struct camera_device *dev); + +/** + * Cancel a picture that was started with takePicture. Calling this method + * when no picture is being taken is a no-op. + */ +static int HAL_camera_device_cancel_picture(struct camera_device *dev); + +/** + * Set the camera parameters. This returns BAD_VALUE if any parameter is + * invalid or not supported. + */ +static int HAL_camera_device_set_parameters( + struct camera_device *dev, + const char *parms); + +/** + * Return the camera parameters. + */ +char *HAL_camera_device_get_parameters(struct camera_device *dev); + +/** + * Release buffer that used by the camera parameters. + */ +static void HAL_camera_device_put_parameters( + struct camera_device *dev, + char *parms); + +/** + * Send command to camera driver. + */ +static int HAL_camera_device_send_command( + struct camera_device *dev, + int32_t cmd, + int32_t arg1, + int32_t arg2); + +/** + * Release the hardware resources owned by this object. Note that this is + * *not* done in the destructor. + */ +static void HAL_camera_device_release(struct camera_device *dev); + +/** + * Dump state of the camera hardware + */ +static int HAL_camera_device_dump(struct camera_device *dev, int fd); + +/** + * Callback functions for the camera HAL module to use to inform the framework + * of changes to the camera subsystem. These are called only by HAL modules + * implementing version CAMERA_MODULE_API_VERSION_2_1 or higher of the HAL + * module API interface. + */ +static int HAL_set_callbacks(const camera_module_callbacks_t *callbacks); + +/** + * Retrun the camera hardware info + */ +static int HAL_getCameraInfo(int cameraId, struct camera_info *info); + +/** + * Return number of the camera hardware + */ +static int HAL_getNumberOfCameras(); + +static int HAL_open_legacy(const struct hw_module_t* module, const char* id, uint32_t halVersion, struct hw_device_t** device); + +static void HAL_get_vendor_tag_ops(vendor_tag_ops_t* ops); +static int HAL_set_torch_mode(const char* camera_id, bool enabled); +static int HAL_init(); + +static int HAL_ext_camera_device_open( + const struct hw_module_t* module, + const char *id, + struct hw_device_t **device); + +/** + * Open camera device + */ +int HAL_ext_camera_device_open_wrapper( + const struct hw_module_t* module, + const char *id, + struct hw_device_t** device); + +static camera_device_ops_t camera_device_ops = { + SET_METHOD(set_preview_window), + SET_METHOD(set_callbacks), + SET_METHOD(enable_msg_type), + SET_METHOD(disable_msg_type), + SET_METHOD(msg_type_enabled), + SET_METHOD(start_preview), + SET_METHOD(stop_preview), + SET_METHOD(preview_enabled), + SET_METHOD(store_meta_data_in_buffers), + SET_METHOD(start_recording), + SET_METHOD(stop_recording), + SET_METHOD(recording_enabled), + SET_METHOD(release_recording_frame), + SET_METHOD(auto_focus), + SET_METHOD(cancel_auto_focus), + SET_METHOD(take_picture), + SET_METHOD(cancel_picture), + SET_METHOD(set_parameters), + SET_METHOD(get_parameters), + SET_METHOD(put_parameters), + SET_METHOD(send_command), + SET_METHOD(release), + SET_METHOD(dump), +}; + +static hw_module_methods_t ext_camera_module_methods = { + open : HAL_camera_device_open +}; + +extern "C" { + struct camera_module HAL_MODULE_INFO_SYM = { + common : { + tag : HARDWARE_MODULE_TAG, + module_api_version : CAMERA_MODULE_VERSION, + hal_api_version : HARDWARE_HAL_API_VERSION, + id : CAMERA_HARDWARE_MODULE_ID, + name : "Exynos Camera HAL1", + author : "Samsung Corporation", + methods : &ext_camera_module_methods, + dso : NULL, + reserved : {0}, + }, + get_number_of_cameras : HAL_getNumberOfCameras, + get_camera_info : HAL_getCameraInfo, + set_callbacks : HAL_set_callbacks, +#if (TARGET_ANDROID_VER_MAJ >= 4 && TARGET_ANDROID_VER_MIN >= 4) + get_vendor_tag_ops : HAL_get_vendor_tag_ops, + open_legacy : HAL_open_legacy, + set_torch_mode : HAL_set_torch_mode, + init : HAL_init, + reserved : {0} +#endif + }; +} + +}; /* namespace android */ + +#endif /* ANDROID_HARDWARE_SECCAMERAINTERFACE_H */ diff --git a/libcamera_external/SecCameraInterfaceState.h b/libcamera_external/SecCameraInterfaceState.h new file mode 100644 index 0000000..95e79f4 --- /dev/null +++ b/libcamera_external/SecCameraInterfaceState.h @@ -0,0 +1,135 @@ +/* +** +** Copyright 2013, Samsung Electronics Co. LTD +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include "model/include/SecCameraHardware-model.h" + +namespace android { + +enum CAMERA_STATE { + CAMERA_NONE, + CAMERA_OPENED, + CAMERA_RELEASED, + CAMERA_CLOSED, + CAMERA_PREVIEW, + CAMERA_PREVIEWSTOPPED, + CAMERA_RECORDING, + CAMERA_RECORDINGSTOPPED, +}; + +static const char *camera_state_enum2str[40] = { + "NONE", + "OPENED", + "RELEASED", + "CLOSED", + "PREVIEW_RUNNING", + "PREVIEW_STOPPED", + "RECORDING_RUNNING", + "RECORDING_STOPPED" +}; + +static CAMERA_STATE cam_state[2]; +static Mutex cam_stateLock[2]; + +static int check_camera_state(CAMERA_STATE state, int cameraId) +{ + bool ret = false; + cam_stateLock[cameraId].lock(); + + ALOGD("DEBUG(%s):camera(%d) state(%d) checking...", __FUNCTION__, cameraId, state); + +#ifndef DUAL_CAMERA_SUPPORTED + /* Assuming that only 2 cameras are present */ + if ((state == CAMERA_OPENED) && (cam_state[(cameraId + 1) % 2] != CAMERA_NONE) && + (cam_state[(cameraId + 1) % 2] != CAMERA_CLOSED)) { + ALOGE("ERR(%s):camera(%d) DUAL camera not supported\n", __FUNCTION__, cameraId); + cam_stateLock[cameraId].unlock(); + return ret; + } +#endif + + ALOGV("DEBUG(%s): cam_state[%d] = %d \n",__FUNCTION__, cameraId, cam_state[cameraId]); + + switch (state) { + case CAMERA_NONE: + ret = true; + break; + case CAMERA_OPENED: + if (cam_state[cameraId] == CAMERA_NONE || + cam_state[cameraId] == CAMERA_CLOSED) + ret = true; + break; + case CAMERA_RELEASED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED) + ret = true; + break; + case CAMERA_CLOSED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED || + cam_state[cameraId] == CAMERA_RELEASED) + ret = true; + break; + case CAMERA_PREVIEW: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEWSTOPPED || + cam_state[cameraId] == CAMERA_RECORDINGSTOPPED) + ret = true; + break; + case CAMERA_PREVIEWSTOPPED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_OPENED || + cam_state[cameraId] == CAMERA_PREVIEW || + cam_state[cameraId] == CAMERA_RECORDINGSTOPPED) + ret = true; + break; + case CAMERA_RECORDING: + if (cam_state[cameraId] == CAMERA_PREVIEW || + cam_state[cameraId] == CAMERA_RECORDINGSTOPPED) + ret = true; + break; + case CAMERA_RECORDINGSTOPPED: + if (cam_state[cameraId] == state || + cam_state[cameraId] == CAMERA_RECORDING) + ret = true; + break; + default: + ALOGE("ERR(%s):camera(%d) state(%s) is unknown value", + __FUNCTION__, cameraId, camera_state_enum2str[state]); + ret = false; + break; + } + + if (ret == true) { + ALOGD("DEBUG(%s):camera(%d) state(%d:%s->%d:%s) is valid", + __FUNCTION__, cameraId, + cam_state[cameraId], camera_state_enum2str[cam_state[cameraId]], + state, camera_state_enum2str[state]); + } else { + ALOGE("ERR(%s):camera(%d) state(%d:%s->%d:%s) is INVALID", + __FUNCTION__, cameraId, + cam_state[cameraId], camera_state_enum2str[cam_state[cameraId]], + state, camera_state_enum2str[state]); + } + + cam_stateLock[cameraId].unlock(); + return ret; +} + +}; /* namespace android */ diff --git a/libcamera_external/SecCameraParameters.cpp b/libcamera_external/SecCameraParameters.cpp new file mode 100644 index 0000000..d1a3fb5 --- /dev/null +++ b/libcamera_external/SecCameraParameters.cpp @@ -0,0 +1,135 @@ +/* + * Copyright 2008, The Android Open Source Project + * Copyright 2013, Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + /*! + * \file SecCameraParameters.cpp + * \brief source file for Android Camera Ext HAL + * \author teahyung kim (tkon.kim@samsung.com) + * \date 2013/04/30 + * + */ + +#define LOG_TAG "SecCameraParams" +#include + +#include +#include +#include + +#include "SecCameraParameters.h" + +namespace android { + +/* Parameter keys to communicate between camera application and driver. */ +const char SecCameraParameters::KEY_DTP_MODE[] = "chk_dataline"; + +const char SecCameraParameters::KEY_VT_MODE[] = "vtmode"; +const char SecCameraParameters::KEY_MOVIE_MODE[] = "cam_mode"; + +const char SecCameraParameters::KEY_ISO[] = "iso"; +const char SecCameraParameters::KEY_METERING[] = "metering"; +const char SecCameraParameters::KEY_AUTO_CONTRAST[] = "wdr"; +const char SecCameraParameters::KEY_ANTI_SHAKE[] = "anti-shake"; +const char SecCameraParameters::KEY_FACE_BEAUTY[] = "face_beauty"; +const char SecCameraParameters::KEY_HDR_MODE[] = "hdr-mode"; +const char SecCameraParameters::KEY_BLUR[] = "blur"; +const char SecCameraParameters::KEY_ANTIBANDING[] = "antibanding"; + + +/* for Image Adjust */ +const char SecCameraParameters::KEY_COLOR[] = "color"; +const char SecCameraParameters::KEY_CONTRAST[] = "contrast"; +const char SecCameraParameters::KEY_SHARPNESS[] = "sharpness"; +const char SecCameraParameters::KEY_SATURATION[] = "saturation"; + + +/* Values for scene mode settings. */ +const char SecCameraParameters::SCENE_MODE_DUSK_DAWN[] = "dusk-dawn"; +const char SecCameraParameters::SCENE_MODE_FALL_COLOR[] = "fall-color"; +const char SecCameraParameters::SCENE_MODE_BACK_LIGHT[] = "back-light"; +const char SecCameraParameters::SCENE_MODE_TEXT[] = "text"; + +/* Values for focus mode settings. */ +const char SecCameraParameters::FOCUS_MODE_FACEDETECT[] = "facedetect"; + +/* Values for iso settings. */ +const char SecCameraParameters::ISO_AUTO[] = "auto"; +const char SecCameraParameters::ISO_50[] = "50"; +const char SecCameraParameters::ISO_100[] = "100"; +const char SecCameraParameters::ISO_200[] = "200"; +const char SecCameraParameters::ISO_400[] = "400"; +const char SecCameraParameters::ISO_800[] = "800"; +const char SecCameraParameters::ISO_1600[] = "1600"; +const char SecCameraParameters::ISO_SPORTS[] = "sports"; +const char SecCameraParameters::ISO_NIGHT[] = "night"; + +/* Values for metering settings. */ +const char SecCameraParameters::METERING_CENTER[] = "center"; +const char SecCameraParameters::METERING_MATRIX[] = "matrix"; +const char SecCameraParameters::METERING_SPOT[] = "spot"; + +SecCameraParameters::SecCameraParameters() +{ +} + +SecCameraParameters::~SecCameraParameters() +{ +} + +int SecCameraParameters::lookupAttr(const cam_strmap_t arr[], int len, const char *name) +{ + if (name) { + for (int i = 0; i < len; i++) { + if (!strcmp(arr[i].desc, name)) + return arr[i].val; + } + } + return NOT_FOUND; +} + +String8 SecCameraParameters::createSizesStr(const image_rect_type *sizes, int len) +{ + String8 str; + char buffer[32]; + + if (len > 0) { + snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height); + str.append(buffer); + } + + for (int i = 1; i < len; i++) { + snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height); + str.append(buffer); + } + return str; +} + +String8 SecCameraParameters::createValuesStr(const cam_strmap_t *values, int len) +{ + String8 str; + + if (len > 0) + str.append(values[0].desc); + + for (int i=1; i +#include +#include +#include + +#define NOT_FOUND -1 + +#define ARRAY_SIZE(x) (sizeof(x) / sizeof(*x)) +#define FRM_RATIO(x) ((x).width * 10 / (x).height) +#define SIZE_RATIO(w, h) ((w) * 10 / (h)) + +namespace android { +/** + * The size of image for display. + */ +typedef struct image_rect_struct { + uint32_t width; /* Image width */ + uint32_t height; /* Image height */ +} image_rect_type; + +typedef struct cam_strmap { + const char *desc; + int val; +} cam_strmap_t; + +typedef struct view_angle { + uint32_t size_ratio; + float view_angle; +} view_angle_type; + +typedef enum { + CAM_CID_FW_MODE = V4L2_CID_CAM_UPDATE_FW, + CAM_CID_DTP_MODE = V4L2_CID_CAMERA_CHECK_DATALINE, + CAM_CID_VT_MODE = V4L2_CID_CAMERA_VT_MODE, + CAM_CID_MOVIE_MODE = V4L2_CID_CAMERA_SENSOR_MODE, + CAM_CID_JPEG_QUALITY = V4L2_CID_CAMERA_JPEG_QUALITY, + CAM_CID_ROTATION = V4L2_CID_ROTATION, + CAM_CID_SCENE_MODE = V4L2_CID_CAMERA_SCENE_MODE, + CAM_CID_ISO = V4L2_CID_CAMERA_ISO, + CAM_CID_BRIGHTNESS = V4L2_CID_CAMERA_BRIGHTNESS, + CAM_CID_SHARPNESS = V4L2_CID_CAMERA_SHARPNESS, + CAM_CID_CONTRAST = V4L2_CID_CAMERA_CONTRAST, + CAM_CID_SATURATION = V4L2_CID_CAMERA_SATURATION, + CAM_CID_WHITE_BALANCE = V4L2_CID_WHITE_BALANCE_PRESET, + CAM_CID_FLASH = V4L2_CID_CAMERA_FLASH_MODE, + CAM_CID_METERING = V4L2_CID_CAMERA_METERING, + CAM_CID_EFFECT = V4L2_CID_CAMERA_EFFECT, + CAM_CID_ZOOM = V4L2_CID_CAMERA_ZOOM, + CAM_CID_BLUR = V4L2_CID_CAMERA_VGA_BLUR, + CAM_CID_AUTO_CONTRAST = V4L2_CID_CAMERA_WDR, + CAM_CID_ANTISHAKE = V4L2_CID_CAMERA_ANTI_SHAKE, + CAM_CID_FRAME_RATE = V4L2_CID_CAM_FRAME_RATE, + CAM_CID_CHECK_ESD = V4L2_CID_CAMERA_CHECK_ESD, + CAM_CID_ANTIBANDING = V4L2_CID_CAMERA_ANTI_BANDING, + CAM_CID_OBJ_TRACKING = V4L2_CID_CAMERA_OBJ_TRACKING_START_STOP, + + CAM_CID_FOCUS_MODE = V4L2_CID_CAMERA_FOCUS_MODE, + + CAM_CID_SET_TOUCH_AF_POSX = V4L2_CID_CAMERA_OBJECT_POSITION_X, + CAM_CID_SET_TOUCH_AF_POSY = V4L2_CID_CAMERA_OBJECT_POSITION_Y, + CAM_CID_SET_TOUCH_AF = V4L2_CID_CAMERA_TOUCH_AF_START_STOP, + + CAM_CID_VFLIP = V4L2_CID_VFLIP, + CAM_CID_HFLIP = V4L2_CID_HFLIP, + CAM_CID_AE_LOCK_UNLOCK = V4L2_CID_CAMERA_AE_LOCK_UNLOCK, + CAM_CID_AWB_LOCK_UNLOCK = V4L2_CID_CAMERA_AWB_LOCK_UNLOCK, + CAM_CID_AEAWB_LOCK_UNLOCK = V4L2_CID_CAMERA_AEAWB_LOCK_UNLOCK, + CAM_CID_IS_S_FORMAT_SCENARIO = V4L2_CID_IS_S_FORMAT_SCENARIO, + CAM_CID_ANTI_BANDING = V4L2_CID_CAMERA_ANTI_BANDING, + + CAM_CID_CAPTURE_MODE = V4L2_CID_CAMERA_CAPTURE, + CAM_CID_JPEG_MAIN_SIZE = V4L2_CID_CAM_JPEG_MAIN_SIZE, +} cam_control_id; + +typedef enum { + CAM_FRMRATIO_QCIF = 12, /* 11 : 9 */ + CAM_FRMRATIO_VGA = 13, /* 4 : 3 */ + CAM_FRMRATIO_D1 = 15, /* 3 : 2 */ + CAM_FRMRATIO_WVGA = 16, /* 5 : 3 */ + CAM_FRMRATIO_HD = 17, /* 16 : 9 */ + CAM_FRMRATIO_SQUARE = 10, /* 1 : 1 */ +} cam_frmratio; + +typedef enum { + CAM_PIXEL_FORMAT_YUV422SP = V4L2_PIX_FMT_NV61, + CAM_PIXEL_FORMAT_YUV420SP = V4L2_PIX_FMT_NV21, + CAM_PIXEL_FORMAT_YUV420P = V4L2_PIX_FMT_YUV420, +/* CAM_PIXEL_FORMAT_YUV422I = V4L2_PIX_FMT_VYUY, */ + CAM_PIXEL_FORMAT_YUV422I = V4L2_PIX_FMT_YUYV, + CAM_PIXEL_FORMAT_RGB565 = V4L2_PIX_FMT_RGB565, + CAM_PIXEL_FORMAT_JPEG = V4L2_PIX_FMT_JPEG, +/* Only for SAMSUNG */ +/*CAM_PIXEL_FORMAT_YUV420 = V4L2_PIX_FMT_NV12, */ + CAM_PIXEL_FORMAT_YUV420 = V4L2_PIX_FMT_NV12M, + CAM_PIXEL_FORMAT_YVU420P = V4L2_PIX_FMT_YVU420, /* For support YV12 */ +} cam_pixel_format; + +class SecCameraParameters { +public: + SecCameraParameters(); + ~SecCameraParameters(); + + static const char KEY_DTP_MODE[]; + + static const char KEY_VT_MODE[]; + static const char KEY_MOVIE_MODE[]; + + static const char KEY_ISO[]; + static const char KEY_METERING[]; + static const char KEY_AUTO_CONTRAST[]; + static const char KEY_ANTI_SHAKE[]; + static const char KEY_FACE_BEAUTY[]; + static const char KEY_HDR_MODE[]; + static const char KEY_BLUR[]; + static const char KEY_ANTIBANDING[]; + + static const char KEY_COLOR[]; + static const char KEY_CONTRAST[]; + static const char KEY_SHARPNESS[]; + static const char KEY_SATURATION[]; + + // Values for scene mode settings. + static const char SCENE_MODE_DUSK_DAWN[]; + static const char SCENE_MODE_FALL_COLOR[]; + static const char SCENE_MODE_BACK_LIGHT[]; + static const char SCENE_MODE_TEXT[]; + + // Values for focus mode settings. + static const char FOCUS_MODE_FACEDETECT[]; + + // Values for ISO settings. + static const char ISO_AUTO[]; + static const char ISO_50[]; + static const char ISO_100[]; + static const char ISO_200[]; + static const char ISO_400[]; + static const char ISO_800[]; + static const char ISO_1600[]; + static const char ISO_SPORTS[]; + static const char ISO_NIGHT[]; + + // Values for metering settings. + static const char METERING_CENTER[]; + static const char METERING_MATRIX[]; + static const char METERING_SPOT[]; + + static int lookupAttr(const cam_strmap_t arr[], int len, const char *name); + static String8 createSizesStr(const image_rect_type *sizes, int len); + static String8 createValuesStr(const cam_strmap_t *values, int len); +}; + +}; // namespace android + +#endif /* ANDROID_HARDWARE_SEC_CAMERA_PARAMETERS_H */ + diff --git a/libcec/Android.mk b/libcec/Android.mk new file mode 100644 index 0000000..763c204 --- /dev/null +++ b/libcec/Android.mk @@ -0,0 +1,25 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE_TAGS := eng + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog +LOCAL_SRC_FILES := libcec.c + +LOCAL_MODULE := libcec +include $(BUILD_SHARED_LIBRARY) diff --git a/libcec/NOTICE b/libcec/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libcec/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libcec/cec.h b/libcec/cec.h new file mode 100644 index 0000000..96d4d7f --- /dev/null +++ b/libcec/cec.h @@ -0,0 +1,26 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef _LINUX_CEC_H_ +#define _LINUX_CEC_H_ + +#define CEC_IOC_MAGIC 'c' + +/** + * CEC device request code to set logical address. + */ +#define CEC_IOC_SETLADDR _IOW(CEC_IOC_MAGIC, 0, unsigned int) + +#endif /* _LINUX_CEC_H_ */ diff --git a/libcec/libcec.c b/libcec/libcec.c new file mode 100644 index 0000000..d464b5f --- /dev/null +++ b/libcec/libcec.c @@ -0,0 +1,384 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +/* drv. header */ +#include "cec.h" + +#include "libcec.h" + +#define CEC_DEBUG 0 + +/** + * @def CEC_DEVICE_NAME + * Defines simbolic name of the CEC device. + */ +#define CEC_DEVICE_NAME "/dev/CEC" + +static struct { + enum CECDeviceType devtype; + unsigned char laddr; +} laddresses[] = { + { CEC_DEVICE_RECODER, 1 }, + { CEC_DEVICE_RECODER, 2 }, + { CEC_DEVICE_TUNER, 3 }, + { CEC_DEVICE_PLAYER, 4 }, + { CEC_DEVICE_AUDIO, 5 }, + { CEC_DEVICE_TUNER, 6 }, + { CEC_DEVICE_TUNER, 7 }, + { CEC_DEVICE_PLAYER, 8 }, + { CEC_DEVICE_RECODER, 9 }, + { CEC_DEVICE_TUNER, 10 }, + { CEC_DEVICE_PLAYER, 11 }, +}; + +static int CECSetLogicalAddr(unsigned int laddr); + +#ifdef CEC_DEBUG +inline static void CECPrintFrame(unsigned char *buffer, unsigned int size); +#endif + +static int fd = -1; + +/** + * Open device driver and assign CEC file descriptor. + * + * @return If success to assign CEC file descriptor, return fd; otherwise, return -1. + */ +int CECOpen() +{ + if (fd != -1) + CECClose(); + + if ((fd = open(CEC_DEVICE_NAME, O_RDWR)) < 0) { + ALOGE("Can't open %s!\n", CEC_DEVICE_NAME); + return -1; + } + + return fd; +} + +/** + * Close CEC file descriptor. + * + * @return If success to close CEC file descriptor, return 1; otherwise, return 0. + */ +int CECClose() +{ + int res = 1; + + if (fd != -1) { + if (close(fd) != 0) { + ALOGE("close() failed!\n"); + res = 0; + } + fd = -1; + } + + return res; +} + +/** + * Allocate logical address. + * + * @param paddr [in] CEC device physical address. + * @param devtype [in] CEC device type. + * + * @return new logical address, or 0 if an error occured. + */ +int CECAllocLogicalAddress(int paddr, enum CECDeviceType devtype) +{ + unsigned char laddr = CEC_LADDR_UNREGISTERED; + size_t i = 0; + + if (fd == -1) { + ALOGE("open device first!\n"); + return 0; + } + + if (CECSetLogicalAddr(laddr) < 0) { + ALOGE("CECSetLogicalAddr() failed!\n"); + return 0; + } + + if (paddr == CEC_NOT_VALID_PHYSICAL_ADDRESS) + return CEC_LADDR_UNREGISTERED; + + /* send "Polling Message" */ + while (i < sizeof(laddresses) / sizeof(laddresses[0])) { + if (laddresses[i].devtype == devtype) { + unsigned char _laddr = laddresses[i].laddr; + unsigned char message = ((_laddr << 4) | _laddr); + if (CECSendMessage(&message, 1) != 1) { + laddr = _laddr; + break; + } + } + i++; + } + + if (laddr == CEC_LADDR_UNREGISTERED) { + ALOGE("All LA addresses in use!!!\n"); + return CEC_LADDR_UNREGISTERED; + } + + if (CECSetLogicalAddr(laddr) < 0) { + ALOGE("CECSetLogicalAddr() failed!\n"); + return 0; + } + + /* broadcast "Report Physical Address" */ + unsigned char buffer[5]; + buffer[0] = (laddr << 4) | CEC_MSG_BROADCAST; + buffer[1] = CEC_OPCODE_REPORT_PHYSICAL_ADDRESS; + buffer[2] = (paddr >> 8) & 0xFF; + buffer[3] = paddr & 0xFF; + buffer[4] = devtype; + + if (CECSendMessage(buffer, 5) != 5) { + ALOGE("CECSendMessage() failed!\n"); + return 0; + } + + return laddr; +} + +/** + * Send CEC message. + * + * @param *buffer [in] pointer to buffer address where message located. + * @param size [in] message size. + * + * @return number of bytes written, or 0 if an error occured. + */ +int CECSendMessage(unsigned char *buffer, int size) +{ + if (fd == -1) { + ALOGE("open device first!\n"); + return 0; + } + + if (size > CEC_MAX_FRAME_SIZE) { + ALOGE("size should not exceed %d\n", CEC_MAX_FRAME_SIZE); + return 0; + } + +#if CEC_DEBUG + ALOGI("CECSendMessage() : "); + CECPrintFrame(buffer, size); +#endif + + return write(fd, buffer, size); +} + +/** + * Receive CEC message. + * + * @param *buffer [in] pointer to buffer address where message will be stored. + * @param size [in] buffer size. + * @param timeout [in] timeout in microseconds. + * + * @return number of bytes received, or 0 if an error occured. + */ +int CECReceiveMessage(unsigned char *buffer, int size, long timeout) +{ + int bytes = 0; + fd_set rfds; + struct timeval tv; + int retval; + + if (fd == -1) { + ALOGE("open device first!\n"); + return 0; + } + + tv.tv_sec = 0; + tv.tv_usec = timeout; + + FD_ZERO(&rfds); + FD_SET(fd, &rfds); + + retval = select(fd + 1, &rfds, NULL, NULL, &tv); + + if (retval == -1) { + return 0; + } else if (retval) { + bytes = read(fd, buffer, size); +#if CEC_DEBUG + ALOGI("CECReceiveMessage() : size(%d)", bytes); + if(bytes > 0) + CECPrintFrame(buffer, bytes); +#endif + } + + return bytes; +} + +/** + * Set CEC logical address. + * + * @return 1 if success, otherwise, return 0. + */ +int CECSetLogicalAddr(unsigned int laddr) +{ + if (ioctl(fd, CEC_IOC_SETLADDR, &laddr)) { + ALOGE("ioctl(CEC_IOC_SETLA) failed!\n"); + return 0; + } + + return 1; +} + +#if CEC_DEBUG +/** + * Print CEC frame. + */ +void CECPrintFrame(unsigned char *buffer, unsigned int size) +{ + if (size > 0) { + size_t i; + ALOGI("fsize: %d ", size); + ALOGI("frame: "); + for (i = 0; i < size; i++) + ALOGI("0x%02x ", buffer[i]); + + ALOGI("\n"); + } +} +#endif + +/** + * Check CEC message. + * + * @param opcode [in] pointer to buffer address where message will be stored. + * @param lsrc [in] buffer size. + * + * @return 1 if message should be ignored, otherwise, return 0. + */ +//TODO: not finished +int CECIgnoreMessage(unsigned char opcode, unsigned char lsrc) +{ + int retval = 0; + + /* if a message coming from address 15 (unregistered) */ + if (lsrc == CEC_LADDR_UNREGISTERED) { + switch (opcode) { + case CEC_OPCODE_DECK_CONTROL: + case CEC_OPCODE_PLAY: + retval = 1; + default: + break; + } + } + + return retval; +} + +/** + * Check CEC message. + * + * @param opcode [in] pointer to buffer address where message will be stored. + * @param size [in] message size. + * + * @return 0 if message should be ignored, otherwise, return 1. + */ +//TODO: not finished +int CECCheckMessageSize(unsigned char opcode, int size) +{ + int retval = 1; + + switch (opcode) { + case CEC_OPCODE_REQUEST_ACTIVE_SOURCE: + if (size != 1) + retval = 0; + break; + case CEC_OPCODE_SET_SYSTEM_AUDIO_MODE: + if (size != 2) + retval = 0; + break; + case CEC_OPCODE_PLAY: + case CEC_OPCODE_DECK_CONTROL: + case CEC_OPCODE_SET_MENU_LANGUAGE: + case CEC_OPCODE_ACTIVE_SOURCE: + case CEC_OPCODE_ROUTING_INFORMATION: + case CEC_OPCODE_SET_STREAM_PATH: + if (size != 3) + retval = 0; + break; + case CEC_OPCODE_FEATURE_ABORT: + case CEC_OPCODE_DEVICE_VENDOR_ID: + case CEC_OPCODE_REPORT_PHYSICAL_ADDRESS: + if (size != 4) + retval = 0; + break; + case CEC_OPCODE_ROUTING_CHANGE: + if (size != 5) + retval = 0; + break; + /* CDC - 1.4 */ + case 0xf8: + if (!(size > 5 && size <= 16)) + retval = 0; + break; + default: + break; + } + + return retval; +} + +/** + * Check CEC message. + * + * @param opcode [in] pointer to buffer address where message will be stored. + * @param broadcast [in] broadcast/direct message. + * + * @return 0 if message should be ignored, otherwise, return 1. + */ +//TODO: not finished +int CECCheckMessageMode(unsigned char opcode, int broadcast) +{ + int retval = 1; + + switch (opcode) { + case CEC_OPCODE_REQUEST_ACTIVE_SOURCE: + case CEC_OPCODE_SET_MENU_LANGUAGE: + case CEC_OPCODE_ACTIVE_SOURCE: + if (!broadcast) + retval = 0; + break; + case CEC_OPCODE_GIVE_PHYSICAL_ADDRESS: + case CEC_OPCODE_DECK_CONTROL: + case CEC_OPCODE_PLAY: + case CEC_OPCODE_FEATURE_ABORT: + case CEC_OPCODE_ABORT: + if (broadcast) + retval = 0; + break; + default: + break; + } + + return retval; +} diff --git a/libcec/libcec.h b/libcec/libcec.h new file mode 100644 index 0000000..9086232 --- /dev/null +++ b/libcec/libcec.h @@ -0,0 +1,173 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _LIBCEC_H_ +#define _LIBCEC_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +/* Maximum CEC frame size */ +#define CEC_MAX_FRAME_SIZE 16 +/* Not valid CEC physical address */ +#define CEC_NOT_VALID_PHYSICAL_ADDRESS 0xFFFF + +/* CEC broadcast address (as destination address) */ +#define CEC_MSG_BROADCAST 0x0F +/* CEC unregistered address (as initiator address) */ +#define CEC_LADDR_UNREGISTERED 0x0F + +/* + * CEC Messages + */ + +/* @name Messages for the One Touch Play Feature */ +#define CEC_OPCODE_ACTIVE_SOURCE 0x82 +#define CEC_OPCODE_IMAGE_VIEW_ON 0x04 +#define CEC_OPCODE_TEXT_VIEW_ON 0x0D + +/* @name Messages for the Routing Control Feature */ +#define CEC_OPCODE_INACTIVE_SOURCE 0x9D +#define CEC_OPCODE_REQUEST_ACTIVE_SOURCE 0x85 +#define CEC_OPCODE_ROUTING_CHANGE 0x80 +#define CEC_OPCODE_ROUTING_INFORMATION 0x81 +#define CEC_OPCODE_SET_STREAM_PATH 0x86 + +/* @name Messages for the Standby Feature */ +#define CEC_OPCODE_STANDBY 0x36 + +/* @name Messages for the One Touch Record Feature */ +#define CEC_OPCODE_RECORD_OFF 0x0B +#define CEC_OPCODE_RECORD_ON 0x09 +#define CEC_OPCODE_RECORD_STATUS 0x0A +#define CEC_OPCODE_RECORD_TV_SCREEN 0x0F + +/* @name Messages for the Timer Programming Feature */ +#define CEC_OPCODE_CLEAR_ANALOGUE_TIMER 0x33 +#define CEC_OPCODE_CLEAR_DIGITAL_TIMER 0x99 +#define CEC_OPCODE_CLEAR_EXTERNAL_TIMER 0xA1 +#define CEC_OPCODE_SET_ANALOGUE_TIMER 0x34 +#define CEC_OPCODE_SET_DIGITAL_TIMER 0x97 +#define CEC_OPCODE_SET_EXTERNAL_TIMER 0xA2 +#define CEC_OPCODE_SET_TIMER_PROGRAM_TITLE 0x67 +#define CEC_OPCODE_TIMER_CLEARED_STATUS 0x43 +#define CEC_OPCODE_TIMER_STATUS 0x35 + +/* @name Messages for the System Information Feature */ +#define CEC_OPCODE_CEC_VERSION 0x9E +#define CEC_OPCODE_GET_CEC_VERSION 0x9F +#define CEC_OPCODE_GIVE_PHYSICAL_ADDRESS 0x83 +#define CEC_OPCODE_GET_MENU_LANGUAGE 0x91 +//#define CEC_OPCODE_POLLING_MESSAGE +#define CEC_OPCODE_REPORT_PHYSICAL_ADDRESS 0x84 +#define CEC_OPCODE_SET_MENU_LANGUAGE 0x32 + +/* @name Messages for the Deck Control Feature */ +#define CEC_OPCODE_DECK_CONTROL 0x42 +#define CEC_OPCODE_DECK_STATUS 0x1B +#define CEC_OPCODE_GIVE_DECK_STATUS 0x1A +#define CEC_OPCODE_PLAY 0x41 + +/* @name Messages for the Tuner Control Feature */ +#define CEC_OPCODE_GIVE_TUNER_DEVICE_STATUS 0x08 +#define CEC_OPCODE_SELECT_ANALOGUE_SERVICE 0x92 +#define CEC_OPCODE_SELECT_DIGITAL_SERVICE 0x93 +#define CEC_OPCODE_TUNER_DEVICE_STATUS 0x07 +#define CEC_OPCODE_TUNER_STEP_DECREMENT 0x06 +#define CEC_OPCODE_TUNER_STEP_INCREMENT 0x05 + +/* @name Messages for the Vendor Specific Commands Feature */ +#define CEC_OPCODE_DEVICE_VENDOR_ID 0x87 +#define CEC_OPCODE_GET_DEVICE_VENDOR_ID 0x8C +#define CEC_OPCODE_VENDOR_COMMAND 0x89 +#define CEC_OPCODE_VENDOR_COMMAND_WITH_ID 0xA0 +#define CEC_OPCODE_VENDOR_REMOTE_BUTTON_DOWN 0x8A +#define CEC_OPCODE_VENDOR_REMOVE_BUTTON_UP 0x8B + +/* @name Messages for the OSD Display Feature */ +#define CEC_OPCODE_SET_OSD_STRING 0x64 + +/* @name Messages for the Device OSD Transfer Feature */ +#define CEC_OPCODE_GIVE_OSD_NAME 0x46 +#define CEC_OPCODE_SET_OSD_NAME 0x47 + +/* @name Messages for the Device Menu Control Feature */ +#define CEC_OPCODE_MENU_REQUEST 0x8D +#define CEC_OPCODE_MENU_STATUS 0x8E +#define CEC_OPCODE_USER_CONTROL_PRESSED 0x44 +#define CEC_OPCODE_USER_CONTROL_RELEASED 0x45 + +/* @name Messages for the Remote Control Passthrough Feature */ + +/* @name Messages for the Power Status Feature */ +#define CEC_OPCODE_GIVE_DEVICE_POWER_STATUS 0x8F +#define CEC_OPCODE_REPORT_POWER_STATUS 0x90 + +/* @name Messages for General Protocol messages */ +#define CEC_OPCODE_FEATURE_ABORT 0x00 +#define CEC_OPCODE_ABORT 0xFF + +/* @name Messages for the System Audio Control Feature */ +#define CEC_OPCODE_GIVE_AUDIO_STATUS 0x71 +#define CEC_OPCODE_GIVE_SYSTEM_AUDIO_MODE_STATUS 0x7D +#define CEC_OPCODE_REPORT_AUDIO_STATUS 0x7A +#define CEC_OPCODE_SET_SYSTEM_AUDIO_MODE 0x72 +#define CEC_OPCODE_SYSTEM_AUDIO_MODE_REQUEST 0x70 +#define CEC_OPCODE_SYSTEM_AUDIO_MODE_STATUS 0x7E + +/* @name Messages for the Audio Rate Control Feature */ +#define CEC_OPCODE_SET_AUDIO_RATE 0x9A + +/* @name CEC Operands */ + +/* TODO: not finished */ + +#define CEC_DECK_CONTROL_MODE_STOP 0x03 +#define CEC_PLAY_MODE_PLAY_FORWARD 0x24 + +/* + * @enum CECDeviceType + * Type of CEC device + */ +enum CECDeviceType { + /* TV */ + CEC_DEVICE_TV, + /* Recording Device */ + CEC_DEVICE_RECODER, + /* Tuner */ + CEC_DEVICE_TUNER, + /* Playback Device */ + CEC_DEVICE_PLAYER, + /* Audio System */ + CEC_DEVICE_AUDIO, +}; + +int CECOpen(); +int CECClose(); +int CECAllocLogicalAddress(int paddr, enum CECDeviceType devtype); +int CECSendMessage(unsigned char *buffer, int size); +int CECReceiveMessage(unsigned char *buffer, int size, long timeout); + +int CECIgnoreMessage(unsigned char opcode, unsigned char lsrc); +int CECCheckMessageSize(unsigned char opcode, int size); +int CECCheckMessageMode(unsigned char opcode, int broadcast); + +#ifdef __cplusplus +} +#endif + +#endif /* _LIBCEC_H_ */ diff --git a/libcsc/Android.mk b/libcsc/Android.mk new file mode 100644 index 0000000..7ab1905 --- /dev/null +++ b/libcsc/Android.mk @@ -0,0 +1,47 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE_TAGS := optional + +LOCAL_SRC_FILES := \ + csc.c + +LOCAL_C_INCLUDES := \ + hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libexynosutils + +LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH) + +LOCAL_CFLAGS := + +LOCAL_MODULE := libcsc + +LOCAL_PRELINK_MODULE := false + +LOCAL_ARM_MODE := arm + +LOCAL_STATIC_LIBRARIES := libswconverter +LOCAL_SHARED_LIBRARIES := liblog libexynosutils libexynosscaler + +LOCAL_CFLAGS += -DUSE_SAMSUNG_COLORFORMAT + +ifdef BOARD_DEFAULT_CSC_HW_SCALER +LOCAL_CFLAGS += -DDEFAULT_CSC_HW=$(BOARD_DEFAULT_CSC_HW_SCALER) +else +LOCAL_CFLAGS += -DDEFAULT_CSC_HW=5 +endif + +ifeq ($(BOARD_USES_FIMC), true) +LOCAL_SHARED_LIBRARIES += libexynosfimc +else +LOCAL_SHARED_LIBRARIES += libexynosgscaler +endif + +ifeq ($(BOARD_USES_DEFAULT_CSC_HW_SCALER), true) +LOCAL_CFLAGS += -DUSES_DEFAULT_CSC_HW_SCALER +endif + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libcsc/NOTICE b/libcsc/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libcsc/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libcsc/csc.c b/libcsc/csc.c new file mode 100644 index 0000000..8f1a2b4 --- /dev/null +++ b/libcsc/csc.c @@ -0,0 +1,1312 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc.c + * + * @brief color space convertion abstract source + * + * @author Pyoungjae Jung(pjet.jung@samsung.com) + * + * @version 1.0.0 + * + * @history + * 2012.1.11 : Create + */ +#define LOG_TAG "libcsc" +#include + +#include +#include +#include +#include +#include + +#include "csc.h" +#include "exynos_format.h" +#include "swconverter.h" + +#ifdef USES_FIMC +#include "exynos_fimc.h" +#endif + +#ifdef USES_GSCALER +#include "exynos_gscaler.h" +#include "exynos_scaler.h" +#endif + +#define GSCALER_IMG_ALIGN 16 +#define FIMC_IMG_ALIGN_WIDTH 16 +#define FIMC_IMG_ALIGN_HEIGHT 2 +#define MFC_IMG_ALIGN_WIDTH 16 + +static CSC_ERRORCODE copy_mfc_data(CSC_HANDLE *handle) { + CSC_ERRORCODE ret = CSC_ErrorNone; + + int i; + char *pSrc = NULL; + char *pDst = NULL; + + ALOGV("%s: convert %x to %x", __FUNCTION__, handle->src_format.color_format, handle->dst_format.color_format); + + switch (handle->src_format.color_format) { + /* Multi FD to Single FD : decoder */ + /* remove a padding data */ + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_U_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + memcpy(pDst + ((handle->src_format.crop_width >> 1) * i), + pSrc + (ALIGN((handle->src_format.crop_width >> 1), MFC_IMG_ALIGN_WIDTH) * i), + (handle->src_format.crop_width >> 1)); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_V_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + memcpy(pDst + ((handle->src_format.crop_width >> 1) * i), + pSrc + (ALIGN((handle->src_format.crop_width >> 1), MFC_IMG_ALIGN_WIDTH) * i), + (handle->src_format.crop_width >> 1)); + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_UV_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + break; + /* Single FD to Multi FD : encoder */ + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_YV12: + /* adding a padding data for u/v plane : 420P */ + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + if (handle->src_format.width == handle->src_format.crop_width) { + memcpy(pDst, pSrc, (handle->src_format.width * handle->src_format.height)); + } else { + for (i = 0; i < (int)handle->src_format.height; i++) { + memcpy(pDst + (handle->src_format.width * i), + pSrc + (handle->src_format.crop_width * i), + handle->src_format.crop_width); + } + } + + pSrc = (char *)handle->src_buffer.planes[CSC_U_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + for (i = 0; i < (int)(handle->src_format.height >> 1); i++) { + memcpy(pDst + (ALIGN((handle->src_format.width >> 1), MFC_IMG_ALIGN_WIDTH) * i), + pSrc + ((handle->src_format.crop_width >> 1) * i), + (handle->src_format.crop_width >> 1)); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_V_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.height >> 1); i++) { + memcpy(pDst + (ALIGN((handle->src_format.width >> 1), MFC_IMG_ALIGN_WIDTH) * i), + pSrc + ((handle->src_format.crop_width >> 1) * i), + (handle->src_format.crop_width >> 1)); + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + if (handle->src_format.width == handle->src_format.crop_width) { + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + memcpy(pDst, pSrc, (handle->src_format.width * handle->src_format.height)); + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_UV_PLANE]; + memcpy(pDst, pSrc, (handle->src_format.width * (handle->src_format.height >> 1))); + } else { + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.height; i++) { + memcpy(pDst + (handle->src_format.width * i), + pSrc + (handle->src_format.crop_width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_UV_PLANE]; + memcpy(pDst, pSrc, (handle->src_format.width * (handle->src_format.height >> 1))); + for (i = 0; i < (int)(handle->src_format.height >> 1); i++) { + memcpy(pDst + (handle->src_format.width * i), + pSrc + (handle->src_format.crop_width * i), + handle->src_format.crop_width); + } + } + break; + case HAL_PIXEL_FORMAT_BGRA_8888: + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + memcpy(pDst, pSrc, (handle->src_format.width * handle->src_format.height * 4)); + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is BRGA888 */ +static CSC_ERRORCODE conv_sw_src_argb888( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_BGRA_8888: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + ret = CSC_ErrorUnsupportFormat; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + csc_BGRA8888_to_YUV420P( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + csc_BGRA8888_to_YUV420SP( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + csc_BGRA8888_to_YUV420P( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is RGBA888 */ +static CSC_ERRORCODE conv_sw_src_rgba888( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + ret = CSC_ErrorUnsupportFormat; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + csc_RGBA8888_to_YUV420P( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + csc_RGBA8888_to_YUV420SP( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + csc_RGBA8888_to_YUV420P( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_RGB_PLANE], + handle->src_format.width, + handle->src_format.height); + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + + +/* source is NV12T */ +static CSC_ERRORCODE conv_sw_src_nv12t( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + csc_tiled_to_linear_y( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.crop_width, + handle->src_format.crop_height); + csc_tiled_to_linear_uv_deinterleave( + (unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_UV_PLANE], + handle->src_format.crop_width, + handle->src_format.crop_height / 2); + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + csc_tiled_to_linear_y( + (unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.crop_width, + handle->src_format.crop_height); + csc_tiled_to_linear_uv( + (unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_UV_PLANE], + handle->src_format.crop_width, + handle->src_format.crop_height / 2); + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is YUV420P */ +static CSC_ERRORCODE conv_sw_src_yuv420p( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: /* bypass */ + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_U_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_V_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + ret = CSC_ErrorNone; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + csc_interleave_memcpy( + (unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_V_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is YVU420P */ +static CSC_ERRORCODE conv_sw_src_yvu420p( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_YV12: /* bypass */ + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_U_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_U_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_V_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + ret = CSC_ErrorNone; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + csc_interleave_memcpy( + (unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_V_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_U_PLANE], + (handle->src_format.width * handle->src_format.height) >> 2); + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is YUV420SP */ +static CSC_ERRORCODE conv_sw_src_yuv420sp( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + char *pSrc = NULL; + char *pDst = NULL; + char *pDstU = NULL; + char *pDstV = NULL; + int srcOffset, dstOffset; + int i, j; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: /* bypass */ + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_UV_PLANE], + handle->src_format.width * handle->src_format.height >> 1); + ret = CSC_ErrorNone; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + { + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDstU = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + pDstV = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + for (j = 0; j < (int)(handle->src_format.crop_width >> 1); j++) { + srcOffset = (i * handle->src_format.width) + (j * 2); + dstOffset = i * (handle->src_format.crop_width >> 1); + + pDstU[dstOffset + j] = pSrc[srcOffset]; + pDstV[dstOffset + j] = pSrc[srcOffset + 1]; + } + } + ret = CSC_ErrorNone; + } + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDstU = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + pDstV = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + for (j = 0; j < (int)(handle->src_format.crop_width >> 1); j++) { + srcOffset = (i * handle->src_format.width) + (j * 2); + dstOffset = i * (handle->src_format.crop_width >> 1); + + pDstU[dstOffset + j] = pSrc[srcOffset + 1]; + pDstV[dstOffset + j] = pSrc[srcOffset]; + } + } + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +/* source is YVU420SP */ +static CSC_ERRORCODE conv_sw_src_yvu420sp( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + char *pSrc = NULL; + char *pDst = NULL; + char *pDstU = NULL; + char *pDstV = NULL; + int srcOffset, dstOffset; + int i, j; + + switch (handle->dst_format.color_format) { + case HAL_PIXEL_FORMAT_YCrCb_420_SP: /* bypass */ + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + if (handle->src_buffer.mem_type == CSC_MEMORY_MFC) { + ret = copy_mfc_data(handle); + } else { + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_Y_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_Y_PLANE], + handle->src_format.width * handle->src_format.height); + memcpy((unsigned char *)handle->dst_buffer.planes[CSC_UV_PLANE], + (unsigned char *)handle->src_buffer.planes[CSC_UV_PLANE], + handle->src_format.width * handle->src_format.height >> 1); + ret = CSC_ErrorNone; + } + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDstU = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + pDstV = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + for (j = 0; j < (int)(handle->src_format.crop_width >> 1); j++) { + srcOffset = (i * handle->src_format.width) + (j * 2); + dstOffset = i * (handle->src_format.crop_width >> 1); + + pDstU[dstOffset + j] = pSrc[srcOffset + 1]; + pDstV[dstOffset + j] = pSrc[srcOffset]; + } + } + ret = CSC_ErrorNone; + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + pSrc = (char *)handle->src_buffer.planes[CSC_Y_PLANE]; + pDst = (char *)handle->dst_buffer.planes[CSC_Y_PLANE]; + for (i = 0; i < (int)handle->src_format.crop_height; i++) { + memcpy(pDst + (handle->src_format.crop_width * i), + pSrc + (handle->src_format.width * i), + handle->src_format.crop_width); + } + + pSrc = (char *)handle->src_buffer.planes[CSC_UV_PLANE]; + pDstU = (char *)handle->dst_buffer.planes[CSC_U_PLANE]; + pDstV = (char *)handle->dst_buffer.planes[CSC_V_PLANE]; + for (i = 0; i < (int)(handle->src_format.crop_height >> 1); i++) { + for (j = 0; j < (int)(handle->src_format.crop_width >> 1); j++) { + srcOffset = (i * handle->src_format.width) + (j * 2); + dstOffset = i * (handle->src_format.crop_width >> 1); + + pDstU[dstOffset + j] = pSrc[srcOffset]; + pDstV[dstOffset + j] = pSrc[srcOffset + 1]; + } + } + ret = CSC_ErrorNone; + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +static CSC_ERRORCODE conv_sw( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + + switch (handle->src_format.color_format) { + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + ret = conv_sw_src_nv12t(handle); + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + ret = conv_sw_src_yuv420p(handle); + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + ret = conv_sw_src_yvu420p(handle); + break; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + ret = conv_sw_src_yuv420sp(handle); + break; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + ret = conv_sw_src_yvu420sp(handle); + break; + case HAL_PIXEL_FORMAT_BGRA_8888: + ret = conv_sw_src_argb888(handle); + break; + case HAL_PIXEL_FORMAT_RGBA_8888: + ret = conv_sw_src_rgba888(handle); + break; + case HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888: + ret = copy_mfc_data(handle); + break; + default: + ret = CSC_ErrorUnsupportFormat; + break; + } + + return ret; +} + +static CSC_ERRORCODE conv_hw( + CSC_HANDLE *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + switch (handle->csc_hw_type) { +#ifdef USES_FIMC + case CSC_HW_TYPE_FIMC: + if (exynos_fimc_convert(handle->csc_hw_handle) != 0) { + ALOGE("%s:: exynos_fimc_convert() fail", __func__); + ret = CSC_Error; + } + break; +#endif +#ifdef USES_GSCALER + case CSC_HW_TYPE_GSCALER: + if (handle->hw_property.fixed_node < CSC_HW_SC0) { + if (exynos_gsc_convert(handle->csc_hw_handle) != 0) { + ALOGE("%s:: exynos_gsc_convert() fail", __func__); + ret = CSC_Error; + } + } else { + if (exynos_sc_convert(handle->csc_hw_handle) != 0) { + ALOGE("%s:: exynos_sc_convert() fail", __func__); + ret = CSC_Error; + } + } + break; +#endif + default: + ALOGE("%s:: unsupported csc_hw_type(%d)", __func__, handle->csc_hw_type); + ret = CSC_ErrorNotImplemented; + break; + } + + return ret; +} + +static CSC_ERRORCODE csc_init_hw( + void *handle) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + csc_handle = (CSC_HANDLE *)handle; + if (csc_handle->csc_method == CSC_METHOD_HW) { +#ifdef USES_FIMC + csc_handle->csc_hw_type = CSC_HW_TYPE_FIMC; +#endif +#ifdef USES_GSCALER + csc_handle->csc_hw_type = CSC_HW_TYPE_GSCALER; +#endif + switch (csc_handle->csc_hw_type) { +#ifdef USES_FIMC + case CSC_HW_TYPE_FIMC: + if (csc_handle->hw_property.fixed_node >= 0) + csc_handle->csc_hw_handle = exynos_fimc_create_exclusive(csc_handle->hw_property.fixed_node, FIMC_M2M_MODE, 0, 0); + else + csc_handle->csc_hw_handle = exynos_fimc_create(); + ALOGV("%s:: CSC_HW_TYPE_FIMC", __func__); + break; +#endif +#ifdef USES_GSCALER + case CSC_HW_TYPE_GSCALER: + if (csc_handle->hw_property.fixed_node >= 0) { + if (csc_handle->hw_property.fixed_node < CSC_HW_SC0) + csc_handle->csc_hw_handle = exynos_gsc_create_exclusive(csc_handle->hw_property.fixed_node, GSC_M2M_MODE, 0, 0); + else if (csc_handle->hw_property.fixed_node < CSC_HW_MAX) + csc_handle->csc_hw_handle = exynos_sc_create(csc_handle->hw_property.fixed_node - CSC_HW_SC0); + else + csc_handle->csc_hw_handle = NULL; + } else { + csc_handle->csc_hw_handle = exynos_gsc_create(); + } + ALOGV("%s:: CSC_HW_TYPE_GSCALER", __func__); + break; +#endif + default: + ALOGE("%s:: unsupported csc_hw_type, csc use sw", __func__); + csc_handle->csc_hw_handle = NULL; + break; + } + } + + if (csc_handle->csc_method == CSC_METHOD_HW) { + if (csc_handle->csc_hw_handle == NULL) { + ALOGE("%s:: CSC_METHOD_HW can't open HW", __func__); + free(csc_handle); + csc_handle = NULL; + } + } + + ALOGV("%s:: CSC_METHOD=%d", __func__, csc_handle->csc_method); + + return ret; +} + +static CSC_ERRORCODE csc_set_format( + void *handle) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + if (csc_handle->csc_method == CSC_METHOD_HW) { + switch (csc_handle->csc_hw_type) { +#ifdef USES_FIMC + case CSC_HW_TYPE_FIMC: + exynos_fimc_set_src_format( + csc_handle->csc_hw_handle, + ALIGN(csc_handle->src_format.width, FIMC_IMG_ALIGN_WIDTH), + ALIGN(csc_handle->src_format.height, FIMC_IMG_ALIGN_HEIGHT), + csc_handle->src_format.crop_left, + csc_handle->src_format.crop_top, + csc_handle->src_format.crop_width, + csc_handle->src_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->src_format.color_format), + csc_handle->src_format.cacheable, + csc_handle->hw_property.mode_drm); + + exynos_fimc_set_dst_format( + csc_handle->csc_hw_handle, + ALIGN(csc_handle->dst_format.width, FIMC_IMG_ALIGN_WIDTH), + ALIGN(csc_handle->dst_format.height, FIMC_IMG_ALIGN_HEIGHT), + csc_handle->dst_format.crop_left, + csc_handle->dst_format.crop_top, + csc_handle->dst_format.crop_width, + csc_handle->dst_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->dst_format.color_format), + csc_handle->dst_format.cacheable, + csc_handle->hw_property.mode_drm, + 0); + break; +#endif +#ifdef USES_GSCALER + case CSC_HW_TYPE_GSCALER: + if (csc_handle->hw_property.fixed_node < CSC_HW_SC0) { + exynos_gsc_set_csc_property( + csc_handle->csc_hw_handle, + csc_handle->csc_mode, + csc_handle->csc_range, + csc_handle->colorspace); + + exynos_gsc_set_src_format( + csc_handle->csc_hw_handle, + csc_handle->src_format.width, + csc_handle->src_format.height, + csc_handle->src_format.crop_left, + csc_handle->src_format.crop_top, + csc_handle->src_format.crop_width, + csc_handle->src_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->src_format.color_format), + csc_handle->src_format.cacheable, + csc_handle->hw_property.mode_drm); + + exynos_gsc_set_dst_format( + csc_handle->csc_hw_handle, + csc_handle->dst_format.width, + csc_handle->dst_format.height, + csc_handle->dst_format.crop_left, + csc_handle->dst_format.crop_top, + csc_handle->dst_format.crop_width, + csc_handle->dst_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->dst_format.color_format), + csc_handle->dst_format.cacheable, + csc_handle->hw_property.mode_drm); + } else { + exynos_sc_set_csc_property( + csc_handle->csc_hw_handle, + csc_handle->csc_range, + csc_handle->colorspace, + csc_handle->filter); + + exynos_sc_set_src_format( + csc_handle->csc_hw_handle, + csc_handle->src_format.width, + csc_handle->src_format.height, + csc_handle->src_format.crop_left, + csc_handle->src_format.crop_top, + csc_handle->src_format.crop_width, + csc_handle->src_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->src_format.color_format), + csc_handle->src_format.cacheable, + csc_handle->hw_property.mode_drm, + 1); + + exynos_sc_set_dst_format( + csc_handle->csc_hw_handle, + csc_handle->dst_format.width, + csc_handle->dst_format.height, + csc_handle->dst_format.crop_left, + csc_handle->dst_format.crop_top, + csc_handle->dst_format.crop_width, + csc_handle->dst_format.crop_height, + HAL_PIXEL_FORMAT_2_V4L2_PIX(csc_handle->dst_format.color_format), + csc_handle->dst_format.cacheable, + csc_handle->hw_property.mode_drm, + 1); + } + break; +#endif + default: + ALOGE("%s:: unsupported csc_hw_type", __func__); + break; + } + } + + return ret; +} + +static CSC_ERRORCODE csc_set_buffer( + void *handle) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + if (csc_handle->csc_method == CSC_METHOD_HW) { + switch (csc_handle->csc_hw_type) { +#ifdef USES_FIMC + case CSC_HW_TYPE_FIMC: + exynos_fimc_set_src_addr(csc_handle->csc_hw_handle, csc_handle->src_buffer.planes, csc_handle->src_buffer.mem_type, -1); + exynos_fimc_set_dst_addr(csc_handle->csc_hw_handle, csc_handle->dst_buffer.planes, csc_handle->dst_buffer.mem_type, -1); + break; +#endif +#ifdef USES_GSCALER + case CSC_HW_TYPE_GSCALER: + if (csc_handle->hw_property.fixed_node < CSC_HW_SC0) { + exynos_gsc_set_src_addr(csc_handle->csc_hw_handle, csc_handle->src_buffer.planes, csc_handle->src_buffer.mem_type, -1); + exynos_gsc_set_dst_addr(csc_handle->csc_hw_handle, csc_handle->dst_buffer.planes, csc_handle->dst_buffer.mem_type, -1); + } else { + exynos_sc_set_src_addr(csc_handle->csc_hw_handle, csc_handle->src_buffer.planes, csc_handle->src_buffer.mem_type, -1); + exynos_sc_set_dst_addr(csc_handle->csc_hw_handle, csc_handle->dst_buffer.planes, csc_handle->dst_buffer.mem_type, -1); + } + break; +#endif + default: + ALOGE("%s:: unsupported csc_hw_type", __func__); + break; + } + } + + return ret; +} + +void *csc_init( + CSC_METHOD method) +{ + CSC_HANDLE *csc_handle; + csc_handle = (CSC_HANDLE *)malloc(sizeof(CSC_HANDLE)); + if (csc_handle == NULL) + return NULL; + + memset(csc_handle, 0, sizeof(CSC_HANDLE)); +#ifdef USES_DEFAULT_CSC_HW_SCALER + csc_handle->hw_property.fixed_node = DEFAULT_CSC_HW; /* CSC_HW_SC1 == 5 */ +#else + csc_handle->hw_property.fixed_node = -1; +#endif + csc_handle->hw_property.mode_drm = 0; + csc_handle->csc_method = method; + + return (void *)csc_handle; +} + +CSC_ERRORCODE csc_deinit( + void *handle) +{ + CSC_ERRORCODE ret = CSC_ErrorNone; + CSC_HANDLE *csc_handle; + + if (handle == NULL) + return ret; + + csc_handle = (CSC_HANDLE *)handle; + if (csc_handle->csc_method == CSC_METHOD_HW) { + switch (csc_handle->csc_hw_type) { +#ifdef USES_FIMC + case CSC_HW_TYPE_FIMC: + exynos_fimc_destroy(csc_handle->csc_hw_handle); + break; +#endif +#ifdef USES_GSCALER + case CSC_HW_TYPE_GSCALER: + if (csc_handle->hw_property.fixed_node < CSC_HW_SC0) + exynos_gsc_destroy(csc_handle->csc_hw_handle); + else + exynos_sc_destroy(csc_handle->csc_hw_handle); + break; +#endif + default: + ALOGE("%s:: unsupported csc_hw_type", __func__); + break; + } + } + + free(csc_handle); + ret = CSC_ErrorNone; + + return ret; +} + +CSC_ERRORCODE csc_get_method( + void *handle, + CSC_METHOD *method) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + *method = csc_handle->csc_method; + + return ret; +} + +CSC_ERRORCODE csc_set_method( + void *handle, + CSC_METHOD method) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + csc_handle = (CSC_HANDLE *)handle; + + switch (method) { + case CSC_METHOD_SW: + case CSC_METHOD_HW: + csc_handle->csc_method = method; + break; + default: + ret = CSC_Error; + break; + } + + return ret; +} + +CSC_ERRORCODE csc_set_hw_property( + void *handle, + CSC_HW_PROPERTY_TYPE property, + int value) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + switch (property) { + case CSC_HW_PROPERTY_FIXED_NODE: + csc_handle->hw_property.fixed_node = value; + break; + case CSC_HW_PROPERTY_MODE_DRM: + csc_handle->hw_property.mode_drm = value; + break; + default: + ALOGE("%s:: not supported hw property", __func__); + ret = CSC_ErrorUnsupportFormat; + } + + return ret; +} + +CSC_ERRORCODE csc_get_eq_property( + void *handle, + CSC_EQ_MODE *csc_mode, + CSC_EQ_RANGE *csc_range, + CSC_EQ_COLORSPACE *colorspace) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + *csc_mode = csc_handle->csc_mode; + *csc_range = csc_handle->csc_range; + *colorspace = csc_handle->colorspace; + + return ret; +} + +CSC_ERRORCODE csc_set_eq_property( + void *handle, + CSC_EQ_MODE csc_mode, + CSC_EQ_RANGE csc_range, + CSC_EQ_COLORSPACE colorspace) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_Error; + + csc_handle = (CSC_HANDLE *)handle; + csc_handle->csc_mode = csc_mode; + csc_handle->csc_range = csc_range; + csc_handle->colorspace = colorspace; + + return ret; +} + +CSC_ERRORCODE csc_set_filter_property( + void *handle, + CSC_HW_FILTER filter) +{ + CSC_HANDLE *csc_handle; + + if (handle == NULL) + return CSC_Error; + + csc_handle = (CSC_HANDLE *)handle; + if (filter >= CSC_FT_MAX) + return CSC_Error; + + csc_handle->filter = filter; + csc_handle->hw_property.fixed_node = CSC_HW_SC1; + + return 0; +} + +CSC_ERRORCODE csc_get_src_format( + void *handle, + unsigned int *width, + unsigned int *height, + unsigned int *crop_left, + unsigned int *crop_top, + unsigned int *crop_width, + unsigned int *crop_height, + unsigned int *color_format, + unsigned int *cacheable) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + *width = csc_handle->src_format.width; + *height = csc_handle->src_format.height; + *crop_left = csc_handle->src_format.crop_left; + *crop_top = csc_handle->src_format.crop_top; + *crop_width = csc_handle->src_format.crop_width; + *crop_height = csc_handle->src_format.crop_height; + *color_format = csc_handle->src_format.color_format; + *cacheable = csc_handle->src_format.cacheable; + + return ret; +} + +CSC_ERRORCODE csc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format, + unsigned int cacheable) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + csc_handle->src_format.width = width; + csc_handle->src_format.height = height; + csc_handle->src_format.crop_left = crop_left; + csc_handle->src_format.crop_top = crop_top; + csc_handle->src_format.crop_width = crop_width; + csc_handle->src_format.crop_height = crop_height; + csc_handle->src_format.color_format = color_format; + csc_handle->src_format.cacheable = cacheable; + + return ret; +} + +CSC_ERRORCODE csc_get_dst_format( + void *handle, + unsigned int *width, + unsigned int *height, + unsigned int *crop_left, + unsigned int *crop_top, + unsigned int *crop_width, + unsigned int *crop_height, + unsigned int *color_format, + unsigned int *cacheable) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + *width = csc_handle->dst_format.width; + *height = csc_handle->dst_format.height; + *crop_left = csc_handle->dst_format.crop_left; + *crop_top = csc_handle->dst_format.crop_top; + *crop_width = csc_handle->dst_format.crop_width; + *crop_height = csc_handle->dst_format.crop_height; + *color_format = csc_handle->dst_format.color_format; + *cacheable = csc_handle->dst_format.cacheable; + + return ret; +} + +CSC_ERRORCODE csc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format, + unsigned int cacheable) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + csc_handle->dst_format.width = width; + csc_handle->dst_format.height = height; + csc_handle->dst_format.crop_left = crop_left; + csc_handle->dst_format.crop_top = crop_top; + csc_handle->dst_format.crop_width = crop_width; + csc_handle->dst_format.crop_height = crop_height; + csc_handle->dst_format.color_format = color_format; + csc_handle->dst_format.cacheable = cacheable; + + return ret; +} + +CSC_ERRORCODE csc_set_src_buffer( + void *handle, + void *addr[3], + int mem_type) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + csc_handle->src_buffer.planes[CSC_Y_PLANE] = addr[0]; + csc_handle->src_buffer.planes[CSC_U_PLANE] = addr[1]; + csc_handle->src_buffer.planes[CSC_V_PLANE] = addr[2]; + csc_handle->src_buffer.mem_type = mem_type; + + return ret; +} + +CSC_ERRORCODE csc_set_dst_buffer( + void *handle, + void *addr[3], + int mem_type) +{ + CSC_HANDLE *csc_handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (handle == NULL) + return CSC_ErrorNotInit; + + csc_handle = (CSC_HANDLE *)handle; + csc_handle->dst_buffer.planes[CSC_Y_PLANE] = addr[0]; + csc_handle->dst_buffer.planes[CSC_U_PLANE] = addr[1]; + csc_handle->dst_buffer.planes[CSC_V_PLANE] = addr[2]; + csc_handle->dst_buffer.mem_type = mem_type; + + return ret; +} + +CSC_ERRORCODE csc_convert( + void *handle) +{ + CSC_HANDLE *csc_handle = (CSC_HANDLE *)handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (csc_handle == NULL) + return CSC_ErrorNotInit; + + if ((csc_handle->csc_method == CSC_METHOD_HW) && + (csc_handle->csc_hw_handle == NULL)) + csc_init_hw(handle); + + csc_set_format(csc_handle); + csc_set_buffer(csc_handle); + + if (csc_handle->csc_method == CSC_METHOD_HW) + ret = conv_hw(csc_handle); + else + ret = conv_sw(csc_handle); + + return ret; +} + +CSC_ERRORCODE csc_convert_with_rotation( + void *handle, int rotation, int flip_horizontal, int flip_vertical) +{ + CSC_HANDLE *csc_handle = (CSC_HANDLE *)handle; + CSC_ERRORCODE ret = CSC_ErrorNone; + + if (csc_handle == NULL) + return CSC_ErrorNotInit; + + if ((csc_handle->csc_method == CSC_METHOD_HW) && + (csc_handle->csc_hw_handle == NULL)) + csc_init_hw(handle); + + csc_set_format(csc_handle); + csc_set_buffer(csc_handle); + +#ifdef USES_FIMC + exynos_fimc_set_rotation(csc_handle->csc_hw_handle, rotation, flip_horizontal, flip_vertical); +#endif +#ifdef USES_GSCALER + if (csc_handle->hw_property.fixed_node < CSC_HW_SC0) + exynos_gsc_set_rotation(csc_handle->csc_hw_handle, rotation, flip_horizontal, flip_vertical); + else + exynos_sc_set_rotation(csc_handle->csc_hw_handle, rotation, flip_horizontal, flip_vertical); +#endif + + if (csc_handle->csc_method == CSC_METHOD_HW) + ret = conv_hw(csc_handle); + else + ret = conv_sw(csc_handle); + + return ret; +} diff --git a/libdisplay/Android.mk b/libdisplay/Android.mk new file mode 100644 index 0000000..804f3df --- /dev/null +++ b/libdisplay/Android.mk @@ -0,0 +1,57 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libhwcutils libsync libmpp + +ifeq ($(BOARD_USES_FIMC), true) +LOCAL_SHARED_LIBRARIES += libexynosfimc +else +LOCAL_SHARED_LIBRARIES += libexynosgscaler +endif + +ifeq ($(BOARD_USES_FB_PHY_LINEAR),true) + LOCAL_SHARED_LIBRARIES += libfimg + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x +endif + +LOCAL_CFLAGS += -DLOG_TAG=\"display\" +LOCAL_CFLAGS += -DHLOG_CODE=1 +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwc \ + $(LOCAL_PATH)/../libhwcutils \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule + +LOCAL_SRC_FILES := \ + ExynosDisplay.cpp \ + ExynosOverlayDisplay.cpp + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libdisplay + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libdisplay/ExynosDisplay.cpp b/libdisplay/ExynosDisplay.cpp new file mode 100644 index 0000000..4df2824 --- /dev/null +++ b/libdisplay/ExynosDisplay.cpp @@ -0,0 +1,202 @@ +#include "ExynosDisplay.h" +#include "ExynosHWCUtils.h" +#include + +bool winConfigChanged(s3c_fb_win_config *c1, s3c_fb_win_config *c2) +{ + return c1->state != c2->state || + c1->fd != c2->fd || + c1->x != c2->x || + c1->y != c2->y || + c1->w != c2->w || + c1->h != c2->h || + c1->format != c2->format || + c1->offset != c2->offset || + c1->stride != c2->stride || + c1->blending != c2->blending || + c1->plane_alpha != c2->plane_alpha; +} + +void dumpConfig(s3c_fb_win_config &c) +{ + ALOGV("\tstate = %u", c.state); + if (c.state == c.S3C_FB_WIN_STATE_BUFFER) { + ALOGV("\t\tfd = %d, offset = %u, stride = %u, " + "x = %d, y = %d, w = %u, h = %u, " +#ifdef USES_DRM_SETTING_BY_DECON + "format = %u, blending = %u, protection = %u", +#else + "format = %u, blending = %u", +#endif + c.fd, c.offset, c.stride, + c.x, c.y, c.w, c.h, +#ifdef USES_DRM_SETTING_BY_DECON + c.format, c.blending, c.protection); +#else + c.format, c.blending); +#endif + } + else if (c.state == c.S3C_FB_WIN_STATE_COLOR) { + ALOGV("\t\tcolor = %u", c.color); + } +} + +enum s3c_fb_pixel_format halFormatToS3CFormat(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + return S3C_FB_PIXEL_FORMAT_RGBA_8888; + case HAL_PIXEL_FORMAT_RGBX_8888: + return S3C_FB_PIXEL_FORMAT_RGBX_8888; + case HAL_PIXEL_FORMAT_RGB_565: + return S3C_FB_PIXEL_FORMAT_RGB_565; + case HAL_PIXEL_FORMAT_BGRA_8888: + return S3C_FB_PIXEL_FORMAT_BGRA_8888; +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: + return S3C_FB_PIXEL_FORMAT_BGRX_8888; +#endif + default: + return S3C_FB_PIXEL_FORMAT_MAX; + } +} + +bool isFormatSupported(int format) +{ + return halFormatToS3CFormat(format) < S3C_FB_PIXEL_FORMAT_MAX; +} + +enum s3c_fb_blending halBlendingToS3CBlending(int32_t blending) +{ + switch (blending) { + case HWC_BLENDING_NONE: + return S3C_FB_BLENDING_NONE; + case HWC_BLENDING_PREMULT: + return S3C_FB_BLENDING_PREMULT; + case HWC_BLENDING_COVERAGE: + return S3C_FB_BLENDING_COVERAGE; + + default: + return S3C_FB_BLENDING_MAX; + } +} + +bool isBlendingSupported(int32_t blending) +{ + return halBlendingToS3CBlending(blending) < S3C_FB_BLENDING_MAX; +} + +#define NUMA(a) (sizeof(a) / sizeof(a [0])) +const char *s3cFormat2str(uint32_t format) +{ + const static char *unknown = "unknown"; + + for (unsigned int n1 = 0; n1 < NUMA(s3cFormat); n1++) { + if (format == s3cFormat[n1].format) { + return s3cFormat[n1].desc; + } + } + + return unknown; +} + +ExynosDisplay::ExynosDisplay(int numGSCs) + : mDisplayFd(-1), + mXres(0), + mYres(0), + mXdpi(0), + mYdpi(0), + mVsyncPeriod(0), + mOtfMode(OTF_OFF), + mHasDrmSurface(false), + mAllocDevice(NULL), + mNumMPPs(numGSCs), + mHwc(NULL) +{ +} + +ExynosDisplay::~ExynosDisplay() +{ + if (!mLayerInfos.isEmpty()) { + for (size_t i = 0; i < mLayerInfos.size(); i++) { + delete mLayerInfos[i]; + } + mLayerInfos.clear(); + } +} + +int ExynosDisplay::getDeconWinMap(int overlayIndex, int totalOverlays) +{ + if (totalOverlays == 4 && overlayIndex == 3) + return 4; + return overlayIndex; +} + +int ExynosDisplay::inverseWinMap(int windowIndex, int totalOverlays) +{ + if (totalOverlays == 4 && windowIndex == 4) + return 3; + return windowIndex; +} + +int ExynosDisplay::prepare(hwc_display_contents_1_t *contents) +{ + return 0; +} + +int ExynosDisplay::set(hwc_display_contents_1_t *contents) +{ + return 0; +} + +void ExynosDisplay::dump(android::String8& result) +{ +} + +void ExynosDisplay::freeMPP() +{ +} + +void ExynosDisplay::allocateLayerInfos(hwc_display_contents_1_t* contents) +{ + if (contents == NULL) + return; + + if (!mLayerInfos.isEmpty()) { + for (size_t i = 0; i < mLayerInfos.size(); i++) { + delete mLayerInfos[i]; + } + mLayerInfos.clear(); + } + + for (int i= 0; i < contents->numHwLayers; i++) { + ExynosLayerInfo *layerInfo = new ExynosLayerInfo(); + mLayerInfos.push(layerInfo); + } +} + +void ExynosDisplay::dumpLayerInfo(android::String8& result) +{ + if (!mLayerInfos.isEmpty()) { + result.append( + " type | CheckOverlayFlag | CheckMPPFlag \n" + "------------+------------------+--------------\n"); + for (size_t i = 0; i < mLayerInfos.size(); i++) { + unsigned int type = mLayerInfos[i]->compositionType; + static char const* compositionTypeName[] = { + "GLES", + "HWC", + "BACKGROUND", + "FB TARGET", + "UNKNOWN"}; + + if (type >= NELEM(compositionTypeName)) + type = NELEM(compositionTypeName) - 1; + result.appendFormat( + " %10s | 0x%8x | 0x%8x \n", + compositionTypeName[type], + mLayerInfos[i]->mCheckOverlayFlag, mLayerInfos[i]->mCheckMPPFlag); + } + } + result.append("\n"); +} diff --git a/libdisplay/ExynosDisplay.h b/libdisplay/ExynosDisplay.h new file mode 100644 index 0000000..f337096 --- /dev/null +++ b/libdisplay/ExynosDisplay.h @@ -0,0 +1,106 @@ +#ifndef EXYNOS_DISPLAY_H +#define EXYNOS_DISPLAY_H + +#include +#include +#include "ExynosHWC.h" + +#ifndef MAX_BUF_STRIDE +#define MAX_BUF_STRIDE 4096 +#endif + +class ExynosMPPModule; + +enum { + eSkipLayer = 0x00000001, + eUnsupportedPlaneAlpha = 0x00000002, + eInvalidHandle = 0x00000004, + eHasFloatSrcCrop = 0x00000008, + eUnsupportedDstWidth = 0x00000010, + eUnsupportedCoordinate = 0x00000020, + eUnsupportedFormat = 0x00000040, + eUnsupportedBlending = 0x00000080, + eDynamicRecomposition = 0x00000100, + eForceFbEnabled = 0x00000200, + eSandwitchedBetweenGLES = 0x00000400, + eHasPopupVideo = 0x00000800, + eHasDRMVideo = 0x00001000, + eInsufficientBandwidth = 0x00002000, + eInsufficientOverlapCount = 0x00004000, + eInsufficientWindow = 0x00008000, + eInsufficientMPP = 0x00010000, + eSwitchingLocalPath = 0x00020000, + eRGBLayerDuringVideoPlayback = 0x00040000, + eSkipStaticLayer = 0x00080000, + eNotAlignedDstPosition = 0x00100000, + eUnSupportedUseCase = 0x00200000, + eExceedHStrideMaximum = 0x00400000, + eMPPUnsupported = 0x40000000, + eUnknown = 0x80000000, +}; + +const struct s3cFormat { + uint32_t format; + const char *desc; +} s3cFormat[] = { + {S3C_FB_PIXEL_FORMAT_RGBA_8888, "RGBA8888"}, + {S3C_FB_PIXEL_FORMAT_RGBX_8888, "RGBX8888"}, + {S3C_FB_PIXEL_FORMAT_RGBA_5551, "RGBA5551"}, + {S3C_FB_PIXEL_FORMAT_RGB_565, "RGB565"}, + {S3C_FB_PIXEL_FORMAT_BGRA_8888, "BGRA8888"}, + {S3C_FB_PIXEL_FORMAT_BGRX_8888, "BGRX8888"}, +}; + +class ExynosLayerInfo { + public: + int32_t compositionType; + uint32_t mCheckOverlayFlag; + uint32_t mCheckMPPFlag; +}; + +bool winConfigChanged(s3c_fb_win_config *c1, s3c_fb_win_config *c2); +void dumpConfig(s3c_fb_win_config &c); +enum s3c_fb_pixel_format halFormatToS3CFormat(int format); +bool isFormatSupported(int format); +enum s3c_fb_blending halBlendingToS3CBlending(int32_t blending); +bool isBlendingSupported(int32_t blending); +const char *s3cFormat2str(uint32_t format); + +class ExynosDisplay { + public: + /* Methods */ + ExynosDisplay(int numGSCs); + virtual ~ExynosDisplay(); + + virtual int getDeconWinMap(int overlayIndex, int totalOverlays); + virtual int inverseWinMap(int windowIndex, int totalOverlays); + virtual int prepare(hwc_display_contents_1_t *contents); + virtual int set(hwc_display_contents_1_t *contents); + virtual void dump(android::String8& result); + virtual void freeMPP(); + virtual void allocateLayerInfos(hwc_display_contents_1_t* contents); + virtual void dumpLayerInfo(android::String8& result); + virtual int32_t getDisplayAttributes(const uint32_t __unused attribute, uint32_t __unused config = 0) {return 0;}; + virtual int getActiveConfig() {return 0;}; + virtual int setActiveConfig(int __unused index) {return 0;}; + + /* Fields */ + int mDisplayFd; + int32_t mXres; + int32_t mYres; + + int32_t mXdpi; + int32_t mYdpi; + int32_t mVsyncPeriod; + + int mOtfMode; + bool mHasDrmSurface; + alloc_device_t *mAllocDevice; + int mNumMPPs; + android::Mutex mLayerInfoMutex; + android::Vector mLayerInfos; + + struct exynos5_hwc_composer_device_1_t *mHwc; +}; + +#endif diff --git a/libdisplay/ExynosOverlayDisplay.cpp b/libdisplay/ExynosOverlayDisplay.cpp new file mode 100644 index 0000000..b9283c5 --- /dev/null +++ b/libdisplay/ExynosOverlayDisplay.cpp @@ -0,0 +1,2005 @@ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS +#include "ExynosOverlayDisplay.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include + +#ifdef G2D_COMPOSITION +#include "ExynosG2DWrapper.h" +#endif + +ExynosOverlayDisplay::ExynosOverlayDisplay(int numMPPs, struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(numMPPs), + mLastFbWindow(NO_FB_NEEDED), + mGrallocModule(NULL), + mLastOverlayWindowIndex(-1), + mLastOverlayLayerIndex(-1), + mVirtualOverlayFlag(0), + mForceFbYuvLayer(0), + mCountSameConfig(0), + mConfigMode(0), + mGscLayers(0), + mPopupPlayYuvContents(false), + mHasCropSurface(false), + mYuvLayers(0), + mBypassSkipStaticLayer(false), + mGscUsed(false), + mCurrentGscIndex(0), + mFbNeeded(false), + mFirstFb(0), + mLastFb(0), + mForceFb(false), + mForceOverlayLayerIndex(-1), + mRetry(false), + mBlanked(true), + mMaxWindowOverlapCnt(NUM_HW_WINDOWS), + mAllowedOverlays(5) +{ + mMPPs = new ExynosMPPModule*[mNumMPPs]; + for (int i = 0; i < mNumMPPs; i++) + mMPPs[i] = new ExynosMPPModule(this, i); + + for (size_t i = 0; i < MAX_NUM_FIMD_DMA_CH; i++) { + mDmaChannelMaxBandwidth[i] = 2560 * 1600; + mDmaChannelMaxOverlapCount[i] = 1; + } + + memset(&mPostData, 0, sizeof(mPostData)); + memset(&mLastConfigData, 0, sizeof(mLastConfigData)); + memset(mLastGscMap, 0, sizeof(mLastGscMap)); + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(mLastLayerHandles, 0, sizeof(mLastLayerHandles)); + memset(mPrevDstConfig, 0, sizeof(mPrevDstConfig)); + + mCurrentRGBMPPIndex = 0; + memset(&mFbUpdateRegion, 0, sizeof(mFbUpdateRegion)); + + mOtfMode = OTF_OFF; + this->mHwc = pdev; +} + +ExynosOverlayDisplay::~ExynosOverlayDisplay() +{ + for (int i = 0; i < mNumMPPs; i++) + delete mMPPs[i]; + delete[] mMPPs; +} + +bool ExynosOverlayDisplay::isOverlaySupported(hwc_layer_1_t &layer, size_t i) +{ + int mMPPIndex = 0; + int ret = 0; + + if (layer.flags & HWC_SKIP_LAYER) { + mLayerInfos[i]->mCheckOverlayFlag |= eSkipLayer; + ALOGV("\tlayer %u: skipping", i); + return false; + } + + if (!layer.planeAlpha) + return true; + + if (i == 0 && layer.planeAlpha < 255) { + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedPlaneAlpha; + return false; + } + + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (!handle) { + ALOGV("\tlayer %u: handle is NULL", i); + mLayerInfos[i]->mCheckOverlayFlag |= eInvalidHandle; + return false; + } + + if (isFormatRgb(handle->format) && handle->stride >= MAX_BUF_STRIDE) { + mLayerInfos[i]->mCheckOverlayFlag |= eExceedHStrideMaximum; + return false; + } + + mMPPIndex = getMPPForUHD(layer); + if (isFormatRgb(handle->format)) + mMPPIndex = getRGBMPPIndex(0); + + if (!(mMPPs[mMPPIndex]->formatRequiresGsc(handle->format)) && + (formatToBpp(handle->format) == 16) && ((layer.displayFrame.left % 2 != 0) || (layer.displayFrame.right % 2 != 0))) + return false; + + if (isSrcCropFloat(layer.sourceCropf) && + getDrmMode(handle->flags) == NO_DRM) { + ALOGV("\tlayer %u: has floating crop info [%7.1f %7.1f %7.1f %7.1f]", i, + layer.sourceCropf.left, layer.sourceCropf.top, layer.sourceCropf.right, + layer.sourceCropf.bottom); + mLayerInfos[i]->mCheckOverlayFlag |= eHasFloatSrcCrop; + return false; + } + + if (visibleWidth(mMPPs[mMPPIndex], layer, handle->format, this->mXres) < BURSTLEN_BYTES) { + ALOGV("\tlayer %u: visible area is too narrow", i); + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedDstWidth; + return false; + } + + if (mMPPs[mMPPIndex]->isProcessingRequired(layer, handle->format)) { + int downNumerator, downDenominator; + int downError = mMPPs[mMPPIndex]->getDownscaleRatio(&downNumerator, &downDenominator); + /* Check whether GSC can handle using local or M2M */ + ret = mMPPs[mMPPIndex]->isProcessingSupported(layer, handle->format, false); + if (ret < 0) { + ALOGV("\tlayer %u: MPP M2M required but not supported", i); + mLayerInfos[i]->mCheckOverlayFlag |= eMPPUnsupported; + mLayerInfos[i]->mCheckMPPFlag |= -ret; + + ret = mMPPs[mMPPIndex]->isProcessingSupported(layer, handle->format, true, downNumerator, downDenominator); + if (ret < 0) { + ALOGV("\tlayer %u: MPP OTF required but not supported", i); + mLayerInfos[i]->mCheckOverlayFlag |= eMPPUnsupported; + mLayerInfos[i]->mCheckMPPFlag |= -ret; + return false; + } + } + } else { +#ifdef USE_FB_PHY_LINEAR +#ifdef G2D_COMPOSITION + if ((this->mG2dComposition) && (this->mG2dLayers < (int)(NUM_HW_WIN_FB_PHY - 1))) + this->mG2d.ovly_lay_idx[this->mG2dLayers++] = i; + else +#endif + return false; +#endif + if (!isFormatSupported(handle->format)) { + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedFormat; + ALOGV("\tlayer %u: pixel format %u not supported", i, handle->format); + return false; + } + } + + if (!isBlendingSupported(layer.blending)) { + ALOGV("\tlayer %u: blending %d not supported", i, layer.blending); + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedBlending; + return false; + } + +#ifdef USE_FB_PHY_LINEAR + if (CC_UNLIKELY(isOffscreen(layer, mXres, mYres))) { + ALOGW("\tlayer %u: off-screen", i); + return false; + } +#endif + + return true; +} + +int ExynosOverlayDisplay::prepare(hwc_display_contents_1_t* contents) +{ + ATRACE_CALL(); + ALOGV("preparing %u layers for FIMD", contents->numHwLayers); + + memset(mPostData.gsc_map, 0, sizeof(mPostData.gsc_map)); + + mForceFb = mHwc->force_gpu; + + checkTVBandwidth(); + + mForceFbYuvLayer = 0; + mConfigMode = 0; + mRetry = false; + + ExynosDisplay::allocateLayerInfos(contents); + /* + * check whether same config or different config, + * should be waited until meeting the NUM_COFIG)STABLE + * before stablizing config, should be composed by GPU + * faster stablizing config, should be returned by OVERLAY + */ + forceYuvLayersToFb(contents); + + if (mHwc->force_gpu == true) + mForceFb = true; + + do { +#ifdef G2D_COMPOSITION + int tot_pixels = 0; + mG2dLayers= 0; + memset(&mG2d, 0, sizeof(mG2d)); + mG2dComposition = 0; + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + mG2dBypassCount = 4; + goto SKIP_G2D_OVERLAY; + } + + if (mG2dBypassCount > 0) + goto SKIP_G2D_OVERLAY; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if ((layer.compositionType == HWC_FRAMEBUFFER_TARGET) || + (layer.compositionType == HWC_BACKGROUND && !mForceFb)) + continue; + + if (is_transformed(layer) || is_scaled(layer) || (!layer.handle)) + goto SKIP_G2D_OVERLAY; + + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (!exynos5_format_is_rgb(handle->format)) + goto SKIP_G2D_OVERLAY; + + tot_pixels += WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + } + + if ((tot_pixels == mXres * mYres) && + (contents->numHwLayers <= NUM_HW_WINDOWS + 1)) + mG2dComposition = 1; + else + mG2dComposition = 0; + + SKIP_G2D_OVERLAY: +#endif + + determineYuvOverlay(contents); + determineSupportedOverlays(contents); + determineBandwidthSupport(contents); + assignWindows(contents); + } while (mRetry); + +#ifdef G2D_COMPOSITION + int alloc_fail = 0; +CHANGE_COMPOS_MODE: + if (mG2dComposition) { + if (mGscUsed || mFbNeeded || alloc_fail) { + //use SKIP_STATIC_LAYER_COMP + for (size_t i = 0; i < (size_t)mG2dLayers; i++) { + int lay_idx = mG2d.ovly_lay_idx[i]; + hwc_layer_1_t &layer = contents->hwLayers[lay_idx]; + layer.compositionType = HWC_FRAMEBUFFER; + mFbNeeded = true; + for (size_t j = 0; j < NUM_HW_WIN_FB_PHY; j++) { + if (mPostData.overlay_map[j] == lay_idx) { + mPostData.overlay_map[j] = -1; + break; + } + } + } + if (mFbNeeded) + mFirstFb = min(mFirstFb, (size_t)mG2d.ovly_lay_idx[0]); + mG2dLayers= 0; + mG2dComposition = 0; + } + } + + if (mG2dComposition) { + alloc_fail = exynos5_g2d_buf_alloc(mHwc, contents); + if (alloc_fail) + goto CHANGE_COMPOS_MODE; + } + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + for (int i = 0; i < (int)NUM_HW_WIN_FB_PHY; i++) + mLastG2dLayerHandle[i] = 0; + } +#endif + + if (mPopupPlayYuvContents) + mVirtualOverlayFlag = 0; + else + skipStaticLayers(contents); + if (mVirtualOverlayFlag) + mFbNeeded = 0; + + if (!mFbNeeded) + mPostData.fb_window = NO_FB_NEEDED; + else + if (mPopupPlayYuvContents) + mPostData.fb_window = 1; + + return 0; +} + +void ExynosOverlayDisplay::configureOtfWindow(hwc_rect_t &displayFrame, + int32_t blending, int32_t planeAlpha, int fence_fd, int format, s3c_fb_win_config &cfg) +{ + uint8_t bpp = formatToBpp(format); + + cfg.state = cfg.S3C_FB_WIN_STATE_OTF; + cfg.fd = -1; + cfg.x = displayFrame.left; + cfg.y = displayFrame.top; + cfg.w = WIDTH(displayFrame); + cfg.h = HEIGHT(displayFrame); + cfg.format = halFormatToS3CFormat(format); + cfg.offset = 0; + cfg.stride = cfg.w * bpp / 8; + cfg.blending = halBlendingToS3CBlending(blending); + cfg.fence_fd = fence_fd; + cfg.plane_alpha = 255; + if (planeAlpha && (planeAlpha < 255)) { + cfg.plane_alpha = planeAlpha; + } +} + +void ExynosOverlayDisplay::configureHandle(private_handle_t *handle, + hwc_frect_t &sourceCrop, hwc_rect_t &displayFrame, + int32_t blending, int32_t planeAlpha, int fence_fd, s3c_fb_win_config &cfg) +{ + uint32_t x, y; + uint32_t w = WIDTH(displayFrame); + uint32_t h = HEIGHT(displayFrame); + uint8_t bpp = formatToBpp(handle->format); + uint32_t offset = ((uint32_t)sourceCrop.top * handle->stride + (uint32_t)sourceCrop.left) * bpp / 8; + + if (displayFrame.left < 0) { + unsigned int crop = -displayFrame.left; + ALOGV("layer off left side of screen; cropping %u pixels from left edge", + crop); + x = 0; + w -= crop; + offset += crop * bpp / 8; + } else { + x = displayFrame.left; + } + + if (displayFrame.right > this->mXres) { + unsigned int crop = displayFrame.right - this->mXres; + ALOGV("layer off right side of screen; cropping %u pixels from right edge", + crop); + w -= crop; + } + + if (displayFrame.top < 0) { + unsigned int crop = -displayFrame.top; + ALOGV("layer off top side of screen; cropping %u pixels from top edge", + crop); + y = 0; + h -= crop; + offset += handle->stride * crop * bpp / 8; + } else { + y = displayFrame.top; + } + + if (displayFrame.bottom > this->mYres) { + int crop = displayFrame.bottom - this->mYres; + ALOGV("layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + h -= crop; + } + + cfg.state = cfg.S3C_FB_WIN_STATE_BUFFER; + cfg.fd = handle->fd; + cfg.x = x; + cfg.y = y; + cfg.w = w; + cfg.h = h; + cfg.format = halFormatToS3CFormat(handle->format); + cfg.offset = offset; + cfg.stride = handle->stride * bpp / 8; + cfg.blending = halBlendingToS3CBlending(blending); + cfg.fence_fd = fence_fd; + cfg.plane_alpha = 255; + if (planeAlpha && (planeAlpha < 255)) { + cfg.plane_alpha = planeAlpha; + } +#ifdef USES_DRM_SETTING_BY_DECON + if (getDrmMode(handle->flags) != NO_DRM) + cfg.protection = 1; + else + cfg.protection = 0; +#endif +} + +void ExynosOverlayDisplay::configureOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg) +{ + hwc_rect_t displayFrame; + + displayFrame.left = layer->displayFrame.left; + displayFrame.top = layer->displayFrame.top; + displayFrame.right = layer->displayFrame.right; + displayFrame.bottom = layer->displayFrame.bottom; + + if (layer->compositionType == HWC_BACKGROUND) { + hwc_color_t color = layer->backgroundColor; + cfg.state = cfg.S3C_FB_WIN_STATE_COLOR; + cfg.color = (color.r << 16) | (color.g << 8) | color.b; + cfg.x = 0; + cfg.y = 0; + cfg.w = this->mXres; + cfg.h = this->mYres; + return; + } + if ((layer->acquireFenceFd >= 0) && this->mForceFbYuvLayer) { + if (sync_wait(layer->acquireFenceFd, 1000) < 0) + ALOGE("sync_wait error"); + close(layer->acquireFenceFd); + layer->acquireFenceFd = -1; + } + private_handle_t *handle = private_handle_t::dynamicCast(layer->handle); + hwc_frect_t originalCrop = layer->sourceCropf; + if (layer->compositionType == HWC_FRAMEBUFFER_TARGET) { + layer->sourceCropf.left = (double)mFbUpdateRegion.left; + layer->sourceCropf.top = (double)mFbUpdateRegion.top; + layer->sourceCropf.right = (double)mFbUpdateRegion.right; + layer->sourceCropf.bottom = (double)mFbUpdateRegion.bottom; + displayFrame.left = mFbUpdateRegion.left; + displayFrame.top = mFbUpdateRegion.top; + displayFrame.right = mFbUpdateRegion.right; + displayFrame.bottom = mFbUpdateRegion.bottom; + } + configureHandle(handle, layer->sourceCropf, displayFrame, + layer->blending, layer->planeAlpha, layer->acquireFenceFd, cfg); + layer->sourceCropf = originalCrop; +} + +void ExynosOverlayDisplay::configureDummyOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg) +{ + hwc_rect_t displayFrame; + + displayFrame.left = layer->displayFrame.left; + displayFrame.top = layer->displayFrame.top; + displayFrame.right = layer->displayFrame.right; + displayFrame.bottom = layer->displayFrame.bottom; + + if (layer->acquireFenceFd >= 0) { + close(layer->acquireFenceFd); + layer->acquireFenceFd = -1; + } + + uint32_t x, y; + uint32_t w = WIDTH(displayFrame); + uint32_t h = HEIGHT(displayFrame); + + if (displayFrame.left < 0) { + unsigned int crop = -displayFrame.left; + x = 0; + w -= crop; + } else { + x = displayFrame.left; + } + + if (displayFrame.right > this->mXres) { + unsigned int crop = displayFrame.right - this->mXres; + w -= crop; + } + + if (displayFrame.top < 0) { + unsigned int crop = -displayFrame.top; + y = 0; + h -= crop; + } else { + y = displayFrame.top; + } + + if (displayFrame.bottom > this->mYres) { + int crop = displayFrame.bottom - this->mYres; + h -= crop; + } + + cfg.state = cfg.S3C_FB_WIN_STATE_COLOR; + cfg.color = 0; + + cfg.x = x; + cfg.y = y; + cfg.w = w; + cfg.h = h; + cfg.fence_fd = -1; + + return; +} + +int ExynosOverlayDisplay::handleWindowUpdate(hwc_display_contents_1_t* contents, + struct s3c_fb_win_config *config) +{ + exynos5_hwc_post_data_t *pdata = &mPostData; + int layerIdx = -1; + int updatedWinCnt = 0; + int totalWinCnt = 0; + int bytePerPixel = 0; + size_t winUpdateInfoIdx; + hwc_rect updateRect = {this->mXres, this->mYres, 0, 0}; + hwc_rect currentRect = {0, 0, 0, 0}; + bool burstLengthCheckDone = false; + int alignAdjustment = 0; + int intersectionWidth = 0; + + char value[PROPERTY_VALUE_MAX]; + property_get("debug.hwc.winupdate", value, NULL); + + if (!(!strcmp(value, "1") || !strcmp(value, "true"))) + return 0; + + if (S3C_WIN_UPDATE_IDX < 0) + return 0; + winUpdateInfoIdx = S3C_WIN_UPDATE_IDX; + + if (contents->flags & HWC_GEOMETRY_CHANGED) + return 0; + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (config[i].state != config[i].S3C_FB_WIN_STATE_DISABLED) { + if (mLastGscMap[i].mode == exynos5_gsc_map_t::GSC_LOCAL) { + HLOGV("[WIN_UPDATE] disabled by GSC-OTF in window(%d)", i); + return 0; + } + totalWinCnt++; + layerIdx = pdata->overlay_map[totalWinCnt - 1]; + + if (winConfigChanged(&config[i], &this->mLastConfigData.config[i])) { + updatedWinCnt++; + + currentRect.left = config[i].x; + currentRect.right = config[i].x + config[i].w; + currentRect.top = config[i].y; + currentRect.bottom = config[i].y + config[i].h; + + if (hwcHasApiVersion((hwc_composer_device_1_t*)mHwc, HWC_DEVICE_API_VERSION_1_5)) + { + private_handle_t *handle = NULL; + hwc_rect damageRect = {0, 0, 0, 0}; + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + unsigned int damageRegionMod = getLayerRegion(layer, damageRect, eDamageRegion); + + if (damageRegionMod == eDamageRegionSkip) + continue; + + if (handle && !isScaled(layer) && !isRotated(layer) + && (damageRegionMod == eDamageRegionPartial)) { + HLOGD("[WIN_UPDATE][surfaceDamage] layer w(%4d) h(%4d), dirty (%4d, %4d) - (%4d, %4d)", + handle->width, handle->height, damageRect.left, damageRect.top, damageRect.right, damageRect.bottom); + + currentRect.left = config[i].x - (int32_t)layer.sourceCropf.left + damageRect.left; + currentRect.right = config[i].x - (int32_t)layer.sourceCropf.left + damageRect.right; + currentRect.top = config[i].y - (int32_t)layer.sourceCropf.top + damageRect.top; + currentRect.bottom = config[i].y - (int32_t)layer.sourceCropf.top + damageRect.bottom; + adjustRect(currentRect, mXres, mYres); + + } + } + if ((currentRect.left > currentRect.right) || (currentRect.top > currentRect.bottom)) { + HLOGD("[WIN_UPDATE] window(%d) layer(%d) invalid region (%4d, %4d) - (%4d, %4d)", + i, layerIdx, currentRect.left, currentRect.top, currentRect.right, currentRect.bottom); + return 0; + } + HLOGD("[WIN_UPDATE] Updated Window(%d) Layer(%d) (%4d, %4d) - (%4d, %4d)", + i, layerIdx, currentRect.left, currentRect.top, currentRect.right, currentRect.bottom); + updateRect = expand(updateRect, currentRect); + } + } + } + if (updatedWinCnt == 0) + return 0; + + updateRect.left = ALIGN_DOWN(updateRect.left, WINUPDATE_X_ALIGNMENT); + updateRect.right = updateRect.left + ALIGN_UP(WIDTH(updateRect), WINUPDATE_W_ALIGNMENT); + + if ((100 * (WIDTH(updateRect) * HEIGHT(updateRect)) / (this->mXres * this->mYres)) > WINUPDATE_THRESHOLD) + return 0; + + alignAdjustment = max(WINUPDATE_X_ALIGNMENT, WINUPDATE_W_ALIGNMENT); + + while (1) { + burstLengthCheckDone = true; + + for (int i = 0; i < NUM_HW_WINDOWS; i++) { + if (config[i].state != config[i].S3C_FB_WIN_STATE_DISABLED) { + if (config[i].format == S3C_FB_PIXEL_FORMAT_RGB_565) + bytePerPixel = 2; + else + bytePerPixel = 4; + + currentRect.left = config[i].x; + currentRect.right = config[i].x + config[i].w; + currentRect.top = config[i].y; + currentRect.bottom = config[i].y + config[i].h; + + intersectionWidth = WIDTH(intersection(currentRect, updateRect)); + + HLOGV("[WIN_UPDATE] win[%d] left(%d) right(%d) intersection(%d)", i, currentRect.left, currentRect.right, intersectionWidth); + + if (intersectionWidth != 0 && intersectionWidth * bytePerPixel < BURSTLEN_BYTES) { + HLOGV("[WIN_UPDATE] win[%d] insufficient burst length (%d)*(%d) < %d", i, intersectionWidth, bytePerPixel, BURSTLEN_BYTES); + burstLengthCheckDone = false; + break; + } + } + } + if (burstLengthCheckDone) + break; + HLOGD("[WIN_UPDATE] Adjusting update width. current left(%d) right(%d)", updateRect.left, updateRect.right); + if (updateRect.left >= alignAdjustment) { + updateRect.left -= alignAdjustment; + } else if (updateRect.right + alignAdjustment <= this->mXres) { + updateRect.right += alignAdjustment; + } else { + ALOGD("[WIN_UPDATE] Error during update width adjustment"); + return 0; + } + } + + config[winUpdateInfoIdx].state = config[winUpdateInfoIdx].S3C_FB_WIN_STATE_UPDATE; + config[winUpdateInfoIdx].x = updateRect.left; + config[winUpdateInfoIdx].y = updateRect.top; + config[winUpdateInfoIdx].w = ALIGN_UP(WIDTH(updateRect), WINUPDATE_W_ALIGNMENT); + config[winUpdateInfoIdx].h = HEIGHT(updateRect); + + HLOGD("[WIN_UPDATE] UpdateRegion cfg (%4d, %4d) w(%4d) h(%4d) updatedWindowCnt(%d)", + config[winUpdateInfoIdx].x, config[winUpdateInfoIdx].y, config[winUpdateInfoIdx].w, config[winUpdateInfoIdx].h, updatedWinCnt); + + return 1; +} + +int ExynosOverlayDisplay::winconfigIoctl(s3c_fb_win_config_data *win_data) +{ + ATRACE_CALL(); + return ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, win_data); +} + +int ExynosOverlayDisplay::postFrame(hwc_display_contents_1_t* contents) +{ + ATRACE_CALL(); + exynos5_hwc_post_data_t *pdata = &mPostData; + struct s3c_fb_win_config_data win_data; + struct s3c_fb_win_config *config = win_data.config; + int win_map = 0; + int tot_ovly_wins = 0; + +#ifdef G2D_COMPOSITION + int num_g2d_overlayed = 0; +#endif + + memset(config, 0, sizeof(win_data.config)); + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + config[i].fence_fd = -1; + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if ( pdata->overlay_map[i] != -1) + tot_ovly_wins++; + } + if (mVirtualOverlayFlag) + tot_ovly_wins++; + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + int layer_idx = pdata->overlay_map[i]; + if (layer_idx != -1) { + hwc_layer_1_t &layer = contents->hwLayers[layer_idx]; + win_map = getDeconWinMap(i, tot_ovly_wins); + if (pdata->gsc_map[i].mode == exynos5_gsc_map_t::GSC_M2M) { + if (contents->flags & HWC_GEOMETRY_CHANGED) + mMPPs[pdata->gsc_map[i].idx]->mLastGSCLayerHandle = -1; + if (postGscM2M(layer, config, win_map, i, !!(i == 0)) < 0) + continue; + } else if (pdata->gsc_map[i].idx == FIMD_GSC_IDX && this->mOtfMode == OTF_RUNNING && + pdata->gsc_map[i].mode == exynos5_gsc_map_t::GSC_LOCAL) { + postGscOtf(layer, config, win_map, i); +#ifdef G2D_COMPOSITION + } else if (this->mG2dComposition && (num_g2d_overlayed < this->mG2dLayers)){ + waitForRenderFinish(&layer.handle, 1); + private_handle_t dstHandle(*(private_handle_t::dynamicCast(layer.handle))); + hwc_frect_t sourceCrop = {0, 0, WIDTH(layer.displayFrame), HEIGHT(layer.displayFrame)}; + int fence = exynos5_config_g2d(mHwc, layer, &dstHandle, config[win_map], num_g2d_overlayed, i); + if (fence < 0) { + ALOGE("config_g2d failed for layer %u", i); + continue; + } + configureHandle(&dstHandle, sourceCrop, layer.displayFrame, + layer.blending, layer.planeAlpha, fence, *config); + num_g2d_overlayed++; + this->mG2d.win_used[i] = num_g2d_overlayed; +#endif + } else { + waitForRenderFinish(&layer.handle, 1); + configureOverlay(&layer, config[win_map]); + } + } + if (i == 0 && config[i].blending != S3C_FB_BLENDING_NONE) { + ALOGV("blending not supported on window 0; forcing BLENDING_NONE"); + config[i].blending = S3C_FB_BLENDING_NONE; + } + if ((i < S3C_WIN_UPDATE_IDX) && + (config[i].state != config[i].S3C_FB_WIN_STATE_DISABLED) && + (config[i].w == 0 || config[i].h == 0)) { + config[i].state = config[i].S3C_FB_WIN_STATE_DISABLED; + } + + ALOGV("window %u configuration:", i); + dumpConfig(config[win_map]); + } + + if (this->mVirtualOverlayFlag) { + handleStaticLayers(contents, win_data, tot_ovly_wins); + } + + if (!handleWindowUpdate(contents, config)) + HLOGD("[WIN_UPDATE] Not Triggered"); + + int ret = winconfigIoctl(&win_data); + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + if (ret < 0) { + ALOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + return ret; + } + if (contents->numHwLayers == 1) { + hwc_layer_1_t &layer = contents->hwLayers[0]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + } + + if (!mGscLayers || this->mOtfMode == OTF_TO_M2M) { + cleanupGscs(); + } + +#ifdef G2D_COMPOSITION + if (!this->mG2dComposition) + exynos5_cleanup_g2d(mHwc, 0); + + this->mG2dBypassCount = max( this->mG2dBypassCount-1, 0); +#endif + + memcpy(&(this->mLastConfigData), &win_data, sizeof(win_data)); + memcpy(this->mLastGscMap, pdata->gsc_map, sizeof(pdata->gsc_map)); + if (!this->mVirtualOverlayFlag) + this->mLastFbWindow = pdata->fb_window; + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + int layer_idx = pdata->overlay_map[i]; + if (layer_idx != -1) { + hwc_layer_1_t &layer = contents->hwLayers[layer_idx]; + win_map = getDeconWinMap(i, tot_ovly_wins); + this->mLastHandles[win_map] = layer.handle; + } + } + + return win_data.fence; +} + +int ExynosOverlayDisplay::clearDisplay() +{ + struct s3c_fb_win_config_data win_data; + memset(&win_data, 0, sizeof(win_data)); + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + LOG_ALWAYS_FATAL_IF(ret < 0, + "ioctl S3CFB_WIN_CONFIG failed to clear screen: %s", + strerror(errno)); + // the causes of an empty config failing are all unrecoverable + + return win_data.fence; +} + +int ExynosOverlayDisplay::set(hwc_display_contents_1_t* contents) +{ + hwc_layer_1_t *fb_layer = NULL; + int err = 0; + + if (this->mPostData.fb_window != NO_FB_NEEDED) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == + HWC_FRAMEBUFFER_TARGET) { + this->mPostData.overlay_map[this->mPostData.fb_window] = i; + fb_layer = &contents->hwLayers[i]; + break; + } + } + + if (CC_UNLIKELY(!fb_layer)) { + ALOGE("framebuffer target expected, but not provided"); + err = -EINVAL; + } else { + ALOGV("framebuffer target buffer:"); + dumpLayer(fb_layer); + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == HWC_OVERLAY && + contents->hwLayers[i].planeAlpha == 0) { + close(contents->hwLayers[i].acquireFenceFd); + contents->hwLayers[i].acquireFenceFd = -1; + contents->hwLayers[i].releaseFenceFd = -1; + } + } + + int fence; + if (!err) { + fence = postFrame(contents); + if (fence < 0) + err = fence; + } + + if (err) + fence = clearDisplay(); + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (this->mPostData.overlay_map[i] != -1) { + hwc_layer_1_t &layer = + contents->hwLayers[this->mPostData.overlay_map[i]]; + int dup_fd = dup(fence); + if (dup_fd < 0) + ALOGW("release fence(%d) dup failed : %s", fence, strerror(errno)); + if (this->mPostData.gsc_map[i].mode == exynos5_gsc_map_t::GSC_M2M) { + int gsc_idx = this->mPostData.gsc_map[i].idx; + ExynosMPP &gsc = *mMPPs[gsc_idx]; + if (gsc.mDstBufFence[gsc.mCurrentBuf] >= 0) { + close (gsc.mDstBufFence[gsc.mCurrentBuf]); + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + } + gsc.mDstBufFence[gsc.mCurrentBuf] = dup_fd; + gsc.mCurrentBuf = (gsc.mCurrentBuf + 1) % gsc.mNumAvailableDstBuffers; + } else if (this->mPostData.gsc_map[i].mode == exynos5_gsc_map_t::GSC_LOCAL) { + layer.releaseFenceFd = dup_fd; +#ifdef G2D_COMPOSITION + } else if (this->mG2dComposition && this->mG2d.win_used[i]) { + int idx = this->mG2d.win_used[i] - 1; + this->mWinBufFence[idx][this->mG2dCurrentBuffer[idx]] = dup_fd; + this->mG2dCurrentBuffer[idx] = (this->mG2dCurrentBuffer[idx] + 1) % NUM_GSC_DST_BUFS; +#endif + } else { + layer.releaseFenceFd = dup_fd; + } + } + } + + if (contents->numHwLayers >= 1) { + hwc_layer_1_t &layer = contents->hwLayers[contents->numHwLayers-1]; + + if (mVirtualOverlayFlag && layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + int dup_fd = dup(fence); + if (dup_fd < 0) { + ALOGW("release fence(%d) dup failed in FB_TARGET: %s", fence, strerror(errno)); + } else { + layer.releaseFenceFd = dup_fd; + } + } + } + + contents->retireFenceFd = fence; + + mHwc->hwc_ctrl.max_num_ovly = mAllowedOverlays; + + return err; +} + +int ExynosOverlayDisplay::getCompModeSwitch() +{ + unsigned int tot_win_size = 0, updateFps = 0; + unsigned int lcd_size = this->mXres * this->mYres; + uint64_t TimeStampDiff; + float Temp; + + if (!mHwc->hwc_ctrl.dynamic_recomp_mode) { + mHwc->LastModeSwitchTimeStamp = 0; + mHwc->CompModeSwitch = NO_MODE_SWITCH; + return 0; + } + + /* initialize the Timestamps */ + if (!mHwc->LastModeSwitchTimeStamp) { + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + mHwc->CompModeSwitch = NO_MODE_SWITCH; + return 0; + } + + /* If video layer is there, skip the mode switch */ + if (mYuvLayers || mGscLayers >= 2 ) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + return 0; + } else { + mHwc->CompModeSwitch = GLES_2_HWC; + mHwc->updateCallCnt = 0; + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + ALOGI("[DYNAMIC_RECOMP] GLES_2_HWC by video layer"); + return GLES_2_HWC; + } + } + + /* Mode Switch is not required if total pixels are not more than the threshold */ + if ((uint32_t)mHwc->incomingPixels <= lcd_size * HWC_FIMD_BW_TH) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + return 0; + } else { + mHwc->CompModeSwitch = GLES_2_HWC; + mHwc->updateCallCnt = 0; + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + ALOGI("[DYNAMIC_RECOMP] GLES_2_HWC by BW check"); + return GLES_2_HWC; + } + } + + /* + * There will be at least one composition call per one minute (because of time update) + * To minimize the analysis overhead, just analyze it once in a second + */ + TimeStampDiff = systemTime(SYSTEM_TIME_MONOTONIC) - mHwc->LastModeSwitchTimeStamp; + + /* check fps every 250ms from LastModeSwitchTimeStamp*/ + if (TimeStampDiff < (VSYNC_INTERVAL * 15)) { + return 0; + } + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + Temp = (VSYNC_INTERVAL * 60) / TimeStampDiff; + updateFps = (int)(mHwc->updateCallCnt * Temp + 0.5); + mHwc->updateCallCnt = 0; + /* + * FPS estimation. + * If FPS is lower than HWC_FPS_TH, try to switch the mode to GLES + */ + if (updateFps < HWC_FPS_TH) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + mHwc->CompModeSwitch = HWC_2_GLES; + ALOGI("[DYNAMIC_RECOMP] HWC_2_GLES by low FPS(%d)", updateFps); + return HWC_2_GLES; + } else { + return 0; + } + } else { + if (mHwc->CompModeSwitch == HWC_2_GLES) { + mHwc->CompModeSwitch = GLES_2_HWC; + ALOGI("[DYNAMIC_RECOMP] GLES_2_HWC by high FPS(%d)", updateFps); + return GLES_2_HWC; + } else { + return 0; + } + } + + return 0; +} + +int32_t ExynosOverlayDisplay::getDisplayAttributes(const uint32_t attribute) +{ + switch(attribute) { + case HWC_DISPLAY_VSYNC_PERIOD: + return this->mVsyncPeriod; + + case HWC_DISPLAY_WIDTH: + return this->mXres; + + case HWC_DISPLAY_HEIGHT: + return this->mYres; + + case HWC_DISPLAY_DPI_X: + return this->mXdpi; + + case HWC_DISPLAY_DPI_Y: + return this->mYdpi; + + default: + ALOGE("unknown display attribute %u", attribute); + return -EINVAL; + } +} + +void ExynosOverlayDisplay::skipStaticLayers(hwc_display_contents_1_t* contents) +{ + static int init_flag = 0; + int last_ovly_lay_idx = -1; + + mVirtualOverlayFlag = 0; + mLastOverlayWindowIndex = -1; + + if (!mHwc->hwc_ctrl.skip_static_layer_mode) + return; + + if (mBypassSkipStaticLayer) + return; + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + init_flag = 0; + return; + } + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (mPostData.overlay_map[i] != -1) { + last_ovly_lay_idx = mPostData.overlay_map[i]; + mLastOverlayWindowIndex = i; + } + } + + if ((last_ovly_lay_idx == -1) || !mFbNeeded || + ((mLastFb - mFirstFb + 1) > NUM_VIRT_OVER)) { + init_flag = 0; + return; + } + mLastOverlayLayerIndex = last_ovly_lay_idx; + + if (init_flag == 1) { + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (!layer.handle || (layer.flags & HWC_SKIP_LAYER) || (mLastLayerHandles[i - mFirstFb] != layer.handle)) { + init_flag = 0; + return; + } + } + + mVirtualOverlayFlag = 1; + for (size_t i = 0; i < contents->numHwLayers-1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) { + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSkipStaticLayer; + } + } + mLastFbWindow = mPostData.fb_window; + return; + } + + init_flag = 1; + for (size_t i = 0; i < NUM_VIRT_OVER; i++) + mLastLayerHandles[i] = 0; + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + mLastLayerHandles[i - mFirstFb] = layer.handle; + } + + return; +} + +void ExynosOverlayDisplay::forceYuvLayersToFb(hwc_display_contents_1_t *contents) +{ +} + +void ExynosOverlayDisplay::handleOffscreenRendering(hwc_layer_1_t &layer, hwc_display_contents_1_t *contents, int index) +{ +} + +void ExynosOverlayDisplay::determineYuvOverlay(hwc_display_contents_1_t *contents) +{ + mPopupPlayYuvContents = false; + mForceOverlayLayerIndex = -1; + mHasDrmSurface = false; + mYuvLayers = 0; + mHasCropSurface = false; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) != NO_DRM) { + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + } + + /* check yuv surface */ + if (mMPPs[0]->formatRequiresGsc(handle->format)) { + layer.sourceCropf.top = (int)layer.sourceCropf.top; + layer.sourceCropf.left = (int)layer.sourceCropf.left; + layer.sourceCropf.bottom = (int)(layer.sourceCropf.bottom + 0.9); + layer.sourceCropf.right = (int)(layer.sourceCropf.right + 0.9); + if (isOverlaySupported(contents->hwLayers[i], i)) { + this->mYuvLayers++; + if (this->mHasDrmSurface == false) + mForceOverlayLayerIndex = i; + } else if (i > 0 && layer.planeAlpha > 0) { + mForceFb = true; + } + } + + if (handle->flags & GRALLOC_USAGE_HW_CAMERA_MASK) { + mAllowedOverlays = mHwc->hwc_ctrl.max_num_ovly; + mHwc->hwc_ctrl.max_num_ovly = 3; + } + + handleOffscreenRendering(layer, contents, i); + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) || defined(USES_VIRTUAL_DISPLAY) + /* during rotation, it use gpu comosition */ + if (layer.flags & HWC_SCREENSHOT_ANIMATOR_LAYER) + mForceFb = true; +#endif + } + mPopupPlayYuvContents = !!(((mYuvLayers == 1) || (this->mHasDrmSurface)) && (mForceOverlayLayerIndex > 0)); + if (mYuvLayers > 1) { + mHwc->allowOTF = false; + if (mMPPs[0]->isOTF()) + mForceFb = true; + } else { + mHwc->allowOTF = true; + } + +#ifdef USE_FB_PHY_LINEAR + if ((!mHasDrmSurface) && +#ifdef G2D_COMPOSITION + (mG2dBypassCount > 0) && +#endif + (contents->flags & HWC_GEOMETRY_CHANGED)) + mForceFb = true; +#endif +} + +void ExynosOverlayDisplay::determineSupportedOverlays(hwc_display_contents_1_t *contents) +{ + bool videoLayer = false; + + mFbNeeded = false; + mFirstFb = ~0; + mLastFb = 0; + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + mPostData.overlay_map[i] = -1; + + // find unsupported overlays + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + ALOGV("\tlayer %u: framebuffer target", i); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.compositionType == HWC_BACKGROUND && !mForceFb) { + ALOGV("\tlayer %u: background supported", i); + dumpLayer(&contents->hwLayers[i]); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if ((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) > 0) { + videoLayer = true; + if (!mHwc->hdmi_hpd && mHwc->mS3DMode == S3D_MODE_READY) + mHwc->mS3DMode = S3D_MODE_RUNNING; + } + if ((!mPopupPlayYuvContents) + || ((uint32_t)mForceOverlayLayerIndex == i)) { + mHwc->incomingPixels += WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if ((!mHasCropSurface || mPopupPlayYuvContents) || + ((mHasDrmSurface) && ((uint32_t)mForceOverlayLayerIndex == i))) { + if (isOverlaySupported(contents->hwLayers[i], i) && + !(mForceFb && (getDrmMode(handle->flags) == NO_DRM)) && (!mHwc->hwc_ctrl.dynamic_recomp_mode || + ((mHwc->CompModeSwitch != HWC_2_GLES) || + (getDrmMode(handle->flags) != NO_DRM)))) { + mHwc->totPixels += WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + ALOGV("\tlayer %u: overlay supported", i); + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + if (mPopupPlayYuvContents) + layer.hints = HWC_HINT_CLEAR_FB; + dumpLayer(&contents->hwLayers[i]); + continue; + } else { + if (mForceFb) + mLayerInfos[i]->mCheckOverlayFlag |= eForceFbEnabled; + else if (mHwc->hwc_ctrl.dynamic_recomp_mode && mHwc->CompModeSwitch == HWC_2_GLES) + mLayerInfos[i]->mCheckOverlayFlag |= eDynamicRecomposition; + else if (isOverlaySupported(contents->hwLayers[i], i)) + mLayerInfos[i]->mCheckOverlayFlag |= eUnknown; + } + } else { + if (mHasCropSurface == true) + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedCoordinate; + else + mLayerInfos[i]->mCheckOverlayFlag |= eUnknown; + } + } else { + /* There is popup video but this layer is not popup video layer */ + mLayerInfos[i]->mCheckOverlayFlag |= eHasPopupVideo; + } + } else { + mLayerInfos[i]->mCheckOverlayFlag |= eInvalidHandle; + } + + if (!mFbNeeded) { + mFirstFb = i; + mFbNeeded = true; + } + mLastFb = i; + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + + dumpLayer(&contents->hwLayers[i]); + } + + if (!mHwc->hdmi_hpd && mHwc->mS3DMode == S3D_MODE_RUNNING && !videoLayer && !mForceFb) + mHwc->mS3DMode = S3D_MODE_DISABLED; + hwc_rect_t base_rect; + hwc_rect_t intersect_rect; + private_handle_t *handle = NULL; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + if (layer.compositionType == HWC_OVERLAY) { + if (i == 0) { + base_rect = layer.displayFrame; + } else if (hasPlaneAlpha(layer) || (handle && hasAlpha(handle->format) && mMPPs[0]->isProcessingRequired(layer, handle->format))) { + //if alpha layer is not completely overlapped with base layer, bypass the alpha layer to GLES. + intersect_rect = intersection(base_rect, layer.displayFrame); + if (!rectEqual(intersect_rect, layer.displayFrame)) { + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedBlending; + mFirstFb = min(mFirstFb, i); + mLastFb = max(mLastFb, i); + mFbNeeded = true; + break; + } + } + } else { + // if one of the bottom layer is HWC_FRAMEBUFFER type, no need to force the alpha layer to FRAMEBUFFER type. + break; + } + } + mFirstFb = min(mFirstFb, (size_t)NUM_HW_WINDOWS-1); + // can't composite overlays sandwiched between framebuffers + if (mFbNeeded) { + for (size_t i = mFirstFb; i < mLastFb; i++) { + if (mPopupPlayYuvContents && ((uint32_t)mForceOverlayLayerIndex == i)) { + mFirstFb = 1; + break; + } + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) == NO_DRM) { + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + } + } + } + } +} + +void ExynosOverlayDisplay::determineBandwidthSupport(hwc_display_contents_1_t *contents) +{ + // Incrementally try to add our supported layers to hardware windows. + // If adding a layer would violate a hardware constraint, force it + // into the framebuffer and try again. (Revisiting the entire list is + // necessary because adding a layer to the framebuffer can cause other + // windows to retroactively violate constraints.) + bool changed; + this->mBypassSkipStaticLayer = false; + unsigned int cannotComposeFlag = 0; + bool addFB = true; + hwc_rect windowIntersection; + windowIntersection.left = 0; + windowIntersection.top = 0; + windowIntersection.right = mXres; + windowIntersection.bottom = mYres; + int intersectionCnt = 0; + bool reduceAvailableWindowCnt = false; + // Initialize to inverse values so that + // min(left, l) = l, min(top, t) = t + // max(right, r) = r, max(bottom, b) = b + // for all l, t, r, b + mFbUpdateRegion.left = mXres; + mFbUpdateRegion.top = mYres; + mFbUpdateRegion.right = 0; + mFbUpdateRegion.bottom = 0; + + uint32_t pixel_used[MAX_NUM_FIMD_DMA_CH]; + do { + android::Vector rects[MAX_NUM_FIMD_DMA_CH]; + android::Vector overlaps[MAX_NUM_FIMD_DMA_CH]; + int dma_ch_idx; + size_t win_idx = 0; + size_t windows_left; + memset(&pixel_used[0], 0, sizeof(pixel_used)); + mGscUsed = false; + windowIntersection.left = 0; + windowIntersection.top = 0; + windowIntersection.right = mXres; + windowIntersection.bottom = mYres; + intersectionCnt = 0; + + if (mFbNeeded) { + hwc_rect_t fb_rect; + fb_rect.top = fb_rect.left = 0; + fb_rect.right = this->mXres - 1; + fb_rect.bottom = this->mYres - 1; + dma_ch_idx = FIMD_DMA_CH_IDX[mFirstFb]; + pixel_used[dma_ch_idx] = (uint32_t) (this->mXres * this->mYres); + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; +#ifdef USE_FB_PHY_LINEAR + windows_left = 1; +#ifdef G2D_COMPOSITION + if (this->mG2dComposition) + windows_left = NUM_HW_WIN_FB_PHY - 1; +#endif +#else + windows_left = mHwc->hwc_ctrl.max_num_ovly - 1; +#endif + if (reduceAvailableWindowCnt) + windows_left = min(windows_left, mMaxWindowOverlapCnt - 1); + rects[dma_ch_idx].push_back(fb_rect); + if (addFB) { + addFB = false; + mHwc->totPixels += mXres * mYres; + } + } + else { +#ifdef USE_FB_PHY_LINEAR + windows_left = 1; +#ifdef G2D_COMPOSITION + if (this->mG2dComposition) + windows_left = NUM_HW_WIN_FB_PHY; +#endif +#else + windows_left = mHwc->hwc_ctrl.max_num_ovly; +#endif + if (reduceAvailableWindowCnt) + windows_left = min(windows_left, mMaxWindowOverlapCnt); + } + + changed = false; + mGscLayers = 0; + mCurrentGscIndex = 0; + mCurrentRGBMPPIndex = 0; + bool switchingOTF = false; + reduceAvailableWindowCnt = false; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if ((layer.flags & HWC_SKIP_LAYER) || + layer.compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + + private_handle_t *handle = private_handle_t::dynamicCast( + layer.handle); + + if (!layer.planeAlpha) + continue; + + // we've already accounted for the framebuffer above + if (layer.compositionType == HWC_FRAMEBUFFER) + continue; + + // only layer 0 can be HWC_BACKGROUND, so we can + // unconditionally allow it without extra checks + if (layer.compositionType == HWC_BACKGROUND) { + windows_left--; + continue; + } + dma_ch_idx = FIMD_DMA_CH_IDX[win_idx]; + + size_t pixels_needed = 0; + if (getDrmMode(handle->flags) != SECURE_DRM) + pixels_needed = getRequiredPixels(layer, mXres, mYres); + else + pixels_needed = WIDTH(layer.displayFrame) * + HEIGHT(layer.displayFrame); + + cannotComposeFlag = 0; + bool can_compose = windows_left && (win_idx < NUM_HW_WINDOWS) && + ((pixel_used[dma_ch_idx] + pixels_needed) <= + (uint32_t)this->mDmaChannelMaxBandwidth[dma_ch_idx]); + if (windows_left <= 0 || (win_idx >= NUM_HW_WINDOWS)) + cannotComposeFlag |= eInsufficientWindow; + if ((pixel_used[dma_ch_idx] + pixels_needed) > (uint32_t)this->mDmaChannelMaxBandwidth[dma_ch_idx]) + cannotComposeFlag |= eInsufficientBandwidth; + + int gsc_index = getMPPForUHD(layer); + + bool gsc_required = mMPPs[gsc_index]->isProcessingRequired(layer, handle->format); + if (gsc_required) { + if (mHasDrmSurface && getDrmMode(handle->flags) == NO_DRM && MAX_VIDEO_LAYERS == 1) { + can_compose = false; + cannotComposeFlag = eHasDRMVideo; + } + if (mGscLayers >= MAX_VIDEO_LAYERS) { + can_compose = can_compose && !mGscUsed; + if (mGscUsed) + cannotComposeFlag |= eInsufficientMPP; + } + if (mHwc->hwc_ctrl.num_of_video_ovly <= mGscLayers) { + can_compose = false; + cannotComposeFlag |= eInsufficientMPP; + } + if (switchingOTF) { + can_compose = false; + cannotComposeFlag |= eSwitchingLocalPath; + } + if (mOtfMode == OTF_RUNNING && win_idx > 0) + switchingOTF = true; + if (multipleRGBScaling(handle->format)) { + can_compose = false; + cannotComposeFlag |= eRGBLayerDuringVideoPlayback; + } + } + + // hwc_rect_t right and bottom values are normally exclusive; + // the intersection logic is simpler if we make them inclusive + hwc_rect_t visible_rect = layer.displayFrame; + visible_rect.right--; visible_rect.bottom--; + + if (can_compose) { + switch (this->mDmaChannelMaxOverlapCount[dma_ch_idx]) { + case 1: // It means, no layer overlap is allowed + for (size_t j = 0; j < rects[dma_ch_idx].size(); j++) + if (intersect(visible_rect, rects[dma_ch_idx].itemAt(j))) { + can_compose = false; + cannotComposeFlag |= eInsufficientOverlapCount; + } + break; + case 2: //It means, upto 2 layer overlap is allowed. + for (size_t j = 0; j < overlaps[dma_ch_idx].size(); j++) + if (intersect(visible_rect, overlaps[dma_ch_idx].itemAt(j))) { + can_compose = false; + cannotComposeFlag |= eInsufficientOverlapCount; + } + break; + default: + break; + } + if (!can_compose) + this->mBypassSkipStaticLayer = true; + } + + if (!can_compose) { + if (getDrmMode(handle->flags) != NO_DRM) { + mFirstFb = 1; + changed = true; + break; + } else { + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= cannotComposeFlag; + if (!mFbNeeded) { + mFirstFb = mLastFb = i; + mFbNeeded = true; + } + else { + mFirstFb = min(i, mFirstFb); + mLastFb = max(i, mLastFb); + } + changed = true; + mFirstFb = min(mFirstFb, (size_t)NUM_HW_WINDOWS-1); + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + break; + } + } + + for (size_t j = 0; j < rects[dma_ch_idx].size(); j++) { + const hwc_rect_t &other_rect = rects[dma_ch_idx].itemAt(j); + if (intersect(visible_rect, other_rect)) + overlaps[dma_ch_idx].push_back(intersection(visible_rect, other_rect)); + } + + rects[dma_ch_idx].push_back(visible_rect); + pixel_used[dma_ch_idx] += pixels_needed; + win_idx++; + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + win_idx = min(win_idx, NUM_HW_WINDOWS - 1); + windows_left--; + if (gsc_required) { + mGscUsed = true; + mGscLayers++; + } + + if (intersect(visible_rect, windowIntersection)) { + windowIntersection = intersection(visible_rect, windowIntersection); + intersectionCnt++; + } else { + windowIntersection = {0, 0, 0, 0}; + intersectionCnt = 0; + } + } + if (mFbNeeded && intersectionCnt != 0) + intersectionCnt++; + if (changed) { + for (size_t i = mFirstFb; i < mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) == NO_DRM) { + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + } + } + } + } + handleTotalBandwidthOverload(contents); + if (intersectionCnt > mMaxWindowOverlapCnt) { + HLOGD("Total Overlap Cnt(%d) > Max Overlap Cnt(%d)", intersectionCnt, mMaxWindowOverlapCnt); + changed = true; + reduceAvailableWindowCnt = true; + } + } while(changed); + + if (mGscLayers == 0) + mHwc->allowOTF = true; + + if (mPopupPlayYuvContents) { + mFbUpdateRegion.left = 0; + mFbUpdateRegion.top = 0; + mFbUpdateRegion.right = mXres; + mFbUpdateRegion.bottom = mYres; + return; + } + + for (size_t i = mFirstFb; i <= mLastFb; i++) + mFbUpdateRegion = expand(mFbUpdateRegion, contents->hwLayers[i].displayFrame); + + for (size_t i = mLastFb + 1; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY && layer.planeAlpha > 0 && layer.planeAlpha < 255) + mFbUpdateRegion = expand(mFbUpdateRegion, contents->hwLayers[i].displayFrame); + } + + int minWidth = BURSTLEN_BYTES * 8 / formatToBpp(HAL_PIXEL_FORMAT_RGBA_8888); + int w = WIDTH(mFbUpdateRegion); + if (w < minWidth) { + if (mFbUpdateRegion.left + minWidth <= mXres) + mFbUpdateRegion.right = mFbUpdateRegion.left + minWidth; + else + mFbUpdateRegion.left = mFbUpdateRegion.right - minWidth; + } +} + +void ExynosOverlayDisplay::assignWindows(hwc_display_contents_1_t *contents) +{ + unsigned int nextWindow = 0; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (!layer.planeAlpha) + continue; + + if (!mPopupPlayYuvContents) { + if (mFbNeeded && i == mFirstFb) { + ALOGV("assigning framebuffer to window %u\n", + nextWindow); + mPostData.fb_window = nextWindow; + nextWindow++; + continue; + } + } + + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (ExynosMPP::isFormatSupportedByGscOtf(handle->format)) { + /* in case of changing compostiontype form GSC to FRAMEBUFFER for yuv layer */ + if ((mConfigMode == 1) && (layer.compositionType == HWC_FRAMEBUFFER)) { + mForceFbYuvLayer = 1; + mConfigMode = 0; + mCountSameConfig = 0; + /* for prepare */ + mForceFb = 1; + mRetry = true; + return; + } + } + } + if (layer.compositionType != HWC_FRAMEBUFFER && + layer.compositionType != HWC_FRAMEBUFFER_TARGET) { + ALOGV("assigning layer %u to window %u", i, nextWindow); + this->mPostData.overlay_map[nextWindow] = i; + if (layer.compositionType == HWC_OVERLAY) { + private_handle_t *handle = + private_handle_t::dynamicCast(layer.handle); + if (mMPPs[0]->isProcessingRequired(layer, handle->format)) { + if (assignGscLayer(layer, i, nextWindow)) + mCurrentGscIndex++; + } + } + nextWindow++; + } + } +} + +bool ExynosOverlayDisplay::assignGscLayer(hwc_layer_1_t &layer, int index, int nextWindow) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + size_t gscIndex = 0; + int downNumerator, downDenominator; + int downError = mMPPs[0]->getDownscaleRatio(&downNumerator, &downDenominator); + bool ret = true; + if (nextWindow == 0 && + !(handle->flags & GRALLOC_USAGE_HW_CAMERA_MASK) && + !downError && + (mMPPs[0]->isProcessingSupported(layer, handle->format, true, downNumerator, downDenominator) > 0)) { + gscIndex = mCurrentGscIndex; + ALOGV("\tusing gscaler %u in LOCAL-PATH", AVAILABLE_GSC_UNITS[gscIndex]); + if (!mMPPs[gscIndex]->isOTF()) + mMPPs[gscIndex]->cleanupM2M(true); + mPostData.gsc_map[nextWindow].mode = exynos5_gsc_map_t::GSC_LOCAL; + mMPPs[gscIndex]->setMode(exynos5_gsc_map_t::GSC_LOCAL); + mPostData.gsc_map[nextWindow].idx = FIMD_GSC_IDX; + if (mOtfMode == OTF_OFF) + mOtfMode = OTF_RUNNING; + mMPPs[0]->mNeedReqbufs = false; + } else { + gscIndex = getMPPForUHD(layer); + if (gscIndex == FIMD_GSC_IDX) { + gscIndex = mCurrentGscIndex; + ret = true; + } else { + ret = false; + } + if (isFormatRgb(handle->format)) { + gscIndex = getRGBMPPIndex(mCurrentRGBMPPIndex); + if (gscIndex != FIMD_GSC_IDX && gscIndex != FIMD_GSC_SEC_IDX) + mCurrentRGBMPPIndex++; + } + ALOGV("\tusing gscaler %u in M2M", AVAILABLE_GSC_UNITS[gscIndex]); + mPostData.gsc_map[nextWindow].mode = exynos5_gsc_map_t::GSC_M2M; + mMPPs[gscIndex]->setMode(exynos5_gsc_map_t::GSC_M2M); + mPostData.gsc_map[nextWindow].idx = gscIndex; + if ((gscIndex == FIMD_GSC_IDX || (mMPPs[gscIndex]->isUsingMSC() && mCurrentGscIndex == 0)) && mOtfMode == OTF_RUNNING) { + ALOGV("change from OTF to M2M"); + mOtfMode = OTF_TO_M2M; + if (mMPPs[gscIndex]->isUsingMSC()) { + mMPPs[0]->setMode(exynos5_gsc_map_t::GSC_LOCAL); + } else { + mMPPs[gscIndex]->setMode(exynos5_gsc_map_t::GSC_LOCAL); + mMPPs[FIMD_GSC_SEC_IDX]->setMode(exynos5_gsc_map_t::GSC_M2M); + mPostData.gsc_map[nextWindow].idx = FIMD_GSC_SEC_IDX; + } + } + } + ALOGV("\tusing gscaler %u", + AVAILABLE_GSC_UNITS[gscIndex]); + return ret; +} + +int ExynosOverlayDisplay::waitForRenderFinish(buffer_handle_t *handle, int buffers) +{ + return 0; +} + +int ExynosOverlayDisplay::postGscM2M(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index, bool isBottom) +{ + exynos5_hwc_post_data_t *pdata = &mPostData; + int gsc_idx = pdata->gsc_map[index].idx; + int dst_format = HAL_PIXEL_FORMAT_RGBX_8888; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + int32_t deconBlending = HWC_BLENDING_NONE; + int32_t originalBlending = HWC_BLENDING_NONE; + + hwc_frect_t sourceCrop = { 0, 0, + (float)WIDTH(layer.displayFrame), (float)HEIGHT(layer.displayFrame) }; + if (mHwc->mS3DMode == S3D_MODE_READY || mHwc->mS3DMode == S3D_MODE_RUNNING) { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + mMPPs[gsc_idx]->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + mMPPs[gsc_idx]->mS3DMode = S3D_TB; + } else { + mMPPs[gsc_idx]->mS3DMode = S3D_NONE; + } + + if (isFormatRgb(handle->format)) + waitForRenderFinish(&layer.handle, 1); + /* OFF_Screen to ON_Screen changes */ + if (getDrmMode(handle->flags) == SECURE_DRM) + recalculateDisplayFrame(layer, mXres, mYres); + + if (handle->format == HAL_PIXEL_FORMAT_RGB_565) + dst_format = HAL_PIXEL_FORMAT_RGB_565; + + if (isBottom) { + originalBlending = layer.blending; + layer.blending = HWC_BLENDING_NONE; + } + + int err = mMPPs[gsc_idx]->processM2M(layer, dst_format, &sourceCrop); + + if (isBottom) + layer.blending = originalBlending; + + if (err < 0) { + ALOGE("failed to configure gscaler %u for layer %u", + gsc_idx, index); + pdata->gsc_map[index].mode = exynos5_gsc_map_t::GSC_NONE; + return -1; + } + + buffer_handle_t dst_buf = mMPPs[gsc_idx]->mDstBuffers[mMPPs[gsc_idx]->mCurrentBuf]; + private_handle_t *dst_handle = + private_handle_t::dynamicCast(dst_buf); + int prevFormat = -1; + if (((handle->format == HAL_PIXEL_FORMAT_RGBA_8888) || (handle->format == HAL_PIXEL_FORMAT_RGBX_8888) || (handle->format == HAL_PIXEL_FORMAT_RGB_565)) + && dst_handle->format == HAL_PIXEL_FORMAT_BGRA_8888) { + prevFormat = dst_handle->format; + dst_handle->format = handle->format; + } + int fence = mMPPs[gsc_idx]->mDstConfig.releaseFenceFd; + + if (layer.blending == HWC_BLENDING_PREMULT) + deconBlending = HWC_BLENDING_COVERAGE; + else + deconBlending = layer.blending; + + configureHandle(dst_handle, sourceCrop, + layer.displayFrame, deconBlending, layer.planeAlpha, fence, + config[win_map]); + +#ifdef USES_DRM_SETTING_BY_DECON + if (getDrmMode(handle->flags) != NO_DRM) + config[win_map].protection = 1; + else + config[win_map].protection = 0; +#endif + + if (prevFormat >= 0) + dst_handle->format = prevFormat; + return 0; +} + +void ExynosOverlayDisplay::postGscOtf(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index) +{ +#ifdef USES_DRM_SETTING_BY_DECON + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); +#endif + exynos5_hwc_post_data_t *pdata = &mPostData; + int gsc_idx = pdata->gsc_map[index].idx; + if (mHwc->mS3DMode == S3D_MODE_READY || mHwc->mS3DMode == S3D_MODE_RUNNING) { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + mMPPs[gsc_idx]->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + mMPPs[gsc_idx]->mS3DMode = S3D_TB; + } else { + mMPPs[gsc_idx]->mS3DMode = S3D_NONE; + } + + int err = mMPPs[gsc_idx]->processOTF(layer); + + if (err < 0) { + ALOGE("failed to config_gsc_localout %u input for layer %u", gsc_idx, index); + mOtfMode = OTF_TO_M2M; + configureDummyOverlay(&layer, config[win_map]); + } else { + configureOtfWindow(layer.displayFrame, layer.blending, layer.planeAlpha, + layer.acquireFenceFd, HAL_PIXEL_FORMAT_RGBX_8888, config[win_map]); + } +#ifdef USES_DRM_SETTING_BY_DECON + if (getDrmMode(handle->flags) != NO_DRM) + config[win_map].protection = 1; + else + config[win_map].protection = 0; +#endif + return; +} + +void ExynosOverlayDisplay::handleStaticLayers(hwc_display_contents_1_t *contents, struct s3c_fb_win_config_data &win_data, int tot_ovly_wins) +{ + int win_map = 0; + if (mLastFbWindow >= NUM_HW_WINDOWS) { + ALOGE("handleStaticLayers:: invalid mLastFbWindow(%d)", mLastFbWindow); + return; + } + win_map = getDeconWinMap(mLastFbWindow, tot_ovly_wins); + ALOGV("[USE] SKIP_STATIC_LAYER_COMP, mLastFbWindow(%d), win_map(%d)\n", mLastFbWindow, win_map); + + memcpy(&win_data.config[win_map], + &mLastConfigData.config[win_map], sizeof(struct s3c_fb_win_config)); + win_data.config[win_map].fence_fd = -1; + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY) { + ALOGV("[SKIP_STATIC_LAYER_COMP] layer.handle: 0x%p, layer.acquireFenceFd: %d\n", layer.handle, layer.acquireFenceFd); + layer.releaseFenceFd = layer.acquireFenceFd; + } + } +} + +int ExynosOverlayDisplay::getMPPForUHD(hwc_layer_1_t &layer) +{ + return FIMD_GSC_IDX; +} + +void ExynosOverlayDisplay::cleanupGscs() +{ + if (mMPPs[FIMD_GSC_IDX]->isM2M()) { + mMPPs[FIMD_GSC_IDX]->cleanupM2M(); + mMPPs[FIMD_GSC_IDX]->setMode(exynos5_gsc_map_t::GSC_NONE); + } else if (mMPPs[FIMD_GSC_IDX]->isOTF()) { + mMPPs[FIMD_GSC_IDX]->cleanupOTF(); + } else if (mMPPs[FIMD_GSC_SEC_IDX]->isM2M()) { + mMPPs[FIMD_GSC_SEC_IDX]->cleanupM2M(); + mMPPs[FIMD_GSC_SEC_IDX]->setMode(exynos5_gsc_map_t::GSC_NONE); + } + for (int i = FIMD_GSC_SEC_IDX + 1; i < mNumMPPs; i++) { + if (mMPPs[i]->isM2M()) { + mMPPs[i]->cleanupM2M(); + mMPPs[i]->setMode(exynos5_gsc_map_t::GSC_NONE); + } + } +} + +void ExynosOverlayDisplay::freeMPP() +{ + bool invalidate = false; + for (int i = 0; i < mNumMPPs; i++) { + if (mMPPs[i]->needsReqbufs()) { + mMPPs[i]->free(); + invalidate = true; + } + } + if (invalidate) + mHwc->procs->invalidate(mHwc->procs); +} + +void ExynosOverlayDisplay::refreshGscUsage(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + int gsc_index = getMPPForUHD(layer); + bool gsc_required = mMPPs[gsc_index]->isProcessingRequired(layer, handle->format); + if (gsc_required) { + if (mGscLayers >= 1) + mGscLayers--; + else + ALOGW("refreshGscUsage: GSC layer count is invalid"); + if (!mGscLayers) + mGscUsed = false; + } +} + +void ExynosOverlayDisplay::handleTotalBandwidthOverload(hwc_display_contents_1_t *contents) +{ + if (mHwc->totPixels >= FIMD_TOTAL_BW_LIMIT) { + if (mFbNeeded) { + for (int i = mFirstFb - 1; i >= 0; i--) { + if (mForceOverlayLayerIndex == 0 && i == 0) + break; + hwc_layer_1_t &layer = contents->hwLayers[i]; + refreshGscUsage(layer); + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + mFirstFb = i; + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + if (mHwc->totPixels >= FIMD_TOTAL_BW_LIMIT) { + for (int i = mLastFb + 1; i < contents->numHwLayers - 1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + refreshGscUsage(layer); + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + mLastFb = i; + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + } + } else { + bool fbAdded = false; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY && + mForceOverlayLayerIndex != i) { + refreshGscUsage(layer); + layer.compositionType = HWC_FRAMEBUFFER; + mLastFb = max(mLastFb, i); + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + + if (!fbAdded) { + fbAdded = true; + mHwc->totPixels += mXres * mYres; + } + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + } + if (mPopupPlayYuvContents || mForceOverlayLayerIndex == 0) + mFirstFb = 1; + else + mFirstFb = 0; + } + mFbNeeded = true; + } +} + +bool ExynosOverlayDisplay::switchOTF(bool enable) +{ + bool ret; + if (mGscLayers > 1) + return false; + ret = mHwc->allowOTF != enable; + mHwc->allowOTF = enable; + return ret; +} + +int ExynosOverlayDisplay::getRGBMPPIndex(int index) +{ + return index; +} + +bool ExynosOverlayDisplay::multipleRGBScaling(int format) +{ + return isFormatRgb(format) && + mGscLayers >= 1; +} + +void ExynosOverlayDisplay::checkTVBandwidth() +{ +} + +void ExynosOverlayDisplay::dump(android::String8& result) +{ + result.append( + " type | handle | color | blend | pa | format | position | size | gsc \n" + "----------+----------|----------+-------+----|----------+---------------+---------------------\n"); + // 8_______ | 8_______ | 8_______ | 5____ | 2_ | 8_______ | [5____,5____] | [5____,5____] | 3__ \n" + + int windowsUsed = 0; + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + struct s3c_fb_win_config &config = mLastConfigData.config[i]; + if (config.state != config.S3C_FB_WIN_STATE_DISABLED) + windowsUsed++; + } + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + struct s3c_fb_win_config &config = mLastConfigData.config[i]; + size_t inverse = inverseWinMap(i, windowsUsed); + if ((config.state == config.S3C_FB_WIN_STATE_DISABLED) && + (mLastGscMap[inverse].mode == exynos5_gsc_map_t::GSC_NONE)){ + result.appendFormat(" %8s | %8s | %8s | %5s | %2s | %8s | %13s | %13s", + "OVERLAY", "-", "-", "-", "-", "-", "-", "-"); + } + else { + if (config.state == config.S3C_FB_WIN_STATE_COLOR) + result.appendFormat(" %8s | %8s | %8x | %5s | %2s | %8s", "COLOR", + "-", config.color, "-", "-", "-"); + else + result.appendFormat(" %8s | %8x | %8s | %5x | %2x | %8s", + mLastFbWindow == inverse ? "FB" : "OVERLAY", + intptr_t(mLastHandles[i]), + "-", config.blending, config.plane_alpha, s3cFormat2str(config.format)); + + result.appendFormat(" | [%5d,%5d] | [%5u,%5u]", config.x, config.y, + config.w, config.h); + } + if (mLastGscMap[inverse].mode == exynos5_gsc_map_t::GSC_NONE) { + result.appendFormat(" | %3s", "-"); + } else { + result.appendFormat(" | %3d", + AVAILABLE_GSC_UNITS[mLastGscMap[inverse].idx]); + if (mLastGscMap[inverse].mode == exynos5_gsc_map_t::GSC_M2M) + result.appendFormat(" | %10s","GSC_M2M"); + else + result.appendFormat(" | %10s","GSC_LOCAL"); + } + result.append("\n"); + } +} + +unsigned int ExynosOverlayDisplay::getLayerRegion(hwc_layer_1_t &layer, hwc_rect &rect_area, uint32_t regionType) { + hwc_rect_t const *hwcRects = NULL; + unsigned int numRects = 0; + + switch (regionType) { + case eDamageRegion: + hwcRects = layer.surfaceDamage.rects; + numRects = layer.surfaceDamage.numRects; + break; + default: + ALOGE("%s:: Invalid regionType (%d)", __func__, regionType); + return eDamageRegionError; + } + if ((numRects == 0) || (hwcRects == NULL)) + return eDamageRegionFull; + + if ((numRects == 1) && (hwcRects[0].left == 0) && (hwcRects[0].top == 0) && + (hwcRects[0].right == 0) && (hwcRects[0].bottom == 0)) + return eDamageRegionSkip; + + rect_area.left = INT_MAX; + rect_area.top = INT_MAX; + rect_area.right = rect_area.bottom = 0; + if (hwcRects != NULL) { + for (size_t j = 0; j < numRects; j++) { + hwc_rect_t rect; + rect.left = hwcRects[j].left; + rect.top = hwcRects[j].top; + rect.right = hwcRects[j].right; + rect.bottom = hwcRects[j].bottom; + adjustRect(rect, INT_MAX, INT_MAX); + /* Get sums of rects */ + rect_area = expand(rect_area, rect); + } + } + + return eDamageRegionPartial; +} diff --git a/libdisplay/ExynosOverlayDisplay.h b/libdisplay/ExynosOverlayDisplay.h new file mode 100644 index 0000000..d3d4824 --- /dev/null +++ b/libdisplay/ExynosOverlayDisplay.h @@ -0,0 +1,140 @@ +#ifndef EXYNOS_PRIMARY_DISPLAY_H +#define EXYNOS_PRIMARY_DISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +#define S3D_ERROR -1 +#define HDMI_PRESET_DEFAULT V4L2_DV_1080P60 +#define HDMI_PRESET_ERROR -1 + +class ExynosMPPModule; + +enum regionType { + eTransparentRegion = 0, + eCoveredOpaqueRegion = 1, + eDamageRegion = 2, +}; + +enum { + eDamageRegionFull = 0, + eDamageRegionPartial, + eDamageRegionSkip, + eDamageRegionError, +}; + +class ExynosOverlayDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosOverlayDisplay(int numGSCs, struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosOverlayDisplay(); + + virtual int prepare(hwc_display_contents_1_t *contents); + virtual int set(hwc_display_contents_1_t *contents); + virtual void dump(android::String8& result); + virtual void freeMPP(); + virtual void handleTotalBandwidthOverload(hwc_display_contents_1_t *contents); + + int clearDisplay(); + int getCompModeSwitch(); + int32_t getDisplayAttributes(const uint32_t attribute); + + bool switchOTF(bool enable); + + /* Fields */ + ExynosMPPModule **mMPPs; + + exynos5_hwc_post_data_t mPostData; + const private_module_t *mGrallocModule; + +#ifdef USE_FB_PHY_LINEAR + buffer_handle_t mWinBuf[NUM_HW_WINDOWS][NUM_GSC_DST_BUFS]; +#ifdef G2D_COMPOSITION + int mG2dComposition; + exynos5_g2d_data_t mG2d; + int mG2dLayers; + int mAllocatedLayers; + uint32_t mWinBufVirtualAddress[NUM_HW_WINDOWS][NUM_GSC_DST_BUFS]; + int mWinBufFence[NUM_HW_WINDOWS][NUM_GSC_DST_BUFS]; + int mG2dCurrentBuffer[NUM_HW_WINDOWS]; + uint32_t mLastG2dLayerHandle[NUM_HW_WINDOWS]; + uint32_t mWinBufMapSize[NUM_HW_WINDOWS]; + int mG2dMemoryAllocated; + int mG2dBypassCount; +#endif +#endif + + struct s3c_fb_win_config_data mLastConfigData; + size_t mLastFbWindow; + const void *mLastHandles[NUM_HW_WINDOWS]; + exynos5_gsc_map_t mLastGscMap[NUM_HW_WINDOWS]; + const void *mLastLayerHandles[NUM_VIRT_OVER]; + int mLastOverlayWindowIndex; + int mLastOverlayLayerIndex; + int mVirtualOverlayFlag; + + bool mForceFbYuvLayer; + int mCountSameConfig; + /* g3d = 0, gsc = 1 */ + int mConfigMode; + video_layer_config mPrevDstConfig[MAX_VIDEO_LAYERS]; + + int mGscLayers; + + bool mPopupPlayYuvContents; + bool mHasCropSurface; + int mYuvLayers; + + bool mBypassSkipStaticLayer; + uint32_t mDmaChannelMaxBandwidth[MAX_NUM_FIMD_DMA_CH]; + uint32_t mDmaChannelMaxOverlapCount[MAX_NUM_FIMD_DMA_CH]; + + bool mGscUsed; + int mCurrentGscIndex; + int mCurrentRGBMPPIndex; + bool mBlanked; + hwc_rect mFbUpdateRegion; + bool mFbNeeded; + size_t mFirstFb; + size_t mLastFb; + bool mForceFb; + int mForceOverlayLayerIndex; + bool mRetry; + int mAllowedOverlays; + size_t mMaxWindowOverlapCnt; + + protected: + /* Methods */ + void configureOtfWindow(hwc_rect_t &displayFrame, + int32_t blending, int32_t planeAlpha, int fence_fd, int format, s3c_fb_win_config &cfg); + void configureHandle(private_handle_t *handle, hwc_frect_t &sourceCrop, + hwc_rect_t &displayFrame, int32_t blending, int32_t planeAlpha, int fence_fd, s3c_fb_win_config &cfg); + void skipStaticLayers(hwc_display_contents_1_t *contents); + void determineSupportedOverlays(hwc_display_contents_1_t *contents); + void determineBandwidthSupport(hwc_display_contents_1_t *contents); + void assignWindows(hwc_display_contents_1_t *contents); + bool assignGscLayer(hwc_layer_1_t &layer, int index, int nextWindow); + void postGscOtf(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index); + void handleStaticLayers(hwc_display_contents_1_t *contents, struct s3c_fb_win_config_data &win_data, int tot_ovly_wins); + void cleanupGscs(); + int handleWindowUpdate(hwc_display_contents_1_t *contents, struct s3c_fb_win_config *config); + unsigned int getLayerRegion(hwc_layer_1_t &layer, hwc_rect &rect_area, uint32_t regionType); + + virtual void determineYuvOverlay(hwc_display_contents_1_t *contents); + virtual int postGscM2M(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index, bool isBottom); + virtual void forceYuvLayersToFb(hwc_display_contents_1_t *contents); + virtual void configureOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg); + virtual void configureDummyOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg); + virtual bool isOverlaySupported(hwc_layer_1_t &layer, size_t i); + virtual void refreshGscUsage(hwc_layer_1_t &layer); + virtual int postFrame(hwc_display_contents_1_t *contents); + virtual int winconfigIoctl(s3c_fb_win_config_data *win_data); + virtual int waitForRenderFinish(buffer_handle_t *handle, int buffers); + virtual void handleOffscreenRendering(hwc_layer_1_t &layer, hwc_display_contents_1_t *contents, int index); + virtual int getMPPForUHD(hwc_layer_1_t &layer); + virtual int getRGBMPPIndex(int index); + virtual bool multipleRGBScaling(int format); + virtual void checkTVBandwidth(); +}; + +#endif diff --git a/libdisplay/NOTICE b/libdisplay/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libdisplay/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libexynosutils/Android.mk b/libexynosutils/Android.mk new file mode 100644 index 0000000..12bb080 --- /dev/null +++ b/libexynosutils/Android.mk @@ -0,0 +1,38 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils + +LOCAL_CFLAGS += -DEXYNOS_PLATFORM_ON_ANDROID + +LOCAL_C_INCLUDES := $(LOCAL_PATH) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../include +LOCAL_C_INCLUDES += framework/base/include + +LOCAL_SRC_FILES := ExynosMutex.cpp \ + Exynos_log.c + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libexynosutils + +LOCAL_SRC_FILES += exynos_format_v4l2.c +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../include + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libexynosutils/ExynosMutex.cpp b/libexynosutils/ExynosMutex.cpp new file mode 100644 index 0000000..b310b94 --- /dev/null +++ b/libexynosutils/ExynosMutex.cpp @@ -0,0 +1,273 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosMutex.cpp + * \brief source file for ExynosMutex + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2011/06/15 + * + * Revision History: + * - 2010/06/15 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ExynosMutex" +#include + +#include +using namespace android; + +#include +#include +#include +#include + +#include "ExynosMutex.h" + +//#define EXYNOS_MUTEX_DEBUG + +ExynosMutex::ExynosMutex() +{ + m_mutex = NULL; + m_flagCreate = false; + m_type = TYPE_BASE; + memset(m_name, 0, 128); +} + +ExynosMutex::~ExynosMutex() +{ + if (m_flagCreate == true) + this->destroy(); +} + +bool ExynosMutex::create(int type, char* name) +{ + if (m_flagCreate == true) { + ALOGE("%s::Already created", __func__); + return false; + } + + int androidMutexType = 0; + + m_type = TYPE_BASE; + + switch (type) { + case TYPE_PRIVATE: + androidMutexType = Mutex::PRIVATE; + break; + case TYPE_SHARED: + androidMutexType = Mutex::SHARED; + break; + default: + ALOGE("%s::unmatched type(%d) fail", __func__, type); + return false; + } + + m_mutex = new Mutex(androidMutexType, name); + if (m_mutex == NULL) { + ALOGE("%s::Mutex create fail", __func__); + return false; + } + + m_type = type; + strncpy(m_name, name, 128 - 1); + + m_flagCreate = true; + + return true; +} + +void ExynosMutex::destroy(void) +{ + if (m_flagCreate == false) { + ALOGE("%s::Not yet created", __func__); + return; + } + + if (m_mutex) + delete ((Mutex *)m_mutex); + m_mutex = NULL; + + m_flagCreate = false; +} + +bool ExynosMutex::getCreatedStatus(void) +{ + return m_flagCreate; +} + +bool ExynosMutex::lock(void) +{ + if (m_flagCreate == false) { + ALOGE("%s::Not yet created", __func__); + return false; + } + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'lock() start", __func__, m_name); +#endif + + if (((Mutex *)m_mutex)->lock() != 0) { + ALOGE("%s::m_core->lock() fail", __func__); + return false; + } + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'lock() end", __func__, m_name); +#endif + + return true; +} + +bool ExynosMutex::unLock(void) +{ + if (m_flagCreate == false) { + ALOGE("%s::Not yet created", __func__); + return false; + } + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'unlock() start", __func__, m_name); +#endif + + ((Mutex *)m_mutex)->unlock(); + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'unlock() end", __func__, m_name); +#endif + + return true; +} + +bool ExynosMutex::tryLock(void) +{ + if (m_flagCreate == false) { + ALOGE("%s::Not yet created", __func__); + return false; + } + + int ret = 0; + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'trylock() start", __func__, m_name); +#endif + + ret = ((Mutex *)m_mutex)->tryLock(); + +#ifdef EXYNOS_MUTEX_DEBUG + ALOGD("%s::%s'trylock() end", __func__, m_name); +#endif + + return (ret == 0) ? true : false; +} + +int ExynosMutex::getType(void) +{ + return m_type; +} + +void *exynos_mutex_create( + int type, + char *name) +{ + ExynosMutex *mutex = new ExynosMutex(); + + if (mutex->create(type, name) == false) { + ALOGE("%s::mutex->create() fail", __func__); + delete mutex; + mutex = NULL; + } + + return (void*)mutex; +} + +bool exynos_mutex_destroy( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + if (((ExynosMutex *)handle)->getCreatedStatus() == true) + ((ExynosMutex *)handle)->destroy(); + + delete (ExynosMutex *)handle; + + return true; +} + +bool exynos_mutex_lock( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + return ((ExynosMutex *)handle)->lock(); + +} + +bool exynos_mutex_unlock( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + return ((ExynosMutex *)handle)->unLock(); + +} + +bool exynos_mutex_trylock( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + return ((ExynosMutex *)handle)->tryLock(); + +} + +int exynos_mutex_get_type( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + return ((ExynosMutex *)handle)->getType(); +} + +bool exynos_mutex_get_created_status( + void *handle) +{ + if (handle == NULL) { + ALOGE("%s::handle is null", __func__); + return false; + } + + return ((ExynosMutex *)handle)->getCreatedStatus(); +} + diff --git a/libexynosutils/ExynosMutex.h b/libexynosutils/ExynosMutex.h new file mode 100644 index 0000000..6ce7a2a --- /dev/null +++ b/libexynosutils/ExynosMutex.h @@ -0,0 +1,139 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/*! + * \file ExynosMutex.h + * \brief header file for ExynosMutex + * \author Sangwoo, Park(sw5771.park@samsung.com) + * \date 2011/06/15 + * + * Revision History: + * - 2010/06/15 : Sangwoo, Park(sw5771.park@samsung.com) \n + * Initial version + * + */ + +#ifndef __EXYNOS_MUTEX_H__ +#define __EXYNOS_MUTEX_H__ + +#ifdef __cplusplus + +//! ExynosMutex +/*! + * \ingroup Exynos + */ +class ExynosMutex +{ +public: + enum TYPE { + TYPE_BASE = 0, + TYPE_PRIVATE, //!< within this process + TYPE_SHARED, //!< within whole system + TYPE_MAX, + }; + +public: + //! Constructor. + ExynosMutex(); + + //! Destructor + virtual ~ExynosMutex(); + + //! Create Mutex + bool create(int type, char* name); + + //! Destroy Mutex + void destroy(void); + + //! Get Mutex created status + bool getCreatedStatus(void); + + //! Lock Mutex + bool lock(void); + + //! Unlock Mutex + bool unLock(void); + + //! trylock Mutex + bool tryLock(void); + + //! Get Mutex type + int getType(void); + +private: + void *m_mutex; + bool m_flagCreate; + + int m_type; + char m_name[128]; + +public: + //! Autolock + /*! + * \ingroup ExynosMutex + */ + class Autolock { + public: + //! Lock on constructor + inline Autolock(ExynosMutex& mutex) : mLock(mutex) { mLock.lock(); } + + //! Lock on constructor + inline Autolock(ExynosMutex* mutex) : mLock(*mutex) { mLock.lock(); } + + //! Unlock on destructor + inline ~Autolock() { mLock.unLock(); } + private: + ExynosMutex& mLock; + }; +}; + +extern "C" { +#endif + +enum EXYNOS_MUTEX_TYPE { + EXYNOS_MUTEX_TYPE_BASE = 0, + EXYNOS_MUTEX_TYPE_PRIVATE, //!< within this process + EXYNOS_MUTEX_TYPE_SHARED, //!< within whole system + EXYNOS_MUTEX_TYPE_MAX, +}; + +void *exynos_mutex_create( + int type, + char *name); + +bool exynos_mutex_destroy( + void *handle); + +bool exynos_mutex_lock( + void *handle); + +bool exynos_mutex_unlock( + void *handle); + +bool exynos_mutex_trylock( + void *handle); + +int exynos_mutex_type( + void *handle); + +bool exynos_mutex_get_created_status( + void *handle); + +#ifdef __cplusplus +} +#endif + +#endif //__EXYNOS_MUTEX_H__ diff --git a/libexynosutils/Exynos_log.c b/libexynosutils/Exynos_log.c new file mode 100644 index 0000000..a8c96c0 --- /dev/null +++ b/libexynosutils/Exynos_log.c @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * + * @author Yongbae, Song(yb.song@samsung.com) + * + * @date 2012-04-02 + * + */ + +#include + +#include "Exynos_log.h" + +void Exynos_Log(EXYNOS_DEV_LOG_LEVEL logLevel, const char *tag, const char *msg, ...) +{ + va_list argptr; + + va_start(argptr, msg); + + switch (logLevel) { + case EXYNOS_DEV_LOG_DEBUG: + __android_log_vprint(ANDROID_LOG_DEBUG, tag, msg, argptr); + break; + case EXYNOS_DEV_LOG_WARNING: + __android_log_vprint(ANDROID_LOG_WARN, tag, msg, argptr); + break; + case EXYNOS_DEV_LOG_ERROR: + __android_log_vprint(ANDROID_LOG_ERROR, tag, msg, argptr); + break; + default: + __android_log_vprint(ANDROID_LOG_VERBOSE, tag, msg, argptr); + } + + va_end(argptr); +} diff --git a/libexynosutils/Exynos_log.h b/libexynosutils/Exynos_log.h new file mode 100644 index 0000000..8e90219 --- /dev/null +++ b/libexynosutils/Exynos_log.h @@ -0,0 +1,48 @@ +/* + * + * Copyright 2010 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file Exynos_log.h + * @brief + * @author Yongbae, Song(yb.songsamsung.com) + * @version 1.0.0 + * @history + * 2012.4.02 : Create + */ + +#ifndef EXYNOS_LOG +#define EXYNOS_LOG + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum +{ + EXYNOS_DEV_LOG_DEBUG, + EXYNOS_DEV_LOG_INFO, + EXYNOS_DEV_LOG_WARNING, + EXYNOS_DEV_LOG_ERROR +} EXYNOS_DEV_LOG_LEVEL; + +void Exynos_Log(EXYNOS_DEV_LOG_LEVEL logLevel, const char *tag, const char *msg, ...); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/libexynosutils/NOTICE b/libexynosutils/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libexynosutils/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libexynosutils/exynos_format_v4l2.c b/libexynosutils/exynos_format_v4l2.c new file mode 100644 index 0000000..fec1e71 --- /dev/null +++ b/libexynosutils/exynos_format_v4l2.c @@ -0,0 +1,438 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +/* + * file exynos5_format_v4l2.c + * author Sangwoo, Park(sw5771.park@samsung.com) + * date 2011/06/02 + * + * Revision History: + * - 2010/06/03 : Sangwoo, Park(sw5771.park@samsung.com) + * Initial version + * + * - 2011/12/07 : Hyeonmyeong Choi( hyeon.choi@samsung.com) + * Add V4L2_PIX_FMT_YVU420M + * + * - 2012/03/06 : shinwon lee(shinwon.lee@samsung.com) + * Merge to libexynosutils + * + */ + +#ifndef __EXYNOS5_FORMAT_V4L2_H__ +#define __EXYNOS5_FORMAT_V4L2_H__ + +//---------------------------------------------------------// +// Include +//---------------------------------------------------------// +#include +#include "exynos_format.h" +#include "s5p_fimc_v4l2.h" +#include +#include "videodev2.h" +#include "videodev2_exynos_media.h" + +int HAL_PIXEL_FORMAT_2_V4L2_PIX( + int hal_pixel_format) +{ + int v4l2_pixel_format = -1; + + switch (hal_pixel_format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + v4l2_pixel_format = V4L2_PIX_FMT_RGB32; + break; + + case HAL_PIXEL_FORMAT_RGB_888: + v4l2_pixel_format = V4L2_PIX_FMT_RGB24; + break; + + case HAL_PIXEL_FORMAT_RGB_565: + v4l2_pixel_format = V4L2_PIX_FMT_RGB565; + break; + + case HAL_PIXEL_FORMAT_BGRA_8888: + v4l2_pixel_format = V4L2_PIX_FMT_BGR32; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + v4l2_pixel_format = V4L2_PIX_FMT_YVU420M; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + v4l2_pixel_format = V4L2_PIX_FMT_YUV420M; + break; + + case HAL_PIXEL_FORMAT_YV12: + v4l2_pixel_format = V4L2_PIX_FMT_YVU420; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + v4l2_pixel_format = V4L2_PIX_FMT_YUV420; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + v4l2_pixel_format = V4L2_PIX_FMT_YUV420N; + break; + + case HAL_PIXEL_FORMAT_YCbCr_422_SP: + v4l2_pixel_format = V4L2_PIX_FMT_NV16; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + v4l2_pixel_format = V4L2_PIX_FMT_NV12; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + v4l2_pixel_format = V4L2_PIX_FMT_NV12N; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + v4l2_pixel_format = V4L2_PIX_FMT_NV12M; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + v4l2_pixel_format = V4L2_PIX_FMT_NV12N_10B; + break; + + case HAL_PIXEL_FORMAT_YCbCr_422_I: + v4l2_pixel_format = V4L2_PIX_FMT_YUYV; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I: + v4l2_pixel_format = V4L2_PIX_FMT_UYVY; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP: + v4l2_pixel_format = V4L2_PIX_FMT_NV61; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + v4l2_pixel_format = V4L2_PIX_FMT_NV21M; + break; + + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + v4l2_pixel_format = V4L2_PIX_FMT_NV21; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: +#ifdef USES_FIMC + v4l2_pixel_format = V4L2_PIX_FMT_NV12MT; +#else + v4l2_pixel_format = V4L2_PIX_FMT_NV12MT_16X16; +#endif + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + v4l2_pixel_format = V4L2_PIX_FMT_NV12NT; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I: + v4l2_pixel_format = V4L2_PIX_FMT_YVYU; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I: + v4l2_pixel_format = V4L2_PIX_FMT_VYUY; + break; + + default: + ALOGE("%s::unmatched HAL_PIXEL_FORMAT color_space(0x%x)\n", + __func__, hal_pixel_format); + break; + } + + return v4l2_pixel_format; +} + +int V4L2_PIX_2_HAL_PIXEL_FORMAT( + int v4l2_pixel_format) +{ + int hal_pixel_format = -1; + + switch (v4l2_pixel_format) { + case V4L2_PIX_FMT_RGB32: + hal_pixel_format = HAL_PIXEL_FORMAT_RGBA_8888; + break; + + case V4L2_PIX_FMT_RGB24: + hal_pixel_format = HAL_PIXEL_FORMAT_RGB_888; + break; + + case V4L2_PIX_FMT_RGB565: + hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565; + break; + + case V4L2_PIX_FMT_BGR32: + hal_pixel_format = HAL_PIXEL_FORMAT_BGRA_8888; + break; + + case V4L2_PIX_FMT_YUV420: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P; + break; + + case V4L2_PIX_FMT_YUV420N: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN; + break; + + case V4L2_PIX_FMT_YUV420M: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M; + break; + + case V4L2_PIX_FMT_YVU420: + hal_pixel_format = HAL_PIXEL_FORMAT_YV12; + break; + + case V4L2_PIX_FMT_YVU420M: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YV12_M; + break; + + case V4L2_PIX_FMT_NV16: + hal_pixel_format = HAL_PIXEL_FORMAT_YCbCr_422_SP; + break; + + case V4L2_PIX_FMT_NV12: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP; + break; + + case V4L2_PIX_FMT_NV12N: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN; + break; + + case V4L2_PIX_FMT_NV12M: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M; + break; + + case V4L2_PIX_FMT_NV21M: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + break; + + case V4L2_PIX_FMT_YUYV: + hal_pixel_format = HAL_PIXEL_FORMAT_YCbCr_422_I; + break; + + case V4L2_PIX_FMT_UYVY: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I; + break; + + case V4L2_PIX_FMT_NV21: + hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_420_SP; + break; + + case V4L2_PIX_FMT_NV12MT: + case V4L2_PIX_FMT_NV12MT_16X16: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED; + break; + + case V4L2_PIX_FMT_NV12NT: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED; + break; + + case V4L2_PIX_FMT_NV61: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP; + break; + + case V4L2_PIX_FMT_YVYU: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I; + break; + + case V4L2_PIX_FMT_VYUY: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I; + break; + + case V4L2_PIX_FMT_NV12N_10B: + hal_pixel_format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B; + break; + + default: + ALOGE("%s::unmatched V4L2_PIX color_space(%d)\n", + __func__, v4l2_pixel_format); + break; + } + + return hal_pixel_format; +} + +int NUM_PLANES(int hal_pixel_format) +{ + switch(hal_pixel_format) { + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + return 3; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + return 2; + break; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_YCbCr_422_I: + case HAL_PIXEL_FORMAT_YCbCr_422_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + return 1; + default: + return 1; + } +} + +unsigned int FRAME_SIZE( + int hal_pixel_format, + int width, + int height) +{ + unsigned int frame_size = 0; + unsigned int size = 0; + + switch (hal_pixel_format) { + // 16bpp + case HAL_PIXEL_FORMAT_RGB_565: + frame_size = GET_16BPP_FRAME_SIZE(width, height); + break; + + // 24bpp + case HAL_PIXEL_FORMAT_RGB_888: + frame_size = GET_24BPP_FRAME_SIZE(width, height); + break; + + // 32bpp + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + frame_size = GET_32BPP_FRAME_SIZE(width, height); + break; + + // 12bpp + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P: + case HAL_PIXEL_FORMAT_YV12: + size = ALIGN(width, 16) * height; + frame_size = size + ALIGN(width / 2, 16) * height; + break; + + + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + frame_size = width * height * 3 / 2; + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + frame_size = (ALIGN_UP(width, 16) * ALIGN_UP(height, 16) + 256) + + (ALIGN_UP((ALIGN_UP(width, 16) * (ALIGN_UP(height, 16) / 2) + 256), 16)); + break; + + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + frame_size = (ALIGN_UP(width, 16) * ALIGN_UP(height, 16) + 256) + + 2 * (ALIGN_UP((ALIGN_UP(width / 2, 16) * (ALIGN_UP(height, 16) / 2) + 256), 16)); + break; + + // 16bpp + case HAL_PIXEL_FORMAT_YCbCr_422_SP: + case HAL_PIXEL_FORMAT_YCbCr_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I: + case HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I: + frame_size = GET_16BPP_FRAME_SIZE(width, height); + break; + + default: + ALOGD("%s::no matching source colorformat(0x%x), width(%d), h(%d) fail\n", + __func__, hal_pixel_format, width, height); + break; + } + + return frame_size; +} + +int V4L2_PIX_2_YUV_INFO(unsigned int v4l2_pixel_format, unsigned int * bpp, unsigned int * planes) +{ + switch (v4l2_pixel_format) { + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + case V4L2_PIX_FMT_NV12N: + case V4L2_PIX_FMT_NV12NT: + case V4L2_PIX_FMT_YUV420: + case V4L2_PIX_FMT_YVU420: + case V4L2_PIX_FMT_YUV420N: + *bpp = 12; + *planes = 1; + break; + + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + case V4L2_PIX_FMT_NV12MT: + case V4L2_PIX_FMT_NV12MT_16X16: + *bpp = 12; + *planes = 2; + break; + + case V4L2_PIX_FMT_YUV420M: + case V4L2_PIX_FMT_YVU420M: + *bpp = 12; + *planes = 3; + break; + + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVYU: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_VYUY: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + *bpp = 16; + *planes = 1; + break; + break; + default: + return -1; + break; + } + + return 0; +} + +int get_yuv_bpp(unsigned int v4l2_pixel_format) +{ + unsigned int bpp, planes; + + if (V4L2_PIX_2_YUV_INFO(v4l2_pixel_format, &bpp, &planes) < 0) + bpp = -1; + + return bpp; +} + +int get_yuv_planes(unsigned int v4l2_pixel_format) +{ + unsigned int bpp, planes; + + if (V4L2_PIX_2_YUV_INFO(v4l2_pixel_format, &bpp, &planes) < 0) + planes = -1; + + return planes; +} +#endif diff --git a/libfimg4x/Android.mk b/libfimg4x/Android.mk new file mode 100644 index 0000000..881f3e1 --- /dev/null +++ b/libfimg4x/Android.mk @@ -0,0 +1,41 @@ +# +# Copyright 2012, Samsung Electronics Co. LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +ifeq ($(BOARD_USES_SKIA_FIMGAPI),true) +LOCAL_MODULE_TAGS := optional + +LOCAL_SRC_FILES:= \ + FimgApi.cpp \ + FimgExynos5.cpp + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../include \ + hardware/samsung_slsi/$(TARGET_SOC)/include \ + hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include + +LOCAL_SHARED_LIBRARIES:= liblog libutils libbinder + +LOCAL_MODULE:= libfimg + +LOCAL_PRELINK_MODULE := false + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/libfimg4x/FimgApi.cpp b/libfimg4x/FimgApi.cpp new file mode 100644 index 0000000..2c524a4 --- /dev/null +++ b/libfimg4x/FimgApi.cpp @@ -0,0 +1,494 @@ +/* +** +** Copyright 2009 Samsung Electronics Co, Ltd. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#define LOG_NDEBUG 0 +#define LOG_TAG "SKIA" +#include + +#include "FimgApi.h" + +pthread_mutex_t s_g2d_lock = PTHREAD_MUTEX_INITIALIZER; + +struct blitinfo_table optbl[] = { + { (int)BLIT_OP_SOLID_FILL, "FILL" }, + { (int)BLIT_OP_CLR, "CLR" }, + { (int)BLIT_OP_SRC, "SRC" }, + { (int)BLIT_OP_DST, "DST" }, + { (int)BLIT_OP_SRC_OVER, "SRC_OVER" }, + { (int)BLIT_OP_DST_OVER, "DST_OVER" }, + { (int)BLIT_OP_SRC_IN, "SRC_IN" }, + { (int)BLIT_OP_DST_IN, "DST_IN" }, + { (int)BLIT_OP_SRC_OUT, "SRC_OUT" }, + { (int)BLIT_OP_DST_OUT, "DST_OUT" }, + { (int)BLIT_OP_SRC_ATOP, "SRC_ATOP" }, + { (int)BLIT_OP_DST_ATOP, "DST_ATOP" }, + { (int)BLIT_OP_XOR, "XOR" }, + { (int)BLIT_OP_ADD, "ADD" }, + { (int)BLIT_OP_MULTIPLY, "MULTIPLY" }, + { (int)BLIT_OP_SCREEN, "SCREEN" }, + { (int)BLIT_OP_DARKEN, "DARKEN" }, + { (int)BLIT_OP_LIGHTEN, "LIGHTEN" }, + { (int)BLIT_OP_DISJ_SRC_OVER, "DISJ_SRC_OVER" }, + { (int)BLIT_OP_DISJ_DST_OVER, "DISJ_DST_OVER" }, + { (int)BLIT_OP_DISJ_SRC_IN, "DISJ_SRC_IN" }, + { (int)BLIT_OP_DISJ_DST_IN, "DISJ_DST_IN" }, + { (int)BLIT_OP_DISJ_SRC_OUT, "DISJ_SRC_OUT" }, + { (int)BLIT_OP_DISJ_DST_OUT, "DISJ_DST_OUT" }, + { (int)BLIT_OP_DISJ_SRC_ATOP, "DISJ_SRC_ATOP" }, + { (int)BLIT_OP_DISJ_DST_ATOP, "DISJ_DST_ATOP" }, + { (int)BLIT_OP_DISJ_XOR, "DISJ_XOR" }, + { (int)BLIT_OP_CONJ_SRC_OVER, "CONJ_SRC_OVER" }, + { (int)BLIT_OP_CONJ_DST_OVER, "CONJ_DST_OVER" }, + { (int)BLIT_OP_CONJ_SRC_IN, "CONJ_SRC_IN" }, + { (int)BLIT_OP_CONJ_DST_IN, "CONJ_DST_IN" }, + { (int)BLIT_OP_CONJ_SRC_OUT, "CONJ_SRC_OUT" }, + { (int)BLIT_OP_CONJ_DST_OUT, "CONJ_DST_OUT" }, + { (int)BLIT_OP_CONJ_SRC_ATOP, "CONJ_SRC_ATOP" }, + { (int)BLIT_OP_CONJ_DST_ATOP, "CONJ_DST_ATOP" }, + { (int)BLIT_OP_CONJ_XOR, "CONJ_XOR" }, + { (int)BLIT_OP_USER_COEFF, "USER_COEFF" }, + { (int)BLIT_OP_END, "" }, +}; + +struct blitinfo_table repeat_tbl[] = { + { (int)NO_REPEAT, "NON" }, + { (int)REPEAT_NORMAL, "DEFAULT" }, + { (int)REPEAT_PAD, "PAD" }, + { (int)REPEAT_REFLECT, "REFLECT, MIRROR" }, + { (int)REPEAT_CLAMP, "CLAMP" }, +}; + +#ifndef REAL_DEBUG + void VOID_FUNC(const char *format, ...) + {} +#endif + +FimgApi::FimgApi() +{ + m_flagCreate = false; +} + +FimgApi::~FimgApi() +{ + if (m_flagCreate == true) + PRINT("%s::this is not Destroyed fail\n", __func__); +} + +bool FimgApi::Create(void) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto CREATE_DONE; + } + + if (m_flagCreate == true) { + PRINT("%s::Already Created fail\n", __func__); + goto CREATE_DONE; + } + + if (t_Create() == false) { + PRINT("%s::t_Create() fail\n", __func__); + goto CREATE_DONE; + } + + m_flagCreate = true; + + ret = true; + +CREATE_DONE : + + t_UnLock(); + + return ret; +} + +bool FimgApi::Destroy(void) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto DESTROY_DONE; + } + + if (m_flagCreate == false) { + PRINT("%s::Already Destroyed fail\n", __func__); + goto DESTROY_DONE; + } + + if (t_Destroy() == false) { + PRINT("%s::t_Destroy() fail\n", __func__); + goto DESTROY_DONE; + } + + m_flagCreate = false; + + ret = true; + +DESTROY_DONE : + + t_UnLock(); + + return ret; +} + +bool FimgApi::Stretch(struct fimg2d_blit *cmd) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto STRETCH_DONE; + } + + if (m_flagCreate == false) { + PRINT("%s::This is not Created fail\n", __func__); + goto STRETCH_DONE; + } + + if (t_Stretch(cmd) == false) { + goto STRETCH_DONE; + } + + ret = true; + +STRETCH_DONE : + + t_UnLock(); + + return ret; +} + +bool FimgApi::Sync(void) +{ + bool ret = false; + + if (m_flagCreate == false) { + PRINT("%s::This is not Created fail\n", __func__); + goto SYNC_DONE; + } + + if (t_Sync() == false) + goto SYNC_DONE; + + ret = true; + +SYNC_DONE : + + return ret; +} + +bool FimgApi::t_Create(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Destroy(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Stretch(struct fimg2d_blit *cmd) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Sync(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Lock(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_UnLock(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +//---------------------------------------------------------------------------// +// extern function +//---------------------------------------------------------------------------// +extern "C" int stretchFimgApi(struct fimg2d_blit *cmd) +{ + pthread_mutex_lock(&s_g2d_lock); + + FimgApi * fimgApi = createFimgApi(); + + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi->Stretch(cmd) == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return 0; +} + +extern "C" int stretchFimgApi_fast(struct fimg2d_blit *cmd, unsigned long tmpbuf_addr, int tmpbuf_size) +{ + if (tmpbuf_addr == 0 || tmpbuf_size <= 0) + return stretchFimgApi(cmd); + + /* scaling & rotation only */ + if (cmd->param.rotate == ORIGIN || cmd->param.scaling.mode == NO_SCALING) + return stretchFimgApi(cmd); + + /* src & dst only */ + if (cmd->src == NULL || cmd->msk != NULL) + return stretchFimgApi(cmd); + + /* a(x)rgb8888 src only */ + if (cmd->src->fmt >= CF_RGB_565) + return stretchFimgApi(cmd); + + FimgApi * fimgApi = createFimgApi(); + + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + return -1; + } + + bool is_scaledown, sr_w, sr_h; + struct fimg2d_image tmpbuf; + struct fimg2d_blit cmd1st, cmd2nd; + struct fimg2d_param *p; + + /* check is_scaledown */ + p = &cmd->param; + sr_w = p->scaling.src_w - p->scaling.dst_w; + sr_h = p->scaling.src_h - p->scaling.dst_h; + is_scaledown = (sr_w + sr_h > 0) ? true : false; + + if (is_scaledown) { + /* set temp buffer */ + tmpbuf.width = cmd->dst->rect.y2 - cmd->dst->rect.y1; + tmpbuf.height = cmd->dst->rect.x2 - cmd->dst->rect.x1; + tmpbuf.stride = tmpbuf.width * 4; + tmpbuf.order = cmd->src->order; + tmpbuf.fmt = cmd->src->fmt; + tmpbuf.addr.type = cmd->src->addr.type; + tmpbuf.addr.start = tmpbuf_addr; + tmpbuf.plane2.type = ADDR_NONE; + tmpbuf.rect.x1 = 0; + tmpbuf.rect.y1 = 0; + tmpbuf.rect.x2 = tmpbuf.width; + tmpbuf.rect.y2 = tmpbuf.height; + tmpbuf.need_cacheopr = false; + + /* 1st step : copy with scaling down */ + p = &cmd1st.param; + memcpy(p, &cmd->param, sizeof(cmd->param)); + p->rotate = ORIGIN; + p->g_alpha = 0xff; + p->dither = false; + cmd1st.op = BLIT_OP_SRC; + cmd1st.src = cmd->src; + cmd1st.dst = &tmpbuf; + cmd1st.msk = NULL; + cmd1st.tmp = NULL; + cmd1st.sync = BLIT_SYNC; + cmd1st.seq_no = cmd->seq_no; + + /* 2nd step : op with rotation */ + p = &cmd2nd.param; + memcpy(p, &cmd->param, sizeof(cmd->param)); + p->scaling.mode = NO_SCALING; + cmd2nd.op = cmd->op; + cmd2nd.src = &tmpbuf; + cmd2nd.dst = cmd->dst; + cmd2nd.msk = NULL; + cmd2nd.tmp = NULL; + cmd2nd.sync = BLIT_SYNC; + cmd2nd.seq_no = cmd->seq_no; + } else { + /* set temp buffer */ + tmpbuf.width = cmd->src->rect.y2 - cmd->src->rect.y1; + tmpbuf.height = cmd->src->rect.x2 - cmd->src->rect.x1; + tmpbuf.stride = tmpbuf.width * 4; + tmpbuf.order = cmd->src->order; + tmpbuf.fmt = cmd->src->fmt; + tmpbuf.addr.type = cmd->src->addr.type; + tmpbuf.addr.start = tmpbuf_addr; + tmpbuf.plane2.type = ADDR_NONE; + tmpbuf.rect.x1 = 0; + tmpbuf.rect.y1 = 0; + tmpbuf.rect.x2 = tmpbuf.width; + tmpbuf.rect.y2 = tmpbuf.height; + tmpbuf.need_cacheopr = false; + + /* 1st step : copy with rotation */ + p = &cmd1st.param; + memcpy(p, &cmd->param, sizeof(cmd->param)); + p->scaling.mode = NO_SCALING; + p->g_alpha = 0xff; + p->dither = false; + cmd1st.op = BLIT_OP_SRC; + cmd1st.src = cmd->src; + cmd1st.dst = &tmpbuf; + cmd1st.msk = NULL; + cmd1st.tmp = NULL; + cmd1st.sync = BLIT_SYNC; + cmd1st.seq_no = cmd->seq_no; + + /* 2nd step : op with scaling up */ + p = &cmd2nd.param; + memcpy(p, &cmd->param, sizeof(cmd->param)); + p->rotate = ORIGIN; + cmd2nd.op = cmd->op; + cmd2nd.src = &tmpbuf; + cmd2nd.dst = cmd->dst; + cmd2nd.msk = NULL; + cmd2nd.tmp = NULL; + cmd2nd.sync = BLIT_SYNC; + cmd2nd.seq_no = cmd->seq_no; + } + + /* 1st step blit */ + if (fimgApi->Stretch(&cmd1st) == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + return -1; + } + + /* 2nd step blit */ + if (fimgApi->Stretch(&cmd2nd) == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + return 0; +} + +extern "C" int SyncFimgApi(void) +{ + pthread_mutex_lock(&s_g2d_lock); + FimgApi * fimgApi = createFimgApi(); + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi->Sync() == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return 0; +} + +void printDataBlit(char *title, const char *called, struct fimg2d_blit *cmd) +{ + SLOGI("%s (From %s)\n", title, called); + SLOGI(" sequence_no. = %u\n", cmd->seq_no); + SLOGI(" blit_op = %d(%s)\n", cmd->op, optbl[cmd->op].str); + SLOGI(" fill_color = %X\n", cmd->param.solid_color); + SLOGI(" global_alpha = %u\n", (unsigned int)cmd->param.g_alpha); + SLOGI(" PREMULT = %s\n", cmd->param.premult == PREMULTIPLIED ? "PREMULTIPLIED" : "NON-PREMULTIPLIED"); + SLOGI(" do_dither = %s\n", cmd->param.dither == true ? "dither" : "no-dither"); + SLOGI(" repeat = %d(%s)\n", cmd->param.repeat.mode, + repeat_tbl[cmd->param.repeat.mode].str); + + printDataBlitRotate(cmd->param.rotate); + + printDataBlitScale(&cmd->param.scaling); + + printDataBlitImage("SRC", cmd->src); + printDataBlitImage("DST", cmd->dst); + + if (cmd->src != NULL) + printDataBlitRect("SRC", &cmd->src->rect); + if (cmd->dst != NULL) + printDataBlitRect("DST", &cmd->dst->rect); +} + +void printDataBlitImage(const char *title, struct fimg2d_image *image) +{ + if (NULL != image) { + SLOGI(" Image_%s\n", title); + SLOGI(" addr = %X\n", image->addr.start); + SLOGI(" format = %d\n", image->fmt); + SLOGI(" size = (%d, %d)\n", image->width, image->height); + } else { + SLOGI(" Image_%s : NULL\n", title); + } +} + +void printDataBlitRect(const char *title, struct fimg2d_rect *rect) +{ + if (NULL != rect) { + SLOGI(" RECT_%s\n", title); + SLOGI(" (x1, y1) = (%d, %d)\n", rect->x1, rect->y1); + SLOGI(" (x2, y2) = (%d, %d)\n", rect->x2, rect->y2); + SLOGI(" (width, height) = (%d, %d)\n", rect->x2 - rect->x1, rect->y2 - rect->y1); + } else + SLOGI(" RECT_%s : NULL\n", title); +} + +void printDataBlitRotate(int rotate) +{ + SLOGI(" ROTATE : %d\n", rotate); +} + +void printDataBlitScale(struct fimg2d_scale *scaling) +{ + SLOGI(" SCALING\n"); + if (scaling->mode != 0) { + SLOGI(" scale_mode : %s\n", (scaling->mode == 1 ? "SCALING_NEAREST" : "SCALING_BILINEAR")); + SLOGI(" src : (src_w, src_h) = (%d, %d)\n", scaling->src_w, scaling->src_h); + SLOGI(" dst : (dst_w, dst_h) = (%d, %d)\n", scaling->dst_w, scaling->dst_h); + SLOGI(" scaling_factor : (scale_w, scale_y) = (%3.2f, %3.2f)\n", + (double)scaling->dst_w / scaling->src_w, (double)scaling->dst_h / scaling->src_h); + } else { + SLOGI(" scale_mode : NO_SCALING\n"); + } +} diff --git a/libfimg4x/FimgExynos5.cpp b/libfimg4x/FimgExynos5.cpp new file mode 100644 index 0000000..f96a778 --- /dev/null +++ b/libfimg4x/FimgExynos5.cpp @@ -0,0 +1,331 @@ +/* +** +** Copyright 2009 Samsung Electronics Co, Ltd. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#define LOG_NDEBUG 0 +#define LOG_TAG "FimgExynos5" +#include + +#include "FimgExynos5.h" + +extern pthread_mutex_t s_g2d_lock; + +namespace android +{ +unsigned FimgV4x::m_curFimgV4xIndex = 0; +int FimgV4x::m_numOfInstance = 0; +FimgApi * FimgV4x::m_ptrFimgApiList[NUMBER_FIMG_LIST] = {NULL, }; + +//---------------------------------------------------------------------------// + +FimgV4x::FimgV4x() + : m_g2dFd(0), + m_g2dVirtAddr(NULL), + m_g2dSize(0), + m_g2dSrcVirtAddr(NULL), + m_g2dSrcSize(0), + m_g2dDstVirtAddr(NULL), + m_g2dDstSize(0) +{ + memset(&(m_g2dPoll), 0, sizeof(struct pollfd)); +} + +FimgV4x::~FimgV4x() +{ +} + +FimgApi *FimgV4x::CreateInstance() +{ + FimgApi *ptrFimg = NULL; + + for(int i = m_curFimgV4xIndex; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] == NULL) + m_ptrFimgApiList[i] = new FimgV4x; + + if (m_ptrFimgApiList[i]->FlagCreate() == false) { + if (m_ptrFimgApiList[i]->Create() == false) { + PRINT("%s::Create(%d) fail\n", __func__, i); + goto CreateInstance_End; + } + else + m_numOfInstance++; + } + + if (i < NUMBER_FIMG_LIST - 1) + m_curFimgV4xIndex = i + 1; + else + m_curFimgV4xIndex = 0; + + ptrFimg = m_ptrFimgApiList[i]; + goto CreateInstance_End; + } + +CreateInstance_End : + + return ptrFimg; +} + +void FimgV4x::DestroyInstance(FimgApi * ptrFimgApi) +{ + pthread_mutex_lock(&s_g2d_lock); + + for(int i = 0; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] != NULL && m_ptrFimgApiList[i] == ptrFimgApi) { + if (m_ptrFimgApiList[i]->FlagCreate() == true && m_ptrFimgApiList[i]->Destroy() == false) { + PRINT("%s::Destroy() fail\n", __func__); + } else { + FimgV4x * tempFimgV4x = (FimgV4x *)m_ptrFimgApiList[i]; + delete tempFimgV4x; + m_ptrFimgApiList[i] = NULL; + + m_numOfInstance--; + } + + break; + } + } + pthread_mutex_unlock(&s_g2d_lock); +} + +void FimgV4x::DestroyAllInstance(void) +{ + pthread_mutex_lock(&s_g2d_lock); + + for (int i = 0; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] != NULL) { + if (m_ptrFimgApiList[i]->FlagCreate() == true + && m_ptrFimgApiList[i]->Destroy() == false) { + PRINT("%s::Destroy() fail\n", __func__); + } else { + FimgV4x * tempFimgV4x = (FimgV4x *)m_ptrFimgApiList[i]; + delete tempFimgV4x; + m_ptrFimgApiList[i] = NULL; + } + } + } + pthread_mutex_unlock(&s_g2d_lock); +} + +bool FimgV4x::t_Create(void) +{ + bool ret = true; + + if (m_CreateG2D() == false) { + PRINT("%s::m_CreateG2D() fail \n", __func__); + + if (m_DestroyG2D() == false) + PRINT("%s::m_DestroyG2D() fail \n", __func__); + + ret = false; + } + + return ret; +} + +bool FimgV4x::t_Destroy(void) +{ + bool ret = true; + + if (m_DestroyG2D() == false) { + PRINT("%s::m_DestroyG2D() fail \n", __func__); + ret = false; + } + + return ret; +} + +bool FimgV4x::t_Stretch(struct fimg2d_blit *cmd) +{ +#ifdef CHECK_FIMGV4x_PERFORMANCE +#define NUM_OF_STEP (10) + StopWatch stopWatch("CHECK_FIMGV4x_PERFORMANCE"); + const char *stopWatchName[NUM_OF_STEP]; + nsecs_t stopWatchTime[NUM_OF_STEP]; + int stopWatchIndex = 0; +#endif // CHECK_FIMGV4x_PERFORMANCE + + if (m_DoG2D(cmd) == false) + goto STRETCH_FAIL; + +#ifdef G2D_NONE_BLOCKING_MODE + if (m_PollG2D(&m_g2dPoll) == false) { + PRINT("%s::m_PollG2D() fail\n", __func__); + goto STRETCH_FAIL; + } +#endif + +#ifdef CHECK_FIMGV4x_PERFORMANCE + m_PrintFimgV4xPerformance(src, dst, stopWatchIndex, stopWatchName, stopWatchTime); +#endif // CHECK_FIMGV4x_PERFORMANCE + + return true; + +STRETCH_FAIL: + return false; + +} + +bool FimgV4x::t_Sync(void) +{ + if (m_PollG2D(&m_g2dPoll) == false) { + PRINT("%s::m_PollG2D() fail\n", __func__); + goto SYNC_FAIL; + } + return true; + +SYNC_FAIL: + return false; + +} + +bool FimgV4x::t_Lock(void) +{ + if (&m_lock != NULL) { + status_t ret = m_lock.lock(); + return true; + } + PRINT("%s::m_lock is NULL",__func__); + return false; +} + +bool FimgV4x::t_UnLock(void) +{ + if (&m_lock != NULL) { + m_lock.unlock(); + return true; + } + PRINT("%s::m_lock is NULL",__func__); + return false; +} + +bool FimgV4x::m_CreateG2D(void) +{ + void * mmap_base; + int val = 0; + + if (m_g2dFd != 0) { + PRINT("%s::m_g2dFd(%d) is not 0 fail\n", __func__, m_g2dFd); + return false; + } + +#ifdef G2D_NONE_BLOCKING_MODE + m_g2dFd = open(SEC_G2D_DEV_NAME, O_RDWR | O_NONBLOCK); +#else + m_g2dFd = open(SEC_G2D_DEV_NAME, O_RDWR); +#endif + if (m_g2dFd < 0) { + PRINT("%s::open(%s) fail(%s)\n", __func__, SEC_G2D_DEV_NAME, strerror(errno)); + m_g2dFd = 0; + return false; + } + val = fcntl(m_g2dFd, F_GETFD, 0); + if (val < 0) { + PRINT("%s::GETFD(%s) fail\n", __func__, SEC_G2D_DEV_NAME); + close(m_g2dFd); + m_g2dFd = 0; + return false; + } + val = fcntl(m_g2dFd, F_SETFD, val | FD_CLOEXEC); + if (val < 0) { + PRINT("%s::SETFD(%s) fail\n", __func__, SEC_G2D_DEV_NAME); + close(m_g2dFd); + m_g2dFd = 0; + return false; + } + + memset(&m_g2dPoll, 0, sizeof(m_g2dPoll)); + m_g2dPoll.fd = m_g2dFd; + m_g2dPoll.events = POLLOUT | POLLERR; + + return true; +} + +bool FimgV4x::m_DestroyG2D(void) +{ + if (m_g2dVirtAddr != NULL) { + munmap(m_g2dVirtAddr, m_g2dSize); + m_g2dVirtAddr = NULL; + m_g2dSize = 0; + } + + if (0 < m_g2dFd) + close(m_g2dFd); + + m_g2dFd = 0; + + return true; +} + +bool FimgV4x::m_DoG2D(struct fimg2d_blit *cmd) +{ + if (ioctl(m_g2dFd, FIMG2D_BITBLT_BLIT, cmd) < 0) + return false; + + return true; +} + +inline bool FimgV4x::m_PollG2D(struct pollfd * events) +{ +#define G2D_POLL_TIME (1000) + + int ret; + + ret = poll(events, 1, G2D_POLL_TIME); + + if (ret < 0) { + PRINT("%s::poll fail \n", __func__); + return false; + } + else if (ret == 0) { + PRINT("%s::No data in %d milli secs..\n", __func__, G2D_POLL_TIME); + return false; + } + + return true; +} + +//---------------------------------------------------------------------------// +// extern function +//---------------------------------------------------------------------------// +extern "C" struct FimgApi * createFimgApi() +{ + if (fimgApiAutoFreeThread == 0) + fimgApiAutoFreeThread = new FimgApiAutoFreeThread(); + else + fimgApiAutoFreeThread->SetOneMoreSleep(); + + return FimgV4x::CreateInstance(); +} + +extern "C" void destroyFimgApi(FimgApi * ptrFimgApi) +{ + // Dont' call DestroyInstance. +} + +extern "C" bool checkScaleFimgApi(Fimg *fimg) +{ + unsigned int i; + for (i = 0; i < sizeof(compare_size) / sizeof(compare_size[0]); i++) { + if ((fimg->srcW == compare_size[i][0]) && (fimg->srcH == compare_size[i][1]) && + (fimg->dstW == compare_size[i][2]) && (fimg->dstH == compare_size[i][3])) + return true; + } + return false; +} + +}; // namespace android diff --git a/libfimg4x/FimgExynos5.h b/libfimg4x/FimgExynos5.h new file mode 100644 index 0000000..530c9b4 --- /dev/null +++ b/libfimg4x/FimgExynos5.h @@ -0,0 +1,165 @@ +/* +** +** Copyright 2008, The Android Open Source Project +** Copyright 2009 Samsung Electronics Co, Ltd. All Rights Reserved. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#ifndef FIMG_EXYNOS5_H +#define FIMG_EXYNOS5_H + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "FimgApi.h" + +#include "sec_g2d_4x.h" + +namespace android +{ + +#define NUMBER_FIMG_LIST (1) +#define GET_RECT_SIZE(rect) ((rect->full_w) * (rect->h) * (rect->bytes_per_pixel)) +#define GET_REAL_SIZE(rect) ((rect->full_w) * (rect->h) * (rect->bytes_per_pixel)) +#define GET_START_ADDR(rect) (rect->virt_addr + ((rect->y * rect->full_w) * rect->bytes_per_pixel)) +#define SLEEP_TIME (100000) // 100 msec + +int compare_size[][4] = { {160, 97, 240, 146}, {228, 228, 266, 266}, {160, 97, 280, 170} }; + +//---------------------------------------------------------------------------// +// class FimgV4x : public FimgBase +//---------------------------------------------------------------------------// +class FimgV4x : public FimgApi +{ +private : + int m_g2dFd; + + unsigned char *m_g2dVirtAddr; + unsigned int m_g2dSize; + unsigned char *m_g2dSrcVirtAddr; + unsigned int m_g2dSrcSize; + unsigned char *m_g2dDstVirtAddr; + unsigned int m_g2dDstSize; + struct pollfd m_g2dPoll; + + Mutex m_lock; + + static unsigned m_curFimgV4xIndex; + static int m_numOfInstance; + + static FimgApi *m_ptrFimgApiList[NUMBER_FIMG_LIST]; + +protected : + FimgV4x(); + virtual ~FimgV4x(); + +public: + static FimgApi *CreateInstance(); + static void DestroyInstance(FimgApi *ptrFimgApi); + static void DestroyAllInstance(void); + +protected: + virtual bool t_Create(void); + virtual bool t_Destroy(void); + virtual bool t_Stretch(struct fimg2d_blit *cmd); + virtual bool t_Sync(void); + virtual bool t_Lock(void); + virtual bool t_UnLock(void); + +private: + bool m_CreateG2D(void); + bool m_DestroyG2D(void); + + bool m_DoG2D(struct fimg2d_blit *cmd); + + inline bool m_PollG2D(struct pollfd *events); + + inline int m_ColorFormatFimgApi2FimgHw(int colorFormat); +}; + +class FimgApiAutoFreeThread; + +static sp fimgApiAutoFreeThread = 0; + +class FimgApiAutoFreeThread : public Thread +{ +private: + bool mOneMoreSleep; + bool mDestroyed; + +public: + FimgApiAutoFreeThread(void): + Thread(false), + mOneMoreSleep(true), + mDestroyed(false) + { } + ~FimgApiAutoFreeThread(void) + { + if (mDestroyed == false) { + FimgV4x::DestroyAllInstance(); + mDestroyed = true; + } + } + + virtual void onFirstRef() + { + run("FimgApiAutoFreeThread", PRIORITY_BACKGROUND); + } + + virtual bool threadLoop() + { + + if (mOneMoreSleep == true) { + mOneMoreSleep = false; + usleep(SLEEP_TIME); + + return true; + } + else { + if (mDestroyed == false) { + FimgV4x::DestroyAllInstance(); + mDestroyed = true; + } + + fimgApiAutoFreeThread = 0; + + return false; + } + } + + void SetOneMoreSleep(void) + { + mOneMoreSleep = true; + } +}; + +}; // namespace android + +#endif // FIMG_EXYNOS5_H diff --git a/libfimg4x/NOTICE b/libfimg4x/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libfimg4x/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libfimg5x/Android.mk b/libfimg5x/Android.mk new file mode 100644 index 0000000..0589963 --- /dev/null +++ b/libfimg5x/Android.mk @@ -0,0 +1,42 @@ +# +# Copyright 2012, Samsung Electronics Co. LTD +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE_TAGS := optional + +LOCAL_SRC_FILES:= \ + FimgApi.cpp \ + FimgExynos5.cpp + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../include \ + hardware/samsung_slsi/$(TARGET_SOC)/include \ + hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include + +ifeq ($(filter 3.18, $(TARGET_LINUX_KERNEL_VERSION)), 3.18) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/kernel-3.18-headers +endif + +LOCAL_SHARED_LIBRARIES:= liblog libutils libbinder + +LOCAL_MODULE:= libfimg + +LOCAL_PRELINK_MODULE := false + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libfimg5x/FimgApi.cpp b/libfimg5x/FimgApi.cpp new file mode 100644 index 0000000..80b524b --- /dev/null +++ b/libfimg5x/FimgApi.cpp @@ -0,0 +1,793 @@ +/* +** +** Copyright 2009 Samsung Electronics Co, Ltd. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#define LOG_NDEBUG 0 +#define LOG_TAG "SKIA" +#include + +#include "FimgApi.h" + +#ifdef FIMG2D_USE_M2M1SHOT2 +#include "videodev2.h" +#include + +/* include freq leveling and compromise table */ +#include "sec_g2d_comp.h" +unsigned int fmt_m2m1shot1_format[] = { + V4L2_PIX_FMT_RGB565, + V4L2_PIX_FMT_ARGB32, + V4L2_PIX_FMT_ABGR32, +}; + +unsigned int xfer_m2m1shot2_format[] = { + M2M1SHOT2_BLEND_CLEAR, + M2M1SHOT2_BLEND_SRC, + M2M1SHOT2_BLEND_DST, + M2M1SHOT2_BLEND_SRCOVER, +}; + +unsigned int filter_m2m1shot2_format[] = { + M2M1SHOT2_SCFILTER_BILINEAR, + M2M1SHOT2_SCFILTER_BILINEAR, +}; + +unsigned int repeat_m2m1shot2_format[] = { + M2M1SHOT2_REPEAT_CLAMP, + M2M1SHOT2_REPEAT_REPEAT, + M2M1SHOT2_REPEAT_REFLECT, + M2M1SHOT2_REPEAT_NONE, +}; +#endif + +pthread_mutex_t s_g2d_lock = PTHREAD_MUTEX_INITIALIZER; + +struct blitinfo_table optbl[] = { + { (int)BLIT_OP_NONE, "NONE" }, + { (int)BLIT_OP_SOLID_FILL, "FILL" }, + { (int)BLIT_OP_CLR, "CLR" }, + { (int)BLIT_OP_SRC, "SRC" }, + { (int)BLIT_OP_DST, "DST" }, + { (int)BLIT_OP_SRC_OVER, "SRC_OVER" }, + { (int)BLIT_OP_DST_OVER, "DST_OVER" }, + { (int)BLIT_OP_SRC_IN, "SRC_IN" }, + { (int)BLIT_OP_DST_IN, "DST_IN" }, + { (int)BLIT_OP_SRC_OUT, "SRC_OUT" }, + { (int)BLIT_OP_DST_OUT, "DST_OUT" }, + { (int)BLIT_OP_SRC_ATOP, "SRC_ATOP" }, + { (int)BLIT_OP_DST_ATOP, "DST_ATOP" }, + { (int)BLIT_OP_XOR, "XOR" }, + { (int)BLIT_OP_ADD, "ADD" }, + { (int)BLIT_OP_MULTIPLY, "MULTIPLY" }, + { (int)BLIT_OP_SCREEN, "SCREEN" }, + { (int)BLIT_OP_DARKEN, "DARKEN" }, + { (int)BLIT_OP_LIGHTEN, "LIGHTEN" }, + { (int)BLIT_OP_DISJ_SRC_OVER, "DISJ_SRC_OVER" }, + { (int)BLIT_OP_DISJ_DST_OVER, "DISJ_DST_OVER" }, + { (int)BLIT_OP_DISJ_SRC_IN, "DISJ_SRC_IN" }, + { (int)BLIT_OP_DISJ_DST_IN, "DISJ_DST_IN" }, + { (int)BLIT_OP_DISJ_SRC_OUT, "DISJ_SRC_OUT" }, + { (int)BLIT_OP_DISJ_DST_OUT, "DISJ_DST_OUT" }, + { (int)BLIT_OP_DISJ_SRC_ATOP, "DISJ_SRC_ATOP" }, + { (int)BLIT_OP_DISJ_DST_ATOP, "DISJ_DST_ATOP" }, + { (int)BLIT_OP_DISJ_XOR, "DISJ_XOR" }, + { (int)BLIT_OP_CONJ_SRC_OVER, "CONJ_SRC_OVER" }, + { (int)BLIT_OP_CONJ_DST_OVER, "CONJ_DST_OVER" }, + { (int)BLIT_OP_CONJ_SRC_IN, "CONJ_SRC_IN" }, + { (int)BLIT_OP_CONJ_DST_IN, "CONJ_DST_IN" }, + { (int)BLIT_OP_CONJ_SRC_OUT, "CONJ_SRC_OUT" }, + { (int)BLIT_OP_CONJ_DST_OUT, "CONJ_DST_OUT" }, + { (int)BLIT_OP_CONJ_SRC_ATOP, "CONJ_SRC_ATOP" }, + { (int)BLIT_OP_CONJ_DST_ATOP, "CONJ_DST_ATOP" }, + { (int)BLIT_OP_CONJ_XOR, "CONJ_XOR" }, + { (int)BLIT_OP_USER_COEFF, "USER_COEFF" }, + { (int)BLIT_OP_END, "" }, +}; + +struct blitinfo_table repeat_tbl[] = { + { (int)NO_REPEAT, "NON" }, + { (int)REPEAT_NORMAL, "DEFAULT" }, + { (int)REPEAT_PAD, "PAD" }, + { (int)REPEAT_REFLECT, "REFLECT, MIRROR" }, + { (int)REPEAT_CLAMP, "CLAMP" }, +}; + +#ifndef REAL_DEBUG + void VOID_FUNC(__attribute__((__unused__)) const char *format, ...) + {} +#endif + +FimgApi::FimgApi() +{ + m_flagCreate = false; +} + +FimgApi::~FimgApi() +{ + if (m_flagCreate == true) + PRINT("%s::this is not Destroyed fail\n", __func__); +} + +bool FimgApi::Create(void) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto CREATE_DONE; + } + + if (m_flagCreate == true) { + PRINT("%s::Already Created fail\n", __func__); + goto CREATE_DONE; + } + + if (t_Create() == false) { + PRINT("%s::t_Create() fail\n", __func__); + goto CREATE_DONE; + } + + m_flagCreate = true; + + ret = true; + +CREATE_DONE : + + t_UnLock(); + + return ret; +} + +bool FimgApi::Destroy(void) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto DESTROY_DONE; + } + + if (m_flagCreate == false) { + PRINT("%s::Already Destroyed fail\n", __func__); + goto DESTROY_DONE; + } + + if (t_Destroy() == false) { + PRINT("%s::t_Destroy() fail\n", __func__); + goto DESTROY_DONE; + } + + m_flagCreate = false; + + ret = true; + +DESTROY_DONE : + + t_UnLock(); + + return ret; +} +#ifdef FIMG2D_USE_M2M1SHOT2 +bool FimgApi::Stretch_v5(struct m2m1shot2 *cmd) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto STRETCH_DONE; + } + + if (m_flagCreate == false) { + PRINT("%s::This is not Created fail\n", __func__); + goto STRETCH_DONE; + } + + if (t_Stretch_v5(cmd) == false) { + goto STRETCH_DONE; + } + + ret = true; + +STRETCH_DONE : + + t_UnLock(); + + return ret; +} +#endif +bool FimgApi::Stretch(struct fimg2d_blit *cmd) +{ + bool ret = false; + + if (t_Lock() == false) { + PRINT("%s::t_Lock() fail\n", __func__); + goto STRETCH_DONE; + } + + if (m_flagCreate == false) { + PRINT("%s::This is not Created fail\n", __func__); + goto STRETCH_DONE; + } + + if (t_Stretch(cmd) == false) { + goto STRETCH_DONE; + } + + ret = true; + +STRETCH_DONE : + + t_UnLock(); + + return ret; +} + +bool FimgApi::Sync(void) +{ + bool ret = false; + + if (m_flagCreate == false) { + PRINT("%s::This is not Created fail\n", __func__); + goto SYNC_DONE; + } + + if (t_Sync() == false) + goto SYNC_DONE; + + ret = true; + +SYNC_DONE : + + return ret; +} + +bool FimgApi::t_Create(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Destroy(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Stretch(__attribute__((__unused__)) struct fimg2d_blit *cmd) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} +#ifdef FIMG2D_USE_M2M1SHOT2 +bool FimgApi::t_Stretch_v5(__attribute__((__unused__)) struct m2m1shot2 *cmd) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} +#endif +bool FimgApi::t_Sync(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_Lock(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +bool FimgApi::t_UnLock(void) +{ + PRINT("%s::This is empty virtual function fail\n", __func__); + return false; +} + +//---------------------------------------------------------------------------// +// extern function +//---------------------------------------------------------------------------// +extern "C" int stretchFimgApi(struct fimg2d_blit *cmd) +{ + pthread_mutex_lock(&s_g2d_lock); + + FimgApi * fimgApi = createFimgApi(); + + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi->Stretch(cmd) == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return 0; +} + +extern "C" int SyncFimgApi(void) +{ + pthread_mutex_lock(&s_g2d_lock); + FimgApi * fimgApi = createFimgApi(); + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi->Sync() == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return 0; +} + +#ifdef FIMG2D_USE_M2M1SHOT2 +extern "C" int stretchFimgApi_v5(struct m2m1shot2 *cmd) +{ + pthread_mutex_lock(&s_g2d_lock); + + FimgApi * fimgApi = createFimgApi(); + + if (fimgApi == NULL) { + PRINT("%s::createFimgApi() fail\n", __func__); + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi->Stretch_v5(cmd) == false) { + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return -1; + } + + if (fimgApi != NULL) + destroyFimgApi(fimgApi); + + pthread_mutex_unlock(&s_g2d_lock); + return 0; +} + +#ifdef FIMGAPI_HAL_FREQLEVELING +int FimgApiFreqleveling(Fimg *fimg) +{ + static struct timeval prev; + static int prev_level = -1; + struct timeval current; + unsigned int size = (unsigned int)(fimg->clipR - fimg->clipL) * + (unsigned int)(fimg->clipB - fimg->clipT); + int result = MIN_LEVEL; + unsigned long time = 0; + + gettimeofday(¤t, NULL); + if ((current.tv_sec - prev.tv_sec) * 1000000 + + (current.tv_usec - prev.tv_usec) < 20000 && prev_level != -1) { + if (prev_level > 0) + prev_level--; + prev = current; + + return prev_level; + } + + for (int i = 0; i < MIN_LEVEL; i++) { + if (fimg_standard_size[i] < size) + result = i; + } + prev = current; + prev_level = result; + +#ifdef FIMGAPI_HAL_DEBUG + ALOGE("freq leveling : %d", result); +#endif + return result; +} +#endif + +#ifdef FIMGAPI_HAL_COMPROMISE +bool FimgApiCompromise(Fimg *fimg) +{ + struct compromise_param param; + + /* source format setting*/ + param.src_fmt = (fimg->srcColorFormat == 0)? 0 : 1; + param.dst_fmt = (fimg->dstColorFormat == 0)? 0 : 1; + + /* scaling setting */ + if (fimg->srcW == fimg->dstW && fimg->srcH == fimg->dstH) + param.isScaling = 0; + else if (fimg->srcW * fimg->srcH < fimg->dstW * fimg->dstH) + param.isScaling = 1; + else + param.isScaling = 2; + + /* filter_mode setting */ + param.isFilter = fimg->isFilter; + + /* blending mode setting */ + if (fimg->xfermode == 1) + param.isSrcOver = 0; + else if (fimg->xfermode == 3) + param.isSrcOver = 1; + else + return false; + + param.clipW = (fimg->clipR - fimg->clipL) * 1.2; + param.clipH = (fimg->clipB - fimg->clipT) * 0.8; +#ifdef FIMGAPI_HAL_DEBUG + ALOGE("compromise [%d %d %d %d %d] [comp %d and %d]", param.src_fmt, param.dst_fmt, + param.isScaling, param.isFilter, param.isSrcOver, param.clipW * param.clipH, + comp_value[param.src_fmt][param.dst_fmt][param.isScaling][param.isFilter][param.isSrcOver]); +#endif + + if ((param.clipW * param.clipH) < comp_value[param.src_fmt][param.dst_fmt][param.isScaling][param.isFilter][param.isSrcOver]) + return false; + return true; +} +#endif + +void printDataBlitImage_v5(struct m2m1shot2_image* image) +{ + /* image feature */ + SLOGI("flags : %u", image->flags); + SLOGI("memory : %u", image->memory); + /* image.buffer : address and buffer size */ + + struct m2m1shot2_buffer &buffer = image->plane[0]; + SLOGI("address %x", buffer.userptr); + SLOGI("length %u", buffer.length); + + /* image.format : color format and coordinate */ + struct m2m1shot2_format &image_fmt = image->fmt; + SLOGI("width : %u", image_fmt.width); + SLOGI("height: %u", image_fmt.height); + SLOGI("format: %u", image_fmt.pixelformat); + + /* image.format : color format and coordinate */ + SLOGI("crop : %d, %d (%u x %u)", image_fmt.crop.left, + image_fmt.crop.top, image_fmt.crop.width, image_fmt.crop.height); + SLOGI("widnow : %d, %d (%u x %u)", image_fmt.window.left, + image_fmt.window.top, image_fmt.window.width, image_fmt.window.height); + /* image.extra : parameter (only source image) */ + struct m2m1shot2_extra &extra = image->ext; + SLOGI("scaler_filter : %u", extra.scaler_filter); + SLOGI("composite : %u",extra.composit_mode); + SLOGI("g_alpha : %u", extra.galpha); + SLOGI("repeat : %u", extra.xrepeat); +} + +void printDataBlit_v5(const char *title, int called, struct m2m1shot2 cmd) +{ + SLOGI("%s (from %d)\n", title, called); + SLOGI("cmd flag %x", cmd.flags); + SLOGI("- - - - - - - destination - - - - - -"); + printDataBlitImage_v5(&cmd.target); + SLOGI("- - - - - - - destination (source[0]-"); + printDataBlitImage_v5(&cmd.sources[0]); + SLOGI("- - - - - - - source - - - - - - - - "); + printDataBlitImage_v5(&cmd.sources[1]); +} + +void copy_m2m1shot2_image(struct m2m1shot2_image *dst, struct m2m1shot2_image *src) +{ + /* initialize the image */ + memset(dst, 0, sizeof(struct m2m1shot2_image)); + + /* image feature */ + dst->flags = src->flags; + dst->fence = src->fence; + dst->memory = src->memory; + dst->num_planes = src->num_planes; + + /* image.buffer : address and buffer size */ + struct m2m1shot2_buffer &d_buffer = dst->plane[0]; + struct m2m1shot2_buffer &s_buffer = src->plane[0]; + d_buffer.userptr = s_buffer.userptr; + d_buffer.length = s_buffer.length; + d_buffer.offset = s_buffer.offset; + + /* image.format : color format and coordinate */ + struct m2m1shot2_format &d_format = dst->fmt; + struct m2m1shot2_format &s_format = src->fmt; + d_format.width = s_format.width; + d_format.height = s_format.height; + d_format.pixelformat = s_format.pixelformat; + + d_format.crop.left = s_format.crop.left; + d_format.crop.top = s_format.crop.top; + d_format.crop.width = s_format.crop.width; + d_format.crop.height = s_format.crop.height; + + d_format.window.left = s_format.window.left; + d_format.window.top = s_format.window.top; + d_format.window.width = s_format.window.width; + d_format.window.height = s_format.window.height; + + /* image.extra : parameter (only source image) */ + struct m2m1shot2_extra &d_extra = dst->ext; + struct m2m1shot2_extra &s_extra = src->ext; + d_extra.scaler_filter = s_extra.scaler_filter; + d_extra.fillcolor = s_extra.fillcolor; + d_extra.transform = s_extra.transform; + d_extra.composit_mode = s_extra.composit_mode; + d_extra.galpha_red = d_extra.galpha_green = + d_extra.galpha = d_extra.galpha_blue = s_extra.galpha; + d_extra.xrepeat = s_extra.xrepeat; + d_extra.yrepeat = s_extra.yrepeat; +} + +unsigned int scale_factor_to_fixed(float m) { + unsigned int value = m * (1 << 16); + return value & 0xFFFFFFFF; +} + +int requestFimgApi_v5(Fimg *fimg) +{ + struct m2m1shot2 cmd; + +#ifdef FIMGAPI_HAL_COMPROMISE + if (FimgApiCompromise(fimg) == false) + return -1; +#endif +#ifdef FIMGAPI_G2D_NEAREST_UNSUPPORT + if (fimg->isFilter == false) + return -1; +#endif + /* initialize the command */ + memset(&cmd, 0, sizeof(cmd)); + cmd.sources = new m2m1shot2_image[3]; + + /* m2m1shot2 */ + struct m2m1shot2_image &srcImage = cmd.sources[1]; + struct m2m1shot2_image &dstImage = cmd.sources[0]; + cmd.num_sources = 2; + cmd.flags = (fimg->isDither) & 0x1; /* dither | nonblock | error response */ + + /* initialize the image */ + memset(&srcImage, 0, sizeof(srcImage)); + memset(&dstImage, 0, sizeof(dstImage)); + + /* image feature */ + srcImage.flags = M2M1SHOT2_IMGFLAG_PREMUL_ALPHA | M2M1SHOT2_IMGFLAG_GLOBAL_ALPHA; + srcImage.fence = 0; /* no use */ + srcImage.memory = M2M1SHOT2_BUFTYPE_USERPTR; + srcImage.num_planes = 1; + + /* image.buffer : address and buffer size */ + struct m2m1shot2_buffer &s_buffer = srcImage.plane[0]; + s_buffer.userptr = (unsigned long) fimg->srcAddr; + s_buffer.length = s_buffer.payload = fimg->srcFH * fimg->srcFWStride; + s_buffer.offset = 0; + + /* image.format : color format and coordinate */ + struct m2m1shot2_format &s_format = srcImage.fmt; + s_format.width = fimg->srcFWStride / fimg->srcBPP; + s_format.height = fimg->srcFH; + s_format.pixelformat = fmt_m2m1shot1_format[fimg->srcColorFormat]; + + s_format.crop.left = fimg->srcX; + s_format.crop.top = fimg->srcY; + s_format.crop.width = fimg->srcW; + s_format.crop.height = fimg->srcH; + + s_format.window.left = fimg->dstX; + s_format.window.top = fimg->dstY; + s_format.window.width = fimg->dstW; + s_format.window.height = fimg->dstH; + + /* image.extra : parameter (only source image) */ + struct m2m1shot2_extra &s_extra = srcImage.ext; + if (fimg->matrixSw != 1.0f || fimg->matrixSh != 1.0f) { + /* caculate factor and enable factor */ + /* or not, draw from source size to destination size */ + s_extra.scaler_filter = M2M1SHOT2_SCFILTER_BILINEAR; + /* set the scaling ratio */ + float Sw = 1.0f / fimg->matrixSw; + float Sh = 1.0f / fimg->matrixSh; + s_extra.horizontal_factor = scale_factor_to_fixed(Sw); + s_extra.vertical_factor = scale_factor_to_fixed(Sh); + srcImage.flags |= M2M1SHOT2_IMGFLAG_XSCALE_FACTOR; + srcImage.flags |= M2M1SHOT2_IMGFLAG_YSCALE_FACTOR; + } else { + s_extra.scaler_filter = M2M1SHOT2_SCFILTER_NONE; + cmd.flags |= M2M1SHOT2_IMGFLAG_NO_RESCALING; + } + + s_extra.fillcolor = fimg->fillcolor; + s_extra.transform = 0; + s_extra.composit_mode = xfer_m2m1shot2_format[fimg->xfermode]; + s_extra.galpha_red = s_extra.galpha_green = + s_extra.galpha_blue = s_extra.galpha = fimg->alpha; + s_extra.xrepeat = s_extra.yrepeat = repeat_m2m1shot2_format[fimg->tileModeX]; + + /* image feature */ + dstImage.flags = M2M1SHOT2_IMGFLAG_PREMUL_ALPHA; + dstImage.fence = 0; + dstImage.memory = M2M1SHOT2_BUFTYPE_USERPTR; + dstImage.num_planes = 1; + + /* image.buffer : address and buffer size */ + struct m2m1shot2_buffer &d_buffer = dstImage.plane[0]; + d_buffer.userptr = (unsigned long) fimg->dstAddr; + d_buffer.length = d_buffer.payload = fimg->dstFH * fimg->dstFWStride; + d_buffer.offset = 0; + + /* image.format : color format and coordinate */ + struct m2m1shot2_format &d_format = dstImage.fmt; + d_format.width = fimg->dstFWStride / fimg->dstBPP; + d_format.height = fimg->dstFH; + d_format.pixelformat = fmt_m2m1shot1_format[fimg->dstColorFormat]; + + d_format.crop.left = d_format.window.left = fimg->clipL; + d_format.crop.top = d_format.window.top = fimg->clipT; + d_format.crop.width = d_format.window.width = fimg->clipR - fimg->clipL; + d_format.crop.height = d_format.window.height = fimg->clipB - fimg->clipT; + + /* image.extra : parameter (only source image) */ + struct m2m1shot2_extra &d_extra = dstImage.ext; + d_extra.scaler_filter = M2M1SHOT2_SCFILTER_NONE; + d_extra.fillcolor = fimg->fillcolor; + d_extra.transform = 0; + d_extra.composit_mode = M2M1SHOT2_BLEND_NONE; + d_extra.galpha_red = d_extra.galpha_green = + d_extra.galpha = d_extra.galpha_blue = 0x0; + d_extra.xrepeat = d_extra.yrepeat = M2M1SHOT2_REPEAT_NONE; + + copy_m2m1shot2_image(&cmd.target, &dstImage); + /* src img[0] need to set xfermode::src for drawing dst */ + d_extra.composit_mode = M2M1SHOT2_BLEND_SRC; + + /* freq supports 4 level as frequency and size*/ +#ifdef FIMGAPI_HAL_FREQLEVELING + static int prev_level[4]; + static int level_count = -1; + static int current_level = 4; + int level; + + level = FimgApiFreqleveling(fimg); + if (level_count == -1) { + for (int i = 0; i < 4; i++) + prev_level[i] = 4; + level_count++; + } + prev_level[level_count % 4] = level; + level_count++; + fimg->level = prev_level[0]; + for (int i = 1; i < 4; i++) { + if (prev_level[i] < fimg->level) + fimg->level = prev_level[i]; + } +#endif + +#ifdef FIMGAPI_HAL_DEBUG + printDataBlit_v5(__func__, fimg->called, cmd); +#endif + /* if it does not success, need to debug */ + if (stretchFimgApi_v5(&cmd) < 0) { + ALOGE("%s : g2d hardware operation failed", __func__); + return -1; + } + + return 0; +} +#endif +void printDataBlit(char *title, const char *called, struct fimg2d_blit *cmd) +{ + struct fimg2d_image *srcImage; + struct fimg2d_image *dstImage; + struct fimg2d_param *srcParam; + srcImage = cmd->src[1]; + dstImage = cmd->dst; + srcParam = &srcImage->param; + + SLOGI("%s (From %s)\n", title, called); + SLOGI(" sequence_no. = %u\n", cmd->seq_no); + SLOGI(" blit_op = %d(%s)\n", srcImage->op, optbl[srcImage->op].str); + SLOGI(" fill_color = %X\n", srcParam->solid_color); + SLOGI(" global_alpha = %u\n", (unsigned int)srcParam->g_alpha); + SLOGI(" PREMULT = %s\n", srcParam->premult == PREMULTIPLIED ? "PREMULTIPLIED" : "NON-PREMULTIPLIED"); + SLOGI(" do_dither = %s\n", cmd->dither == true ? "dither" : "no-dither"); + SLOGI(" repeat = %d(%s)\n", srcParam->repeat.mode, + repeat_tbl[srcParam->repeat.mode].str); + + printDataBlitRotate(srcParam->rotate); + + printDataBlitScale(&srcParam->scaling); + + printDataBlitImage("SRC", srcImage); + printDataBlitImage("DST", dstImage); + + if (srcImage != NULL) + printDataBlitRect("SRC", &srcImage->rect); + if (srcParam != NULL) + printDataBlitRect("SRC_CLIP(same as DST)", &srcParam->clipping); + if (dstImage != NULL) + printDataBlitRect("DST_CLIP", &dstImage->rect); +} + +void printDataBlitImage(const char *title, struct fimg2d_image *image) +{ + if (NULL != image) { + SLOGI(" Image_%s\n", title); + SLOGI(" addr = %lx\n", image->addr.start); + SLOGI(" format = %s\n", image->fmt == 0 ? "ARGB_8888" : "RGB_565"); + SLOGI(" size = (%d, %d)\n", image->width, image->height); + } else { + SLOGI(" Image_%s : NULL\n", title); + } +} + +void printDataBlitRect(const char *title, struct fimg2d_clip *clipping) +{ + if (NULL != clipping && clipping->enable == 1) { + SLOGI(" RECT_%s\n", title); + SLOGI(" (x1, y1) = (%d, %d)\n", clipping->x1, clipping->y1); + SLOGI(" (x2, y2) = (%d, %d)\n", clipping->x2, clipping->y2); + SLOGI(" (width, height) = (%d, %d)\n", clipping->x2 - clipping->x1, clipping->y2 - clipping->y1); + } else { + SLOGI(" RECT_%s : NULL\n", title); + } +} + +void printDataBlitRect(const char *title, struct fimg2d_rect *rect) +{ + if (NULL != rect) { + SLOGI(" RECT_%s\n", title); + SLOGI(" (x1, y1) = (%d, %d)\n", rect->x1, rect->y1); + SLOGI(" (x2, y2) = (%d, %d)\n", rect->x2, rect->y2); + SLOGI(" (width, height) = (%d, %d)\n", rect->x2 - rect->x1, rect->y2 - rect->y1); + } else { + SLOGI(" RECT_%s : NULL\n", title); + } +} + +void printDataBlitRotate(int rotate) +{ + SLOGI(" ROTATE : %d\n", rotate); +} + +void printDataBlitScale(struct fimg2d_scale *scaling) +{ + SLOGI(" SCALING\n"); + if (scaling->mode != 0) { + SLOGI(" scale_mode : %s\n", + (scaling->mode == 1 ? "SCALING_BILINEAR" : "NO_SCALING")); + SLOGI(" src : (src_w, src_h) = (%d, %d)\n", scaling->src_w, scaling->src_h); + SLOGI(" dst : (dst_w, dst_h) = (%d, %d)\n", scaling->dst_w, scaling->dst_h); + SLOGI(" scaling_factor : (scale_w, scale_y) = (%3.2f, %3.2f)\n", (double)scaling->dst_w / scaling->src_w, (double)scaling->dst_h / scaling->src_h); + } else { + SLOGI(" scale_mode : NO_SCALING\n"); + } +} diff --git a/libfimg5x/FimgExynos5.cpp b/libfimg5x/FimgExynos5.cpp new file mode 100644 index 0000000..2d5a865 --- /dev/null +++ b/libfimg5x/FimgExynos5.cpp @@ -0,0 +1,378 @@ +/* +** +** Copyright 2009 Samsung Electronics Co, Ltd. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#define LOG_NDEBUG 0 +#define LOG_TAG "FimgExynos5" +#include + +#include +#include "FimgExynos5.h" + +extern pthread_mutex_t s_g2d_lock; + +namespace android +{ +unsigned FimgV4x::m_curFimgV4xIndex = 0; +int FimgV4x::m_numOfInstance = 0; +FimgApi * FimgV4x::m_ptrFimgApiList[NUMBER_FIMG_LIST] = {NULL, }; + +//---------------------------------------------------------------------------// + +FimgV4x::FimgV4x() + : m_g2dFd(0), + m_g2dVirtAddr(NULL), + m_g2dSize(0), + m_g2dSrcVirtAddr(NULL), + m_g2dSrcSize(0), + m_g2dDstVirtAddr(NULL), + m_g2dDstSize(0) +{ + memset(&(m_g2dPoll), 0, sizeof(struct pollfd)); +} + +FimgV4x::~FimgV4x() +{ +} + +FimgApi *FimgV4x::CreateInstance() +{ + FimgApi *ptrFimg = NULL; + + for(int i = m_curFimgV4xIndex; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] == NULL) + m_ptrFimgApiList[i] = new FimgV4x; + + if (m_ptrFimgApiList[i]->FlagCreate() == false) { + if (m_ptrFimgApiList[i]->Create() == false) { + PRINT("%s::Create(%d) fail\n", __func__, i); + goto CreateInstance_End; + } + else + m_numOfInstance++; + } + + if (i < NUMBER_FIMG_LIST - 1) + m_curFimgV4xIndex = i + 1; + else + m_curFimgV4xIndex = 0; + + ptrFimg = m_ptrFimgApiList[i]; + goto CreateInstance_End; + } + +CreateInstance_End : + + return ptrFimg; +} + +void FimgV4x::DestroyInstance(FimgApi * ptrFimgApi) +{ + pthread_mutex_lock(&s_g2d_lock); + + for(int i = 0; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] != NULL && m_ptrFimgApiList[i] == ptrFimgApi) { + if (m_ptrFimgApiList[i]->FlagCreate() == true && m_ptrFimgApiList[i]->Destroy() == false) { + PRINT("%s::Destroy() fail\n", __func__); + } else { + FimgV4x * tempFimgV4x = (FimgV4x *)m_ptrFimgApiList[i]; + delete tempFimgV4x; + m_ptrFimgApiList[i] = NULL; + + m_numOfInstance--; + } + + break; + } + } + pthread_mutex_unlock(&s_g2d_lock); +} + +void FimgV4x::DestroyAllInstance(void) +{ + pthread_mutex_lock(&s_g2d_lock); + + for (int i = 0; i < NUMBER_FIMG_LIST; i++) { + if (m_ptrFimgApiList[i] != NULL) { + if (m_ptrFimgApiList[i]->FlagCreate() == true + && m_ptrFimgApiList[i]->Destroy() == false) { + PRINT("%s::Destroy() fail\n", __func__); + } else { + FimgV4x * tempFimgV4x = (FimgV4x *)m_ptrFimgApiList[i]; + delete tempFimgV4x; + m_ptrFimgApiList[i] = NULL; + } + } + } + pthread_mutex_unlock(&s_g2d_lock); +} + +bool FimgV4x::t_Create(void) +{ + bool ret = true; + + if (m_CreateG2D() == false) { + PRINT("%s::m_CreateG2D() fail \n", __func__); + + if (m_DestroyG2D() == false) + PRINT("%s::m_DestroyG2D() fail \n", __func__); + + ret = false; + } + + return ret; +} + +bool FimgV4x::t_Destroy(void) +{ + bool ret = true; + + if (m_DestroyG2D() == false) { + PRINT("%s::m_DestroyG2D() fail \n", __func__); + ret = false; + } + + return ret; +} +#ifdef FIMG2D_USE_M2M1SHOT2 +bool FimgV4x::t_Stretch_v5(struct m2m1shot2 *cmd) +{ +#ifdef CHECK_FIMGV4x_PERFORMANCE +#define NUM_OF_STEP (10) + StopWatch stopWatch("CHECK_FIMGV4x_PERFORMANCE"); + const char *stopWatchName[NUM_OF_STEP]; + nsecs_t stopWatchTime[NUM_OF_STEP]; + int stopWatchIndex = 0; +#endif // CHECK_FIMGV4x_PERFORMANCE + + if (m_DoG2D_v5(cmd) == false) + goto STRETCH_FAIL; + +#ifdef G2D_NONE_BLOCKING_MODE + if (m_PollG2D(&m_g2dPoll) == false) { + PRINT("%s::m_PollG2D() fail\n", __func__); + goto STRETCH_FAIL; + } +#endif + +#ifdef CHECK_FIMGV4x_PERFORMANCE + m_PrintFimgV4xPerformance(src, dst, stopWatchIndex, stopWatchName, stopWatchTime); +#endif // CHECK_FIMGV4x_PERFORMANCE + + return true; + +STRETCH_FAIL: + return false; + +} +#endif +bool FimgV4x::t_Stretch(struct fimg2d_blit *cmd) +{ +#ifdef CHECK_FIMGV4x_PERFORMANCE +#define NUM_OF_STEP (10) + StopWatch stopWatch("CHECK_FIMGV4x_PERFORMANCE"); + const char *stopWatchName[NUM_OF_STEP]; + nsecs_t stopWatchTime[NUM_OF_STEP]; + int stopWatchIndex = 0; +#endif // CHECK_FIMGV4x_PERFORMANCE + + if (m_DoG2D(cmd) == false) + goto STRETCH_FAIL; + +#ifdef G2D_NONE_BLOCKING_MODE + if (m_PollG2D(&m_g2dPoll) == false) { + PRINT("%s::m_PollG2D() fail\n", __func__); + goto STRETCH_FAIL; + } +#endif + +#ifdef CHECK_FIMGV4x_PERFORMANCE + m_PrintFimgV4xPerformance(src, dst, stopWatchIndex, stopWatchName, stopWatchTime); +#endif // CHECK_FIMGV4x_PERFORMANCE + + return true; + +STRETCH_FAIL: + return false; + +} + +bool FimgV4x::t_Sync(void) +{ + if (m_PollG2D(&m_g2dPoll) == false) { + PRINT("%s::m_PollG2D() fail\n", __func__); + goto SYNC_FAIL; + } + return true; + +SYNC_FAIL: + return false; + +} + +bool FimgV4x::t_Lock(void) +{ + if (&m_lock != NULL) { + status_t ret = m_lock.lock(); + return true; + } + PRINT("%s::m_lock is NULL",__func__); + return false; +} + +bool FimgV4x::t_UnLock(void) +{ + if (&m_lock != NULL) { + m_lock.unlock(); + return true; + } + PRINT("%s::m_lock is NULL",__func__); + return false; +} + +bool FimgV4x::m_CreateG2D(void) +{ + void * mmap_base; + int val = 0; + + if (m_g2dFd != 0) { + PRINT("%s::m_g2dFd(%d) is not 0 fail\n", __func__, m_g2dFd); + return false; + } + +#ifdef G2D_NONE_BLOCKING_MODE + m_g2dFd = open(SEC_G2D_DEV_NAME, O_RDWR | O_NONBLOCK); +#else + m_g2dFd = open(SEC_G2D_DEV_NAME, O_RDWR); +#endif + if (m_g2dFd < 0) { + PRINT("%s::open(%s) fail(%s)\n", __func__, SEC_G2D_DEV_NAME, strerror(errno)); + m_g2dFd = 0; + return false; + } + val = fcntl(m_g2dFd, F_GETFD, 0); + if (val < 0) { + PRINT("%s::GETFD(%s) fail\n", __func__, SEC_G2D_DEV_NAME); + close(m_g2dFd); + m_g2dFd = 0; + return false; + } + val = fcntl(m_g2dFd, F_SETFD, val | FD_CLOEXEC); + if (val < 0) { + PRINT("%s::SETFD(%s) fail\n", __func__, SEC_G2D_DEV_NAME); + close(m_g2dFd); + m_g2dFd = 0; + return false; + } + + memset(&m_g2dPoll, 0, sizeof(m_g2dPoll)); + m_g2dPoll.fd = m_g2dFd; + m_g2dPoll.events = POLLOUT | POLLERR; + + return true; +} + +bool FimgV4x::m_DestroyG2D(void) +{ + if (m_g2dVirtAddr != NULL) { + munmap(m_g2dVirtAddr, m_g2dSize); + m_g2dVirtAddr = NULL; + m_g2dSize = 0; + } + + if (0 < m_g2dFd) + close(m_g2dFd); + + m_g2dFd = 0; + + return true; +} +#ifdef FIMG2D_USE_M2M1SHOT2 +bool FimgV4x::m_DoG2D_v5(struct m2m1shot2 *cmd) +{ + if (ioctl(m_g2dFd, M2M1SHOT2_IOC_PROCESS, cmd) < 0) + return false; + /* support the error handling */ + if (cmd->flags & M2M1SHOT2_FLAG_ERROR) { + ALOGE("%s: hardware operation failed", __func__); + return false; + } + return true; +} +#endif +bool FimgV4x::m_DoG2D(struct fimg2d_blit *cmd) +{ + if (ioctl(m_g2dFd, FIMG2D_BITBLT_BLIT, cmd) < 0) + return false; + + /* wait to complte the blit */ + ioctl(m_g2dFd, FIMG2D_BITBLT_SYNC); + + return true; +} + +inline bool FimgV4x::m_PollG2D(struct pollfd * events) +{ +#define G2D_POLL_TIME (1000) + + int ret; + + ret = poll(events, 1, G2D_POLL_TIME); + + if (ret < 0) { + PRINT("%s::poll fail \n", __func__); + return false; + } + else if (ret == 0) { + PRINT("%s::No data in %d milli secs..\n", __func__, G2D_POLL_TIME); + return false; + } + + return true; +} + +//---------------------------------------------------------------------------// +// extern function +//---------------------------------------------------------------------------// +extern "C" struct FimgApi * createFimgApi() +{ + if (fimgApiAutoFreeThread == 0) + fimgApiAutoFreeThread = new FimgApiAutoFreeThread(); + else + fimgApiAutoFreeThread->SetOneMoreSleep(); + + return FimgV4x::CreateInstance(); +} + +extern "C" void destroyFimgApi(__attribute__((__unused__)) FimgApi * ptrFimgApi) +{ + // Dont' call DestroyInstance. +} + +extern "C" bool checkScaleFimgApi(Fimg *fimg) +{ + unsigned int i; + for (i = 0; i < sizeof(compare_size) / sizeof(compare_size[0]); i++) { + if ((fimg->srcW == compare_size[i][0]) && (fimg->srcH == compare_size[i][1]) && + (fimg->dstW == compare_size[i][2]) && (fimg->dstH == compare_size[i][3])) + return true; + } + return false; +} + +}; // namespace android diff --git a/libfimg5x/FimgExynos5.h b/libfimg5x/FimgExynos5.h new file mode 100644 index 0000000..03f2c66 --- /dev/null +++ b/libfimg5x/FimgExynos5.h @@ -0,0 +1,170 @@ +/* +** +** Copyright 2008, The Android Open Source Project +** Copyright 2009 Samsung Electronics Co, Ltd. All Rights Reserved. +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +** +** +*/ + +#ifndef FIMG_EXYNOS5_H +#define FIMG_EXYNOS5_H + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "FimgApi.h" + +#include "sec_g2d_5x.h" + +namespace android +{ + +#define NUMBER_FIMG_LIST (1) +#define GET_RECT_SIZE(rect) ((rect->full_w) * (rect->h) * (rect->bytes_per_pixel)) +#define GET_REAL_SIZE(rect) ((rect->full_w) * (rect->h) * (rect->bytes_per_pixel)) +#define GET_START_ADDR(rect) (rect->virt_addr + ((rect->y * rect->full_w) * rect->bytes_per_pixel)) +#define SLEEP_TIME (100000) // 100 msec + +int compare_size[][4] = { {160, 97, 240, 146} }; + +//---------------------------------------------------------------------------// +// class FimgV4x : public FimgBase +//---------------------------------------------------------------------------// +class FimgV4x : public FimgApi +{ +private : + int m_g2dFd; + + unsigned char *m_g2dVirtAddr; + unsigned int m_g2dSize; + unsigned char *m_g2dSrcVirtAddr; + unsigned int m_g2dSrcSize; + unsigned char *m_g2dDstVirtAddr; + unsigned int m_g2dDstSize; + struct pollfd m_g2dPoll; + + Mutex m_lock; + + static unsigned m_curFimgV4xIndex; + static int m_numOfInstance; + + static FimgApi *m_ptrFimgApiList[NUMBER_FIMG_LIST]; + +protected : + FimgV4x(); + virtual ~FimgV4x(); + +public: + static FimgApi *CreateInstance(); + static void DestroyInstance(FimgApi *ptrFimgApi); + static void DestroyAllInstance(void); + +protected: + virtual bool t_Create(void); + virtual bool t_Destroy(void); + virtual bool t_Stretch(struct fimg2d_blit *cmd); +#ifdef FIMG2D_USE_M2M1SHOT2 + virtual bool t_Stretch_v5(struct m2m1shot2 *cmd); +#endif + virtual bool t_Sync(void); + virtual bool t_Lock(void); + virtual bool t_UnLock(void); + +private: + bool m_CreateG2D(void); + bool m_DestroyG2D(void); + + bool m_DoG2D(struct fimg2d_blit *cmd); +#ifdef FIMG2D_USE_M2M1SHOT2 + bool m_DoG2D_v5(struct m2m1shot2 *cmd); +#endif + inline bool m_PollG2D(struct pollfd *events); + + inline int m_ColorFormatFimgApi2FimgHw(int colorFormat); +}; + +class FimgApiAutoFreeThread; + +static sp fimgApiAutoFreeThread = 0; + +class FimgApiAutoFreeThread : public Thread +{ +private: + bool mOneMoreSleep; + bool mDestroyed; + +public: + FimgApiAutoFreeThread(void): + Thread(false), + mOneMoreSleep(true), + mDestroyed(false) + { } + ~FimgApiAutoFreeThread(void) + { + if (mDestroyed == false) { + FimgV4x::DestroyAllInstance(); + mDestroyed = true; + } + } + + virtual void onFirstRef() + { + run("FimgApiAutoFreeThread", PRIORITY_BACKGROUND); + } + + virtual bool threadLoop() + { + + if (mOneMoreSleep == true) { + mOneMoreSleep = false; + usleep(SLEEP_TIME); + + return true; + } + else { + if (mDestroyed == false) { + FimgV4x::DestroyAllInstance(); + mDestroyed = true; + } + + fimgApiAutoFreeThread = 0; + + return false; + } + } + + void SetOneMoreSleep(void) + { + mOneMoreSleep = true; + } +}; + +}; // namespace android + +#endif // FIMG_EXYNOS5_H diff --git a/libfimg5x/NOTICE b/libfimg5x/NOTICE new file mode 100644 index 0000000..c1c71a2 --- /dev/null +++ b/libfimg5x/NOTICE @@ -0,0 +1,189 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/libg2d/Android.mk b/libg2d/Android.mk new file mode 100644 index 0000000..53d1cb1 --- /dev/null +++ b/libg2d/Android.mk @@ -0,0 +1,32 @@ +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils + +LOCAL_SRC_FILES := exynos_blender.cpp exynos_blender_obj.cpp libg2d.cpp + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libexynosg2d + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libg2d/NOTICE b/libg2d/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libg2d/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libg2d/exynos_blender.cpp b/libg2d/exynos_blender.cpp new file mode 100644 index 0000000..6d31cbb --- /dev/null +++ b/libg2d/exynos_blender.cpp @@ -0,0 +1,289 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_blender.cpp + * \brief User's API for Exynos Blender library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#include "exynos_blender.h" +#include "exynos_blender_obj.h" +#include "libg2d_obj.h" + +static CBlender *GetExynosBlender(bl_handle_t handle) +{ + if (handle == NULL) { + BL_LOGE("Handle is null"); + return NULL; + } + + CBlender *bl = reinterpret_cast(handle); + if (!bl->Valid()) { + BL_LOGE("Handle is invalid %p", handle); + return NULL; + } + + return bl; +} + +bl_handle_t exynos_bl_create(struct bl_property *prop) +{ + CBlender *bl; + + if (prop->devid == DEV_UNSPECIFIED) { + BL_LOGE("Reserved device id %d\n", prop->devid); + return NULL; + + } else if (prop->devid < DEVID_G2D_END) { + bl = new CFimg2d(prop->devid, prop->nonblock); + if (!bl) { + BL_LOGE("Failed to create Fimg2d handle\n"); + return NULL; + } + + if (!bl->Valid()) { + BL_LOGE("Fimg2d handle %p is not valid\n", bl); + delete bl; + return NULL; + } + return reinterpret_cast(bl); + + } else { + BL_LOGE("Uknown device id %d\n", prop->devid); + return NULL; + } +} + +void exynos_bl_destroy(bl_handle_t handle) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return; + + if (bl->DoStop()) { + BL_LOGE("Failed to stop Blender (handle %p)", handle); + return; + } + + delete bl; +} + +int exynos_bl_deactivate(bl_handle_t handle, bool deact) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->Deactivate(deact); +} + +int exynos_bl_set_color_fill( + bl_handle_t handle, + bool enable, + uint32_t color_argb8888) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetColorFill(enable, color_argb8888); +} + +int exynox_bl_set_rotate( + bl_handle_t handle, + enum BL_ROTATE rot, + bool hflip, + bool vflip) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetRotate(rot, hflip, vflip); +} + +int exynos_bl_set_blend( + bl_handle_t handle, + enum BL_OP_TYPE op, + bool premultiplied) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetBlend(op, premultiplied); +} + +int exynos_bl_set_galpha( + bl_handle_t handle, + bool enable, + unsigned char g_alpha) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetGlobalAlpha(enable, g_alpha); +} + +int exynos_bl_set_dither(bl_handle_t handle, bool enable) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetDither(enable); +} + +int exynos_bl_set_scale( + bl_handle_t handle, + enum BL_SCALE mode, + uint32_t src_w, + uint32_t dst_w, + uint32_t src_h, + uint32_t dst_h) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetScale(mode, src_w, dst_w, src_h, dst_h); +} + +int exynos_bl_set_repeat(bl_handle_t handle, enum BL_REPEAT mode) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetRepeat(mode); +} + +int exynos_bl_set_clip( + bl_handle_t handle, + bool enable, + uint32_t x, + uint32_t y, + uint32_t width, + uint32_t height) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetClipRect(enable, x, y, width, height); +} + +int exynos_bl_set_csc_spec( + bl_handle_t handle, + bool enable, + enum v4l2_colorspace space, + bool wide) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetCscSpec(enable, space, wide); +} + +int exynos_bl_set_src_format( + bl_handle_t handle, + unsigned int width, + unsigned int height, + unsigned int crop_x, + unsigned int crop_y, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetImageFormat(CBlender::SRC, width, height, + crop_x, crop_y, crop_width, crop_height, v4l2_colorformat); +} + +int exynos_bl_set_dst_format( + bl_handle_t handle, + unsigned int width, + unsigned int height, + unsigned int crop_x, + unsigned int crop_y, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetImageFormat(CBlender::DST, width, height, + crop_x, crop_y, crop_width, crop_height, v4l2_colorformat); +} + +int exynos_bl_set_src_addr( + bl_handle_t handle, + void *addr[BL_MAX_PLANES], + enum v4l2_memory type) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetAddr(CBlender::SRC, addr, type); +} + +int exynos_bl_set_dst_addr( + bl_handle_t handle, + void *addr[BL_MAX_PLANES], + enum v4l2_memory type) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + return bl->SetAddr(CBlender::DST, addr, type); +} + +int exynos_bl_do_blend(bl_handle_t handle) +{ + CBlender *bl = GetExynosBlender(handle); + if (!bl) + return -1; + + int ret = bl->DoStart(); + if (ret) + return ret; + + bl->DoStop(); + return 0; +} + +int exynos_bl_do_blend_fast(bl_handle_t handle) +{ + BL_LOGE("Unimplemented Operation (handle %p)", handle); + return -1; +} + diff --git a/libg2d/exynos_blender_obj.cpp b/libg2d/exynos_blender_obj.cpp new file mode 100644 index 0000000..18aa27a --- /dev/null +++ b/libg2d/exynos_blender_obj.cpp @@ -0,0 +1,184 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_blender_obj.cpp + * \brief source file for Exynos Blender library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#include +#include +#include +#include + +#include "exynos_blender.h" +#include "exynos_blender_obj.h" + +const char *CBlender::m_cszPortName[CBlender::NUM_PORTS] = { "source", "destination" }; + +int CBlender::SetColorFill(bool enable, uint32_t color_argb8888) +{ + m_Ctrl.fill.enable = enable; + m_Ctrl.fill.color_argb8888 = color_argb8888; + SetFlag(F_FILL); + return 0; +} + +int CBlender::SetRotate(BL_ROTATE rot, bool hflip, bool vflip) +{ + m_Ctrl.rot = rot; + m_Ctrl.hflip = hflip; + m_Ctrl.vflip = vflip; + SetFlag(F_ROTATE); + return 0; +} + +int CBlender::SetBlend(BL_OP_TYPE op, bool premultiplied) +{ + m_Ctrl.op = op; + m_Ctrl.premultiplied = premultiplied; + SetFlag(F_BLEND); + return 0; +} + +int CBlender::SetGlobalAlpha(bool enable, unsigned char g_alpha) +{ + m_Ctrl.global_alpha.enable = enable; + m_Ctrl.global_alpha.val = g_alpha; + SetFlag(F_GALPHA); + return 0; +} + +int CBlender::SetDither(bool enable) +{ + m_Ctrl.dither = enable; + SetFlag(F_DITHER); + return 0; +} + +int CBlender::SetBluescreen(BL_BLUESCREEN mode, uint32_t bg_color, uint32_t bs_color) +{ + m_Ctrl.bluescreen.mode = mode; + m_Ctrl.bluescreen.bg_color = bg_color; + m_Ctrl.bluescreen.bs_color = bs_color; + SetFlag(F_BLUSCR); + return 0; +} + +int CBlender::SetScale(BL_SCALE mode, + uint32_t src_w, uint32_t dst_w, uint32_t src_h, uint32_t dst_h) +{ + if (mode && (!src_w || !dst_w || !src_h || !dst_h)) { + BL_LOGE("Invalid zero scale ratio: %d %d %d %d", + src_w, dst_w, src_h, dst_h); + return -1; + } + + m_Ctrl.scale.mode = mode; + m_Ctrl.scale.src_w = src_w; + m_Ctrl.scale.dst_w = dst_w; + m_Ctrl.scale.src_h = src_h; + m_Ctrl.scale.dst_h = dst_h; + + SetFlag(F_SCALE); + return 0; +} + +int CBlender::SetRepeat(BL_REPEAT mode) +{ + m_Ctrl.repeat = mode; + SetFlag(F_REPEAT); + return 0; +} + +int CBlender::SetClipRect( + bool enable, + uint32_t x, + uint32_t y, + uint32_t width, + uint32_t height) +{ + m_Ctrl.clip.enable = enable; + m_Ctrl.clip.x = x; + m_Ctrl.clip.y = y; + m_Ctrl.clip.width = width; + m_Ctrl.clip.height = height; + + SetFlag(F_CLIP); + return 0; +} + +int CBlender::SetCscSpec(bool enable, enum v4l2_colorspace space, bool wide) +{ + if (enable) { + BL_LOGD("Following user-defined csc mode (enable=true)\n"); + } else { + BL_LOGD("Following driver default csc mode (enable=false)\n"); + } + + if (space != V4L2_COLORSPACE_SMPTE170M && space != V4L2_COLORSPACE_REC709) { + BL_LOGE("Invalid colorspace %d", space); + return -1; + } + + m_Ctrl.csc_spec.enable = enable; + m_Ctrl.csc_spec.space = space; + m_Ctrl.csc_spec.wide = wide; + + SetFlag(F_CSC_SPEC); + return 0; +} + +int CBlender::SetImageFormat( + BL_PORT port, + unsigned int width,unsigned int height, + unsigned int crop_x, unsigned int crop_y, + unsigned int crop_width, unsigned int crop_height, + unsigned int v4l2_colorformat) +{ + m_Frame.port[port].type = (port == SRC) ? SRC_BUFTYPE : DST_BUFTYPE; + m_Frame.port[port].color_format = v4l2_colorformat; + m_Frame.port[port].width = width; + m_Frame.port[port].height = height; + m_Frame.port[port].crop_x = crop_x; + m_Frame.port[port].crop_y = crop_y; + m_Frame.port[port].crop_width = crop_width; + m_Frame.port[port].crop_height = crop_height; + + SetFlag(F_SRC_FMT + port); + return 0; +} + +int CBlender::SetAddr( + BL_PORT port, + void *addr[BL_MAX_PLANES], + v4l2_memory type) +{ + for (int i = 0; i < BL_MAX_PLANES; i++) + m_Frame.port[port].addr[i] = addr[i]; + + m_Frame.port[port].memory = type; + + SetFlag(F_SRC_MEMORY + port); + return 0; +} diff --git a/libg2d/libg2d.cpp b/libg2d/libg2d.cpp new file mode 100644 index 0000000..0df0806 --- /dev/null +++ b/libg2d/libg2d.cpp @@ -0,0 +1,627 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libg2d.cpp + * \brief source file for G2D library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#include +#include +#include +#include + +#include "exynos_blender.h" +#include "exynos_blender_obj.h" +#include "videodev2_exynos_media.h" +#include "exynos_v4l2.h" +#include "libg2d_obj.h" + +void CFimg2d::Initialize(BL_DEVID devid, bool nonblock) +{ + m_iDeviceID = devid; + int dev_num = m_iDeviceID - DEV_G2D0; + + snprintf(m_cszNode, BL_MAX_NODENAME, G2D_DEV_NODE "%d", G2D_NODE(dev_num)); + + m_fdBlender = exynos_v4l2_open(m_cszNode, + nonblock ? (O_RDWR | O_NONBLOCK) : (O_RDWR)); + if (m_fdBlender < 0) { + BL_LOGERR("Failed to open '%s'", m_cszNode); + return; + } + + unsigned int cap = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_OUTPUT_MPLANE | + V4L2_CAP_VIDEO_CAPTURE_MPLANE; + if (!exynos_v4l2_querycap(m_fdBlender, cap)) { + BL_LOGERR("Failed to query capabilities on '%s'", m_cszNode); + close(m_fdBlender); + m_fdBlender = -1; + } else { + m_fdValidate = -m_fdBlender; + } +} + +void CFimg2d::ResetPort(BL_PORT port) +{ + BL_LOGD("Current m_Flags 0x%lx\n", m_Flags); + + if (IsFlagSet(F_SRC_QBUF + port)) + DQBuf(static_cast(port)); + + if (IsFlagSet(F_SRC_STREAMON + port)) { + if (exynos_v4l2_streamoff(m_fdBlender, m_Frame.port[port].type)) { + BL_LOGERR("Failed STREAMOFF for the %s", m_cszPortName[port]); + } else { + BL_LOGD("VIDIC_STREAMOFF is successful for the %s", m_cszPortName[port]); + } + } + + if (IsFlagSet(F_SRC_REQBUFS + port)) { + v4l2_requestbuffers reqbufs; + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.type = m_Frame.port[port].type; + reqbufs.memory = m_Frame.port[port].memory; + if (exynos_v4l2_reqbufs(m_fdBlender, &reqbufs)) { + BL_LOGERR("Failed REQBUFS(0) for the %s", m_cszPortName[port]); + } else { + BL_LOGD("VIDIC_REQBUFS(0) is successful for the %s", m_cszPortName[port]); + } + } + + if (port == SRC && IsFlagSet(F_FILL) && IsFlagSet(F_SRC_MEMORY)) { + for (int i = 0; i < G2D_NUM_OF_PLANES; i++) { + if (m_Frame.port[SRC].addr[i]) { + free(m_Frame.port[SRC].addr[i]); + BL_LOGD("Succeeded free for source buffer %d plane\n", port); + } + } + } +} + +CFimg2d::CFimg2d(BL_DEVID devid, bool nonblock) +{ + Initialize(devid, nonblock); + if (Valid()) { + BL_LOGD("Succeeded opened '%s'. fd %d", m_cszNode, m_fdBlender); + } +} + +CFimg2d::~CFimg2d() +{ + if (m_fdBlender >= 0) + close(m_fdBlender); + m_fdBlender = -1; +} + +int CFimg2d::SetCtrl() +{ + if (!IsFlagSet(F_CTRL_ANY)) { + BL_LOGD("Skipped S_CTRL by No change"); + return 0; + } + + int ret; + + if (IsFlagSet(F_FILL)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_COLOR_FILL, m_Ctrl.fill.enable); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_COLOR_FILL"); + goto err; + } + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_SRC_COLOR, m_Ctrl.fill.color_argb8888); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_SRC_COLOR"); + goto err; + } + } + + if (IsFlagSet(F_ROTATE)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_ROTATE, m_Ctrl.rot); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_ROTATE"); + goto err; + } + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_HFLIP, m_Ctrl.hflip); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_HFLIP"); + goto err; + } + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_VFLIP, m_Ctrl.vflip); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_VFLIP"); + goto err; + } + } + + if (IsFlagSet(F_BLEND)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_BLEND_OP, m_Ctrl.op); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_BLEND_OP"); + goto err; + } + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_FMT_PREMULTI, m_Ctrl.premultiplied); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_FMT_PREMULTI"); + goto err; + } + } + + if (IsFlagSet(F_GALPHA)) { + if (m_Ctrl.global_alpha.enable) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_GLOBAL_ALPHA, m_Ctrl.global_alpha.val); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_GLOBAL_ALPHA"); + goto err; + } + } else { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_GLOBAL_ALPHA, 0xff); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_GLOBAL_ALPHA 0xff"); + goto err; + } + } + } + + if (IsFlagSet(F_DITHER)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_DITH, m_Ctrl.dither); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_DITH"); + goto err; + } + } + + if (IsFlagSet(F_BLUSCR)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_BLUESCREEN, m_Ctrl.bluescreen.mode); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_BLUESCREEN"); + goto err; + } + if (m_Ctrl.bluescreen.mode) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_BG_COLOR, m_Ctrl.bluescreen.bg_color); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_BG_COLOR"); + goto err; + } + } + if (m_Ctrl.bluescreen.mode == BLUSCR) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_BS_COLOR, m_Ctrl.bluescreen.bs_color); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_BS_COLOR"); + goto err; + } + } + } + + if (IsFlagSet(F_SCALE)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_SCALE_MODE, m_Ctrl.scale.mode); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_SCALE_MODE"); + goto err; + } + + int Wratio = m_Ctrl.scale.src_w << 16 | m_Ctrl.scale.dst_w; + int Hratio = m_Ctrl.scale.src_h << 16 | m_Ctrl.scale.dst_h; + + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_SCALE_WIDTH, Wratio); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_SCALE_WIDTH"); + goto err; + } + + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_SCALE_HEIGHT, Hratio); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_SCALE_HEIGHT"); + goto err; + } + } + + if (IsFlagSet(F_REPEAT)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_REPEAT, m_Ctrl.repeat); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_REPEAT"); + goto err; + } + } + + if (IsFlagSet(F_CLIP)) { + int val = 0; + v4l2_rect clip_rect; + + if (m_Ctrl.clip.enable) { + clip_rect.left = m_Ctrl.clip.x; + clip_rect.top = m_Ctrl.clip.y; + clip_rect.width = m_Ctrl.clip.width; + clip_rect.height = m_Ctrl.clip.height; + + val = reinterpret_cast(&clip_rect); + } + + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_CLIP, val); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_CLIP"); + goto err; + } + } + + if (IsFlagSet(F_CSC_SPEC)) { + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_CSC_EQ_MODE, m_Ctrl.csc_spec.enable); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_CSC_EQ_MODE"); + goto err; + } + + if (m_Ctrl.csc_spec.enable) { + bool is_bt709 = (m_Ctrl.csc_spec.space == V4L2_COLORSPACE_REC709)? true : false; + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_CSC_EQ, is_bt709); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_CSC_EQ"); + goto err; + } + ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_CSC_RANGE, m_Ctrl.csc_spec.wide); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_CSC_RANGE"); + goto err; + } + } + } + + BL_LOGD("Succeeded S_CTRL flags(0x%lx)\n", m_Flags); + return 0; + +err: + BL_LOGE("Failed S_CTRL flags(0x%lx)\n", m_Flags); + return ret; +} + +int CFimg2d::SetFormat() +{ + if (!IsFlagSet(F_DST_FMT)) { + BL_LOGE("No found destination foramt\n"); + return -1; + } + + if (!IsFlagSet(F_FILL) && !IsFlagSet(F_SRC_FMT)) { + BL_LOGE("No found source foramt\n"); + return -1; + } + + //! dummy min buffer(16x16) for src color fill + if (IsFlagSet(F_FILL)) { + m_Frame.port[SRC].type = SRC_BUFTYPE; + m_Frame.port[SRC].color_format = m_Frame.port[DST].color_format; + m_Frame.port[SRC].width = 16; + m_Frame.port[SRC].height = 16; + m_Frame.port[SRC].crop_x = 0; + m_Frame.port[SRC].crop_y = 0; + m_Frame.port[SRC].crop_width = m_Frame.port[SRC].width; + m_Frame.port[SRC].crop_height = m_Frame.port[SRC].height; + SetFlag(F_SRC_FMT); + + for (int i = 0; i < G2D_NUM_OF_PLANES; i++) { + m_Frame.port[SRC].addr[i] = calloc(1, 16 * 16 * 4); + if (m_Frame.port[SRC].addr[i]) + BL_LOGD("Succeeded alloc for source color %d plane\n", i); + else { + BL_LOGE("Failed alloc for source color %d plane\n", i); + return -1; + } + } + + m_Frame.port[SRC].memory = V4L2_MEMORY_USERPTR; + + SetFlag(F_SRC_MEMORY); + } + + for (int port = 0; port < NUM_PORTS; port++) { + v4l2_format fmt; + fmt.type = m_Frame.port[port].type; + fmt.fmt.pix_mp.pixelformat = m_Frame.port[port].color_format; + fmt.fmt.pix_mp.width = m_Frame.port[port].width; + fmt.fmt.pix_mp.height = m_Frame.port[port].height; + + if (exynos_v4l2_s_fmt(m_fdBlender, &fmt) < 0) { + BL_LOGE("Failed S_FMT(fmt: %d, w:%d, h:%d) for the %s", + fmt.fmt.pix_mp.pixelformat, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, + m_cszPortName[port]); + return -1; + } + + // returned fmt.fmt.pix_mp.num_planes and fmt.fmt.pix_mp.plane_fmt[i].sizeimage + m_Frame.port[port].out_num_planes = fmt.fmt.pix_mp.num_planes; + + for (int i = 0; i < m_Frame.port[port].out_num_planes; i++) + m_Frame.port[port].out_plane_size[i] = fmt.fmt.pix_mp.plane_fmt[i].sizeimage; + + v4l2_crop crop; + crop.type = m_Frame.port[port].type; + crop.c.left = m_Frame.port[port].crop_x; + crop.c.top = m_Frame.port[port].crop_y; + crop.c.width = m_Frame.port[port].crop_width; + crop.c.height = m_Frame.port[port].crop_height; + + if (exynos_v4l2_s_crop(m_fdBlender, &crop) < 0) { + BL_LOGE("Failed S_CROP(fmt: %d, l:%d, t:%d, w:%d, h:%d) for the %s", + crop.type, crop.c.left, crop.c.top, crop.c.width, crop.c.height, + m_cszPortName[port]); + return -1; + } + + if (m_Frame.port[port].out_num_planes > G2D_NUM_OF_PLANES) { + BL_LOGE("Number of planes exceeds %d", m_Frame.port[port].out_num_planes); + return -1; + } + + BL_LOGD("Succeeded S_FMT and S_CROP for the %s", m_cszPortName[port]); + } + + return 0; +} + +int CFimg2d::ReqBufs() +{ + v4l2_requestbuffers reqbufs; + + for (int port = 0; port < NUM_PORTS; port++) { + if (!IsFlagSet(F_SRC_FMT + port)) { + BL_LOGE("No found format for the %s", m_cszPortName[port]); + return -1; + } + if (!IsFlagSet(F_SRC_MEMORY + port)) { + BL_LOGE("No found buffer for the %s", m_cszPortName[port]); + return -1; + } + + memset(&reqbufs, 0, sizeof(reqbufs)); + + reqbufs.type = m_Frame.port[port].type; + reqbufs.memory = m_Frame.port[port].memory; + reqbufs.count = 1; + + if (exynos_v4l2_reqbufs(m_fdBlender, &reqbufs) < 0) { + BL_LOGE("Failed REQBUFS for the %s", m_cszPortName[port]); + return -1; + } + + BL_LOGD("Succeeded REQBUFS for the %s", m_cszPortName[port]); + SetFlag(F_SRC_REQBUFS + port); + } + + return 0; +} + +int CFimg2d::QBuf() +{ + v4l2_buffer buffer; + v4l2_plane planes[G2D_NUM_OF_PLANES]; + + for (int port = 0; port < NUM_PORTS; port++) { + if (!IsFlagSet(F_SRC_REQBUFS + port)) { + BL_LOGE("No found reqbufs for the %s", m_cszPortName[port]); + return -1; + } + + memset(&buffer, 0, sizeof(buffer)); + memset(&planes, 0, sizeof(planes)); + + buffer.type = m_Frame.port[port].type; + buffer.memory = m_Frame.port[port].memory; + buffer.index = 0; + buffer.length = m_Frame.port[port].out_num_planes; + + buffer.m.planes = planes; + for (unsigned long i = 0; i < buffer.length; i++) { + planes[i].length = m_Frame.port[port].out_plane_size[i]; + if (V4L2_TYPE_IS_OUTPUT(buffer.type)) + planes[i].bytesused = planes[i].length; + if (buffer.memory == V4L2_MEMORY_DMABUF) + planes[i].m.fd = reinterpret_cast(m_Frame.port[port].addr[i]); + else + planes[i].m.userptr = reinterpret_cast(m_Frame.port[port].addr[i]); + } + + if (exynos_v4l2_qbuf(m_fdBlender, &buffer) < 0) { + BL_LOGE("Failed QBUF for the %s", m_cszPortName[port]); + return -1; + } + + BL_LOGD("Succeeded QBUF for the %s", m_cszPortName[port]); + SetFlag(F_SRC_QBUF + port); + } + + return 0; +} + +int CFimg2d::DQBuf(BL_PORT port) +{ + v4l2_buffer buffer; + v4l2_plane plane[G2D_NUM_OF_PLANES]; + + if (!IsFlagSet(F_SRC_QBUF + port)) { + BL_LOGE("No found queued buffer for the %s", m_cszPortName[port]); + return -1; + } + + memset(&buffer, 0, sizeof(buffer)); + + buffer.type = m_Frame.port[port].type; + buffer.memory = m_Frame.port[port].memory; + + if (V4L2_TYPE_IS_MULTIPLANAR(buffer.type)) { + memset(plane, 0, sizeof(plane)); + + buffer.length = m_Frame.port[port].out_num_planes; + buffer.m.planes = plane; + } + + if (exynos_v4l2_dqbuf(m_fdBlender, &buffer) < 0 ) { + BL_LOGE("Failed DQBuf the %s", m_cszPortName[port]); + return -1; + } + + if (buffer.flags & V4L2_BUF_FLAG_ERROR) { + BL_LOGE("Error occurred while processing streaming data"); + return -1; + } + + ClearFlag(F_SRC_QBUF + port); + BL_LOGD("Succeeded VIDIOC_DQBUF for the %s", m_cszPortName[port]); + return 0; +} + +int CFimg2d::DQBuf() +{ + for (int port = 0; port < NUM_PORTS; port++) { + if (DQBuf(static_cast(port))) + return -1; + } + return 0; +} + +int CFimg2d::StreamOn() +{ + for (int port = 0; port < NUM_PORTS; port++) { + if (exynos_v4l2_streamon(m_fdBlender, m_Frame.port[port].type)) { + BL_LOGE("Failed StreamOn for the %s", m_cszPortName[port]); + return errno; + } + + SetFlag(F_SRC_STREAMON + port); + BL_LOGD("Succeeded VIDIOC_STREAMON for the %s", m_cszPortName[port]); + } + return 0; +} + +int CFimg2d::DoStart() +{ + int ret; + +#ifdef BL_DEBUG + DebugParam(); +#endif + + ret = SetCtrl(); + if (ret) + goto err; + + ret = SetFormat(); + if (ret) + goto err; + + ret = ReqBufs(); + if (ret) + goto err; + + ret = QBuf(); + if (ret) + goto err; + + ret = StreamOn(); + if (ret) + goto err; + + ret = DQBuf(); + if (ret) + goto err; + + return 0; + +err: + DebugParam(); + DoStop(); + return ret; +} + +int CFimg2d::DoStop() +{ + for (int port = 0; port < NUM_PORTS; port++) + ResetPort(static_cast(port)); + + ResetFlag(); + return 0; +} + +int CFimg2d::Deactivate(bool deact) +{ + int ret = exynos_v4l2_s_ctrl(m_fdBlender, V4L2_CID_2D_DEACTIVATE, deact); + if (ret) { + BL_LOGERR("Failed S_CTRL V4L2_CID_2D_DEACTIVATE"); + return ret; + } + + BL_LOGERR("Succeeded S_CTRL V4L2_CID_2D_DEACTIVATE"); + return 0; +} + +void CFimg2d::DebugParam() +{ + const char *port[2] = { "SRC", "DST" }; + + BL_LOGE("G2D HAL Parameter Debug!!\n"); + ALOGE("Handle %p DeviceId %d\n", this, GetDeviceID()); + for (int i = 0; i < NUM_PORTS; i++) { + ALOGE("[%s] V4L2 Buf Type %d\n", port[i], m_Frame.port[i].type); + ALOGE("[%s] Full WxH %d x %d\n", port[i], + m_Frame.port[i].width, m_Frame.port[i].height); + ALOGE("[%s] Crop(XYWH) %d, %d, %d, %d\n", port[i], + m_Frame.port[i].crop_x, + m_Frame.port[i].crop_y, + m_Frame.port[i].crop_width, + m_Frame.port[i].crop_height); + ALOGE("[%s] Format %d\n", port[i], m_Frame.port[i].color_format); + ALOGE("[%s] Addr [0] %p [1] %p\n", port[i], + m_Frame.port[i].addr[0], + m_Frame.port[i].addr[1]); + ALOGE("[%s] Addr Type %d\n", port[i], m_Frame.port[i].memory); + ALOGE("[%s] out_num_planes %d\n", port[i], m_Frame.port[i].out_num_planes); + ALOGE("[%s] out_planes_size [0] %ld [1] %ld\n", port[i], + m_Frame.port[i].out_plane_size[0], + m_Frame.port[i].out_plane_size[1]); + } + ALOGE("Flags 0x%lx\n", m_Flags); + ALOGE("fill: enable %d color 0x%x\n", + m_Ctrl.fill.enable, m_Ctrl.fill.color_argb8888); + ALOGE("rotate: %d hflip: %d vflip: %d\n", + m_Ctrl.rot, m_Ctrl.hflip, m_Ctrl.vflip); + ALOGE("blend: %d premultiplied: %d\n", + m_Ctrl.op, m_Ctrl.premultiplied); + ALOGE("galpha: enable %d val 0x%x\n", + m_Ctrl.global_alpha.enable, m_Ctrl.global_alpha.val); + ALOGE("dither: enable %d\n", m_Ctrl.dither); + ALOGE("bluescreen: mode %d bgcolor 0x%x bscolor 0x%x\n", + m_Ctrl.bluescreen.mode, + m_Ctrl.bluescreen.bg_color, + m_Ctrl.bluescreen.bs_color); + ALOGE("scale: mode %d Width(src:dst %d, %d) Height(src:dst %d, %d)\n", + m_Ctrl.scale.mode, + m_Ctrl.scale.src_w, m_Ctrl.scale.dst_w, + m_Ctrl.scale.src_h, m_Ctrl.scale.dst_h); + ALOGE("repeat: mode %d\n", m_Ctrl.repeat); + ALOGE("clip: enable %d Clip(XYWH) %d, %d, %d %d\n", m_Ctrl.clip.enable, + m_Ctrl.clip.x, m_Ctrl.clip.y, m_Ctrl.clip.width, m_Ctrl.clip.height); + ALOGE("csc spec: enable %d space %d wide %d\n", + m_Ctrl.csc_spec.enable, + m_Ctrl.csc_spec.space, + m_Ctrl.csc_spec.wide); +} + diff --git a/libg2d/libg2d_obj.h b/libg2d/libg2d_obj.h new file mode 100644 index 0000000..9c4cf74 --- /dev/null +++ b/libg2d/libg2d_obj.h @@ -0,0 +1,65 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libg2d_obj.h + * \brief header file for G2D library + * \author Eunseok Choi (es10.choi@samsung.com) + * \date 2013/09/21 + * + * Revision History: + * - 2013.09.21 : Eunseok Choi (eunseok.choi@samsung.com) \n + * Create + * + */ +#ifndef __LIBG2D_H__ +#define __LIBG2D_H__ + +#include "exynos_blender.h" +#include "exynos_blender_obj.h" + +#define G2D_DEV_NODE "/dev/video" +#define G2D_NODE(x) (55 + x) + +class CBlender; + +class CFimg2d : public CBlender { +public: + enum { G2D_NUM_OF_PLANES = 2 }; + + CFimg2d(BL_DEVID devid, bool nonblock = false); + ~CFimg2d(); + + int DoStart(); + int DoStop(); + int Deactivate(bool deact); + +private: + void Initialize(BL_DEVID devid, bool nonblock); + void ResetPort(BL_PORT port); + void DebugParam(); + + int SetCtrl(); + int SetFormat(); + int ReqBufs(); + int QBuf(); + int DQBuf(); + int DQBuf(BL_PORT port); + int StreamOn(); +}; + +#endif // __LIBG2D_H__ diff --git a/libgscaler/Android.mk b/libgscaler/Android.mk new file mode 100644 index 0000000..13e0cb7 --- /dev/null +++ b/libgscaler/Android.mk @@ -0,0 +1,39 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 libexynosscaler + +# to talk to secure side +# LOCAL_SHARED_LIBRARIES += libMcClient +# LOCAL_STATIC_LIBRARIES := libsecurepath + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos5/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp + +LOCAL_SRC_FILES := \ + libgscaler_obj.cpp \ + libgscaler.cpp + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libexynosgscaler + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libgscaler/NOTICE b/libgscaler/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libgscaler/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libgscaler/libgscaler.cpp b/libgscaler/libgscaler.cpp new file mode 100644 index 0000000..8073e80 --- /dev/null +++ b/libgscaler/libgscaler.cpp @@ -0,0 +1,646 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libgscaler.cpp + * \brief source file for Gscaler HAL + * \author Sungchun Kang (sungchun.kang@samsung.com) + * \date 2013/06/01 + * + * Revision History: + * - 2013.06.01 : Sungchun Kang (sungchun.kang@samsung.com) \n + * Create + */ + +#include "libgscaler_obj.h" + +void *exynos_gsc_create(void) +{ + CGscaler *gsc = new CGscaler(GSC_M2M_MODE); + if (!gsc) { + ALOGE("%s:: failed to allocate Gscaler handle", __func__); + return NULL; + } + if (gsc->m_gsc_find_and_create(gsc) == false) { + ALOGE("%s::m_exynos_gsc_find_and_create() fail", __func__); + delete gsc; + return NULL; + } + + return reinterpret_cast(gsc); +} + +void *exynos_gsc_create_exclusive( + int dev_num, + int mode, + int out_mode, + int allow_drm) +{ + int i = 0; + int op_id = 0; + unsigned int total_sleep_time = 0; + int ret = 0; + + Exynos_gsc_In(); + + if ((dev_num < 0) || (dev_num >= HW_SCAL_MAX)) { + ALOGE("%s::fail:: dev_num is not valid(%d) ", __func__, dev_num); + return NULL; + } + + if ((dev_num >= NUM_OF_GSC_HW) && (dev_num < HW_SCAL_MAX)) { + CGscaler *gsc = new CGscaler(mode, out_mode, dev_num, allow_drm); + if (!gsc) { + ALOGE("%s:: failed to allocate Gscaler handle", __func__); + return NULL; + } + + gsc->scaler = exynos_sc_create_exclusive(dev_num - HW_SCAL0, + allow_drm); + if (!gsc->scaler) { + delete(gsc); + ALOGE("%s::exynos_sc_create fail", __func__); + return NULL; + } + Exynos_gsc_Out(); + return reinterpret_cast(gsc); + } + + if ((mode < 0) || (mode >= NUM_OF_GSC_HW)) { + ALOGE("%s::fail:: mode is not valid(%d) ", __func__, mode); + return NULL; + } + + CGscaler *gsc = new CGscaler(mode, out_mode, dev_num, allow_drm); + if (!gsc) { + ALOGE("%s:: failed to allocate Gscaler handle", __func__); + return NULL; + } + + if (mode == GSC_M2M_MODE) { + gsc->gsc_fd = gsc->m_gsc_m2m_create(dev_num); + if (gsc->gsc_fd < 0) { + ALOGE("%s::m_gsc_m2m_create(%i) fail", __func__, dev_num); + goto err; + } + } else if (mode == GSC_OUTPUT_MODE) { + ret = gsc->m_gsc_output_create(gsc, dev_num, out_mode); + if (ret < 0) { + ALOGE("%s::m_gsc_output_create(%i) fail", __func__, dev_num); + goto err; + } + } else if (mode == GSC_CAPTURE_MODE) { + ret = gsc->m_gsc_capture_create(gsc, dev_num, out_mode); + if (ret < 0) { + ALOGE("%s::m_gsc_capture_create(%i) fail", __func__, dev_num); + goto err; + } + } else { + ALOGE("%s::Unsupported Mode(%i) fail", __func__, dev_num); + goto err; + } + + Exynos_gsc_Out(); + + return reinterpret_cast(gsc); +err: + switch (mode) { + case GSC_M2M_MODE: + gsc->m_gsc_m2m_destroy(gsc); + break; + case GSC_OUTPUT_MODE: + gsc->m_gsc_out_destroy(gsc); + break; + case GSC_CAPTURE_MODE: + gsc->m_gsc_cap_destroy(gsc); + break; + } + + delete(gsc); + + Exynos_gsc_Out(); + + return NULL; +} + +void exynos_gsc_destroy(void *handle) +{ + Exynos_gsc_In(); + + int i = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return; + } + + if (gsc->mode == GSC_OUTPUT_MODE) + gsc->m_gsc_out_destroy(gsc); + else if (gsc->mode ==GSC_CAPTURE_MODE) + gsc->m_gsc_cap_destroy(gsc); + else + gsc->m_gsc_m2m_destroy(gsc); + + delete(gsc); + + Exynos_gsc_Out(); +} + +int exynos_gsc_set_csc_property( + void *handle, + unsigned int eq_auto, + unsigned int range_full, + unsigned int v4l2_colorspace) +{ + Exynos_gsc_In(); + + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->gsc_id >= HW_SCAL0) { + int ret; + ret = exynos_sc_csc_exclusive(gsc->scaler, + range_full, v4l2_colorspace); + Exynos_gsc_Out(); + return ret; + } + gsc->eq_auto = eq_auto; + gsc->range_full = range_full; + gsc->v4l2_colorspace = v4l2_colorspace; + + Exynos_gsc_Out(); + + return 0; +} + +int exynos_gsc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm) +{ + Exynos_gsc_In(); + + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + gsc->src_info.width = width; + gsc->src_info.height = height; + gsc->src_info.crop_left = crop_left; + gsc->src_info.crop_top = crop_top; + gsc->src_info.crop_width = crop_width; + gsc->src_info.crop_height = crop_height; + gsc->src_info.v4l2_colorformat = v4l2_colorformat; + gsc->src_info.cacheable = cacheable; + gsc->src_info.mode_drm = mode_drm; + gsc->src_info.dirty = true; + + Exynos_gsc_Out(); + + return 0; +} + +int exynos_gsc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm) +{ + Exynos_gsc_In(); + + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + gsc->dst_info.width = width; + gsc->dst_info.height = height; + gsc->dst_info.crop_left = crop_left; + gsc->dst_info.crop_top = crop_top; + gsc->dst_info.crop_width = crop_width; + gsc->dst_info.crop_height = crop_height; + gsc->dst_info.v4l2_colorformat = v4l2_colorformat; + gsc->dst_info.dirty = true; + gsc->dst_info.cacheable = cacheable; + gsc->dst_info.mode_drm = mode_drm; + + Exynos_gsc_Out(); + + return 0; +} + +int exynos_gsc_set_rotation( + void *handle, + int rotation, + int flip_horizontal, + int flip_vertical) +{ + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + int new_rotation = rotation % 360; + + if (new_rotation % 90 != 0) { + ALOGE("%s::rotation(%d) cannot be acceptable fail", __func__, + rotation); + return -1; + } + + if(new_rotation < 0) + new_rotation = -new_rotation; + + gsc->dst_info.rotation = new_rotation; + gsc->dst_info.flip_horizontal = flip_horizontal; + gsc->dst_info.flip_vertical = flip_vertical; + + return 0; +} + +int exynos_gsc_set_src_addr( + void *handle, + void *addr[3], + int mem_type, + int acquireFenceFd) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + gsc->src_info.buf.addr[0] = addr[0]; + gsc->src_info.buf.addr[1] = addr[1]; + gsc->src_info.buf.addr[2] = addr[2]; + gsc->src_info.acquireFenceFd = acquireFenceFd; + gsc->src_info.buf.mem_type = (enum v4l2_memory)mem_type; + + Exynos_gsc_Out(); + + return 0; +} + +int exynos_gsc_set_dst_addr( + void *handle, + void *addr[3], + int mem_type, + int acquireFenceFd) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + gsc->dst_info.buf.addr[0] = addr[0]; + gsc->dst_info.buf.addr[1] = addr[1]; + gsc->dst_info.buf.addr[2] = addr[2]; + gsc->dst_info.acquireFenceFd = acquireFenceFd; + gsc->dst_info.buf.mem_type = (enum v4l2_memory)mem_type; + + Exynos_gsc_Out(); + + return 0; +} + +int exynos_gsc_convert(void *handle) +{ + Exynos_gsc_In(); + + int ret = -1; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return ret; + } + + if (gsc->m_gsc_m2m_run_core(handle) < 0) { + ALOGE("%s::exynos_gsc_run_core fail", __func__); + goto done; + } + + if (gsc->m_gsc_m2m_wait_frame_done(handle) < 0) { + ALOGE("%s::exynos_gsc_m2m_wait_frame_done", __func__); + goto done; + } + + if (gsc->src_info.releaseFenceFd >= 0) { + close(gsc->src_info.releaseFenceFd); + gsc->src_info.releaseFenceFd = -1; + } + + if (gsc->dst_info.releaseFenceFd >= 0) { + close(gsc->dst_info.releaseFenceFd); + gsc->dst_info.releaseFenceFd = -1; + } + + if (gsc->m_gsc_m2m_stop(handle) < 0) { + ALOGE("%s::m_gsc_m2m_stop", __func__); + goto done; + } + + ret = 0; + +done: + Exynos_gsc_Out(); + + return ret; +} + +int exynos_gsc_subdev_s_crop(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + struct v4l2_subdev_crop sd_crop; + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + sd_crop.pad = GSCALER_SUBDEV_PAD_SOURCE; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.left = dst_img->x; + sd_crop.rect.top = dst_img->y; + sd_crop.rect.width = dst_img->w; + sd_crop.rect.height = dst_img->h; + + return exynos_subdev_s_crop(gsc->mdev.gsc_sd_entity->fd, &sd_crop); +} + +int exynos_gsc_config_exclusive(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_config_exclusive(gsc->scaler, + (exynos_sc_img *)src_img, (exynos_sc_img *)dst_img); + Exynos_gsc_Out(); + return ret; + } + + switch (gsc->mode) { + case GSC_M2M_MODE: + ret = gsc->m_gsc_m2m_config(handle, src_img, dst_img); + break; + case GSC_OUTPUT_MODE: + ret = gsc->m_gsc_out_config(handle, src_img, dst_img); + break; + case GSC_CAPTURE_MODE: + ret = gsc->m_gsc_cap_config(handle, src_img, dst_img); + break; + default: + break; + } + + Exynos_gsc_Out(); + + return ret; +} + +int exynos_gsc_run_exclusive(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (handle == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_run_exclusive(gsc->scaler, + (exynos_sc_img *)src_img, (exynos_sc_img *)dst_img); + Exynos_gsc_Out(); + return ret; + } + + switch (gsc->mode) { + case GSC_M2M_MODE: + ret = gsc->m_gsc_m2m_run(handle, src_img, dst_img); + break; + case GSC_OUTPUT_MODE: + ret = gsc->m_gsc_out_run(handle, src_img); + break; + case GSC_CAPTURE_MODE: + ret = gsc->m_gsc_cap_run(handle, dst_img); + break; + default: + break; + } + + Exynos_gsc_Out(); + + return ret; +} + +void *exynos_gsc_create_blend_exclusive(int dev_num, int mode, int out_mode, + int allow_drm) +{ + int i = 0; + int op_id = 0; + unsigned int total_sleep_time = 0; + int ret = 0; + + Exynos_gsc_In(); + + if ((dev_num < 0) || (dev_num >= HW_SCAL_MAX)) { + ALOGE("%s::fail:: dev_num is not valid(%d) ", __func__, dev_num); + return NULL; + } + + if ((dev_num >= NUM_OF_GSC_HW) && (dev_num < HW_SCAL_MAX)) { + CGscaler *gsc = new CGscaler(mode, out_mode, dev_num, allow_drm); + if (!gsc) { + ALOGE("%s:: failed to allocate Gscaler handle", __func__); + return NULL; + } + + gsc->scaler = exynos_sc_create_blend_exclusive(dev_num - HW_SCAL0, allow_drm); + if (!gsc->scaler) { + Exynos_gsc_Out(); + delete(gsc); + ALOGE("%s::exynos_sc_create_blend_exclusive failed", __func__); + return NULL; + } + Exynos_gsc_Out(); + + return reinterpret_cast(gsc); + } + + Exynos_gsc_Out(); + + return NULL; +} + +int exynos_gsc_config_blend_exclusive(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img, + struct SrcBlendInfo *srcblendinfo) +{ + Exynos_gsc_In(); + + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_config_blend_exclusive(gsc->scaler, + (exynos_sc_img *)src_img, + (exynos_sc_img *)dst_img, + srcblendinfo); + Exynos_gsc_Out(); + return ret; + } + Exynos_gsc_Out(); + return ret; +} + +int exynos_gsc_wait_frame_done_exclusive(void *handle) +{ + Exynos_gsc_In(); + + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (handle == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_wait_frame_done_exclusive(gsc->scaler); + Exynos_gsc_Out(); + return ret; + } + + if (gsc->mode == GSC_M2M_MODE) + ret = gsc->m_gsc_m2m_wait_frame_done(handle); + + Exynos_gsc_Out(); + + return ret; +} + +int exynos_gsc_stop_exclusive(void *handle) +{ + Exynos_gsc_In(); + + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (handle == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_stop_exclusive(gsc->scaler); + Exynos_gsc_Out(); + return ret; + } + + switch (gsc->mode) { + case GSC_M2M_MODE: + ret = gsc->m_gsc_m2m_stop(handle); + break; + case GSC_OUTPUT_MODE: + ret = gsc->m_gsc_out_stop(handle); + break; + case GSC_CAPTURE_MODE: + ret = gsc->m_gsc_cap_stop(handle); + break; + default: + break; + } + + Exynos_gsc_Out(); + + return ret; +} + +int exynos_gsc_free_and_close(void *handle) +{ + Exynos_gsc_In(); + + struct v4l2_requestbuffers reqbuf; + struct v4l2_buffer buf; + struct v4l2_plane planes[NUM_OF_GSC_PLANES]; + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + + if (gsc->gsc_id >= HW_SCAL0) { + ret = exynos_sc_free_and_close(gsc->scaler); + Exynos_gsc_Out(); + return ret; + } + + memset(&reqbuf, 0, sizeof(struct v4l2_requestbuffers)); + if (gsc->mode == GSC_OUTPUT_MODE) + reqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + else + reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + + reqbuf.memory = V4L2_MEMORY_DMABUF; + reqbuf.count = 0; + + if (exynos_v4l2_reqbufs(gsc->mdev.gsc_vd_entity->fd, &reqbuf) < 0) { + ALOGE("%s::request buffers failed", __func__); + return -1; + } + + exynos_gsc_destroy(gsc); + Exynos_gsc_Out(); + + return 0; +} diff --git a/libgscaler/libgscaler_obj.cpp b/libgscaler/libgscaler_obj.cpp new file mode 100644 index 0000000..19f0b5e --- /dev/null +++ b/libgscaler/libgscaler_obj.cpp @@ -0,0 +1,2073 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libgscaler_obj.cpp + * \brief source file for Gscaler HAL + * \author Sungchun Kang (sungchun.kang@samsung.com) + * \date 2013/06/01 + * + * Revision History: + * - 2013.06.01 : Sungchun Kang (sungchun.kang@samsung.com) \n + * Create + */ + +#include +#include "libgscaler_obj.h" +#include "content_protect.h" + +int CGscaler::m_gsc_output_create(void *handle, int dev_num, int out_mode) +{ + Exynos_gsc_In(); + + struct media_device *media0; + struct media_entity *gsc_sd_entity; + struct media_entity *gsc_vd_entity; + struct media_entity *sink_sd_entity; + char node[32]; + char devname[32]; + unsigned int cap; + int i; + int fd = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if ((out_mode != GSC_OUT_FIMD) && + (out_mode != GSC_OUT_TV)) + return -1; + + gsc->out_mode = out_mode; + /* GSCX => FIMD_WINX : arbitrary linking is not allowed */ + if ((out_mode == GSC_OUT_FIMD) && +#ifndef USES_ONLY_GSC0_GSC1 + (dev_num > 2)) +#else + (dev_num > 1)) +#endif + return -1; + + /* media0 */ + snprintf(node, sizeof(node), "%s%d", PFX_NODE_MEDIADEV, 0); + media0 = exynos_media_open(node); + if (media0 == NULL) { + ALOGE("%s::exynos_media_open failed (node=%s)", __func__, node); + return false; + } + gsc->mdev.media0 = media0; + + /* Get the sink subdev entity by name and make the node of sink subdev*/ + if (out_mode == GSC_OUT_FIMD) + snprintf(devname, sizeof(devname), PFX_FIMD_ENTITY, dev_num); + else + snprintf(devname, sizeof(devname), PFX_MXR_ENTITY, 0); + + sink_sd_entity = exynos_media_get_entity_by_name(media0, devname, + strlen(devname)); + if (!sink_sd_entity) { + ALOGE("%s:: failed to get the sink sd entity", __func__); + goto gsc_output_err; + } + gsc->mdev.sink_sd_entity = sink_sd_entity; + + sink_sd_entity->fd = exynos_subdev_open_devname(devname, O_RDWR); + if (sink_sd_entity->fd < 0) { + ALOGE("%s:: failed to open sink subdev node", __func__); + goto gsc_output_err; + } + + /* get GSC video dev & sub dev entity by name*/ +#if defined(USES_DT) + switch (dev_num) { + case 0: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY0); + break; + case 1: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY1); + break; + case 2: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY2); + break; + } +#else + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY, dev_num); +#endif + gsc_vd_entity = exynos_media_get_entity_by_name(media0, devname, + strlen(devname)); + if (!gsc_vd_entity) { + ALOGE("%s:: failed to get the gsc vd entity", __func__); + goto gsc_output_err; + } + gsc->mdev.gsc_vd_entity = gsc_vd_entity; + + snprintf(devname, sizeof(devname), PFX_GSC_SUBDEV_ENTITY, dev_num); + gsc_sd_entity = exynos_media_get_entity_by_name(media0, devname, + strlen(devname)); + if (!gsc_sd_entity) { + ALOGE("%s:: failed to get the gsc sd entity", __func__); + goto gsc_output_err; + } + gsc->mdev.gsc_sd_entity = gsc_sd_entity; + + /* gsc sub-dev open */ + snprintf(devname, sizeof(devname), PFX_GSC_SUBDEV_ENTITY, dev_num); + gsc_sd_entity->fd = exynos_subdev_open_devname(devname, O_RDWR); + if (gsc_sd_entity->fd < 0) { + ALOGE("%s: gsc sub-dev open fail", __func__); + goto gsc_output_err; + } + + /* gsc video-dev open */ +#if defined(USES_DT) + switch (dev_num) { + case 0: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY0); + break; + case 1: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY1); + break; + case 2: + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY2); + break; + } +#else + snprintf(devname, sizeof(devname), PFX_GSC_VIDEODEV_ENTITY, dev_num); +#endif + gsc_vd_entity->fd = exynos_v4l2_open_devname(devname, O_RDWR | O_NONBLOCK); + if (gsc_vd_entity->fd < 0) { + ALOGE("%s: gsc video-dev open fail", __func__); + goto gsc_output_err; + } + + cap = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_OUTPUT_MPLANE; + + if (exynos_v4l2_querycap(gsc_vd_entity->fd, cap) == false) { + ALOGE("%s::exynos_v4l2_querycap() fail", __func__); + goto gsc_output_err; + } + + Exynos_gsc_Out(); + + return 0; + +gsc_output_err: + gsc->m_gsc_out_destroy(handle); + + return -1; +} + +int CGscaler::m_gsc_capture_create(void *handle, int dev_num, int out_mode) +{ + Exynos_gsc_In(); + + struct media_device *media1; + struct media_entity *gsc_sd_entity; + struct media_entity *gsc_vd_entity; + struct media_entity *sink_sd_entity; + struct media_link *links; + char node[32]; + char devname[32]; + unsigned int cap; + int i; + int fd = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + gsc->out_mode = out_mode; + + if (dev_num != 2) + return -1; + + /* media1 */ + snprintf(node, sizeof(node), "%s%d", PFX_NODE_MEDIADEV, 1); + media1 = exynos_media_open(node); + if (media1 == NULL) { + ALOGE("%s::exynos_media_open failed (node=%s)", __func__, node); + return false; + } + gsc->mdev.media1 = media1; + + /* DECON-TV sub-device Open */ + snprintf(devname, sizeof(devname), DEX_WB_SD_NAME); + + sink_sd_entity = exynos_media_get_entity_by_name(media1, devname, + strlen(devname)); + if (!sink_sd_entity) { + ALOGE("%s:: failed to get the sink sd entity", __func__); + goto gsc_cap_err; + } + gsc->mdev.sink_sd_entity = sink_sd_entity; + + sink_sd_entity->fd = exynos_subdev_open_devname(devname, O_RDWR); + if (sink_sd_entity->fd < 0) { + ALOGE("%s:: failed to open sink subdev node", __func__); + goto gsc_cap_err; + } + + /* Gscaler2 capture video-device Open */ + snprintf(devname, sizeof(devname), PFX_GSC_CAPTURE_ENTITY); + gsc_vd_entity = exynos_media_get_entity_by_name(media1, devname, + strlen(devname)); + if (!gsc_vd_entity) { + ALOGE("%s:: failed to get the gsc vd entity", __func__); + goto gsc_cap_err; + } + gsc->mdev.gsc_vd_entity = gsc_vd_entity; + + gsc_vd_entity->fd = exynos_v4l2_open_devname(devname, O_RDWR); + if (gsc_vd_entity->fd < 0) { + ALOGE("%s: gsc video-dev open fail", __func__); + goto gsc_cap_err; + } + + /* Gscaler2 capture sub-device Open */ + snprintf(devname, sizeof(devname), GSC_WB_SD_NAME); + gsc_sd_entity = exynos_media_get_entity_by_name(media1, devname, + strlen(devname)); + if (!gsc_sd_entity) { + ALOGE("%s:: failed to get the gsc sd entity", __func__); + goto gsc_cap_err; + } + gsc->mdev.gsc_sd_entity = gsc_sd_entity; + + gsc_sd_entity->fd = exynos_subdev_open_devname(devname, O_RDWR); + if (gsc_sd_entity->fd < 0) { + ALOGE("%s: gsc sub-dev open fail", __func__); + goto gsc_cap_err; + } + + if (exynos_media_setup_link(media1, sink_sd_entity->pads, + gsc_sd_entity->pads, MEDIA_LNK_FL_ENABLED) < 0) { + ALOGE("%s::exynos_media_setup_link failed", __func__); + goto gsc_cap_err; + } + + cap = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_CAPTURE_MPLANE; + + if (exynos_v4l2_querycap(gsc_vd_entity->fd, cap) == false) { + ALOGE("%s::exynos_v4l2_querycap() fail", __func__); + goto gsc_cap_err; + } + + Exynos_gsc_Out(); + + return 0; + +gsc_cap_err: + gsc->m_gsc_cap_destroy(handle); + + return -1; +} + +int CGscaler::m_gsc_out_stop(void *handle) +{ + Exynos_gsc_In(); + + struct v4l2_requestbuffers reqbuf; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->src_info.stream_on == false) { + /* to handle special scenario.*/ + gsc->src_info.qbuf_cnt = 0; + ALOGD("%s::GSC is already stopped", __func__); + goto SKIP_STREAMOFF; + } + gsc->src_info.qbuf_cnt = 0; + gsc->src_info.stream_on = false; + + if (exynos_v4l2_streamoff(gsc->mdev.gsc_vd_entity->fd, + V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) < 0) { + ALOGE("%s::stream off failed", __func__); + return -1; + } + +SKIP_STREAMOFF: + Exynos_gsc_Out(); + + return 0; +} + +int CGscaler::m_gsc_cap_stop(void *handle) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->dst_info.stream_on == false) { + /* to handle special scenario.*/ + gsc->dst_info.qbuf_cnt = 0; + ALOGD("%s::GSC is already stopped", __func__); + goto SKIP_STREAMOFF; + } + gsc->dst_info.qbuf_cnt = 0; + gsc->dst_info.stream_on = false; + + if (exynos_v4l2_streamoff(gsc->mdev.gsc_vd_entity->fd, + V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) < 0) { + ALOGE("%s::stream off failed", __func__); + return -1; + } + +SKIP_STREAMOFF: + Exynos_gsc_Out(); + + return 0; +} + +bool CGscaler::m_gsc_out_destroy(void *handle) +{ + Exynos_gsc_In(); + + int i; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return false; + } + + if (gsc->src_info.stream_on == true) { + if (gsc->m_gsc_out_stop(gsc) < 0) + ALOGE("%s::m_gsc_out_stop() fail", __func__); + + gsc->src_info.stream_on = false; + } + + if (gsc->mdev.gsc_vd_entity && gsc->mdev.gsc_vd_entity->fd > 0) { + close(gsc->mdev.gsc_vd_entity->fd); + gsc->mdev.gsc_vd_entity->fd = -1; + } + + if (gsc->mdev.gsc_sd_entity && gsc->mdev.gsc_sd_entity->fd > 0) { + close(gsc->mdev.gsc_sd_entity->fd); + gsc->mdev.gsc_sd_entity->fd = -1; + } + + if (gsc->mdev.sink_sd_entity && gsc->mdev.sink_sd_entity->fd > 0) { + close(gsc->mdev.sink_sd_entity->fd); + gsc->mdev.sink_sd_entity->fd = -1; + } + + if (gsc->mdev.media0) + exynos_media_close(gsc->mdev.media0); + + gsc->mdev.media0 = NULL; + gsc->mdev.gsc_sd_entity = NULL; + gsc->mdev.gsc_vd_entity = NULL; + gsc->mdev.sink_sd_entity = NULL; + + Exynos_gsc_Out(); + return true; +} + +bool CGscaler::m_gsc_cap_destroy(void *handle) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return false; + } + + if (gsc->dst_info.stream_on == true) { + if (gsc->m_gsc_cap_stop(gsc) < 0) + ALOGE("%s::m_gsc_cap_stop() fail", __func__); + + gsc->dst_info.stream_on = false; + } + + if (!gsc->mdev.media1 || !gsc->mdev.gsc_sd_entity || + !gsc->mdev.gsc_vd_entity || !gsc->mdev.sink_sd_entity) { + ALOGE("%s::gsc->mdev information is null", __func__); + return false; + } + + if (exynos_media_setup_link(gsc->mdev.media1, + gsc->mdev.sink_sd_entity->pads, + gsc->mdev.gsc_sd_entity->pads, 0) < 0) { + ALOGE("%s::exynos_media_setup_unlin failed", __func__); + } + + if (gsc->mdev.gsc_vd_entity && gsc->mdev.gsc_vd_entity->fd > 0) { + close(gsc->mdev.gsc_vd_entity->fd); + gsc->mdev.gsc_vd_entity->fd = -1; + } + + if (gsc->mdev.gsc_sd_entity && gsc->mdev.gsc_sd_entity->fd > 0) { + close(gsc->mdev.gsc_sd_entity->fd); + gsc->mdev.gsc_sd_entity->fd = -1; + } + + if (gsc->mdev.sink_sd_entity && gsc->mdev.sink_sd_entity->fd > 0) { + close(gsc->mdev.sink_sd_entity->fd); + gsc->mdev.sink_sd_entity->fd = -1; + } + + if (gsc->mdev.media1) + exynos_media_close(gsc->mdev.media1); + + gsc->mdev.media1 = NULL; + gsc->mdev.gsc_sd_entity = NULL; + gsc->mdev.gsc_vd_entity = NULL; + gsc->mdev.sink_sd_entity = NULL; + + Exynos_gsc_Out(); + return true; +} + +int CGscaler::m_gsc_m2m_create(int dev) +{ + Exynos_gsc_In(); + + int fd = 0; + int video_node_num; + unsigned int cap; + char node[32]; + + switch(dev) { + case 0: + video_node_num = NODE_NUM_GSC_0; + break; + case 1: + video_node_num = NODE_NUM_GSC_1; + break; +#ifndef USES_ONLY_GSC0_GSC1 + case 2: + video_node_num = NODE_NUM_GSC_2; + break; + case 3: + video_node_num = NODE_NUM_GSC_3; + break; +#endif + default: + ALOGE("%s::unexpected dev(%d) fail", __func__, dev); + return -1; + break; + } + + snprintf(node, sizeof(node), "%s%d", PFX_NODE_GSC, video_node_num); + fd = exynos_v4l2_open(node, O_RDWR); + if (fd < 0) { + ALOGE("%s::exynos_v4l2_open(%s) fail", __func__, node); + return -1; + } + + cap = V4L2_CAP_STREAMING | + V4L2_CAP_VIDEO_OUTPUT_MPLANE | + V4L2_CAP_VIDEO_CAPTURE_MPLANE; + + if (exynos_v4l2_querycap(fd, cap) == false) { + ALOGE("%s::exynos_v4l2_querycap() fail", __func__); + close(fd); + fd = 0; + return -1; + } + + Exynos_gsc_Out(); + + return fd; +} + +bool CGscaler::m_gsc_find_and_create(void *handle) +{ + Exynos_gsc_In(); + + int i = 0; + bool flag_find_new_gsc = false; + unsigned int total_sleep_time = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return false; + } + + do { + for (i = 0; i < NUM_OF_GSC_HW; i++) { +#ifndef USES_ONLY_GSC0_GSC1 + if (i == 0 || i == 3) +#else + if (i == 0) +#endif + continue; + + gsc->gsc_id = i; + gsc->gsc_fd = gsc->m_gsc_m2m_create(i); + if (gsc->gsc_fd < 0) { + gsc->gsc_fd = 0; + continue; + } + + flag_find_new_gsc = true; + break; + } + + if (flag_find_new_gsc == false) { + usleep(GSC_WAITING_TIME_FOR_TRYLOCK); + total_sleep_time += GSC_WAITING_TIME_FOR_TRYLOCK; + ALOGV("%s::waiting for the gscaler availability", __func__); + } + + } while(flag_find_new_gsc == false + && total_sleep_time < MAX_GSC_WAITING_TIME_FOR_TRYLOCK); + + if (flag_find_new_gsc == false) + ALOGE("%s::we don't have any available gsc.. fail", __func__); + + Exynos_gsc_Out(); + + return flag_find_new_gsc; +} + +bool CGscaler::m_gsc_m2m_destroy(void *handle) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return false; + } + + /* + * just in case, we call stop here because we cannot afford to leave + * secure side protection on if things failed. + */ + gsc->m_gsc_m2m_stop(handle); + + if (gsc->gsc_id >= HW_SCAL0) { + bool ret = exynos_sc_free_and_close(gsc->scaler); + Exynos_gsc_Out(); + return ret; + } + + if (0 < gsc->gsc_fd) + close(gsc->gsc_fd); + gsc->gsc_fd = 0; + + Exynos_gsc_Out(); + + return true; +} + +int CGscaler::m_gsc_m2m_stop(void *handle) +{ + Exynos_gsc_In(); + + struct v4l2_requestbuffers req_buf; + int ret = 0; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (!gsc->src_info.stream_on && !gsc->dst_info.stream_on) { + /* wasn't streaming, return success */ + return 0; + } else if (gsc->src_info.stream_on != gsc->dst_info.stream_on) { + ALOGE("%s: invalid state, queue stream state doesn't match \ + (%d != %d)", __func__, gsc->src_info.stream_on, + gsc->dst_info.stream_on); + ret = -1; + } + + /* + * we need to plow forward on errors below to make sure that if we had + * turned on content protection on secure side, we turn it off. + * + * also, if we only failed to turn on one of the streams, we'll turn + * the other one off correctly. + */ + if (gsc->src_info.stream_on == true) { + if (exynos_v4l2_streamoff(gsc->gsc_fd, + gsc->src_info.buf.buf_type) < 0) { + ALOGE("%s::exynos_v4l2_streamoff(src) fail", __func__); + ret = -1; + } + gsc->src_info.stream_on = false; + } + + if (gsc->dst_info.stream_on == true) { + if (exynos_v4l2_streamoff(gsc->gsc_fd, + gsc->dst_info.buf.buf_type) < 0) { + ALOGE("%s::exynos_v4l2_streamoff(dst) fail", __func__); + ret = -1; + } + gsc->dst_info.stream_on = false; + } + + /* if drm is enabled */ + if (gsc->allow_drm && gsc->protection_enabled) { + unsigned int protect_id = 0; + + if (gsc->gsc_id == 0) + protect_id = CP_PROTECT_GSC0; + else if (gsc->gsc_id == 1) + protect_id = CP_PROTECT_GSC1; + else if (gsc->gsc_id == 2) + protect_id = CP_PROTECT_GSC2; + else if (gsc->gsc_id == 3) + protect_id = CP_PROTECT_GSC3; + + /* CP_Disable_Path_Protection(protect_id); */ + gsc->protection_enabled = false; + } + + if (exynos_v4l2_s_ctrl(gsc->gsc_fd, + V4L2_CID_CONTENT_PROTECTION, 0) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CONTENT_PROTECTION) fail", + __func__); + ret = -1; + } + + /* src: clear_buf */ + req_buf.count = 0; + req_buf.type = gsc->src_info.buf.buf_type; + req_buf.memory = gsc->src_info.buf.mem_type; + if (exynos_v4l2_reqbufs(gsc->gsc_fd, &req_buf) < 0) { + ALOGE("%s::exynos_v4l2_reqbufs():src: fail", __func__); + ret = -1; + } + + /* dst: clear_buf */ + req_buf.count = 0; + req_buf.type = gsc->dst_info.buf.buf_type; + req_buf.memory = gsc->dst_info.buf.mem_type;; + if (exynos_v4l2_reqbufs(gsc->gsc_fd, &req_buf) < 0) { + ALOGE("%s::exynos_v4l2_reqbufs():dst: fail", __func__); + ret = -1; + } + + Exynos_gsc_Out(); + + return ret; +} + +int CGscaler::m_gsc_m2m_run_core(void *handle) +{ + Exynos_gsc_In(); + + unsigned int rotate, hflip, vflip; + bool is_dirty; + bool is_drm; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + is_dirty = gsc->src_info.dirty || gsc->dst_info.dirty; + is_drm = gsc->src_info.mode_drm; + + if (is_dirty && (gsc->src_info.mode_drm != gsc->dst_info.mode_drm)) { + ALOGE("%s: drm mode mismatch between src and dst, \ + gsc%d (s=%d d=%d)", __func__, gsc->gsc_id, + gsc->src_info.mode_drm, gsc->dst_info.mode_drm); + return -1; + } else if (is_drm && !gsc->allow_drm) { + ALOGE("%s: drm mode is not supported on gsc%d", __func__, + gsc->gsc_id); + return -1; + } + + CGscaler::rotateValueHAL2GSC(gsc->dst_img.rot, &rotate, &hflip, &vflip); + + if (CGscaler::m_gsc_check_src_size(&gsc->src_info.width, + &gsc->src_info.height, &gsc->src_info.crop_left, + &gsc->src_info.crop_top, &gsc->src_info.crop_width, + &gsc->src_info.crop_height, gsc->src_info.v4l2_colorformat, + (rotate == 90 || rotate == 270)) == false) { + ALOGE("%s::m_gsc_check_src_size() fail", __func__); + return -1; + } + + if (CGscaler::m_gsc_check_dst_size(&gsc->dst_info.width, + &gsc->dst_info.height, &gsc->dst_info.crop_left, + &gsc->dst_info.crop_top, &gsc->dst_info.crop_width, + &gsc->dst_info.crop_height, gsc->dst_info.v4l2_colorformat, + gsc->dst_info.rotation) == false) { + ALOGE("%s::m_gsc_check_dst_size() fail", __func__); + return -1; + } + + /* dequeue buffers from previous work if necessary */ + if (gsc->src_info.stream_on == true) { + if (gsc->m_gsc_m2m_wait_frame_done(handle) < 0) { + ALOGE("%s::exynos_gsc_m2m_wait_frame_done fail", __func__); + return -1; + } + } + + /* + * need to set the content protection flag before doing reqbufs + * in set_format + */ + if (is_dirty && gsc->allow_drm && is_drm) { + if (exynos_v4l2_s_ctrl(gsc->gsc_fd, + V4L2_CID_CONTENT_PROTECTION, is_drm) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl() fail", __func__); + return -1; + } + } + + /* + * from this point on, we have to ensure to call stop to clean up + * whatever state we have set. + */ + + if (gsc->src_info.dirty) { + if (CGscaler::m_gsc_set_format(gsc->gsc_fd, &gsc->src_info) == false) { + ALOGE("%s::m_gsc_set_format(src) fail", __func__); + goto done; + } + gsc->src_info.dirty = false; + } + + if (gsc->dst_info.dirty) { + if (CGscaler::m_gsc_set_format(gsc->gsc_fd, &gsc->dst_info) == false) { + ALOGE("%s::m_gsc_set_format(dst) fail", __func__); + goto done; + } + gsc->dst_info.dirty = false; + } + + /* + * set up csc equation property + */ + if (is_dirty) { + if (exynos_v4l2_s_ctrl(gsc->gsc_fd, + V4L2_CID_CSC_EQ_MODE, gsc->eq_auto) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_EQ_MODE) fail", __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->gsc_fd, + V4L2_CID_CSC_EQ, gsc->v4l2_colorspace) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_EQ) fail", __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->gsc_fd, + V4L2_CID_CSC_RANGE, gsc->range_full) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_RANGE) fail", __func__); + return -1; + } + } + + /* if we are enabling drm, make sure to enable hw protection. + * Need to do this before queuing buffers so that the mmu is reserved + * and power domain is kept on. + */ + if (is_dirty && gsc->allow_drm && is_drm) { + unsigned int protect_id = 0; + + if (gsc->gsc_id == 0) { + protect_id = CP_PROTECT_GSC0; + } else if (gsc->gsc_id == 1) { + protect_id = CP_PROTECT_GSC1; + } else if (gsc->gsc_id == 2) { + protect_id = CP_PROTECT_GSC2; + } else if (gsc->gsc_id == 3) { + protect_id = CP_PROTECT_GSC3; + } else { + ALOGE("%s::invalid gscaler id %d for content protection", + __func__, gsc->gsc_id); + goto done; + } + + /* if (CP_Enable_Path_Protection(protect_id) != 0) { + ALOGE("%s::CP_Enable_Path_Protection failed", __func__); + goto done; + } */ + gsc->protection_enabled = true; + } + + if (gsc->m_gsc_set_addr(gsc->gsc_fd, &gsc->src_info) == false) { + ALOGE("%s::m_gsc_set_addr(src) fail", __func__); + goto done; + } + + if (gsc->m_gsc_set_addr(gsc->gsc_fd, &gsc->dst_info) == false) { + ALOGE("%s::m_gsc_set_addr(dst) fail", __func__); + goto done; + } + + if (gsc->src_info.stream_on == false) { + if (exynos_v4l2_streamon(gsc->gsc_fd, gsc->src_info.buf.buf_type) < 0) { + ALOGE("%s::exynos_v4l2_streamon(src) fail", __func__); + goto done; + } + gsc->src_info.stream_on = true; + } + + if (gsc->dst_info.stream_on == false) { + if (exynos_v4l2_streamon(gsc->gsc_fd, gsc->dst_info.buf.buf_type) < 0) { + ALOGE("%s::exynos_v4l2_streamon(dst) fail", __func__); + goto done; + } + gsc->dst_info.stream_on = true; + } + + Exynos_gsc_Out(); + + return 0; + +done: + gsc->m_gsc_m2m_stop(handle); + return -1; +} + +bool CGscaler::m_gsc_check_src_size( + unsigned int *w, unsigned int *h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int v4l2_colorformat, bool rotation) +{ + unsigned int minWidth, minHeight, shift = 0; + if (v4l2_colorformat == V4L2_PIX_FMT_RGB32 || v4l2_colorformat == V4L2_PIX_FMT_RGB565) + shift = 1; + if (rotation) { + minWidth = GSC_MIN_SRC_H_SIZE >> shift; + minHeight = GSC_MIN_SRC_W_SIZE >> shift; + } else { + minWidth = GSC_MIN_SRC_W_SIZE >> shift; + minHeight = GSC_MIN_SRC_H_SIZE >> shift; + } + + if (*w < minWidth || *h < minHeight) { + ALOGE("%s::too small size (w : %d < %d) (h : %d < %d)", + __func__, GSC_MIN_SRC_W_SIZE, *w, GSC_MIN_SRC_H_SIZE, *h); + return false; + } + + if (*crop_w < minWidth || *crop_h < minHeight) { + ALOGE("%s::too small size (w : %d < %d) (h : %d < %d)", + __func__, GSC_MIN_SRC_W_SIZE,* crop_w, GSC_MIN_SRC_H_SIZE, *crop_h); + return false; + } + + return true; +} + +bool CGscaler::m_gsc_check_dst_size( + unsigned int *w, unsigned int *h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int v4l2_colorformat, + int rotation) +{ + if (*w < GSC_MIN_DST_W_SIZE || *h < GSC_MIN_DST_H_SIZE) { + ALOGE("%s::too small size (w : %d < %d) (h : %d < %d)", + __func__, GSC_MIN_DST_W_SIZE, *w, GSC_MIN_DST_H_SIZE, *h); + return false; + } + + if (*crop_w < GSC_MIN_DST_W_SIZE || *crop_h < GSC_MIN_DST_H_SIZE) { + ALOGE("%s::too small size (w : %d < %d) (h : %d < %d)", + __func__, GSC_MIN_DST_W_SIZE,* crop_w, GSC_MIN_DST_H_SIZE, *crop_h); + return false; + } + + return true; +} + + +int CGscaler::m_gsc_multiple_of_n(int number, int N) +{ + int result = number; + switch (N) { + case 1: + case 2: + case 4: + case 8: + case 16: + case 32: + case 64: + case 128: + case 256: + result = (number - (number & (N-1))); + break; + default: + result = number - (number % N); + break; + } + return result; +} + +int CGscaler::m_gsc_m2m_wait_frame_done(void *handle) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if ((gsc->src_info.stream_on == false) || + (gsc->dst_info.stream_on == false)) { + ALOGE("%s:: src_strean_on or dst_stream_on are false", __func__); + return -1; + } + + if (gsc->src_info.buf.buffer_queued) { + if (exynos_v4l2_dqbuf(gsc->gsc_fd, &gsc->src_info.buf.buffer) < 0) { + ALOGE("%s::exynos_v4l2_dqbuf(src) fail", __func__); + return -1; + } + gsc->src_info.buf.buffer_queued = false; + } + + if (gsc->dst_info.buf.buffer_queued) { + if (exynos_v4l2_dqbuf(gsc->gsc_fd, &gsc->dst_info.buf.buffer) < 0) { + ALOGE("%s::exynos_v4l2_dqbuf(dst) fail", __func__); + return -1; + } + gsc->dst_info.buf.buffer_queued = false; + } + + Exynos_gsc_Out(); + + return 0; +} + +bool CGscaler::m_gsc_set_format(int fd, GscInfo *info) +{ + Exynos_gsc_In(); + + struct v4l2_requestbuffers req_buf; + int plane_count; + + plane_count = m_gsc_get_plane_count(info->v4l2_colorformat); + if (plane_count < 0) { + ALOGE("%s::not supported v4l2_colorformat", __func__); + return false; + } + + if (exynos_v4l2_s_ctrl(fd, V4L2_CID_ROTATE, info->rotation) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_ROTATE) fail", __func__); + return false; + } + + if (exynos_v4l2_s_ctrl(fd, V4L2_CID_VFLIP, info->flip_horizontal) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_VFLIP) fail", __func__); + return false; + } + + if (exynos_v4l2_s_ctrl(fd, V4L2_CID_HFLIP, info->flip_vertical) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_HFLIP) fail", __func__); + return false; + } + + info->format.type = info->buf.buf_type; + info->format.fmt.pix_mp.width = info->width; + info->format.fmt.pix_mp.height = info->height; + info->format.fmt.pix_mp.pixelformat = info->v4l2_colorformat; + info->format.fmt.pix_mp.field = V4L2_FIELD_ANY; + info->format.fmt.pix_mp.num_planes = plane_count; + + if (exynos_v4l2_s_fmt(fd, &info->format) < 0) { + ALOGE("%s::exynos_v4l2_s_fmt() fail", __func__); + return false; + } + + info->crop.type = info->buf.buf_type; + info->crop.c.left = info->crop_left; + info->crop.c.top = info->crop_top; + info->crop.c.width = info->crop_width; + info->crop.c.height = info->crop_height; + + if (exynos_v4l2_s_crop(fd, &info->crop) < 0) { + ALOGE("%s::exynos_v4l2_s_crop() fail", __func__); + return false; + } + + if (exynos_v4l2_s_ctrl(fd, V4L2_CID_CACHEABLE, info->cacheable) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl() fail", __func__); + return false; + } + + req_buf.count = 1; + req_buf.type = info->buf.buf_type; + req_buf.memory = info->buf.mem_type; + if (exynos_v4l2_reqbufs(fd, &req_buf) < 0) { + ALOGE("%s::exynos_v4l2_reqbufs() fail", __func__); + return false; + } + + Exynos_gsc_Out(); + + return true; +} + +unsigned int CGscaler::m_gsc_get_plane_count(int v4l_pixel_format) +{ + int plane_count = 0; + + switch (v4l_pixel_format) { + case V4L2_PIX_FMT_RGB32: + case V4L2_PIX_FMT_BGR32: + case V4L2_PIX_FMT_RGB24: + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_RGB555X: + case V4L2_PIX_FMT_RGB444: + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_YVU420: + case V4L2_PIX_FMT_YUV420: + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + case V4L2_PIX_FMT_YUV422P: + plane_count = 1; + break; + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV12MT_16X16: + case V4L2_PIX_FMT_NV21M: + plane_count = 2; + break; + case V4L2_PIX_FMT_YVU420M: + case V4L2_PIX_FMT_YUV420M: + plane_count = 3; + break; + default: + ALOGE("%s::unmatched v4l_pixel_format color_space(0x%x)\n", + __func__, v4l_pixel_format); + plane_count = -1; + break; + } + + return plane_count; +} + +bool CGscaler::m_gsc_set_addr(int fd, GscInfo *info) +{ + unsigned int i; + unsigned int plane_size[NUM_OF_GSC_PLANES]; + + CGscaler::m_gsc_get_plane_size(plane_size, info->width, + info->height, info->v4l2_colorformat); + + info->buf.buffer.index = 0; + info->buf.buffer.flags = V4L2_BUF_FLAG_USE_SYNC; + info->buf.buffer.type = info->buf.buf_type; + info->buf.buffer.memory = info->buf.mem_type; + info->buf.buffer.m.planes = info->buf.planes; + info->buf.buffer.length = info->format.fmt.pix_mp.num_planes; + info->buf.buffer.reserved = info->acquireFenceFd; + + for (i = 0; i < info->format.fmt.pix_mp.num_planes; i++) { + if (info->buf.buffer.memory == V4L2_MEMORY_DMABUF) + info->buf.buffer.m.planes[i].m.fd = (long)info->buf.addr[i]; + else + info->buf.buffer.m.planes[i].m.userptr = + (unsigned long)info->buf.addr[i]; + info->buf.buffer.m.planes[i].length = plane_size[i]; + info->buf.buffer.m.planes[i].bytesused = 0; + } + + if (exynos_v4l2_qbuf(fd, &info->buf.buffer) < 0) { + ALOGE("%s::exynos_v4l2_qbuf() fail", __func__); + return false; + } + info->buf.buffer_queued = true; + + info->releaseFenceFd = info->buf.buffer.reserved; + + return true; +} + +unsigned int CGscaler::m_gsc_get_plane_size( + unsigned int *plane_size, + unsigned int width, + unsigned int height, + int v4l_pixel_format) +{ + switch (v4l_pixel_format) { + /* 1 plane */ + case V4L2_PIX_FMT_RGB32: + case V4L2_PIX_FMT_BGR32: + plane_size[0] = width * height * 4; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_RGB24: + plane_size[0] = width * height * 3; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_RGB555X: + case V4L2_PIX_FMT_RGB444: + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_UYVY: + plane_size[0] = width * height * 2; + plane_size[1] = 0; + plane_size[2] = 0; + break; + /* 2 planes */ + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + plane_size[0] = width * height; + plane_size[1] = width * (height / 2); + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + plane_size[0] = width * height * 3 / 2; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_YUV422P: + plane_size[0] = width * height * 2; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_NV12MT_16X16: + plane_size[0] = ALIGN(width, 16) * ALIGN(height, 16); + plane_size[1] = ALIGN(width, 16) * ALIGN(height / 2, 8); + plane_size[2] = 0; + break; + /* 3 planes */ + case V4L2_PIX_FMT_YUV420M: + plane_size[0] = width * height; + plane_size[1] = (width / 2) * (height / 2); + plane_size[2] = (width / 2) * (height / 2); + break; + case V4L2_PIX_FMT_YVU420: + plane_size[0] = ALIGN(width, 16) * height + ALIGN(width / 2, 16) * height; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_YUV420: + plane_size[0] = width * height * 3 / 2; + plane_size[1] = 0; + plane_size[2] = 0; + break; + case V4L2_PIX_FMT_YVU420M: + plane_size[0] = ALIGN(width, 16) * height; + plane_size[1] = ALIGN(width / 2, 16) * (height / 2); + plane_size[2] = plane_size[1]; + break; + default: + ALOGE("%s::unmatched v4l_pixel_format color_space(0x%x)\n", + __func__, v4l_pixel_format); + return -1; + } + + return 0; +} + +int CGscaler::m_gsc_m2m_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + int32_t src_color_space; + int32_t dst_color_space; + int ret; + unsigned int rotate; + unsigned int hflip; + unsigned int vflip; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if ((src_img->drmMode && !gsc->allow_drm) || + (src_img->drmMode != dst_img->drmMode)) { + ALOGE("%s::invalid drm state request for gsc%d (s=%d d=%d)", + __func__, gsc->gsc_id, src_img->drmMode, dst_img->drmMode); + return -1; + } + + src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(src_img->format); + dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(dst_img->format); + CGscaler::rotateValueHAL2GSC(dst_img->rot, &rotate, &hflip, &vflip); + exynos_gsc_set_rotation(gsc, rotate, hflip, vflip); + + ret = exynos_gsc_set_src_format(gsc, src_img->fw, src_img->fh, + src_img->x, src_img->y, src_img->w, src_img->h, + src_color_space, src_img->cacheable, src_img->drmMode); + if (ret < 0) { + ALOGE("%s: fail: exynos_gsc_set_src_format \ + [fw %d fh %d x %d y %d w %d h %d f %x rot %d]", + __func__, src_img->fw, src_img->fh, src_img->x, src_img->y, + src_img->w, src_img->h, src_color_space, src_img->rot); + return -1; + } + + ret = exynos_gsc_set_dst_format(gsc, dst_img->fw, dst_img->fh, + dst_img->x, dst_img->y, dst_img->w, dst_img->h, + dst_color_space, dst_img->cacheable, dst_img->drmMode); + if (ret < 0) { + ALOGE("%s: fail: exynos_gsc_set_dst_format \ + [fw %d fh %d x %d y %d w %d h %d f %x rot %d]", + __func__, dst_img->fw, dst_img->fh, dst_img->x, dst_img->y, + dst_img->w, dst_img->h, src_color_space, dst_img->rot); + return -1; + } + + Exynos_gsc_Out(); + + return 0; +} + +int CGscaler::m_gsc_out_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + struct v4l2_format fmt; + struct v4l2_crop crop; + struct v4l2_requestbuffers reqbuf; + struct v4l2_subdev_format sd_fmt; + struct v4l2_subdev_crop sd_crop; + int i; + unsigned int rotate; + unsigned int hflip; + unsigned int vflip; + unsigned int plane_size[NUM_OF_GSC_PLANES]; + bool rgb; + + struct v4l2_rect dst_rect; + int32_t src_color_space; + int32_t dst_color_space; + int32_t src_planes; + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + if (gsc->src_info.stream_on != false) { + ALOGE("Error: Src is already streamed on !!!!"); + return -1; + } + + memcpy(&gsc->src_img, src_img, sizeof(exynos_mpp_img)); + memcpy(&gsc->dst_img, dst_img, sizeof(exynos_mpp_img)); + src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(src_img->format); + dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(dst_img->format); + src_planes = m_gsc_get_plane_count(src_color_space); + src_planes = (src_planes == -1) ? 1 : src_planes; + rgb = get_yuv_planes(dst_color_space) == -1; + CGscaler::rotateValueHAL2GSC(dst_img->rot, &rotate, &hflip, &vflip); + + if (CGscaler::m_gsc_check_src_size(&gsc->src_img.fw, + &gsc->src_img.fh, &gsc->src_img.x, &gsc->src_img.y, + &gsc->src_img.w, &gsc->src_img.h, src_color_space, + (rotate == 90 || rotate == 270)) == false) { + ALOGE("%s::m_gsc_check_src_size() fail", __func__); + return -1; + } + + /*set: src v4l2_buffer*/ + gsc->src_info.buf.buf_idx = 0; + gsc->src_info.qbuf_cnt = 0; + /* set format: src pad of GSC sub-dev*/ + sd_fmt.pad = GSCALER_SUBDEV_PAD_SOURCE; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + if (gsc->out_mode == GSC_OUT_FIMD) { + sd_fmt.format.width = gsc->dst_img.fw; + sd_fmt.format.height = gsc->dst_img.fh; + } else { + sd_fmt.format.width = gsc->dst_img.w; + sd_fmt.format.height = gsc->dst_img.h; + } + sd_fmt.format.code = rgb ? V4L2_MBUS_FMT_XRGB8888_4X8_LE : + V4L2_MBUS_FMT_YUV8_1X24; + if (exynos_subdev_s_fmt(gsc->mdev.gsc_sd_entity->fd, &sd_fmt) < 0) { + ALOGE("%s::GSC subdev set format failed", __func__); + return -1; + } + + /* set crop: src crop of GSC sub-dev*/ + sd_crop.pad = GSCALER_SUBDEV_PAD_SOURCE; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + if (gsc->out_mode == GSC_OUT_FIMD) { + sd_crop.rect.left = gsc->dst_img.x; + sd_crop.rect.top = gsc->dst_img.y; + sd_crop.rect.width = gsc->dst_img.w; + sd_crop.rect.height = gsc->dst_img.h; + } else { + sd_crop.rect.left = 0; + sd_crop.rect.top = 0; + sd_crop.rect.width = gsc->dst_img.w; + sd_crop.rect.height = gsc->dst_img.h; + } + + /* sink pad is connected to GSC out */ + /* set format: sink sub-dev */ + if (gsc->out_mode == GSC_OUT_FIMD) { + sd_fmt.pad = FIMD_SUBDEV_PAD_SINK; + sd_fmt.format.width = gsc->dst_img.w; + sd_fmt.format.height = gsc->dst_img.h; + } else { + sd_fmt.pad = MIXER_V_SUBDEV_PAD_SINK; + sd_fmt.format.width = gsc->dst_img.w + gsc->dst_img.x*2; + sd_fmt.format.height = gsc->dst_img.h + gsc->dst_img.y*2; + } + + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.code = rgb ? V4L2_MBUS_FMT_XRGB8888_4X8_LE : + V4L2_MBUS_FMT_YUV8_1X24; + if (exynos_subdev_s_fmt(gsc->mdev.sink_sd_entity->fd, &sd_fmt) < 0) { + ALOGE("%s::sink:set format failed (PAD=%d)", __func__, + sd_fmt.pad); + return -1; + } + + /* set crop: sink sub-dev */ + if (gsc->out_mode == GSC_OUT_FIMD) + sd_crop.pad = FIMD_SUBDEV_PAD_SINK; + else + sd_crop.pad = MIXER_V_SUBDEV_PAD_SINK; + + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + if (gsc->out_mode == GSC_OUT_FIMD) { + sd_crop.rect.left = gsc->dst_img.x; + sd_crop.rect.top = gsc->dst_img.y; + sd_crop.rect.width = gsc->dst_img.w; + sd_crop.rect.height = gsc->dst_img.h; + } else { + sd_crop.rect.left = 0; + sd_crop.rect.top = 0; + sd_crop.rect.width = gsc->dst_img.w; + sd_crop.rect.height = gsc->dst_img.h; + } + + if (gsc->out_mode != GSC_OUT_FIMD) { + sd_fmt.pad = MIXER_V_SUBDEV_PAD_SOURCE; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = gsc->dst_img.w + gsc->dst_img.x*2; + sd_fmt.format.height = gsc->dst_img.h + gsc->dst_img.y*2; + sd_fmt.format.code = V4L2_MBUS_FMT_XRGB8888_4X8_LE; + if (exynos_subdev_s_fmt(gsc->mdev.sink_sd_entity->fd, &sd_fmt) < 0) { + ALOGE("%s::sink:set format failed (PAD=%d)", __func__, + sd_fmt.pad); + return -1; + } + + sd_fmt.pad = MIXER_V_SUBDEV_PAD_SOURCE; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.left = gsc->dst_img.x; + sd_crop.rect.top = gsc->dst_img.y; + sd_crop.rect.width = gsc->dst_img.w; + sd_crop.rect.height = gsc->dst_img.h; + if (exynos_subdev_s_crop(gsc->mdev.sink_sd_entity->fd, &sd_crop) < 0) { + ALOGE("%s::sink: subdev set crop failed(PAD=%d)", __func__, + sd_crop.pad); + return -1; + } + } + + /*set GSC ctrls */ + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_ROTATE, + rotate) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_ROTATE: %d) failed", + __func__, rotate); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_HFLIP, + vflip) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_HFLIP: %d) failed", + __func__, vflip); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_VFLIP, + hflip) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_VFLIP: %d) failed", + __func__, hflip); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CACHEABLE, 1) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_CACHEABLE: 1) failed", + __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CONTENT_PROTECTION, gsc->src_img.drmMode) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CONTENT_PROTECTION) fail", + __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CSC_EQ_MODE, gsc->eq_auto) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_EQ_MODE) fail", __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CSC_EQ, gsc->v4l2_colorspace) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_EQ) fail", __func__); + return -1; + } + + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CSC_RANGE, gsc->range_full) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_RANGE) fail", __func__); + return -1; + } + + /* set src format :GSC video dev*/ + fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + fmt.fmt.pix_mp.width = gsc->src_img.fw; + fmt.fmt.pix_mp.height = gsc->src_img.fh; + fmt.fmt.pix_mp.pixelformat = src_color_space; + fmt.fmt.pix_mp.field = V4L2_FIELD_NONE; + fmt.fmt.pix_mp.num_planes = src_planes; + + if (exynos_v4l2_s_fmt(gsc->mdev.gsc_vd_entity->fd, &fmt) < 0) { + ALOGE("%s::videodev set format failed", __func__); + return -1; + } + + /* set src crop info :GSC video dev*/ + crop.type = fmt.type; + crop.c.left = gsc->src_img.x; + crop.c.top = gsc->src_img.y; + crop.c.width = gsc->src_img.w; + crop.c.height = gsc->src_img.h; + + if (exynos_v4l2_s_crop(gsc->mdev.gsc_vd_entity->fd, &crop) < 0) { + ALOGE("%s::videodev set crop failed", __func__); + return -1; + } + + reqbuf.type = fmt.type; + reqbuf.memory = V4L2_MEMORY_DMABUF; + reqbuf.count = MAX_BUFFERS_GSCALER_OUT; + + if (exynos_v4l2_reqbufs(gsc->mdev.gsc_vd_entity->fd, &reqbuf) < 0) { + ALOGE("%s::request buffers failed", __func__); + return -1; + } + + Exynos_gsc_Out(); + + return 0; +} + +int CGscaler::m_gsc_cap_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + struct v4l2_format fmt; + struct v4l2_crop crop; + struct v4l2_requestbuffers reqbuf; + struct v4l2_subdev_format sd_fmt; + struct v4l2_subdev_crop sd_crop; + int i; + unsigned int rotate; + unsigned int hflip; + unsigned int vflip; + unsigned int plane_size[NUM_OF_GSC_PLANES]; + bool rgb; + + struct v4l2_rect dst_rect; + int32_t src_color_space; + int32_t dst_color_space; + int32_t dst_planes; + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + memcpy(&gsc->src_img, src_img, sizeof(exynos_mpp_img)); + memcpy(&gsc->dst_img, dst_img, sizeof(exynos_mpp_img)); + src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(src_img->format); + dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(dst_img->format); + dst_planes = m_gsc_get_plane_count(dst_color_space); + dst_planes = (dst_planes == -1) ? 1 : dst_planes; + rgb = get_yuv_planes(src_color_space) == -1; + CGscaler::rotateValueHAL2GSC(src_img->rot, &rotate, &hflip, &vflip); + + if (CGscaler::m_gsc_check_src_size(&gsc->src_img.fw, + &gsc->src_img.fh, &gsc->src_img.x, &gsc->src_img.y, + &gsc->src_img.w, &gsc->src_img.h, src_color_space, + (rotate == 90 || rotate == 270)) == false) { + ALOGE("%s::m_gsc_check_src_size() fail", __func__); + return -1; + } + + /*set GSC ctrls */ + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_ROTATE, + rotate) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_ROTATE: %d) failed", + __func__, rotate); + return -1; + } + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_HFLIP, + vflip) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_HFLIP: %d) failed", + __func__, vflip); + return -1; + } + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, V4L2_CID_VFLIP, + hflip) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_VFLIP: %d) failed", + __func__, hflip); + return -1; + } + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CACHEABLE, 1) < 0) { + ALOGE("%s:: exynos_v4l2_s_ctrl (V4L2_CID_CACHEABLE: 1) failed", + __func__); + return -1; + } + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CONTENT_PROTECTION, gsc->src_img.drmMode) < 0) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CONTENT_PROTECTION) fail", + __func__); + return -1; + } + if (exynos_v4l2_s_ctrl(gsc->mdev.gsc_vd_entity->fd, + V4L2_CID_CSC_RANGE, gsc->range_full)) { + ALOGE("%s::exynos_v4l2_s_ctrl(V4L2_CID_CSC_RANGE: %d) fail", + __func__, gsc->range_full); + return -1; + } + /* set format: source pad of Decon-TV sub-dev*/ + sd_fmt.pad = DECON_TV_WB_PAD; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = gsc->src_img.w; + sd_fmt.format.height = gsc->src_img.h; + sd_fmt.format.code = WB_PATH_FORMAT; + if (exynos_subdev_s_fmt(gsc->mdev.sink_sd_entity->fd, &sd_fmt) < 0) { + ALOGE("%s::Decon-TV subdev set format failed", __func__); + return -1; + } + + if (!gsc->dst_info.stream_on) { + /* set src format: GSC video dev*/ + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmt.fmt.pix_mp.width = gsc->dst_img.fw; + fmt.fmt.pix_mp.height = gsc->dst_img.fh; + fmt.fmt.pix_mp.pixelformat = dst_color_space; + fmt.fmt.pix_mp.field = V4L2_FIELD_NONE; + fmt.fmt.pix_mp.num_planes = dst_planes; + + if (exynos_v4l2_s_fmt(gsc->mdev.gsc_vd_entity->fd, &fmt) < 0) { + ALOGE("%s::videodev set format failed", __func__); + return -1; + } + gsc->dst_info.buf.buf_idx = 0; + gsc->dst_info.qbuf_cnt = 0; + } + + /* set format: sink pad of GSC sub-dev*/ + sd_fmt.pad = GSCALER_SUBDEV_PAD_SINK; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = gsc->src_img.w; + sd_fmt.format.height = gsc->src_img.h; + sd_fmt.format.code = WB_PATH_FORMAT; + if (exynos_subdev_s_fmt(gsc->mdev.gsc_sd_entity->fd, &sd_fmt) < 0) { + ALOGE("%s::GSC subdev set format failed", __func__); + return -1; + } + + /* set src crop info :GSC video dev*/ + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + crop.c.left = gsc->dst_img.x; + crop.c.top = gsc->dst_img.y; + crop.c.width = gsc->dst_img.w; + crop.c.height = gsc->dst_img.h; + if (exynos_v4l2_s_crop(gsc->mdev.gsc_vd_entity->fd, &crop) < 0) { + ALOGE("%s::videodev set crop failed", __func__); + return -1; + } + + /* set crop: src crop of GSC sub-dev*/ + sd_crop.pad = GSCALER_SUBDEV_PAD_SINK; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.left = 0; + sd_crop.rect.top = 0; + sd_crop.rect.width = gsc->src_img.w; + sd_crop.rect.height = gsc->src_img.h; + + if (exynos_subdev_s_crop(gsc->mdev.gsc_sd_entity->fd, &sd_crop) < 0) { + ALOGE("%s::GSC subdev set crop failed(PAD=%d)", __func__, + sd_crop.pad); + return -1; + } + reqbuf.type = fmt.type; + reqbuf.memory = V4L2_MEMORY_DMABUF; + reqbuf.count = MAX_BUFFERS_GSCALER_CAP; + + if (!gsc->dst_info.stream_on) { + if (exynos_v4l2_reqbufs(gsc->mdev.gsc_vd_entity->fd, &reqbuf) < 0) { + ALOGE("%s::request buffers failed", __func__); + return -1; + } + } + + Exynos_gsc_Out(); + + return 0; +} + + +void CGscaler::rotateValueHAL2GSC(unsigned int transform, + unsigned int *rotate, unsigned int *hflip, unsigned int *vflip) +{ + int rotate_flag = transform & 0x7; + *rotate = 0; + *hflip = 0; + *vflip = 0; + + switch (rotate_flag) { + case HAL_TRANSFORM_ROT_90: + *rotate = 90; + break; + case HAL_TRANSFORM_ROT_180: + *rotate = 180; + break; + case HAL_TRANSFORM_ROT_270: + *rotate = 270; + break; + case HAL_TRANSFORM_FLIP_H | HAL_TRANSFORM_ROT_90: + *rotate = 90; + *vflip = 1; /* set vflip to compensate the rot & flip order. */ + break; + case HAL_TRANSFORM_FLIP_V | HAL_TRANSFORM_ROT_90: + *rotate = 90; + *hflip = 1; /* set hflip to compensate the rot & flip order. */ + break; + case HAL_TRANSFORM_FLIP_H: + *hflip = 1; + break; + case HAL_TRANSFORM_FLIP_V: + *vflip = 1; + break; + default: + break; + } +} + +int CGscaler::m_gsc_m2m_run(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + Exynos_gsc_In(); + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + void *addr[3] = {NULL, NULL, NULL}; + int ret = 0; + + addr[0] = (void *)src_img->yaddr; + addr[1] = (void *)src_img->uaddr; + addr[2] = (void *)src_img->vaddr; + ret = exynos_gsc_set_src_addr(handle, addr, src_img->mem_type, + src_img->acquireFenceFd); + if (ret < 0) { + ALOGE("%s::fail: exynos_gsc_set_src_addr[%p %p %p]", __func__, + addr[0], addr[1], addr[2]); + return -1; + } + + addr[0] = (void *)dst_img->yaddr; + addr[1] = (void *)dst_img->uaddr; + addr[2] = (void *)dst_img->vaddr; + ret = exynos_gsc_set_dst_addr(handle, addr, dst_img->mem_type, + dst_img->acquireFenceFd); + if (ret < 0) { + ALOGE("%s::fail: exynos_gsc_set_dst_addr[%p %p %p]", __func__, + addr[0], addr[1], addr[2]); + return -1; + } + + ret = gsc->m_gsc_m2m_run_core(handle); + if (ret < 0) { + ALOGE("%s::fail: m_gsc_m2m_run_core", __func__); + return -1; + } + + if (src_img->acquireFenceFd >= 0) { + close(src_img->acquireFenceFd); + src_img->acquireFenceFd = -1; + } + + if (dst_img->acquireFenceFd >= 0) { + close(dst_img->acquireFenceFd); + dst_img->acquireFenceFd = -1; + } + + src_img->releaseFenceFd = gsc->src_info.releaseFenceFd; + dst_img->releaseFenceFd = gsc->dst_info.releaseFenceFd; + + Exynos_gsc_Out(); + + return 0; +} + +int CGscaler::m_gsc_out_run(void *handle, exynos_mpp_img *src_img) +{ + struct v4l2_plane planes[NUM_OF_GSC_PLANES]; + struct v4l2_buffer buf; + int32_t src_color_space; + int32_t src_planes; + unsigned int i; + unsigned int plane_size[NUM_OF_GSC_PLANES]; + int ret = 0; + unsigned int dq_retry_cnt = 0; + + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + /* All buffers have been queued, dequeue one */ + if (gsc->src_info.qbuf_cnt == MAX_BUFFERS_GSCALER_OUT) { + memset(&buf, 0, sizeof(struct v4l2_buffer)); + for (i = 0; i < NUM_OF_GSC_PLANES; i++) + memset(&planes[i], 0, sizeof(struct v4l2_plane)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_DMABUF; + buf.m.planes = planes; + + src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(gsc->src_img.format); + src_planes = m_gsc_get_plane_count(src_color_space); + src_planes = (src_planes == -1) ? 1 : src_planes; + buf.length = src_planes; + + + do { + ret = exynos_v4l2_dqbuf(gsc->mdev.gsc_vd_entity->fd, &buf); + if (ret == -EAGAIN) { + ALOGE("%s::Retry DQbuf(index=%d)", __func__, buf.index); + usleep(10000); + dq_retry_cnt++; + continue; + } + break; + } while (dq_retry_cnt <= 10); + + if (ret < 0) { + ALOGE("%s::dq buffer failed (index=%d)", __func__, buf.index); + return -1; + } + gsc->src_info.qbuf_cnt--; + } + + memset(&buf, 0, sizeof(struct v4l2_buffer)); + for (i = 0; i < NUM_OF_GSC_PLANES; i++) + memset(&planes[i], 0, sizeof(struct v4l2_plane)); + + src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(gsc->src_img.format); + src_planes = m_gsc_get_plane_count(src_color_space); + src_planes = (src_planes == -1) ? 1 : src_planes; + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_DMABUF; + buf.flags = 0; + buf.length = src_planes; + buf.index = gsc->src_info.buf.buf_idx; + buf.m.planes = planes; + buf.reserved = -1; + + gsc->src_info.buf.addr[0] = (void*)src_img->yaddr; + gsc->src_info.buf.addr[1] = (void*)src_img->uaddr; + gsc->src_info.buf.addr[2] = (void*)src_img->vaddr; + + if (CGscaler::tmp_get_plane_size(src_color_space, plane_size, + gsc->src_img.fw, gsc->src_img.fh, src_planes) != true) { + ALOGE("%s:get_plane_size:fail", __func__); + return -1; + } + + for (i = 0; i < buf.length; i++) { + buf.m.planes[i].m.fd = (long)gsc->src_info.buf.addr[i]; + buf.m.planes[i].length = plane_size[i]; + buf.m.planes[i].bytesused = plane_size[i]; + } + + /* Queue the buf */ + if (exynos_v4l2_qbuf(gsc->mdev.gsc_vd_entity->fd, &buf) < 0) { + ALOGE("%s::queue buffer failed (index=%d)(mSrcBufNum=%d)", + __func__, gsc->src_info.buf.buf_idx, + MAX_BUFFERS_GSCALER_OUT); + return -1; + } + gsc->src_info.buf.buf_idx++; + gsc->src_info.buf.buf_idx = + gsc->src_info.buf.buf_idx % MAX_BUFFERS_GSCALER_OUT; + gsc->src_info.qbuf_cnt++; + + if (gsc->src_info.stream_on == false) { + if (exynos_v4l2_streamon(gsc->mdev.gsc_vd_entity->fd, + (v4l2_buf_type)buf.type) < 0) { + ALOGE("%s::stream on failed", __func__); + return -1; + } + gsc->src_info.stream_on = true; + } + + return 0; +} + +int CGscaler::m_gsc_cap_run(void *handle, exynos_mpp_img *dst_img) +{ + struct v4l2_plane planes[NUM_OF_GSC_PLANES]; + struct v4l2_buffer buf; + int32_t dst_color_space; + int32_t dst_planes; + unsigned int i; + unsigned int plane_size[NUM_OF_GSC_PLANES]; + CGscaler* gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + /* All buffers have been queued, dequeue one */ + if (gsc->dst_info.qbuf_cnt == MAX_BUFFERS_GSCALER_CAP) { + memset(&buf, 0, sizeof(struct v4l2_buffer)); + for (i = 0; i < NUM_OF_GSC_PLANES; i++) + memset(&planes[i], 0, sizeof(struct v4l2_plane)); + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_DMABUF; + buf.m.planes = planes; + + dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(gsc->dst_img.format); + dst_planes = m_gsc_get_plane_count(dst_color_space); + dst_planes = (dst_planes == -1) ? 1 : dst_planes; + buf.length = dst_planes; + + + if (exynos_v4l2_dqbuf(gsc->mdev.gsc_vd_entity->fd, &buf) < 0) { + ALOGE("%s::dequeue buffer failed (index=%d)(mSrcBufNum=%d)", + __func__, gsc->src_info.buf.buf_idx, + MAX_BUFFERS_GSCALER_CAP); + return -1; + } + gsc->dst_info.qbuf_cnt--; + } + + memset(&buf, 0, sizeof(struct v4l2_buffer)); + for (i = 0; i < NUM_OF_GSC_PLANES; i++) + memset(&planes[i], 0, sizeof(struct v4l2_plane)); + + dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(gsc->dst_img.format); + dst_planes = m_gsc_get_plane_count(dst_color_space); + dst_planes = (dst_planes == -1) ? 1 : dst_planes; + + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_DMABUF; + buf.flags = V4L2_BUF_FLAG_USE_SYNC; + buf.length = dst_planes; + buf.index = gsc->dst_info.buf.buf_idx; + buf.m.planes = planes; + buf.reserved = dst_img->acquireFenceFd; + + gsc->dst_info.buf.addr[0] = (void*)dst_img->yaddr; + gsc->dst_info.buf.addr[1] = (void*)dst_img->uaddr; + gsc->dst_info.buf.addr[2] = (void*)dst_img->vaddr; + + if (CGscaler::tmp_get_plane_size(dst_color_space, plane_size, + gsc->dst_img.fw, gsc->dst_img.fh, dst_planes) != true) { + ALOGE("%s:get_plane_size:fail", __func__); + return -1; + } + + for (i = 0; i < buf.length; i++) { + buf.m.planes[i].m.fd = (int)(long)gsc->dst_info.buf.addr[i]; + buf.m.planes[i].length = plane_size[i]; + buf.m.planes[i].bytesused = plane_size[i]; + } + + /* Queue the buf */ + if (exynos_v4l2_qbuf(gsc->mdev.gsc_vd_entity->fd, &buf) < 0) { + ALOGE("%s::queue buffer failed (index=%d)(mDstBufNum=%d)", + __func__, gsc->dst_info.buf.buf_idx, + MAX_BUFFERS_GSCALER_CAP); + return -1; + } + + gsc->dst_info.buf.buf_idx++; + gsc->dst_info.buf.buf_idx = + gsc->dst_info.buf.buf_idx % MAX_BUFFERS_GSCALER_CAP; + gsc->dst_info.qbuf_cnt++; + + if (gsc->dst_info.stream_on == false) { + if (exynos_v4l2_streamon(gsc->mdev.gsc_vd_entity->fd, + (v4l2_buf_type)buf.type) < 0) { + ALOGE("%s::stream on failed", __func__); + return -1; + } + gsc->dst_info.stream_on = true; + } + + dst_img->releaseFenceFd = buf.reserved; + return 0; +} + +bool CGscaler::tmp_get_plane_size(int V4L2_PIX, + unsigned int * size, unsigned int width, unsigned int height, int src_planes) +{ + unsigned int frame_ratio = 1; + int src_bpp = get_yuv_bpp(V4L2_PIX); + unsigned int frame_size = width * height; + + src_planes = (src_planes == -1) ? 1 : src_planes; + frame_ratio = 8 * (src_planes -1) / (src_bpp - 8); + + switch (src_planes) { + case 1: + switch (V4L2_PIX) { + case V4L2_PIX_FMT_BGR32: + case V4L2_PIX_FMT_RGB32: + size[0] = frame_size << 2; + break; + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_VYUY: + case V4L2_PIX_FMT_YVYU: + size[0] = frame_size << 1; + break; + case V4L2_PIX_FMT_YUV420: + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + case V4L2_PIX_FMT_NV21M: + size[0] = (frame_size * 3) >> 1; + break; + case V4L2_PIX_FMT_YVU420: + size[0] = frame_size + (ALIGN((width >> 1), 16) * ((height >> 1) * 2)); + break; + default: + ALOGE("%s::invalid color type (%x)", __func__, V4L2_PIX); + return false; + break; + } + size[1] = 0; + size[2] = 0; + break; + case 2: + size[0] = frame_size; + size[1] = frame_size / frame_ratio; + size[2] = 0; + break; + case 3: + size[0] = frame_size; + size[1] = frame_size / frame_ratio; + size[2] = frame_size / frame_ratio; + break; + default: + ALOGE("%s::invalid color foarmt", __func__); + return false; + break; + } + + return true; +} + +int CGscaler::ConfigMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst) +{ + return exynos_gsc_config_exclusive(handle, src, dst); +} + +int CGscaler::ConfigBlendMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst, + SrcBlendInfo *srcblendinfo) +{ + return exynos_gsc_config_blend_exclusive(handle, src, dst, srcblendinfo); +} + +int CGscaler::RunMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst) +{ + return exynos_gsc_run_exclusive(handle, src, dst); +} + +int CGscaler::StopMpp(void *handle) +{ + return exynos_gsc_stop_exclusive(handle); +} + +void CGscaler::DestroyMpp(void *handle) +{ + return exynos_gsc_destroy(handle); +} + +int CGscaler::SetCSCProperty(void *handle, unsigned int eqAuto, + unsigned int fullRange, unsigned int colorspace) +{ + return exynos_gsc_set_csc_property(handle, eqAuto, fullRange, + colorspace); +} + +int CGscaler::FreeMpp(void *handle) +{ + return exynos_gsc_free_and_close(handle); +} + +int CGscaler::SetInputCrop(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + struct v4l2_crop crop; + int ret = 0; + CGscaler *gsc = GetGscaler(handle); + if (gsc == NULL) { + ALOGE("%s::handle == NULL() fail", __func__); + return -1; + } + + crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + crop.c.left = src_img->x; + crop.c.top = src_img->y; + crop.c.width = src_img->w; + crop.c.height = src_img->h; + + return exynos_v4l2_s_crop(gsc->mdev.gsc_vd_entity->fd, &crop); +} diff --git a/libgscaler/libgscaler_obj.h b/libgscaler/libgscaler_obj.h new file mode 100644 index 0000000..fa8c8e0 --- /dev/null +++ b/libgscaler/libgscaler_obj.h @@ -0,0 +1,270 @@ +/* + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +#ifndef LIBGSCALER_OBJ_H_ +#define LIBGSCALER_OBJ_H_ + +#ifdef __cplusplus +extern "C" { +#endif + +//#define LOG_NDEBUG 0 +#define LOG_TAG "libexynosgscaler" +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "exynos_gscaler.h" + +#include "exynos_format.h" +#include "exynos_v4l2.h" +#include "LibMpp.h" + +#include "exynos_scaler.h" + +#define NUM_OF_GSC_PLANES (3) +#define MAX_BUFFERS_GSCALER_OUT (10) +#define MAX_BUFFERS_GSCALER_CAP (1) +#define GSCALER_SUBDEV_PAD_SINK (0) +#define GSCALER_SUBDEV_PAD_SOURCE (1) +#define MIXER_V_SUBDEV_PAD_SINK (0) +#define MIXER_V_SUBDEV_PAD_SOURCE (3) +#define FIMD_SUBDEV_PAD_SINK (0) +#define DECON_TV_WB_PAD (0) +#define MAX_BUFFERS (6) + +#define NUM_OF_GSC_HW (4) +#define NODE_NUM_GSC_0 (23) +#define NODE_NUM_GSC_1 (26) +#define NODE_NUM_GSC_2 (29) +#define NODE_NUM_GSC_3 (32) + +#define PFX_NODE_GSC "/dev/video" +#define PFX_NODE_MEDIADEV "/dev/media" +#define PFX_MXR_ENTITY "s5p-mixer%d" +#define PFX_FIMD_ENTITY "s3c-fb-window%d" +#if defined(USES_DT) +#define PFX_GSC_VIDEODEV_ENTITY0 "13c00000.gsc.output" +#define PFX_GSC_VIDEODEV_ENTITY1 "13c10000.gsc.output" +#define PFX_GSC_VIDEODEV_ENTITY2 "13c20000.gsc.output" +#else +#define PFX_GSC_VIDEODEV_ENTITY "exynos-gsc.%d.output" +#endif +#define PFX_GSC_CAPTURE_ENTITY "13c20000.gsc.capture" +#define PFX_GSC_SUBDEV_ENTITY "exynos-gsc-sd.%d" +#define PFX_SUB_DEV "/dev/v4l-subdev%d" +#define GSC_WB_SD_NAME "gsc-wb-sd" +#define DEX_WB_SD_NAME "dex-wb-sd" +#define GSC_VD_PAD_SOURCE 0 +#define GSC_SD_PAD_SINK 0 +#define GSC_SD_PAD_SOURCE 1 +#define GSC_OUT_PAD_SINK 0 +#define WB_PATH_FORMAT 0x100D; + +#define GSC_MIN_SRC_W_SIZE (64) +#define GSC_MIN_SRC_H_SIZE (32) +#define GSC_MIN_DST_W_SIZE (32) +#define GSC_MIN_DST_H_SIZE (16) + +#define MAX_GSC_WAITING_TIME_FOR_TRYLOCK (16000) // 16msec +#define GSC_WAITING_TIME_FOR_TRYLOCK (8000) // 8msec + +typedef struct GscalerInfo { + unsigned int width; + unsigned int height; + unsigned int crop_left; + unsigned int crop_top; + unsigned int crop_width; + unsigned int crop_height; + unsigned int v4l2_colorformat; + unsigned int mode_drm; + unsigned int cacheable; + int rotation; + int flip_horizontal; + int flip_vertical; + int qbuf_cnt; + int acquireFenceFd; + int releaseFenceFd; + bool stream_on; + bool dirty; + struct v4l2_format format; + struct v4l2_crop crop; + struct Buffer_Info { + enum v4l2_memory mem_type; + enum v4l2_buf_type buf_type; + void *addr[NUM_OF_GSC_PLANES]; + struct v4l2_plane planes[NUM_OF_GSC_PLANES]; + bool buffer_queued; + struct v4l2_buffer buffer; + int buf_idx; + }buf; +}GscInfo; + +struct MediaDevice { + struct media_device *media0; + struct media_device *media1; + struct media_entity *gsc_sd_entity; + struct media_entity *gsc_vd_entity; + struct media_entity *sink_sd_entity; +}; + +class CGscaler : public LibMpp { +public: + GscInfo src_info; + GscInfo dst_info; + exynos_mpp_img src_img; + exynos_mpp_img dst_img; + MediaDevice mdev; + int out_mode; + int gsc_id; + bool allow_drm; + bool protection_enabled; + int gsc_fd; + int mode; + unsigned int eq_auto; /* 0: user, 1: auto */ + unsigned int range_full; /* 0: narrow, 1: full */ + unsigned int v4l2_colorspace; /* 1: 601, 3: 709, see csc.h or videodev2.h */ + void *scaler; + + void __InitMembers(int __mode, int __out_mode, int __gsc_id,int __allow_drm) + { + memset(&mdev, 0, sizeof(mdev)); + scaler = NULL; + + mode = __mode; + out_mode = __out_mode; + gsc_id = __gsc_id; + allow_drm = __allow_drm; + } + + CGscaler(int __mode) + { + memset(&src_info, 0, sizeof(GscInfo)); + memset(&dst_info, 0, sizeof(GscInfo)); + memset(&src_img, 0, sizeof(exynos_mpp_img)); + memset(&dst_img, 0, sizeof(exynos_mpp_img)); + mode = __mode; + protection_enabled = false; + gsc_fd = -1; + src_info.buf.buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + dst_info.buf.buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + eq_auto = 0; /* user mode */ + range_full = 0; /* narrow */ + v4l2_colorspace = 1; /* SMPTE170M (601) */ + __InitMembers(__mode, 0, 0, 0); + } + CGscaler(int __mode, int __out_mode, int __gsc_id, int __allow_drm) + { + memset(&src_info, 0, sizeof(GscInfo)); + memset(&dst_info, 0, sizeof(GscInfo)); + memset(&src_img, 0, sizeof(exynos_mpp_img)); + memset(&dst_img, 0, sizeof(exynos_mpp_img)); + protection_enabled = false; + gsc_fd = -1; + src_info.buf.buf_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + dst_info.buf.buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + eq_auto = 0; /* user mode */ + range_full = 0; /* narrow */ + v4l2_colorspace = 1; /* SMPTE170M (601) */ + __InitMembers(__mode, __out_mode, __gsc_id, __allow_drm); + } + + ~CGscaler() + { + ALOGD("%s", __func__); + } + virtual int ConfigMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst); + virtual int ConfigBlendMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst, + SrcBlendInfo *srcblendinfo); + virtual int RunMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst); + virtual int StopMpp(void *handle); + virtual void DestroyMpp(void *handle); + virtual int SetCSCProperty(void *handle, unsigned int eqAuto, + unsigned int fullRange, unsigned int colorspace); + virtual int FreeMpp(void *handle); + virtual int SetInputCrop(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + bool m_gsc_find_and_create(void *handle); + bool m_gsc_out_destroy(void *handle); + bool m_gsc_cap_destroy(void *handle); + bool m_gsc_m2m_destroy(void *handle); + int m_gsc_m2m_create(int dev); + int m_gsc_output_create(void *handle, int dev_num, int out_mode); + int m_gsc_capture_create(void *handle, int dev_num, int out_mode); + int m_gsc_out_stop(void *handle); + int m_gsc_cap_stop(void *handle); + int m_gsc_m2m_stop(void *handle); + int m_gsc_m2m_run_core(void *handle); + int m_gsc_m2m_wait_frame_done(void *handle); + int m_gsc_m2m_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img); + int m_gsc_out_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img); + int m_gsc_cap_config(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img); + int m_gsc_m2m_run(void *handle, + exynos_mpp_img *src_img, exynos_mpp_img *dst_img); + int m_gsc_out_run(void *handle, exynos_mpp_img *src_img); + int m_gsc_cap_run(void *handle, exynos_mpp_img *dst_img); + static bool m_gsc_set_format(int fd, GscInfo *info); + static unsigned int m_gsc_get_plane_count(int v4l_pixel_format); + static bool m_gsc_set_addr(int fd, GscInfo *info); + static unsigned int m_gsc_get_plane_size( + unsigned int *plane_size, unsigned int width, + unsigned int height, int v4l_pixel_format); + static bool m_gsc_check_src_size(unsigned int *w, unsigned int *h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int v4l2_colorformat, bool rotation); + static bool m_gsc_check_dst_size(unsigned int *w, unsigned int *h, + unsigned int *crop_x, unsigned int *crop_y, + unsigned int *crop_w, unsigned int *crop_h, + int v4l2_colorformat, int rotation); + static int m_gsc_multiple_of_n(int number, int N); + static void rotateValueHAL2GSC(unsigned int transform, + unsigned int *rotate, unsigned int *hflip, unsigned int *vflip); + static bool tmp_get_plane_size(int V4L2_PIX, + unsigned int * size, unsigned int width, unsigned int height, int src_planes); +}; + +inline CGscaler *GetGscaler(void* handle) +{ + if (handle == NULL) { + ALOGE("%s::NULL Scaler handle", __func__); + return NULL; + } + + CGscaler *gsc = reinterpret_cast(handle); + + return gsc; +} +#ifdef __cplusplus +} +#endif + +#endif diff --git a/libhdmi/Android.mk b/libhdmi/Android.mk new file mode 100644 index 0000000..7727d14 --- /dev/null +++ b/libhdmi/Android.mk @@ -0,0 +1,53 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 libhwcutils libdisplay libmpp + +LOCAL_CFLAGS += -DLOG_TAG=\"hdmi\" +LOCAL_CFLAGS += -DHLOG_CODE=2 + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libdisplay \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/system/core/libsync/include + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY_DECON_EXT_WB), true) +LOCAL_SHARED_LIBRARIES += libvirtualdisplay +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libvirtualdisplay +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libvirtualdisplaymodule +endif + +LOCAL_SRC_FILES := \ + ExynosExternalDisplay.cpp dv_timings.c + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhdmimodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libhdmi + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhdmi/ExynosExternalDisplay.cpp b/libhdmi/ExynosExternalDisplay.cpp new file mode 100644 index 0000000..82eea97 --- /dev/null +++ b/libhdmi/ExynosExternalDisplay.cpp @@ -0,0 +1,1341 @@ +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosExternalDisplay.h" +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB +#include "ExynosVirtualDisplayModule.h" +#endif +#include "decon_tv.h" +#include + +extern struct v4l2_dv_timings dv_timings[]; +bool is_same_dv_timings(const struct v4l2_dv_timings *t1, + const struct v4l2_dv_timings *t2) +{ + if (t1->type == t2->type && + t1->bt.width == t2->bt.width && + t1->bt.height == t2->bt.height && + t1->bt.interlaced == t2->bt.interlaced && + t1->bt.polarities == t2->bt.polarities && + t1->bt.pixelclock == t2->bt.pixelclock && + t1->bt.hfrontporch == t2->bt.hfrontporch && + t1->bt.vfrontporch == t2->bt.vfrontporch && + t1->bt.vsync == t2->bt.vsync && + t1->bt.vbackporch == t2->bt.vbackporch && + (!t1->bt.interlaced || + (t1->bt.il_vfrontporch == t2->bt.il_vfrontporch && + t1->bt.il_vsync == t2->bt.il_vsync && + t1->bt.il_vbackporch == t2->bt.il_vbackporch))) + return true; + return false; +} + +int ExynosExternalDisplay::getDVTimingsIndex(int preset) +{ + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + if (preset == preset_index_mappings[i].preset) + return preset_index_mappings[i].dv_timings_index; + } + return -1; +} + +ExynosExternalDisplay::ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(1), + mEnabled(false), + mBlanked(false), + mVirtualOverlayFlag(0), + mRetry(false), + mForceOverlayLayerIndex(-1), + mYuvLayers(0), + mFbNeeded(false), + mFirstFb(0), + mLastFb(0), + mGscUsed(false), + mCurrentGscIndex(0), + mBypassSkipStaticLayer(false), + mLastOverlayWindowIndex(-1), + mLastOverlayLayerIndex(-1), + mGscLayers(0), + mLastFbWindow(NO_FB_NEEDED), + mUseSubtitles(false) +{ + this->mHwc = pdev; + mMPPs[0] = new ExynosMPPModule(this, HDMI_GSC_IDX); + mXres = 0; + mYres = 0; + mXdpi = 0; + mYdpi = 0; + mVsyncPeriod = 0; + mNumMPPs = 1; + mOtfMode = OTF_OFF; + mUseProtectedLayer = false; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mUseScreenshootLayer = false; + mLocalExternalDisplayPause = false; +#endif + + memset(mLastLayerHandles, 0, sizeof(mLastLayerHandles)); + memset(&mPostData, 0, sizeof(mPostData)); + memset(&mLastConfigData, 0, sizeof(mLastConfigData)); + memset(mLastGscMap, 0, sizeof(mLastGscMap)); + memset(mLastHandles, 0, sizeof(mLastHandles)); + + for (size_t i = 0; i < MAX_NUM_HDMI_DMA_CH; i++) { + mDmaChannelMaxBandwidth[i] = HDMI_DMA_CH_BW_SET[i]; + mDmaChannelMaxOverlapCount[i] = HDMI_DMA_CH_OVERLAP_CNT_SET[i]; + } +} + +ExynosExternalDisplay::~ExynosExternalDisplay() +{ + delete mMPPs[0]; +} + +bool ExynosExternalDisplay::isOverlaySupported(hwc_layer_1_t &layer, size_t i) +{ + int ret = 0; + + if (layer.flags & HWC_SKIP_LAYER) { + mLayerInfos[i]->mCheckOverlayFlag |= eSkipLayer; + ALOGV("\tlayer %u: skipping", i); + return false; + } + + if (!layer.planeAlpha) { + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedPlaneAlpha; + return false; + } + + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (!handle) { + ALOGV("\tlayer %u: handle is NULL", i); + mLayerInfos[i]->mCheckOverlayFlag |= eInvalidHandle; + return false; + } + + if (visibleWidth(mMPPs[0], layer, handle->format, this->mXres) < BURSTLEN_BYTES) { + ALOGV("\tlayer %u: visible area is too narrow", i); + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedDstWidth; + return false; + } + + if (mMPPs[0]->isProcessingRequired(layer, handle->format)) { + /* Check whether GSC can handle using local or M2M, local is not used */ + ret = mMPPs[0]->isProcessingSupported(layer, handle->format, false); + if (ret < 0) { + ALOGV("\tlayer %u: MPP M2M required but not supported", i); + mLayerInfos[i]->mCheckOverlayFlag |= eMPPUnsupported; + mLayerInfos[i]->mCheckMPPFlag |= -ret; + return false; + } + } else { + if (!isFormatSupported(handle->format)) { + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedFormat; + ALOGV("\tlayer %u: pixel format %u not supported", i, handle->format); + return false; + } + } + if ((layer.blending != HWC_BLENDING_NONE) && + mMPPs[0]->isFormatSupportedByGsc(handle->format) && + !isFormatRgb(handle->format)) { + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedBlending; + return false; + } + + if (!isBlendingSupported(layer.blending)) { + ALOGV("\tlayer %u: blending %d not supported", i, layer.blending); + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedBlending; + return false; + } + if (CC_UNLIKELY(isOffscreen(layer, mXres, mYres))) { + ALOGW("\tlayer %u: off-screen", i); + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedCoordinate; + return false; + } + + return true; +} + +int ExynosExternalDisplay::prepare(hwc_display_contents_1_t* contents) +{ + memset(mPostData.gsc_map, 0, sizeof(mPostData.gsc_map)); + mRetry = false; + + ExynosDisplay::allocateLayerInfos(contents); + + checkGrallocFlags(contents); + + do { + determineYuvOverlay(contents); + determineSupportedOverlays(contents); + determineBandwidthSupport(contents); + assignWindows(contents); + } while (mRetry); +#if defined(GSC_VIDEO) + if ((mHwc->mS3DMode != S3D_MODE_DISABLED) && (this->mYuvLayers == 1) && !mUseSubtitles) { + // UI layers will be skiped when S3D video is playing + mPostData.fb_window = NO_FB_NEEDED; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.compositionType = HWC_OVERLAY; + } + } +#endif + + skipStaticLayers(contents); + + if (mVirtualOverlayFlag) + mFbNeeded = 0; + + if (!mFbNeeded) + mPostData.fb_window = NO_FB_NEEDED; + + /* + * Usually, layer cnt must be more than 1 for HWC 1.1. + * But, Surfaceflinger is passing some special case to HWC with layer cnt is equal to 1. + * To handle that special case: enable fb window if the layer cnt is 1. + */ + if (contents->numHwLayers <= 1) + mPostData.fb_window = 0; + + return 0; +} + + + +void ExynosExternalDisplay::configureHandle(private_handle_t *handle, + hwc_frect_t &sourceCrop, hwc_rect_t &displayFrame, + int32_t blending, int32_t planeAlpha, int fence_fd, s3c_fb_win_config &cfg) +{ + uint32_t x, y; + uint32_t w = WIDTH(displayFrame); + uint32_t h = HEIGHT(displayFrame); + uint8_t bpp = formatToBpp(handle->format); + uint32_t offset = (sourceCrop.top * handle->stride + sourceCrop.left) * bpp / 8; + + if (displayFrame.left < 0) { + unsigned int crop = -displayFrame.left; + ALOGV("layer off left side of screen; cropping %u pixels from left edge", + crop); + x = 0; + w -= crop; + offset += crop * bpp / 8; + } else { + x = displayFrame.left; + } + + if (displayFrame.right > this->mXres) { + unsigned int crop = displayFrame.right - this->mXres; + ALOGV("layer off right side of screen; cropping %u pixels from right edge", + crop); + w -= crop; + } + + if (displayFrame.top < 0) { + unsigned int crop = -displayFrame.top; + ALOGV("layer off top side of screen; cropping %u pixels from top edge", + crop); + y = 0; + h -= crop; + offset += handle->stride * crop * bpp / 8; + } else { + y = displayFrame.top; + } + + if (displayFrame.bottom > this->mYres) { + int crop = displayFrame.bottom - this->mYres; + ALOGV("layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + h -= crop; + } + cfg.state = cfg.S3C_FB_WIN_STATE_BUFFER; + cfg.fd = handle->fd; + cfg.x = x; + cfg.y = y; + cfg.w = w; + cfg.h = h; + cfg.format = halFormatToS3CFormat(handle->format); + cfg.offset = offset; + cfg.stride = handle->stride * bpp / 8; + cfg.blending = halBlendingToS3CBlending(blending); + cfg.fence_fd = fence_fd; + cfg.plane_alpha = 255; + if (planeAlpha && (planeAlpha < 255)) { + cfg.plane_alpha = planeAlpha; + } + +#ifdef USES_DRM_SETTING_BY_DECON + if (getDrmMode(handle->flags) != NO_DRM) + cfg.protection = 1; + else + cfg.protection = 0; +#endif +} + +void ExynosExternalDisplay::configureOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg) +{ + if (layer->compositionType == HWC_BACKGROUND) { + hwc_color_t color = layer->backgroundColor; + cfg.state = cfg.S3C_FB_WIN_STATE_COLOR; + cfg.color = (color.r << 16) | (color.g << 8) | color.b; + cfg.x = 0; + cfg.y = 0; + cfg.w = this->mXres; + cfg.h = this->mYres; + return; + } + + private_handle_t *handle = private_handle_t::dynamicCast(layer->handle); + configureHandle(handle, layer->sourceCropf, layer->displayFrame, + layer->blending, layer->planeAlpha, layer->acquireFenceFd, cfg); + + +} + +int ExynosExternalDisplay::postFrame(hwc_display_contents_1_t* contents) +{ + exynos5_hwc_post_data_t *pdata = &mPostData; + struct s3c_fb_win_config_data win_data; + struct s3c_fb_win_config *config = win_data.config; + int win_map = 0; + int tot_ovly_wins = 0; + + memset(config, 0, sizeof(win_data.config)); + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + config[i].fence_fd = -1; + + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + if ( pdata->overlay_map[i] != -1) + tot_ovly_wins++; + } + if (mVirtualOverlayFlag) + tot_ovly_wins++; + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (mLocalExternalDisplayPause) { + for (int i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.acquireFenceFd > 0) + close(layer.acquireFenceFd); + layer.releaseFenceFd = -1; + } + return 0; + } +#endif + + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + int layer_idx = pdata->overlay_map[i]; + if (layer_idx != -1) { + hwc_layer_1_t &layer = contents->hwLayers[layer_idx]; + private_handle_t *handle = + private_handle_t::dynamicCast(layer.handle); + win_map = i + 1; /* Window 0 is background layer, We can't use */ + if (handle == NULL) { + ALOGE("compositionType is OVERLAY but handle is NULL"); + continue; + } +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SKIP_HDMI_RENDERING) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } +#endif + if (pdata->gsc_map[i].mode == exynos5_gsc_map_t::GSC_M2M) { + if (mHwc->mS3DMode != S3D_MODE_DISABLED && mHwc->mHdmiResolutionChanged) { + if (isPresetSupported(mHwc->mHdmiPreset)) { + mHwc->mS3DMode = S3D_MODE_RUNNING; + setPreset(mHwc->mHdmiPreset); + } else { + mHwc->mS3DMode = S3D_MODE_RUNNING; + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = true; + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + mMPPs[0]->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + mMPPs[0]->mS3DMode = S3D_TB; + } + } + if (postGscM2M(layer, config, win_map, i) < 0) + continue; + } else { + configureOverlay(&layer, config[win_map]); + } + } + if (i == 0 && config[i].blending != S3C_FB_BLENDING_NONE) { + ALOGV("blending not supported on window 0; forcing BLENDING_NONE"); + config[i].blending = S3C_FB_BLENDING_NONE; + } + + dumpConfig(config[win_map]); + } + +#if defined(GSC_VIDEO) + if ((mHwc->mS3DMode != S3D_MODE_DISABLED) && (this->mYuvLayers == 1) && !mUseSubtitles) + skipUILayers(contents); +#endif + + if (this->mVirtualOverlayFlag) { + handleStaticLayers(contents, win_data, tot_ovly_wins); + } + + if (checkConfigChanged(win_data, mLastConfigData) == false) + { + ExynosMPP &gsc = *mMPPs[0]; + gsc.mCurrentBuf = (gsc.mCurrentBuf + 1) % gsc.mNumAvailableDstBuffers; + return 0; + } + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + if (ret < 0) { + ALOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + return ret; + } + + if (mGscLayers < MAX_HDMI_VIDEO_LAYERS) { + cleanupGscs(); + } + + memcpy(&(this->mLastConfigData), &win_data, sizeof(win_data)); + memcpy(this->mLastGscMap, pdata->gsc_map, sizeof(pdata->gsc_map)); + this->mLastFbWindow = pdata->fb_window; + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + int layer_idx = pdata->overlay_map[i]; + if (layer_idx != -1) { + hwc_layer_1_t &layer = contents->hwLayers[layer_idx]; + this->mLastHandles[i] = layer.handle; + } + } + + return win_data.fence; +} + +int ExynosExternalDisplay::clearDisplay() +{ + if (!mEnabled) + return 0; + + struct s3c_fb_win_config_data win_data; + memset(&win_data, 0, sizeof(win_data)); + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + LOG_ALWAYS_FATAL_IF(ret < 0, + "ioctl S3CFB_WIN_CONFIG failed to clear screen: %s", + strerror(errno)); + // the causes of an empty config failing are all unrecoverable + + return win_data.fence; +} + +int ExynosExternalDisplay::set(hwc_display_contents_1_t* contents) +{ + if (!mEnabled || mBlanked) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.acquireFenceFd >= 0) { + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + } + } + return 0; + } + + hwc_layer_1_t *fb_layer = NULL; + int err = 0; + + if (this->mPostData.fb_window != NO_FB_NEEDED) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == + HWC_FRAMEBUFFER_TARGET) { + this->mPostData.overlay_map[this->mPostData.fb_window] = i; + fb_layer = &contents->hwLayers[i]; + break; + } + } + + if (CC_UNLIKELY(!fb_layer)) { + ALOGE("framebuffer target expected, but not provided"); + err = -EINVAL; + } else { + if (fb_layer != NULL) { + dumpLayer(fb_layer); + } + } + } + + int fence; + if (!err) { + fence = postFrame(contents); + if (fence < 0) + err = fence; + } + + if (err) + fence = clearDisplay(); + if (fence == 0) { + //Only happens in mLocalExternalPause scenario, not error + fence = -1; + } else { + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + if (this->mPostData.overlay_map[i] != -1) { + hwc_layer_1_t &layer = + contents->hwLayers[this->mPostData.overlay_map[i]]; + int dup_fd = dup(fence); + if (dup_fd < 0) + ALOGW("release fence dup failed: %s", strerror(errno)); + if (this->mPostData.gsc_map[i].mode == exynos5_gsc_map_t::GSC_M2M) { + //int gsc_idx = this->mPostData.gsc_map[i].idx; + ExynosMPP &gsc = *mMPPs[0]; + if (gsc.mDstBufFence[gsc.mCurrentBuf] >= 0) { + close (gsc.mDstBufFence[gsc.mCurrentBuf]); + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + } + gsc.mDstBufFence[gsc.mCurrentBuf] = dup_fd; + gsc.mCurrentBuf = (gsc.mCurrentBuf + 1) % gsc.mNumAvailableDstBuffers; + } else { + layer.releaseFenceFd = dup_fd; + } + } + } + } + contents->retireFenceFd = fence; + + if (this->mYuvLayers == 0 && !mHwc->local_external_display_pause) { + if (mHwc->mS3DMode == S3D_MODE_RUNNING && contents->numHwLayers > 1) { + int preset = convert3DTo2D(mHwc->mHdmiCurrentPreset); + if (isPresetSupported(preset)) { + setPreset(preset); + mHwc->mS3DMode = S3D_MODE_STOPPING; + mHwc->mHdmiPreset = preset; + if (mHwc->procs) + mHwc->procs->invalidate(mHwc->procs); + } else { + mHwc->mS3DMode = S3D_MODE_DISABLED; + mHwc->mHdmiPreset = mHwc->mHdmiCurrentPreset; + } + } + } + + return err; +} + +void ExynosExternalDisplay::skipStaticLayers(hwc_display_contents_1_t* contents) +{ + static int init_flag = 0; + int last_ovly_lay_idx = -1; + + mVirtualOverlayFlag = 0; + mLastOverlayWindowIndex = -1; + + if (!mHwc->hwc_ctrl.skip_static_layer_mode) + return; + + if (mBypassSkipStaticLayer) + return; + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + init_flag = 0; + return; + } + + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + if (mPostData.overlay_map[i] != -1) { + last_ovly_lay_idx = mPostData.overlay_map[i]; + mLastOverlayWindowIndex = i; + } + } + + if ((last_ovly_lay_idx == -1) || ((uint32_t)last_ovly_lay_idx >= (contents->numHwLayers - 2)) || + ((contents->numHwLayers - last_ovly_lay_idx - 1) >= NUM_VIRT_OVER)) { + init_flag = 0; + return; + } + mLastOverlayLayerIndex = last_ovly_lay_idx; + last_ovly_lay_idx++; + if (init_flag == 1) { + for (size_t i = last_ovly_lay_idx; i < contents->numHwLayers -1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (!layer.handle || (layer.flags & HWC_SKIP_LAYER) || (mLastLayerHandles[i - last_ovly_lay_idx] != layer.handle)) { + init_flag = 0; + return; + } + } + + mVirtualOverlayFlag = 1; + for (size_t i = last_ovly_lay_idx; i < contents->numHwLayers-1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) { + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSkipStaticLayer; + } + } + return; + } + + init_flag = 1; + for (size_t i = last_ovly_lay_idx; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + mLastLayerHandles[i - last_ovly_lay_idx] = layer.handle; + } + for (size_t i = contents->numHwLayers - last_ovly_lay_idx; i < NUM_VIRT_OVER; i++) + mLastLayerHandles[i] = 0; + + return; +} + +void ExynosExternalDisplay::determineYuvOverlay(hwc_display_contents_1_t *contents) +{ + mForceOverlayLayerIndex = -1; + mHasDrmSurface = false; + mYuvLayers = 0; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) != NO_DRM) { + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + break; + } + + /* check yuv surface */ + if (((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) > 0) && + (this->mYuvLayers < MAX_HDMI_VIDEO_LAYERS)) { + this->mYuvLayers++; + mForceOverlayLayerIndex = i; + + } + } + } +} + + +void ExynosExternalDisplay::determineSupportedOverlays(hwc_display_contents_1_t *contents) +{ + bool videoLayer = false; + + mFbNeeded = false; + mFirstFb = mLastFb = 0; + + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) + mPostData.overlay_map[i] = -1; + + // find unsupported overlays + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + ALOGV("\tlayer %u: framebuffer target", i); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.compositionType == HWC_BACKGROUND) { + ALOGV("\tlayer %u: background supported", i); + dumpLayer(&contents->hwLayers[i]); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (mLocalExternalDisplayPause) { + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_HDMI_RENDERING; + continue; + } +#endif + + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); +#if defined(GSC_VIDEO) + if ((getDrmMode(handle->flags) != NO_DRM) + || ((!mHwc->force_mirror_mode) && ((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) > 0) && + (mMPPs[0]->isProcessingSupported(layer, handle->format, false) > 0))) { +#else + if (getDrmMode(handle->flags) != NO_DRM) { +#endif + if (((mUseProtectedLayer == true) && (getDrmMode(handle->flags) != NO_DRM)) || + ((mUseProtectedLayer == false) && !videoLayer)) { + videoLayer = true; + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + ALOGV("\tlayer %u: video layer", i); + dumpLayer(&layer); + continue; + } + } + layer.compositionType = HWC_FRAMEBUFFER; + dumpLayer(&layer); + } else { + mLayerInfos[i]->mCheckOverlayFlag |= eInvalidHandle; + } + + if (!mFbNeeded) { + mFirstFb = i; + mFbNeeded = true; + } + mLastFb = i; + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eUnSupportedUseCase; + + dumpLayer(&contents->hwLayers[i]); + } + + mFirstFb = min(mFirstFb, (size_t)NUM_HDMI_WINDOWS-1); + // can't composite overlays sandwiched between framebuffers + if (mFbNeeded) { + for (size_t i = mFirstFb; i < mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle) { + handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) != NO_DRM) + continue; + } + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + } + } +} + +void ExynosExternalDisplay::determineBandwidthSupport(hwc_display_contents_1_t *contents) +{ + bool changed; + this->mBypassSkipStaticLayer = false; + unsigned int cannotComposeFlag = 0; + + uint32_t pixel_used[MAX_NUM_HDMI_DMA_CH]; + do { + android::Vector rects[MAX_NUM_HDMI_DMA_CH]; + android::Vector overlaps[MAX_NUM_HDMI_DMA_CH]; + int dma_ch_idx; + uint32_t win_idx = 0; + size_t windows_left; + memset(&pixel_used[0], 0, sizeof(pixel_used)); + mGscUsed = false; + + if (mFbNeeded) { + hwc_rect_t fb_rect; + fb_rect.top = fb_rect.left = 0; + fb_rect.right = this->mXres - 1; + fb_rect.bottom = this->mYres - 1; + dma_ch_idx = HDMI_DMA_CH_IDX[mFirstFb]; + pixel_used[dma_ch_idx] = (uint32_t) (this->mXres * this->mYres); + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + windows_left = NUM_HDMI_WINDOWS - 1; + rects[dma_ch_idx].push_back(fb_rect); + } + else { + windows_left = NUM_HDMI_WINDOWS; + } + changed = false; + mGscLayers = 0; + mCurrentGscIndex = 0; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if ((layer.flags & HWC_SKIP_LAYER) || + layer.compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + + private_handle_t *handle = private_handle_t::dynamicCast( + layer.handle); + + // we've already accounted for the framebuffer above + if (layer.compositionType == HWC_FRAMEBUFFER) + continue; + + // only layer 0 can be HWC_BACKGROUND, so we can + // unconditionally allow it without extra checks + if (layer.compositionType == HWC_BACKGROUND) { + windows_left--; + continue; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SKIP_HDMI_RENDERING) + continue; +#endif + + dma_ch_idx = HDMI_DMA_CH_IDX[win_idx]; + + size_t pixels_needed = 0; + if (getDrmMode(handle->flags) != SECURE_DRM) + pixels_needed = getRequiredPixels(layer, mXres, mYres); + else + pixels_needed = WIDTH(layer.displayFrame) * + HEIGHT(layer.displayFrame); + + cannotComposeFlag = 0; + bool can_compose = windows_left && (win_idx < NUM_HDMI_WINDOWS) && + ((pixel_used[dma_ch_idx] + pixels_needed) <= + (uint32_t)this->mDmaChannelMaxBandwidth[dma_ch_idx]); + + if (windows_left <= 0 || (win_idx >= NUM_HDMI_WINDOWS)) + cannotComposeFlag |= eInsufficientWindow; + if ((pixel_used[dma_ch_idx] + pixels_needed) > (uint32_t)this->mDmaChannelMaxBandwidth[dma_ch_idx]) + cannotComposeFlag |= eInsufficientBandwidth; + + bool gsc_required = mMPPs[0]->isProcessingRequired(layer, handle->format); + if (gsc_required) { + if (mGscLayers >= MAX_HDMI_VIDEO_LAYERS) { + can_compose = can_compose && !mGscUsed; + if (mGscUsed) + cannotComposeFlag |= eInsufficientMPP; + } +#if 0 + if (mHwc->hwc_ctrl.num_of_video_ovly <= mGscLayers) + can_compose = false; +#endif + } + // hwc_rect_t right and bottom values are normally exclusive; + // the intersection logic is simpler if we make them inclusive + hwc_rect_t visible_rect = layer.displayFrame; + visible_rect.right--; visible_rect.bottom--; + + if (can_compose) { + switch (this->mDmaChannelMaxOverlapCount[dma_ch_idx]) { + case 1: // It means, no layer overlap is allowed + for (size_t j = 0; j < rects[dma_ch_idx].size(); j++) { + if (intersect(visible_rect, rects[dma_ch_idx].itemAt(j))) { + can_compose = false; + cannotComposeFlag |= eInsufficientOverlapCount; + } + } + break; + case 2: //It means, upto 2 layer overlap is allowed. + for (size_t j = 0; j < overlaps[dma_ch_idx].size(); j++) { + if (intersect(visible_rect, overlaps[dma_ch_idx].itemAt(j))) { + can_compose = false; + cannotComposeFlag |= eInsufficientOverlapCount; + } + } + break; + default: + break; + } + if (!can_compose) + this->mBypassSkipStaticLayer = true; + } + + if (!can_compose) { + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= cannotComposeFlag; + if (!mFbNeeded) { + mFirstFb = mLastFb = i; + mFbNeeded = true; + } + else { + mFirstFb = min(i, mFirstFb); + mLastFb = max(i, mLastFb); + } + changed = true; + mFirstFb = min(mFirstFb, (size_t)NUM_HDMI_WINDOWS-1); + break; + } + + for (size_t j = 0; j < rects[dma_ch_idx].size(); j++) { + const hwc_rect_t &other_rect = rects[dma_ch_idx].itemAt(j); + if (intersect(visible_rect, other_rect)) + overlaps[dma_ch_idx].push_back(intersection(visible_rect, other_rect)); + } + + rects[dma_ch_idx].push_back(visible_rect); + pixel_used[dma_ch_idx] += pixels_needed; + win_idx++; + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + win_idx = min(win_idx, (uint32_t)(NUM_HDMI_WINDOWS - 1)); + windows_left--; + if (gsc_required) { + mGscUsed = true; + mGscLayers++; + } + } + + if (changed) + for (size_t i = mFirstFb; i < mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if ((handle != NULL) && (getDrmMode(handle->flags) != NO_DRM)) + continue; + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + } + } while(changed); + +} + +void ExynosExternalDisplay::assignWindows(hwc_display_contents_1_t *contents) +{ + unsigned int nextWindow = 0; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (mFbNeeded && i == mFirstFb) { + mPostData.fb_window = nextWindow; + nextWindow++; + continue; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SKIP_HDMI_RENDERING) + continue; +#endif + + if (layer.compositionType != HWC_FRAMEBUFFER && + layer.compositionType != HWC_FRAMEBUFFER_TARGET) { + this->mPostData.overlay_map[nextWindow] = i; + if (layer.compositionType == HWC_OVERLAY) { + private_handle_t *handle = + private_handle_t::dynamicCast(layer.handle); + if (mMPPs[0]->isProcessingRequired(layer, handle->format)) { + if (assignGscLayer(layer, i, nextWindow)) + mCurrentGscIndex++; + } + } + nextWindow++; + } + } +} + +bool ExynosExternalDisplay::assignGscLayer(hwc_layer_1_t __unused &layer, int __unused index, int nextWindow) +{ + mPostData.gsc_map[nextWindow].mode = exynos5_gsc_map_t::GSC_M2M; + mMPPs[0]->setMode(exynos5_gsc_map_t::GSC_M2M); + mPostData.gsc_map[nextWindow].idx = HDMI_GSC_IDX; + return true; +} + +int ExynosExternalDisplay::postGscM2M(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index) +{ + exynos5_hwc_post_data_t *pdata = &mPostData; + //int gsc_idx = pdata->gsc_map[index].idx; + int gsc_idx = 0; + int dst_format = HAL_PIXEL_FORMAT_RGBX_8888; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + hwc_frect_t sourceCrop = { 0, 0, + (float)WIDTH(layer.displayFrame), (float)HEIGHT(layer.displayFrame) }; + + /* OFF_Screen to ON_Screen changes */ + if (getDrmMode(handle->flags) == SECURE_DRM) + recalculateDisplayFrame(layer, mXres, mYres); + + int err = mMPPs[0]->processM2M(layer, dst_format, &sourceCrop); + if (err < 0) { + ALOGE("failed to configure gscaler %u for layer %u", + gsc_idx, index); + pdata->gsc_map[index].mode = exynos5_gsc_map_t::GSC_NONE; + return -1; + } + + buffer_handle_t dst_buf = mMPPs[0]->mDstBuffers[mMPPs[0]->mCurrentBuf]; + private_handle_t *dst_handle = + private_handle_t::dynamicCast(dst_buf); + int fence = mMPPs[0]->mDstConfig.releaseFenceFd; + configureHandle(dst_handle, sourceCrop, + layer.displayFrame, layer.blending, layer.planeAlpha, fence, + config[win_map]); + +#ifdef USES_DRM_SETTING_BY_DECON + if (getDrmMode(handle->flags) != NO_DRM) + config[win_map].protection = 1; + else + config[win_map].protection = 0; +#endif + return 0; +} + +void ExynosExternalDisplay::handleStaticLayers(hwc_display_contents_1_t *contents, struct s3c_fb_win_config_data &win_data, int __unused tot_ovly_wins) +{ + ALOGV("[USE] SKIP_STATIC_LAYER_COMP\n"); + int last_ovly_win_map = mLastOverlayWindowIndex + 2; + memcpy(&win_data.config[last_ovly_win_map], + &mLastConfigData.config[last_ovly_win_map], sizeof(struct s3c_fb_win_config)); + win_data.config[last_ovly_win_map].fence_fd = -1; + for (size_t i = mLastOverlayLayerIndex + 1; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY) { + ALOGV("[SKIP_STATIC_LAYER_COMP] layer.handle: 0x%p, layer.acquireFenceFd: %d\n", layer.handle, layer.acquireFenceFd); + layer.releaseFenceFd = layer.acquireFenceFd; + } + } +} + +void ExynosExternalDisplay::skipUILayers(hwc_display_contents_1_t *contents) +{ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.flags & HWC_SKIP_LAYER) { + ALOGV("HDMI skipping layer %d", i); + continue; + } + + if (layer.compositionType == HWC_OVERLAY) { + if (!layer.handle) + continue; + + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if ((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) < 0) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + } + } +} + +void ExynosExternalDisplay::cleanupGscs() +{ + mMPPs[0]->cleanupM2M(); + mMPPs[0]->setMode(exynos5_gsc_map_t::GSC_NONE); +} + +int ExynosExternalDisplay::openHdmi() +{ + int ret = 1; + int sw_fd; + + mHwc->externalDisplay->mDisplayFd = open("/dev/graphics/fb1", O_RDWR); + if (mHwc->externalDisplay->mDisplayFd < 0) { + ALOGE("failed to open framebuffer for externalDisplay"); + ret = mHwc->externalDisplay->mDisplayFd; + return ret; + } + + ALOGD("Open fd for HDMI"); + + return ret; +} + +void ExynosExternalDisplay::closeHdmi() +{ + if (mDisplayFd > 0) { + close(mDisplayFd); + ALOGD("Close fd for HDMI"); + } + mDisplayFd = -1; +} + +void ExynosExternalDisplay::setHdmiStatus(bool status) +{ +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + ExynosVirtualDisplayModule *virDisplay = (ExynosVirtualDisplayModule *)mHwc->virtualDisplay; + if (virDisplay->mDisplayFd < 0) { +#endif + if (status) { + char value[PROPERTY_VALUE_MAX]; + property_get("wlan.wfd.status", value, "disconnected"); + bool bWFDDisconnected = !strcmp(value, "disconnected"); + + if (bWFDDisconnected) { + enable(); + } + } else { + disable(); + closeHdmi(); + } +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + } +#endif +} + +bool ExynosExternalDisplay::isPresetSupported(unsigned int preset) +{ + bool found = false; + int index = 0; + int ret = 0; + exynos_hdmi_data hdmi_data; + int dv_timings_index = getDVTimingsIndex(preset); + + if (dv_timings_index < 0) { + ALOGE("%s: unsupported preset, %d", __func__, preset); + return -1; + } + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_ENUM_PRESET; + while (true) { + hdmi_data.etimings.index = index++; + ret = ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data); + + if (ret < 0) { + if (errno == EINVAL) + break; + ALOGE("%s: enum_dv_timings error, %d", __func__, errno); + return -1; + } + + ALOGV("%s: %d width=%d height=%d", + __func__, hdmi_data.etimings.index, + hdmi_data.etimings.timings.bt.width, hdmi_data.etimings.timings.bt.height); + + if (is_same_dv_timings(&hdmi_data.etimings.timings, &dv_timings[dv_timings_index])) { + mXres = hdmi_data.etimings.timings.bt.width; + mYres = hdmi_data.etimings.timings.bt.height; + found = true; + mHwc->mHdmiCurrentPreset = preset; + break; + } + } + return found; +} + +int ExynosExternalDisplay::getDisplayConfigs(uint32_t *configs, size_t *numConfigs) +{ + *numConfigs = 1; + configs[0] = 0; + getConfig(); + return 0; +} + +int ExynosExternalDisplay::getConfig() +{ + if (!mHwc->hdmi_hpd) + return -1; + + exynos_hdmi_data hdmi_data; + int dv_timings_index = 0; + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_PRESET; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: g_dv_timings error, %d", __func__, errno); + return -1; + } + + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + dv_timings_index = preset_index_mappings[i].dv_timings_index; + if (is_same_dv_timings(&hdmi_data.timings, &dv_timings[dv_timings_index])) { + mXres = hdmi_data.timings.bt.width; + mYres = hdmi_data.timings.bt.height; + mHwc->mHdmiCurrentPreset = preset_index_mappings[i].preset; + break; + } + } + ALOGD("HDMI resolution is (%d x %d)", mXres, mYres); + + return 0; +} + +int ExynosExternalDisplay::enable() +{ + if (mEnabled) + return 0; + + if (mBlanked) + return 0; + + char value[PROPERTY_VALUE_MAX]; + property_get("persist.hdmi.hdcp_enabled", value, "1"); + int hdcp_enabled = atoi(value); + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_HDCP; + hdmi_data.hdcp = hdcp_enabled; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set HDCP status %d", __func__, errno); + } + + /* "2" is RGB601_16_235 */ + property_get("persist.hdmi.color_range", value, "2"); + int color_range = atoi(value); + +#if 0 // This should be changed + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_SET_COLOR_RANGE, + color_range) < 0) + ALOGE("%s: s_ctrl(CID_TV_COLOR_RANGE) failed %d", __func__, errno); +#endif + + int err = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_UNBLANK); + if (err < 0) { + if (errno == EBUSY) + ALOGI("unblank ioctl failed (display already unblanked)"); + else + ALOGE("unblank ioctl failed: %s", strerror(errno)); + return -errno; + } + + mEnabled = true; + return 0; +} + +void ExynosExternalDisplay::disable() +{ + if (!mEnabled) + return; + + blank(); + + mMPPs[0]->cleanupM2M(); + mEnabled = false; +} + +void ExynosExternalDisplay::setPreset(int preset) +{ + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = false; + mHwc->hdmi_hpd = false; + int dv_timings_index = getDVTimingsIndex(preset); + if (dv_timings_index < 0) { + ALOGE("invalid preset(%d)", preset); + return; + } + + disable(); + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_PRESET; + hdmi_data.timings = dv_timings[dv_timings_index]; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) != -1) { + if (mHwc->procs) + mHwc->procs->hotplug(mHwc->procs, HWC_DISPLAY_EXTERNAL, false); + } +} + +int ExynosExternalDisplay::convert3DTo2D(int preset) +{ + switch (preset) { + case V4L2_DV_720P60_FP: + case V4L2_DV_720P60_SB_HALF: + case V4L2_DV_720P60_TB: + return V4L2_DV_720P60; + case V4L2_DV_720P50_FP: + case V4L2_DV_720P50_SB_HALF: + case V4L2_DV_720P50_TB: + return V4L2_DV_720P50; + case V4L2_DV_1080P60_SB_HALF: + case V4L2_DV_1080P60_TB: + return V4L2_DV_1080P60; + case V4L2_DV_1080P30_FP: + case V4L2_DV_1080P30_SB_HALF: + case V4L2_DV_1080P30_TB: + return V4L2_DV_1080P30; + default: + return HDMI_PRESET_ERROR; + } +} + +void ExynosExternalDisplay::setHdcpStatus(int status) +{ + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_HDCP; + hdmi_data.hdcp = !!status; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set HDCP status %d", __func__, errno); + } +} + +void ExynosExternalDisplay::setAudioChannel(uint32_t channels) +{ + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_AUDIO; + hdmi_data.audio_info = channels; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set audio channels %d", __func__, errno); + } +} + +uint32_t ExynosExternalDisplay::getAudioChannel() +{ + int channels = 0; + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_AUDIO; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to get audio channels %d", __func__, errno); + } + channels = hdmi_data.audio_info; + + return channels; +} + +int ExynosExternalDisplay::getCecPaddr() +{ + if (!mHwc->hdmi_hpd) + return -1; + + exynos_hdmi_data hdmi_data; + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_CEC_ADDR; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: g_dv_timings error, %d", __func__, errno); + return -1; + } + + return (int)hdmi_data.cec_addr; +} + +int ExynosExternalDisplay::blank() +{ + int fence = clearDisplay(); + if (fence >= 0) + close(fence); + + int err = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_POWERDOWN); + if (err < 0) { + if (errno == EBUSY) + ALOGI("blank ioctl failed (display already blanked)"); + else + ALOGE("blank ioctl failed: %s", strerror(errno)); + return -errno; + } + + return 0; +} + +void ExynosExternalDisplay::checkGrallocFlags(hwc_display_contents_1_t *contents) +{ + mUseProtectedLayer = false; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mUseScreenshootLayer = false; + /* it can get from HWCService */ + mLocalExternalDisplayPause = mHwc->external_display_pause; +#endif + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (h->flags & GRALLOC_USAGE_PROTECTED) + mUseProtectedLayer = false; + } +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SCREENSHOT_ANIMATOR_LAYER) + mUseScreenshootLayer = true; +#endif + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (mUseScreenshootLayer) + mLocalExternalDisplayPause = true; + else + mLocalExternalDisplayPause = false; +#endif +} + +bool ExynosExternalDisplay::checkConfigChanged(struct s3c_fb_win_config_data lastConfigData, struct s3c_fb_win_config_data newConfigData) +{ + for (size_t i = 0; i < NUM_HDMI_WINDOWS; i++) { + if ((lastConfigData.config[i].state != newConfigData.config[i].state) || + (lastConfigData.config[i].fd != newConfigData.config[i].fd) || + (lastConfigData.config[i].x != newConfigData.config[i].x) || + (lastConfigData.config[i].y != newConfigData.config[i].y) || + (lastConfigData.config[i].w != newConfigData.config[i].w) || + (lastConfigData.config[i].h != newConfigData.config[i].h) || + (lastConfigData.config[i].format != newConfigData.config[i].format) || + (lastConfigData.config[i].offset != newConfigData.config[i].offset) || + (lastConfigData.config[i].blending != newConfigData.config[i].blending) || + (lastConfigData.config[i].plane_alpha != newConfigData.config[i].plane_alpha)) + return true; + } + return false; +} diff --git a/libhdmi/ExynosExternalDisplay.h b/libhdmi/ExynosExternalDisplay.h new file mode 100644 index 0000000..1592ec2 --- /dev/null +++ b/libhdmi/ExynosExternalDisplay.h @@ -0,0 +1,143 @@ +#ifndef EXYNOS_LEGACY_HDMI_H +#define EXYNOS_LEGACY_HDMI_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" +#include "../../exynos/kernel-3.10-headers/videodev2.h" +#include "../../exynos/kernel-3.10-headers/v4l2-dv-timings.h" + +#define NUM_VIRT_OVER_HDMI 5 +#define MAX_HDMI_VIDEO_LAYERS 1 + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) +#define HWC_SKIP_HDMI_RENDERING 0x80000000 +#endif + +#define SUPPORTED_DV_TIMINGS_NUM 28 +struct preset_index_mapping { + int preset; + int dv_timings_index; +}; + +const struct preset_index_mapping preset_index_mappings[SUPPORTED_DV_TIMINGS_NUM] = { + {V4L2_DV_480P59_94, 0}, + {V4L2_DV_576P50, 1}, + {V4L2_DV_720P50, 2}, + {V4L2_DV_720P60, 3}, + {V4L2_DV_1080I50, 4}, + {V4L2_DV_1080I60, 5}, + {V4L2_DV_1080P24, 6}, + {V4L2_DV_1080P25, 7}, + {V4L2_DV_1080P30, 8}, + {V4L2_DV_1080P50, 9}, + {V4L2_DV_1080P60, 10}, + {V4L2_DV_2160P24, 11}, + {V4L2_DV_2160P25, 12}, + {V4L2_DV_2160P30, 13}, + {V4L2_DV_2160P24_1, 14}, + {V4L2_DV_720P60_SB_HALF, 15}, + {V4L2_DV_720P60_TB, 16}, + {V4L2_DV_720P50_SB_HALF, 17}, + {V4L2_DV_720P50_TB, 18}, + {V4L2_DV_1080P24_FP, 19}, + {V4L2_DV_1080P24_SB_HALF, 20}, + {V4L2_DV_1080P24_TB, 21}, + {V4L2_DV_1080I60_SB_HALF, 22}, + {V4L2_DV_1080I50_SB_HALF, 23}, + {V4L2_DV_1080P60_SB_HALF, 24}, + {V4L2_DV_1080P60_TB, 25}, + {V4L2_DV_1080P30_SB_HALF, 26}, + {V4L2_DV_1080P30_TB, 27} +}; + +class ExynosExternalDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosExternalDisplay(); + + void setHdmiStatus(bool status); + + bool isPresetSupported(unsigned int preset); + int getConfig(); + int getDisplayConfigs(uint32_t *configs, size_t *numConfigs); + int enable(); + void disable(); + void setPreset(int preset); + int convert3DTo2D(int preset); + void setHdcpStatus(int status); + void setAudioChannel(uint32_t channels); + uint32_t getAudioChannel(); + int getCecPaddr(); + + virtual int openHdmi(); + virtual void closeHdmi(); + virtual int blank(); + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + void checkGrallocFlags(hwc_display_contents_1_t *contents); + int clearDisplay(); + + /* Fields */ + ExynosMPPModule *mMPPs[1]; + + bool mEnabled; + bool mBlanked; + + const void *mLastLayerHandles[NUM_VIRT_OVER_HDMI]; + int mVirtualOverlayFlag; + + exynos5_hwc_post_data_t mPostData; + bool mRetry; + int mForceOverlayLayerIndex; + int mYuvLayers; + bool mFbNeeded; + size_t mFirstFb; + size_t mLastFb; + bool mGscUsed; + int mCurrentGscIndex; + bool mBypassSkipStaticLayer; + int mLastOverlayWindowIndex; + int mLastOverlayLayerIndex; + int mGscLayers; + size_t mLastFbWindow; + + uint32_t mDmaChannelMaxBandwidth[MAX_NUM_HDMI_DMA_CH]; + uint32_t mDmaChannelMaxOverlapCount[MAX_NUM_HDMI_DMA_CH]; + struct s3c_fb_win_config_data mLastConfigData; + exynos5_gsc_map_t mLastGscMap[NUM_HW_WINDOWS]; + const void *mLastHandles[NUM_HDMI_WINDOWS]; + bool mUseSubtitles; + bool mLocalExternalDisplayPause; + bool mIsCameraStarted; + uint32_t mFBT_Transform; + bool mUseProtectedLayer; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + bool mUseScreenshootLayer; +#endif + + protected: + void determineYuvOverlay(hwc_display_contents_1_t *contents); + void determineSupportedOverlays(hwc_display_contents_1_t *contents); + void determineBandwidthSupport(hwc_display_contents_1_t *contents); + void assignWindows(hwc_display_contents_1_t *contents); + bool assignGscLayer(hwc_layer_1_t &layer, int index, int nextWindow); + int postGscM2M(hwc_layer_1_t &layer, struct s3c_fb_win_config *config, int win_map, int index); + void configureHandle(private_handle_t *handle, hwc_frect_t &sourceCrop, + hwc_rect_t &displayFrame, int32_t blending, int32_t planeAlpha, int fence_fd, s3c_fb_win_config &cfg); + void skipStaticLayers(hwc_display_contents_1_t *contents); + void handleStaticLayers(hwc_display_contents_1_t *contents, struct s3c_fb_win_config_data &win_data, int tot_ovly_wins); + void skipUILayers(hwc_display_contents_1_t *contents); + void cleanupGscs(); + int getDVTimingsIndex(int preset); + + //virtual void configureOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg); + //virtual bool isOverlaySupported(hwc_layer_1_t &layer, size_t i); + //virtual int postFrame(hwc_display_contents_1_t *contents); + void configureOverlay(hwc_layer_1_t *layer, s3c_fb_win_config &cfg); + bool isOverlaySupported(hwc_layer_1_t &layer, size_t i); + int postFrame(hwc_display_contents_1_t *contents); + bool checkConfigChanged(struct s3c_fb_win_config_data lastConfigData, struct s3c_fb_win_config_data newConfigData); +}; + +#endif diff --git a/libhdmi/NOTICE b/libhdmi/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhdmi/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhdmi/decon_tv.h b/libhdmi/decon_tv.h new file mode 100644 index 0000000..b5b6e49 --- /dev/null +++ b/libhdmi/decon_tv.h @@ -0,0 +1,24 @@ +#ifndef SAMSUNG_DEOCN_TV_H +#define SAMSUNG_DECON_TV_H + +struct exynos_hdmi_data { + enum { + EXYNOS_HDMI_STATE_PRESET = 0, + EXYNOS_HDMI_STATE_ENUM_PRESET, + EXYNOS_HDMI_STATE_CEC_ADDR, + EXYNOS_HDMI_STATE_HDCP, + EXYNOS_HDMI_STATE_AUDIO, + } state; + struct v4l2_dv_timings timings; + struct v4l2_enum_dv_timings etimings; + __u32 cec_addr; + __u32 audio_info; + int hdcp; +}; + +#define EXYNOS_GET_HDMI_CONFIG _IOW('F', 220, \ + struct exynos_hdmi_data) +#define EXYNOS_SET_HDMI_CONFIG _IOW('F', 221, \ + struct exynos_hdmi_data) + +#endif /* SAMSUNG_DECON_TV_H */ diff --git a/libhdmi/dv_timings.c b/libhdmi/dv_timings.c new file mode 100644 index 0000000..729b85d --- /dev/null +++ b/libhdmi/dv_timings.c @@ -0,0 +1,35 @@ +#include "../../exynos/kernel-3.10-headers/videodev2.h" +#include "../../exynos/kernel-3.10-headers/v4l2-dv-timings.h" +#include "decon_tv.h" + +const struct v4l2_dv_timings dv_timings[] = { + V4L2_DV_BT_CEA_720X480P59_94, + V4L2_DV_BT_CEA_720X576P50, + V4L2_DV_BT_CEA_1280X720P50, + V4L2_DV_BT_CEA_1280X720P60, + V4L2_DV_BT_CEA_1920X1080I50, + V4L2_DV_BT_CEA_1920X1080I60, + V4L2_DV_BT_CEA_1920X1080P24, + V4L2_DV_BT_CEA_1920X1080P25, + V4L2_DV_BT_CEA_1920X1080P30, + V4L2_DV_BT_CEA_1920X1080P50, + V4L2_DV_BT_CEA_1920X1080P60, + V4L2_DV_BT_CEA_3840X2160P24, + V4L2_DV_BT_CEA_3840X2160P25, + V4L2_DV_BT_CEA_3840X2160P30, + V4L2_DV_BT_CEA_4096X2160P24, + V4L2_DV_BT_CEA_1280X720P60_SB_HALF, + V4L2_DV_BT_CEA_1280X720P60_TB, + V4L2_DV_BT_CEA_1280X720P50_SB_HALF, + V4L2_DV_BT_CEA_1280X720P50_TB, + V4L2_DV_BT_CEA_1920X1080P24_FP, + V4L2_DV_BT_CEA_1920X1080P24_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P24_TB, + V4L2_DV_BT_CEA_1920X1080I60_SB_HALF, + V4L2_DV_BT_CEA_1920X1080I50_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P60_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P60_TB, + V4L2_DV_BT_CEA_1920X1080P30_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P30_TB, +}; + diff --git a/libhdmi_dummy/Android.mk b/libhdmi_dummy/Android.mk new file mode 100644 index 0000000..996e62e --- /dev/null +++ b/libhdmi_dummy/Android.mk @@ -0,0 +1,51 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libsync libexynosv4l2 libhwcutils libdisplay \ + libmpp + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libdisplay +endif + +LOCAL_SRC_FILES := \ + ExynosExternalDisplay.cpp + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhdmimodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libhdmi + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhdmi_dummy/ExynosExternalDisplay.cpp b/libhdmi_dummy/ExynosExternalDisplay.cpp new file mode 100644 index 0000000..9bba2ed --- /dev/null +++ b/libhdmi_dummy/ExynosExternalDisplay.cpp @@ -0,0 +1,121 @@ +#include "ExynosMPPModule.h" +#include "ExynosExternalDisplay.h" + +ExynosExternalDisplay::ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev) : + ExynosDisplay(1), + mFlagIONBufferAllocated(false) +{ + this->mHwc = pdev; + mMPPs[0] = NULL; + mEnabled = false; + mBlanked = false; + mUseSubtitles = false; + +} + +ExynosExternalDisplay::~ExynosExternalDisplay() +{ +} + +int ExynosExternalDisplay::prepare(__unused hwc_display_contents_1_t *contents) +{ + return 0; +} + +int ExynosExternalDisplay::clearDisplay() +{ + return -1; +} + +int ExynosExternalDisplay::set(__unused hwc_display_contents_1_t *contents) +{ + return 0; +} + +int ExynosExternalDisplay::openHdmi() +{ + return 0; +} + +void ExynosExternalDisplay::setHdmiStatus(__unused bool status) +{ +} + +bool ExynosExternalDisplay::isPresetSupported(__unused unsigned int preset) +{ + return false; +} + +int ExynosExternalDisplay::getConfig() +{ + return 0; +} + +int ExynosExternalDisplay::getDisplayConfigs(__unused uint32_t *configs, size_t *numConfigs) +{ + *numConfigs = 0; + return -1; +} + +int ExynosExternalDisplay::enableLayer(__unused hdmi_layer_t &hl) +{ + return 0; +} + +void ExynosExternalDisplay::disableLayer(__unused hdmi_layer_t &hl) +{ +} + +int ExynosExternalDisplay::enable() +{ + return 0; +} + +void ExynosExternalDisplay::disable() +{ +} + +int ExynosExternalDisplay::output(__unused hdmi_layer_t &hl, __unused hwc_layer_1_t &layer, __unused private_handle_t *h, __unused int acquireFenceFd, __unused int *releaseFenceFd) +{ + return 0; +} + +void ExynosExternalDisplay::skipStaticLayers(__unused hwc_display_contents_1_t *contents, __unused int ovly_idx) +{ +} + +void ExynosExternalDisplay::setPreset(__unused int preset) +{ +} + +int ExynosExternalDisplay::convert3DTo2D(__unused int preset) +{ + return 0; +} + +void ExynosExternalDisplay::calculateDstRect(__unused int src_w, __unused int src_h, __unused int dst_w, __unused int dst_h, __unused struct v4l2_rect *dst_rect) +{ +} + +void ExynosExternalDisplay::setHdcpStatus(__unused int status) +{ +} + +void ExynosExternalDisplay::setAudioChannel(__unused uint32_t channels) +{ +} + +uint32_t ExynosExternalDisplay::getAudioChannel() +{ + return 0; +} + +int ExynosExternalDisplay::blank() +{ + return 0; +} + +int ExynosExternalDisplay::waitForRenderFinish(__unused private_module_t *grallocModule, __unused buffer_handle_t *handle, __unused int buffers) +{ + return 0; +} diff --git a/libhdmi_dummy/ExynosExternalDisplay.h b/libhdmi_dummy/ExynosExternalDisplay.h new file mode 100644 index 0000000..4d87e4a --- /dev/null +++ b/libhdmi_dummy/ExynosExternalDisplay.h @@ -0,0 +1,51 @@ +#ifndef EXYNOS_DUMMY_HDMI_H +#define EXYNOS_DUMMY_HDMI_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +#define NUM_VIRT_OVER_HDMI 5 + +class ExynosExternalDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosExternalDisplay(); + + void setHdmiStatus(bool status); + + bool isPresetSupported(unsigned int preset); + int getConfig(); + int getDisplayConfigs(uint32_t *configs, size_t *numConfigs); + int enableLayer(hdmi_layer_t &hl); + void disableLayer(hdmi_layer_t &hl); + int enable(); + void disable(); + int output(hdmi_layer_t &hl, hwc_layer_1_t &layer, private_handle_t *h, int acquireFenceFd, int *releaseFenceFd); + void skipStaticLayers(hwc_display_contents_1_t *contents, int ovly_idx); + void setPreset(int preset); + int convert3DTo2D(int preset); + void calculateDstRect(int src_w, int src_h, int dst_w, int dst_h, struct v4l2_rect *dst_rect); + void setHdcpStatus(int status); + void setAudioChannel(uint32_t channels); + uint32_t getAudioChannel(); + bool isIONBufferAllocated() {return mFlagIONBufferAllocated;}; + + virtual int openHdmi(); + virtual int blank(); + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + int clearDisplay(); + virtual void freeExtVideoBuffers() {} + virtual int waitForRenderFinish(private_module_t *grallocModule, buffer_handle_t *handle, int buffers); + + /* Fields */ + ExynosMPPModule *mMPPs[1]; + + bool mEnabled; + bool mBlanked; + bool mUseSubtitles; + bool mFlagIONBufferAllocated; +}; + +#endif diff --git a/libhdmi_dummy/NOTICE b/libhdmi_dummy/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhdmi_dummy/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhdmi_legacy/Android.mk b/libhdmi_legacy/Android.mk new file mode 100644 index 0000000..5fe094b --- /dev/null +++ b/libhdmi_legacy/Android.mk @@ -0,0 +1,58 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 libhwcutils libdisplay libmpp + +LOCAL_CFLAGS += -DLOG_TAG=\"hdmi\" +LOCAL_CFLAGS += -DHLOG_CODE=2 + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libdisplay \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/system/core/libsync/include + +LOCAL_SRC_FILES := \ + ExynosExternalDisplay.cpp + +ifneq ($(filter 3.10, $(TARGET_LINUX_KERNEL_VERSION)),) +LOCAL_SRC_FILES += \ + dv_timings.c +LOCAL_CFLAGS += -DUSE_DV_TIMINGS +endif + +ifeq ($(TARGET_BOARD_PLATFORM),exynos4) + LOCAL_CFLAGS += -DNOT_USE_TRIPLE_BUFFER +endif + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhdmimodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libhdmi + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhdmi_legacy/ExynosExternalDisplay.cpp b/libhdmi_legacy/ExynosExternalDisplay.cpp new file mode 100644 index 0000000..e8d8022 --- /dev/null +++ b/libhdmi_legacy/ExynosExternalDisplay.cpp @@ -0,0 +1,1337 @@ +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosOverlayDisplay.h" +#include "ExynosExternalDisplay.h" +#include + +#if defined(USE_DV_TIMINGS) +extern struct v4l2_dv_timings dv_timings[]; + +bool is_same_dv_timings(const struct v4l2_dv_timings *t1, + const struct v4l2_dv_timings *t2) +{ + if (t1->type == t2->type && + t1->bt.width == t2->bt.width && + t1->bt.height == t2->bt.height && + t1->bt.interlaced == t2->bt.interlaced && + t1->bt.polarities == t2->bt.polarities && + t1->bt.pixelclock == t2->bt.pixelclock && + t1->bt.hfrontporch == t2->bt.hfrontporch && + t1->bt.vfrontporch == t2->bt.vfrontporch && + t1->bt.vsync == t2->bt.vsync && + t1->bt.vbackporch == t2->bt.vbackporch && + (!t1->bt.interlaced || + (t1->bt.il_vfrontporch == t2->bt.il_vfrontporch && + t1->bt.il_vsync == t2->bt.il_vsync && + t1->bt.il_vbackporch == t2->bt.il_vbackporch))) + return true; + return false; +} +#endif +int ExynosExternalDisplay::getDVTimingsIndex(int preset) +{ + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + if (preset == preset_index_mappings[i].preset) + return preset_index_mappings[i].dv_timings_index; + } + return -1; +} + +inline bool hdmi_src_cfg_changed(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return c1.format != c2.format || + c1.rot != c2.rot || + c1.cacheable != c2.cacheable || + c1.drmMode != c2.drmMode || + c1.fw != c2.fw || + c1.fh != c2.fh; +} + +ExynosExternalDisplay::ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(1), + mMixer(-1), + mEnabled(false), + mBlanked(false), + mIsFbLayer(false), + mIsVideoLayer(false), + mFbStarted(false), + mVideoStarted(false), + mHasFbComposition(false), + mHasSkipLayer(false), + mUiIndex(0), + mVideoIndex(1), + mVirtualOverlayFlag(0) +{ + mIsCameraStarted = false; + mFBT_Transform = 0; + mUseProtectedLayer = false; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mUseScreenshootLayer = false; + mLocalExternalDisplayPause = false; +#endif + mNumMPPs = 1; + this->mHwc = pdev; + mOtfMode = OTF_OFF; + mUseSubtitles = false; + + mMPPs[0] = new ExynosMPPModule(this, HDMI_GSC_IDX); + memset(mMixerLayers, 0, sizeof(mMixerLayers)); + memset(mLastLayerHandles, 0, sizeof(mLastLayerHandles)); +} + +ExynosExternalDisplay::~ExynosExternalDisplay() +{ + delete mMPPs[0]; +} + +int ExynosExternalDisplay::openHdmi() +{ + int ret = 0; + int sw_fd; + + mMixer = exynos_subdev_open_devname("s5p-mixer0", O_RDWR); + if (mMixer < 0) { + ALOGE("failed to open hdmi mixer0 subdev"); + ret = mMixer; + return ret; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mUiIndex = 1; + mVideoIndex = 0; +#else + mUiIndex = 0; + mVideoIndex = 1; +#endif + + mMixerLayers[0].id = 0; + mMixerLayers[0].fd = open("/dev/video16", O_RDWR); + if (mMixerLayers[0].fd < 0) { + ALOGE("failed to open hdmi layer0 device"); + ret = mMixerLayers[0].fd; + close(mMixer); + return ret; + } + + mMixerLayers[1].id = 1; + mMixerLayers[1].fd = open("/dev/video17", O_RDWR); + if (mMixerLayers[1].fd < 0) { + ALOGE("failed to open hdmi layer1 device"); + ret = mMixerLayers[1].fd; + close(mMixerLayers[0].fd); + return ret; + } + +#if defined(VP_VIDEO) + mMixerLayers[2].id = VIDEO_LAYER_INDEX; + mMixerLayers[2].fd = open("/dev/video20", O_RDWR); + if (mMixerLayers[2].fd < 0) { + ALOGE("failed to open hdmi video layer device"); + ret = mMixerLayers[2].fd; + close(mMixerLayers[0].fd); + close(mMixerLayers[1].fd); + return ret; + } +#else + mMixerLayers[2].id = VIDEO_LAYER_INDEX; + mMixerLayers[2].fd = -1; +#endif + return ret; +} + +void ExynosExternalDisplay::setHdmiStatus(bool status) +{ + if (status) { + enable(); + } else { + disable(); + } +} + +bool ExynosExternalDisplay::isPresetSupported(unsigned int preset) +{ +#if defined(USE_DV_TIMINGS) + struct v4l2_enum_dv_timings enum_timings; + int dv_timings_index = getDVTimingsIndex(preset); +#else + struct v4l2_dv_enum_preset enum_preset; +#endif + bool found = false; + int index = 0; + int ret; + + if (preset <= V4L2_DV_INVALID || preset > V4L2_DV_1080P30_TB) { + ALOGE("%s: invalid preset, %d", __func__, preset); + return -1; + } +#if defined(USE_DV_TIMINGS) + if (dv_timings_index < 0) { + ALOGE("%s: unsupported preset, %d", __func__, preset); + return -1; + } +#endif + + while (true) { +#if defined(USE_DV_TIMINGS) + enum_timings.index = index++; + ret = ioctl(mMixerLayers[0].fd, VIDIOC_ENUM_DV_TIMINGS, &enum_timings); + + if (ret < 0) { + if (errno == EINVAL) + break; + ALOGE("%s: enum_dv_timings error, %d", __func__, errno); + return -1; + } + + ALOGV("%s: %d width=%d height=%d", + __func__, enum_timings.index, + enum_timings.timings.bt.width, enum_timings.timings.bt.height); + + if (is_same_dv_timings(&enum_timings.timings, &dv_timings[dv_timings_index])) { + mXres = enum_timings.timings.bt.width; + mYres = enum_timings.timings.bt.height; + found = true; + mHwc->mHdmiCurrentPreset = preset; + } +#else + enum_preset.index = index++; + ret = ioctl(mMixerLayers[0].fd, VIDIOC_ENUM_DV_PRESETS, &enum_preset); + + if (ret < 0) { + if (errno == EINVAL) + break; + ALOGE("%s: enum_dv_presets error, %d", __func__, errno); + return -1; + } + + ALOGV("%s: %d preset=%02d width=%d height=%d name=%s", + __func__, enum_preset.index, enum_preset.preset, + enum_preset.width, enum_preset.height, enum_preset.name); + + if (preset == enum_preset.preset) { + mXres = enum_preset.width; + mYres = enum_preset.height; + found = true; + mHwc->mHdmiCurrentPreset = preset; + } +#endif + } + return found; +} + +int ExynosExternalDisplay::getConfig() +{ +#if defined(USE_DV_TIMINGS) + struct v4l2_dv_timings timings; + int dv_timings_index = 0; +#endif + struct v4l2_dv_preset preset; + int ret; + + if (!mHwc->hdmi_hpd) + return -1; + +#if defined(USE_DV_TIMINGS) + if (ioctl(mMixerLayers[0].fd, VIDIOC_G_DV_TIMINGS, &timings) < 0) { + ALOGE("%s: g_dv_timings error, %d", __func__, errno); + return -1; + } + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + dv_timings_index = preset_index_mappings[i].dv_timings_index; + if (is_same_dv_timings(&timings, &dv_timings[dv_timings_index])) { + preset.preset = preset_index_mappings[i].preset; + break; + } + } +#else + if (ioctl(mMixerLayers[0].fd, VIDIOC_G_DV_PRESET, &preset) < 0) { + ALOGE("%s: g_dv_preset error, %d", __func__, errno); + return -1; + } +#endif + + return isPresetSupported(preset.preset) ? 0 : -1; +} + +int ExynosExternalDisplay::getDisplayConfigs(uint32_t *configs, size_t *numConfigs) +{ + *numConfigs = 1; + configs[0] = 0; + getConfig(); + return 0; +} + +int ExynosExternalDisplay::enableLayer(hdmi_layer_t &hl) +{ + if (hl.enabled) + return 0; + + struct v4l2_requestbuffers reqbuf; + memset(&reqbuf, 0, sizeof(reqbuf)); + reqbuf.count = NUM_HDMI_BUFFERS; + reqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + reqbuf.memory = V4L2_MEMORY_DMABUF; + if (exynos_v4l2_reqbufs(hl.fd, &reqbuf) < 0) { + ALOGE("%s: layer%d: reqbufs failed %d", __func__, hl.id, errno); + return -1; + } + + if (reqbuf.count != NUM_HDMI_BUFFERS) { + ALOGE("%s: layer%d: didn't get buffer", __func__, hl.id); + return -1; + } + + if (hl.id == mUiIndex) { + if (exynos_v4l2_s_ctrl(hl.fd, V4L2_CID_TV_PIXEL_BLEND_ENABLE, 1) < 0) { + ALOGE("%s: layer%d: PIXEL_BLEND_ENABLE failed %d", __func__, + hl.id, errno); + return -1; + } + } else { + if (exynos_v4l2_s_ctrl(hl.fd, V4L2_CID_TV_PIXEL_BLEND_ENABLE, 0) < 0) { + ALOGE("%s: layer%d: PIXEL_BLEND_DISABLE failed %d", __func__, + hl.id, errno); + return -1; + } + } + + ALOGV("%s: layer%d enabled", __func__, hl.id); + hl.enabled = true; + return 0; +} + +void ExynosExternalDisplay::disableLayer(hdmi_layer_t &hl) +{ + if (!hl.enabled) + return; + + if (hl.streaming) { + if (exynos_v4l2_streamoff(hl.fd, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) < 0) + ALOGE("%s: layer%d: streamoff failed %d", __func__, hl.id, errno); + hl.streaming = false; + } + + struct v4l2_requestbuffers reqbuf; + memset(&reqbuf, 0, sizeof(reqbuf)); + reqbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + reqbuf.memory = V4L2_MEMORY_DMABUF; + if (exynos_v4l2_reqbufs(hl.fd, &reqbuf) < 0) + ALOGE("%s: layer%d: reqbufs failed %d", __func__, hl.id, errno); + + memset(&hl.cfg, 0, sizeof(hl.cfg)); + hl.current_buf = 0; + hl.queued_buf = 0; + hl.enabled = false; + + ALOGV("%s: layer%d disabled", __func__, hl.id); +} + +int ExynosExternalDisplay::enable() +{ + if (mEnabled) + return 0; + + if (mBlanked) + return 0; + + struct v4l2_subdev_format sd_fmt; + memset(&sd_fmt, 0, sizeof(sd_fmt)); + sd_fmt.pad = MIXER_G0_SUBDEV_PAD_SINK; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = mXres; + sd_fmt.format.height = mYres; + sd_fmt.format.code = V4L2_MBUS_FMT_XRGB8888_4X8_LE; + if (exynos_subdev_s_fmt(mMixer, &sd_fmt) < 0) { + ALOGE("%s: s_fmt failed pad=%d", __func__, sd_fmt.pad); + return -1; + } + + struct v4l2_subdev_crop sd_crop; + memset(&sd_crop, 0, sizeof(sd_crop)); + sd_crop.pad = MIXER_G0_SUBDEV_PAD_SINK; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.width = mXres; + sd_crop.rect.height = mYres; + if (exynos_subdev_s_crop(mMixer, &sd_crop) < 0) { + ALOGE("%s: s_crop failed pad=%d", __func__, sd_crop.pad); + return -1; + } + + memset(&sd_fmt, 0, sizeof(sd_fmt)); + sd_fmt.pad = MIXER_G0_SUBDEV_PAD_SOURCE; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = mXres; + sd_fmt.format.height = mYres; + sd_fmt.format.code = V4L2_MBUS_FMT_XRGB8888_4X8_LE; + if (exynos_subdev_s_fmt(mMixer, &sd_fmt) < 0) { + ALOGE("%s: s_fmt failed pad=%d", __func__, sd_fmt.pad); + return -1; + } + + memset(&sd_crop, 0, sizeof(sd_crop)); + sd_crop.pad = MIXER_G0_SUBDEV_PAD_SOURCE; + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.width = mXres; + sd_crop.rect.height = mYres; + if (exynos_subdev_s_crop(mMixer, &sd_crop) < 0) { + ALOGE("%s: s_crop failed pad=%d", __func__, sd_crop.pad); + return -1; + } + + char value[PROPERTY_VALUE_MAX]; + property_get("persist.hdmi.hdcp_enabled", value, "1"); + int hdcp_enabled = atoi(value); + + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_HDCP_ENABLE, + hdcp_enabled) < 0) + ALOGE("%s: s_ctrl(CID_TV_HDCP_ENABLE) failed %d", __func__, errno); + + /* "2" is RGB601_16_235 */ + property_get("persist.hdmi.color_range", value, "2"); + int color_range = atoi(value); + + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_SET_COLOR_RANGE, + color_range) < 0) + ALOGE("%s: s_ctrl(CID_TV_COLOR_RANGE) failed %d", __func__, errno); + + enableLayer(mMixerLayers[mUiIndex]); + + mEnabled = true; + return 0; +} + +void ExynosExternalDisplay::disable() +{ + if (!mEnabled) + return; + + disableLayer(mMixerLayers[0]); + disableLayer(mMixerLayers[1]); +#if defined(VP_VIDEO) + disableLayer(mMixerLayers[2]); +#endif + + blank(); + + mMPPs[0]->cleanupM2M(); + mEnabled = false; +} + +int ExynosExternalDisplay::output(hdmi_layer_t &hl, + hwc_layer_1_t &layer, + private_handle_t *h, + int acquireFenceFd, + int *releaseFenceFd) +{ + int ret = 0; + + exynos_mpp_img src_cfg; + memset(&src_cfg, 0, sizeof(src_cfg)); + + if (hl.id == VIDEO_LAYER_INDEX) { + src_cfg.x = layer.displayFrame.left; + src_cfg.y = layer.displayFrame.top; + src_cfg.w = WIDTH(layer.displayFrame); + src_cfg.h = HEIGHT(layer.displayFrame); +#ifdef USES_DRM_SETTING_BY_DECON + src_cfg.drmMode = !!(getDrmMode(h->flags) == SECURE_DRM); +#endif + if (isVPSupported(layer, h->format)) { + src_cfg.fw = h->stride; + src_cfg.fh = h->vstride; + } else { + src_cfg.fw = ALIGN(mXres, 16); + src_cfg.fh = ALIGN(mYres, 16); + } + } + + exynos_mpp_img cfg; + memset(&cfg, 0, sizeof(cfg)); + cfg.x = layer.displayFrame.left; + cfg.y = layer.displayFrame.top; + cfg.w = WIDTH(layer.displayFrame); + cfg.h = HEIGHT(layer.displayFrame); +#ifdef USES_DRM_SETTING_BY_DECON + cfg.drmMode = !!(getDrmMode(h->flags) == SECURE_DRM); +#endif + + if ((signed int)cfg.x < 0 || (signed int)cfg.y < 0 || (cfg.w > (uint32_t)mXres) || (cfg.h > (uint32_t)mYres)) { + *releaseFenceFd = -1; + if (acquireFenceFd >= 0) + close(acquireFenceFd); + return ret; + } + + if ((hl.id == VIDEO_LAYER_INDEX && ExynosMPP::isSrcConfigChanged(hl.cfg, src_cfg)) || + (hl.id != VIDEO_LAYER_INDEX && ExynosMPP::isSrcConfigChanged(hl.cfg, cfg)) || mFbStarted || mVideoStarted) { + struct v4l2_subdev_crop sd_crop; + memset(&sd_crop, 0, sizeof(sd_crop)); + if (hl.id == 0) + sd_crop.pad = MIXER_G0_SUBDEV_PAD_SOURCE; + else if (hl.id == 1) + sd_crop.pad = MIXER_G1_SUBDEV_PAD_SOURCE; + + if ((hl.id == VIDEO_LAYER_INDEX && hdmi_src_cfg_changed(hl.cfg, src_cfg)) || + (hl.id != VIDEO_LAYER_INDEX && hdmi_src_cfg_changed(hl.cfg, cfg)) || mFbStarted || mVideoStarted) { + disableLayer(hl); + + /* Set source image size */ + struct v4l2_format fmt; + memset(&fmt, 0, sizeof(fmt)); + fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + + if (hl.id == VIDEO_LAYER_INDEX) { + if (isVPSupported(layer, h->format)) { + fmt.fmt.pix_mp.width = h->stride; + fmt.fmt.pix_mp.height = h->vstride; + } else { + fmt.fmt.pix_mp.width = ALIGN(mXres, 16); + fmt.fmt.pix_mp.height = ALIGN(mYres, 16); + } + } else if (hl.id == mVideoIndex) { + fmt.fmt.pix_mp.width = ALIGN(mXres, 16); + fmt.fmt.pix_mp.height = ALIGN(mYres, 16); + } else { + fmt.fmt.pix_mp.width = h->stride; + fmt.fmt.pix_mp.height = h->vstride; + } + + if (hl.id == VIDEO_LAYER_INDEX) { + if (isVPSupported(layer, h->format)) + fmt.fmt.pix_mp.pixelformat = HAL_PIXEL_FORMAT_2_V4L2_PIX(h->format); + else + fmt.fmt.pix_mp.pixelformat = HAL_PIXEL_FORMAT_2_V4L2_PIX(HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED); + fmt.fmt.pix_mp.num_planes = 2; + } else { + fmt.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_BGR32; + fmt.fmt.pix_mp.num_planes = 1; + } + fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; + + ret = exynos_v4l2_s_fmt(hl.fd, &fmt); + if (ret < 0) { + ALOGE("%s: layer%d: s_fmt failed %d", __func__, hl.id, errno); + goto err; + } + + if (hl.id != VIDEO_LAYER_INDEX) { + struct v4l2_subdev_format sd_fmt; + memset(&sd_fmt, 0, sizeof(sd_fmt)); + sd_fmt.pad = sd_crop.pad; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = mXres; + sd_fmt.format.height = mYres; + sd_fmt.format.code = V4L2_MBUS_FMT_XRGB8888_4X8_LE; + if (exynos_subdev_s_fmt(mMixer, &sd_fmt) < 0) { + ALOGE("%s: s_fmt failed pad=%d", __func__, sd_fmt.pad); + return -1; + } + } + + enableLayer(hl); + + mFbStarted = false; + mVideoStarted = false; + } + + /* Set source crop size */ + struct v4l2_crop crop; + memset(&crop, 0, sizeof(crop)); + crop.type = V4L2_BUF_TYPE_VIDEO_OVERLAY; + crop.c.width = cfg.w; + crop.c.height = cfg.h; + + if (hl.id == VIDEO_LAYER_INDEX) { + if (isVPSupported(layer, h->format)) { + crop.c.left = layer.sourceCropf.left; + crop.c.top = layer.sourceCropf.top; + crop.c.width = WIDTH(layer.sourceCropf); + crop.c.height = HEIGHT(layer.sourceCropf); + } else { + crop.c.left = 0; + crop.c.top = 0; + } + } else if (hl.id == mVideoIndex) { + crop.c.left = 0; + crop.c.top = 0; + } else { + crop.c.left = cfg.x; + crop.c.top = cfg.y; + } + + if (exynos_v4l2_s_crop(hl.fd, &crop) < 0) { + ALOGE("%s: v4l2_s_crop failed ", __func__); + goto err; + } + + /* Set destination position & scaling size */ + if (hl.id == VIDEO_LAYER_INDEX) { + crop.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + crop.c.left = cfg.x; + crop.c.top = cfg.y; + crop.c.width = cfg.w; + crop.c.height = cfg.h; + + if (exynos_v4l2_s_crop(hl.fd, &crop) < 0) { + ALOGE("%s: v4l2_s_crop (mixer output) failed ", __func__); + goto err; + } + } else { + sd_crop.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_crop.rect.left = cfg.x; + sd_crop.rect.top = cfg.y; + sd_crop.rect.width = cfg.w; + sd_crop.rect.height = cfg.h; + if (exynos_subdev_s_crop(mMixer, &sd_crop) < 0) { + ALOGE("%s: s_crop failed pad=%d", __func__, sd_crop.pad); + goto err; + } + } + + ALOGV("HDMI layer%d configuration:", hl.id); + dumpMPPImage(cfg); + if (hl.id == VIDEO_LAYER_INDEX) + hl.cfg = src_cfg; + else + hl.cfg = cfg; + } + + struct v4l2_buffer buffer; + struct v4l2_plane planes[3]; + + if (hl.queued_buf == NUM_HDMI_BUFFERS) { + memset(&buffer, 0, sizeof(buffer)); + memset(planes, 0, sizeof(planes)); + buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buffer.memory = V4L2_MEMORY_DMABUF; + if(hl.id == VIDEO_LAYER_INDEX) + buffer.length = 2; + else + buffer.length = 1; + buffer.m.planes = planes; + ret = exynos_v4l2_dqbuf(hl.fd, &buffer); + if (ret < 0) { + ALOGE("%s: layer%d: dqbuf failed %d", __func__, hl.id, errno); + goto err; + } + hl.queued_buf--; + } + + memset(&buffer, 0, sizeof(buffer)); + memset(planes, 0, sizeof(planes)); + buffer.index = hl.current_buf; + buffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buffer.memory = V4L2_MEMORY_DMABUF; + buffer.flags = V4L2_BUF_FLAG_USE_SYNC; + buffer.reserved = acquireFenceFd; + buffer.m.planes = planes; + + + if (hl.id == VIDEO_LAYER_INDEX) { + buffer.length = 2; + buffer.m.planes[0].m.fd = h->fd; + buffer.m.planes[1].m.fd = h->fd1; + } else { + buffer.length = 1; + buffer.m.planes[0].m.fd = h->fd; + } + + if (exynos_v4l2_qbuf(hl.fd, &buffer) < 0) { + ALOGE("%s: layer%d: qbuf failed %d", __func__, hl.id, errno); + ret = -1; + goto err; + } + + if (releaseFenceFd) + *releaseFenceFd = buffer.reserved; + else + close(buffer.reserved); + + hl.queued_buf++; + hl.current_buf = (hl.current_buf + 1) % NUM_HDMI_BUFFERS; + + if (!hl.streaming) { +#ifdef USES_DRM_SETTING_BY_DECON + if (exynos_v4l2_s_ctrl(hl.fd, V4L2_CID_CONTENT_PROTECTION, hl.cfg.drmMode) < 0) + ALOGE("%s: s_ctrl(V4L2_CID_CONTENT_PROTECTION) failed %d", __func__, errno);; +#endif + if (exynos_v4l2_streamon(hl.fd, (v4l2_buf_type)buffer.type) < 0) { + ALOGE("%s: layer%d: streamon failed %d", __func__, hl.id, errno); + ret = -1; + goto err; + } + hl.streaming = true; + } + +err: + if (acquireFenceFd >= 0) + close(acquireFenceFd); + + return ret; +} + +void ExynosExternalDisplay::skipStaticLayers(hwc_display_contents_1_t *contents, int ovly_idx) +{ + static int init_flag = 0; + mVirtualOverlayFlag = 0; + mHasSkipLayer = false; + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + init_flag = 0; + return; + } + + if ((ovly_idx == -1) || (ovly_idx >= ((int)contents->numHwLayers - 2)) || + ((contents->numHwLayers - ovly_idx - 1) >= NUM_VIRT_OVER_HDMI)) { + init_flag = 0; + return; + } + + ovly_idx++; + if (init_flag == 1) { + for (size_t i = ovly_idx; i < contents->numHwLayers - 1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (!layer.handle || (mLastLayerHandles[i - ovly_idx] != layer.handle)) { + init_flag = 0; + return; + } + } + + mVirtualOverlayFlag = 1; + for (size_t i = ovly_idx; i < contents->numHwLayers - 1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) { + layer.compositionType = HWC_OVERLAY; + mHasSkipLayer = true; + } + } + return; + } + + init_flag = 1; + for (size_t i = ovly_idx; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + mLastLayerHandles[i - ovly_idx] = layer.handle; + } + + for (size_t i = contents->numHwLayers - ovly_idx; i < NUM_VIRT_OVER_HDMI; i++) + mLastLayerHandles[i - ovly_idx] = 0; + + return; +} + +void ExynosExternalDisplay::setPreset(int preset) +{ + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = false; + mHwc->hdmi_hpd = false; +#if !defined(USE_DV_TIMINGS) + v4l2_dv_preset v_preset; + v_preset.preset = preset; +#else + struct v4l2_dv_timings dv_timing; + int dv_timings_index = getDVTimingsIndex(preset); + if (dv_timings_index < 0) { + ALOGE("invalid preset(%d)", preset); + return; + } + memcpy(&dv_timing, &dv_timings[dv_timings_index], sizeof(v4l2_dv_timings)); +#endif + + if (preset <= V4L2_DV_INVALID || preset > V4L2_DV_1080P30_TB) { + ALOGE("%s: invalid preset, %d", __func__, preset); + return; + } + + disable(); +#if defined(USE_DV_TIMINGS) + if (ioctl(mMixerLayers[0].fd, VIDIOC_S_DV_TIMINGS, &dv_timing) != -1) { + if (mHwc->procs) + mHwc->procs->hotplug(mHwc->procs, HWC_DISPLAY_EXTERNAL, false); + } +#else + if (ioctl(mMixerLayers[0].fd, VIDIOC_S_DV_PRESET, &v_preset) != -1) { + if (mHwc->procs) + mHwc->procs->hotplug(mHwc->procs, HWC_DISPLAY_EXTERNAL, false); + } +#endif +} + +int ExynosExternalDisplay::convert3DTo2D(int preset) +{ + switch (preset) { + case V4L2_DV_720P60_FP: + case V4L2_DV_720P60_SB_HALF: + case V4L2_DV_720P60_TB: + return V4L2_DV_720P60; + case V4L2_DV_720P50_FP: + case V4L2_DV_720P50_SB_HALF: + case V4L2_DV_720P50_TB: + return V4L2_DV_720P50; + case V4L2_DV_1080P60_SB_HALF: + case V4L2_DV_1080P60_TB: + return V4L2_DV_1080P60; + case V4L2_DV_1080P30_FP: + case V4L2_DV_1080P30_SB_HALF: + case V4L2_DV_1080P30_TB: + return V4L2_DV_1080P30; + default: + return HDMI_PRESET_ERROR; + } +} + +void ExynosExternalDisplay::calculateDstRect(int src_w, int src_h, int dst_w, int dst_h, struct v4l2_rect *dst_rect) +{ + if (dst_w * src_h <= dst_h * src_w) { + dst_rect->left = 0; + dst_rect->top = (dst_h - ((dst_w * src_h) / src_w)) >> 1; + dst_rect->width = dst_w; + dst_rect->height = ((dst_w * src_h) / src_w); + } else { + dst_rect->left = (dst_w - ((dst_h * src_w) / src_h)) >> 1; + dst_rect->top = 0; + dst_rect->width = ((dst_h * src_w) / src_h); + dst_rect->height = dst_h; + } +} + +bool ExynosExternalDisplay::isVPSupported(hwc_layer_1_t &layer, int format) +{ +#if defined(VP_VIDEO) + int min_source_width = 32; + int min_source_height = 4; + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if((layer.transform == 0) && + (format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED || + format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M || + format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV || + format == HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M)) { + if (h->stride >= min_source_width && + h->height >= min_source_height && + (WIDTH(layer.sourceCropf) == WIDTH(layer.displayFrame)) && + (HEIGHT(layer.sourceCropf) == HEIGHT(layer.displayFrame))) { + return true; + } + } +#endif + return false; +} + +bool ExynosExternalDisplay::isVideoOverlaySupported(hwc_layer_1_t &layer, int format) +{ +#if defined(VP_VIDEO) + if (isVPSupported(layer, format)) + return true; +#endif + if (mMPPs[0]->isProcessingSupported(layer, format, false) > 0) + return true; + + return false; +} + +int ExynosExternalDisplay::prepare(hwc_display_contents_1_t* contents) +{ + hwc_layer_1_t *video_layer = NULL; + uint32_t numVideoLayers = 0; + uint32_t videoIndex = 0; + + mHwc->force_mirror_mode = false; + checkGrallocFlags(contents); + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + ALOGV("\tlayer %u: framebuffer target", i); + continue; + } + + if (layer.compositionType == HWC_BACKGROUND) { + ALOGV("\tlayer %u: background layer", i); + dumpLayer(&layer); + continue; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (mLocalExternalDisplayPause) { + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_HDMI_RENDERING; + continue; + } +#endif + + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if ((int)get_yuv_planes(halFormatToV4L2Format(h->format)) > 0) { + if (mHwc->mS3DMode != S3D_MODE_DISABLED && mHwc->mHdmiResolutionChanged) + mHwc->mS3DMode = S3D_MODE_RUNNING; + } + + if ((mHwc->force_mirror_mode) && getDrmMode(h->flags) == NO_DRM) { + layer.compositionType = HWC_FRAMEBUFFER; + continue; + } else { +#if defined(GSC_VIDEO) || defined(VP_VIDEO) + if (((getDrmMode(h->flags) != NO_DRM) + || ((int)get_yuv_planes(halFormatToV4L2Format(h->format)) > 0)) && + isVideoOverlaySupported(layer, h->format)) { +#else + if (getDrmMode(h->flags) != NO_DRM) { +#endif +#if !defined(GSC_VIDEO) && !defined(VP_VIDEO) + if (((mUseProtectedLayer == true) && (getDrmMode(handle->flags) != NO_DRM)) || + ((mUseProtectedLayer == false) && !video_layer)) { +#endif + video_layer = &layer; + layer.compositionType = HWC_OVERLAY; +#if defined(GSC_VIDEO) || defined(VP_VIDEO) + videoIndex = i; + numVideoLayers++; +#endif + ALOGV("\tlayer %u: video layer", i); + dumpLayer(&layer); + continue; +#if !defined(GSC_VIDEO) && !defined(VP_VIDEO) + } +#endif + } + } + layer.compositionType = HWC_FRAMEBUFFER; + dumpLayer(&layer); + } else { + layer.compositionType = HWC_FRAMEBUFFER; + } + } +#if defined(GSC_VIDEO) || defined(VP_VIDEO) + if (numVideoLayers == 1) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (!mUseSubtitles || i == videoIndex) { + if (mHwc->mS3DMode != S3D_MODE_DISABLED) + layer.compositionType = HWC_OVERLAY; + } + + if (i == videoIndex) { + struct v4l2_rect dest_rect; + if (mHwc->mS3DMode != S3D_MODE_DISABLED) { + layer.displayFrame.left = 0; + layer.displayFrame.top = 0; + layer.displayFrame.right = mXres; + layer.displayFrame.bottom = mYres; + } + } + } +#if !defined(USE_GRALLOC_FLAG_FOR_HDMI) + skipStaticLayers(contents, videoIndex); +#endif + } else if (numVideoLayers > 1) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle) { + handle = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(handle->flags) != NO_DRM) + continue; + } + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET || + layer.compositionType == HWC_BACKGROUND) + continue; + layer.compositionType = HWC_FRAMEBUFFER; + } + } +#endif + mHasFbComposition = false; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) + mHasFbComposition = true; + } + return 0; +} + +int ExynosExternalDisplay::clearDisplay() +{ + return -1; +} + +int ExynosExternalDisplay::set(hwc_display_contents_1_t* contents) +{ + hwc_layer_1_t *fb_layer = NULL; + hwc_layer_1_t *video_layer = NULL; + + if (!mEnabled || mBlanked) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.acquireFenceFd >= 0) { + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + } + } + return 0; + } + +#if !defined(USE_GRALLOC_FLAG_FOR_HDMI) +#if defined(VP_VIDEO) + mVideoIndex = VIDEO_LAYER_INDEX; +#else + mVideoIndex = 1; +#endif + mUiIndex = 0; +#endif + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.flags & HWC_SKIP_LAYER) { + ALOGV("HDMI skipping layer %d", i); + continue; + } + + if (layer.compositionType == HWC_OVERLAY) { + if (!layer.handle) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SKIP_HDMI_RENDERING) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } else { +#endif +#if defined(GSC_VIDEO) || defined(VP_VIDEO) + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if ((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) < 0) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } +#endif +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + } +#endif + + ALOGV("HDMI video layer:"); + dumpLayer(&layer); + + int gsc_idx = HDMI_GSC_IDX; + bool changedPreset = false; + if (mHwc->mS3DMode != S3D_MODE_DISABLED && mHwc->mHdmiResolutionChanged) { + if (isPresetSupported(mHwc->mHdmiPreset)) { + mHwc->mS3DMode = S3D_MODE_RUNNING; + setPreset(mHwc->mHdmiPreset); + changedPreset = true; + } else { + mHwc->mS3DMode = S3D_MODE_RUNNING; + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = true; + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + mMPPs[0]->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + mMPPs[0]->mS3DMode = S3D_TB; + } + } + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + +#if defined(GSC_VIDEO) || defined(VP_VIDEO) + if ((getDrmMode(h->flags) != NO_DRM) || + ((int)get_yuv_planes(halFormatToV4L2Format(h->format)) > 0)) { +#else + if (getDrmMode(h->flags) != NO_DRM) { +#endif + if (getDrmMode(h->flags) == SECURE_DRM) + recalculateDisplayFrame(layer, mXres, mYres); + + video_layer = &layer; + + if (mIsVideoLayer == false) + mVideoStarted = true; + else + mVideoStarted = false; + mIsVideoLayer = true; + +#if defined(VP_VIDEO) + if ((mMPPs[0]->mS3DMode == S3D_MODE_DISABLED) && + (isVPSupported(layer, h->format))) { + enableLayer(mMixerLayers[mVideoIndex]); + output(mMixerLayers[mVideoIndex], layer, h, layer.acquireFenceFd, + &layer.releaseFenceFd); + } else { +#endif + ExynosMPPModule &gsc = *mMPPs[0]; +#if defined(VP_VIDEO) + int ret = gsc.processM2M(layer, HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED, NULL); +#else + int ret = gsc.processM2M(layer, HAL_PIXEL_FORMAT_BGRA_8888, NULL); +#endif + if (ret < 0) { + ALOGE("failed to configure gscaler for video layer"); + continue; + } + + buffer_handle_t dst_buf = gsc.mDstBuffers[gsc.mCurrentBuf]; + private_handle_t *h = private_handle_t::dynamicCast(dst_buf); + + int acquireFenceFd = gsc.mDstConfig.releaseFenceFd; + int releaseFenceFd = -1; + + enableLayer(mMixerLayers[mVideoIndex]); + + output(mMixerLayers[mVideoIndex], layer, h, acquireFenceFd, + &releaseFenceFd); + + if (gsc.mDstBufFence[gsc.mCurrentBuf] >= 0) { + close (gsc.mDstBufFence[gsc.mCurrentBuf]); + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + } + gsc.mDstBufFence[gsc.mCurrentBuf] = releaseFenceFd; + gsc.mCurrentBuf = (gsc.mCurrentBuf + 1) % gsc.mNumAvailableDstBuffers; +#if defined(VP_VIDEO) + } +#endif + } + + } + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + if (!layer.handle) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + + if (!mHasFbComposition && !mHasSkipLayer) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + + dumpLayer(&layer); + + /* HDMI rotation for camera preview */ + bool camera_preview_started = false; + bool camera_connected = false; + char value[PROPERTY_VALUE_MAX]; + property_get("persist.sys.camera.preview", value, "0"); + camera_preview_started = !!(atoi(value)); + property_get("persist.sys.camera.connect", value, "0"); + camera_connected = !!(atoi(value)); + + if (camera_preview_started && camera_connected) + mIsCameraStarted = true; + else if (!camera_preview_started && !camera_connected) + mIsCameraStarted = false; + + if (((mFBT_Transform == HAL_TRANSFORM_FLIP_V) || + (mFBT_Transform == HAL_TRANSFORM_FLIP_H) || + (mFBT_Transform == HAL_TRANSFORM_ROT_90) || + (mFBT_Transform == HAL_TRANSFORM_ROT_180) || + (mFBT_Transform == HAL_TRANSFORM_ROT_270))) { + if (mHasFbComposition && mIsCameraStarted && camera_connected) { + struct v4l2_rect dest_rect; + bool rot90or270 = !!((mFBT_Transform) & HAL_TRANSFORM_ROT_90); + + if (rot90or270) + calculateDstRect(HEIGHT(layer.sourceCropf), WIDTH(layer.sourceCropf), + mXres, mYres, &dest_rect); + else + calculateDstRect(WIDTH(layer.sourceCropf), HEIGHT(layer.sourceCropf), + mXres, mYres, &dest_rect); + + layer.displayFrame.left = dest_rect.left; + layer.displayFrame.top = dest_rect.top; + layer.displayFrame.right = dest_rect.width + dest_rect.left; + layer.displayFrame.bottom = dest_rect.height + dest_rect.top; + layer.transform = mFBT_Transform; + + ExynosMPPModule &gsc = *mMPPs[0]; + int ret = gsc.processM2M(layer, HAL_PIXEL_FORMAT_BGRA_8888, NULL); + if (ret < 0) { + ALOGE("failed to configure gscaler for video layer"); + continue; + } + + buffer_handle_t dst_buf = gsc.mDstBuffers[gsc.mCurrentBuf]; + private_handle_t *h = private_handle_t::dynamicCast(dst_buf); + + int acquireFenceFd = gsc.mDstConfig.releaseFenceFd; + int releaseFenceFd = -1; + + if (mIsVideoLayer == false) + mVideoStarted = true; + else + mVideoStarted = false; + mIsVideoLayer = true; + + enableLayer(mMixerLayers[mVideoIndex]); + output(mMixerLayers[mVideoIndex], layer, h, acquireFenceFd, + &releaseFenceFd); + if (gsc.mDstBufFence[gsc.mCurrentBuf] >= 0) { + close (gsc.mDstBufFence[gsc.mCurrentBuf]); + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + } + gsc.mDstBufFence[gsc.mCurrentBuf] = releaseFenceFd; + gsc.mCurrentBuf = (gsc.mCurrentBuf + 1) % NUM_GSC_DST_BUFS; + + } else + layer.releaseFenceFd = layer.acquireFenceFd; + video_layer = &layer; + } else { + if (mHasFbComposition && ((mIsCameraStarted && camera_connected) || !mIsCameraStarted)) { + if (mIsFbLayer == false) + mFbStarted = true; + else + mFbStarted = false; + mIsFbLayer = true; + + layer.displayFrame.left = 0; + layer.displayFrame.right = mXres; + layer.displayFrame.top = 0; + layer.displayFrame.bottom = mYres; + layer.transform = 0; + + enableLayer(mMixerLayers[mUiIndex]); + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + private_module_t *grallocModule = (private_module_t *)((ExynosOverlayDisplay *)mHwc->primaryDisplay)->mGrallocModule; + waitForRenderFinish(grallocModule, &layer.handle, 1); + output(mMixerLayers[mUiIndex], layer, h, layer.acquireFenceFd, + &layer.releaseFenceFd); + } else + layer.releaseFenceFd = layer.acquireFenceFd; + fb_layer = &layer; + } + } + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (!mLocalExternalDisplayPause) { +#endif + if (!video_layer) { + disableLayer(mMixerLayers[mVideoIndex]); +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mIsVideoLayer = false; +#endif + mMPPs[0]->cleanupM2M(); + if (mHwc->mS3DMode == S3D_MODE_RUNNING && contents->numHwLayers > 1) { + int preset = convert3DTo2D(mHwc->mHdmiCurrentPreset); + if (isPresetSupported(preset)) { + setPreset(preset); + mHwc->mS3DMode = S3D_MODE_STOPPING; + mHwc->mHdmiPreset = preset; + if (mHwc->procs) + mHwc->procs->invalidate(mHwc->procs); + } else { + mHwc->mS3DMode = S3D_MODE_DISABLED; + mHwc->mHdmiPreset = mHwc->mHdmiCurrentPreset; + } + } + } + + if (!fb_layer) { + disableLayer(mMixerLayers[mUiIndex]); + mIsFbLayer = false; + } +#if !defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (!video_layer) { + disableLayer(mMixerLayers[mVideoIndex]); + mIsVideoLayer = false; + } +#endif + + /* MIXER_UPDATE */ + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_UPDATE, 1) < 0) { + ALOGE("%s: s_ctrl(CID_TV_UPDATE) failed %d", __func__, errno); + return -1; + } +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + } +#endif + + return 0; +} + +void ExynosExternalDisplay::setHdcpStatus(int status) +{ + if (exynos_v4l2_s_ctrl(mMixerLayers[1].fd, V4L2_CID_TV_HDCP_ENABLE, + !!status) < 0) + ALOGE("%s: s_ctrl(CID_TV_HDCP_ENABLE) failed %d", __func__, errno); +} + +void ExynosExternalDisplay::setAudioChannel(uint32_t channels) +{ + if (exynos_v4l2_s_ctrl(mMixerLayers[0].fd, + V4L2_CID_TV_SET_NUM_CHANNELS, channels) < 0) + ALOGE("%s: failed to set audio channels", __func__); +} + +uint32_t ExynosExternalDisplay::getAudioChannel() +{ + int channels; + if (exynos_v4l2_g_ctrl(mMixerLayers[0].fd, + V4L2_CID_TV_MAX_AUDIO_CHANNELS, &channels) < 0) + ALOGE("%s: failed to get audio channels", __func__); + return channels; +} + +void ExynosExternalDisplay::checkGrallocFlags(hwc_display_contents_1_t *contents) +{ + mUseProtectedLayer = false; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + mUseScreenshootLayer = false; + + /* it can get from HWCService */ + mLocalExternalDisplayPause = mHwc->external_display_pause; + mFBT_Transform = mHwc->ext_fbt_transform; +#endif + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (h->flags & GRALLOC_USAGE_PROTECTED) + mUseProtectedLayer = false; + } +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (layer.flags & HWC_SCREENSHOT_ANIMATOR_LAYER) + mUseScreenshootLayer = true; +#endif + } + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + if (mUseScreenshootLayer) + mLocalExternalDisplayPause = true; + else + mLocalExternalDisplayPause = false; +#endif +} + +int ExynosExternalDisplay::getCecPaddr() +{ + if (!mHwc->hdmi_hpd) + return -1; + + int cecPaddr = -1;; + + if (exynos_v4l2_g_ctrl(mMixerLayers[0].fd, V4L2_CID_TV_SOURCE_PHY_ADDR, &cecPaddr) < 0) + return -1; + + return cecPaddr; +} + +int ExynosExternalDisplay::blank() +{ +/* USE_HDMI_BLANK */ + /* + * V4L2_CID_TV_BLANK becomes effective + * only if it is called before disable() : STREAMOFF + */ + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_BLANK, 1) < 0) { + ALOGE("%s: s_ctrl(CID_TV_BLANK) failed %d", __func__, errno); + return -1; + } + return 0; +} + +int ExynosExternalDisplay::waitForRenderFinish(private_module_t *grallocModule, buffer_handle_t *handle, int buffers) +{ + return 0; +} diff --git a/libhdmi_legacy/ExynosExternalDisplay.h b/libhdmi_legacy/ExynosExternalDisplay.h new file mode 100644 index 0000000..3d4a326 --- /dev/null +++ b/libhdmi_legacy/ExynosExternalDisplay.h @@ -0,0 +1,109 @@ +#ifndef EXYNOS_LEGACY_HDMI_H +#define EXYNOS_LEGACY_HDMI_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +#define VIDEO_LAYER_INDEX 2 +#define NUM_VIRT_OVER_HDMI 5 + +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) +#define HWC_SKIP_HDMI_RENDERING 0x80000000 +#endif +#define SUPPORTED_DV_TIMINGS_NUM 24 +struct preset_index_mapping { + int preset; + int dv_timings_index; +}; +const struct preset_index_mapping preset_index_mappings[SUPPORTED_DV_TIMINGS_NUM] = { + {V4L2_DV_480P59_94, 0}, + {V4L2_DV_576P50, 1}, + {V4L2_DV_720P50, 2}, + {V4L2_DV_720P60, 3}, + {V4L2_DV_1080I50, 4}, + {V4L2_DV_1080I60, 5}, + {V4L2_DV_1080P24, 6}, + {V4L2_DV_1080P25, 7}, + {V4L2_DV_1080P30, 8}, + {V4L2_DV_1080P50, 9}, + {V4L2_DV_1080P60, 10}, + {V4L2_DV_720P60_SB_HALF, 11}, + {V4L2_DV_720P60_TB, 12}, + {V4L2_DV_720P50_SB_HALF, 13}, + {V4L2_DV_720P50_TB, 14}, + {V4L2_DV_1080P24_FP, 15}, + {V4L2_DV_1080P24_SB_HALF, 16}, + {V4L2_DV_1080P24_TB, 17}, + {V4L2_DV_1080I60_SB_HALF, 18}, + {V4L2_DV_1080I50_SB_HALF, 19}, + {V4L2_DV_1080P60_SB_HALF, 20}, + {V4L2_DV_1080P60_TB, 21}, + {V4L2_DV_1080P30_SB_HALF, 22}, + {V4L2_DV_1080P30_TB, 23} +}; + +class ExynosExternalDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosExternalDisplay(); + + void setHdmiStatus(bool status); + + bool isPresetSupported(unsigned int preset); + int getConfig(); + int getDisplayConfigs(uint32_t *configs, size_t *numConfigs); + int enableLayer(hdmi_layer_t &hl); + void disableLayer(hdmi_layer_t &hl); + int enable(); + void disable(); + int output(hdmi_layer_t &hl, hwc_layer_1_t &layer, private_handle_t *h, int acquireFenceFd, int *releaseFenceFd); + void skipStaticLayers(hwc_display_contents_1_t *contents, int ovly_idx); + void setPreset(int preset); + int convert3DTo2D(int preset); + void calculateDstRect(int src_w, int src_h, int dst_w, int dst_h, struct v4l2_rect *dst_rect); + void setHdcpStatus(int status); + void setAudioChannel(uint32_t channels); + uint32_t getAudioChannel(); + void checkGrallocFlags(hwc_display_contents_1_t *contents); + int getDVTimingsIndex(int preset); + int getCecPaddr(); + bool isVideoOverlaySupported(hwc_layer_1_t &layer, int format); + bool isVPSupported(hwc_layer_1_t &layer, int format); + + virtual int openHdmi(); + virtual int blank(); + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + + virtual int waitForRenderFinish(private_module_t *grallocModule, buffer_handle_t *handle, int buffers); + int clearDisplay(); + + /* Fields */ + ExynosMPPModule *mMPPs[1]; + + int mMixer; + bool mEnabled; + bool mBlanked; + hdmi_layer_t mMixerLayers[3]; + int mIsFbLayer; + int mIsVideoLayer; + int mFbStarted; + int mVideoStarted; + bool mHasFbComposition; + bool mHasSkipLayer; + int mUiIndex; + int mVideoIndex; + bool mUseSubtitles; + const void *mLastLayerHandles[NUM_VIRT_OVER_HDMI]; + int mVirtualOverlayFlag; + bool mIsCameraStarted; + uint32_t mFBT_Transform; + bool mUseProtectedLayer; +#if defined(USE_GRALLOC_FLAG_FOR_HDMI) + bool mLocalExternalDisplayPause; + bool mUseScreenshootLayer; +#endif +}; + +#endif diff --git a/libhdmi_legacy/NOTICE b/libhdmi_legacy/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhdmi_legacy/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhdmi_legacy/dv_timings.c b/libhdmi_legacy/dv_timings.c new file mode 100644 index 0000000..00ad814 --- /dev/null +++ b/libhdmi_legacy/dv_timings.c @@ -0,0 +1,30 @@ +#include "../../exynos/kernel-3.10-headers/videodev2.h" +#include "../../exynos/kernel-3.10-headers/v4l2-dv-timings.h" + +const struct v4l2_dv_timings dv_timings[] = { + V4L2_DV_BT_CEA_720X480P59_94, + V4L2_DV_BT_CEA_720X576P50, + V4L2_DV_BT_CEA_1280X720P50, + V4L2_DV_BT_CEA_1280X720P60, + V4L2_DV_BT_CEA_1920X1080I50, + V4L2_DV_BT_CEA_1920X1080I60, + V4L2_DV_BT_CEA_1920X1080P24, + V4L2_DV_BT_CEA_1920X1080P25, + V4L2_DV_BT_CEA_1920X1080P30, + V4L2_DV_BT_CEA_1920X1080P50, + V4L2_DV_BT_CEA_1920X1080P60, + V4L2_DV_BT_CEA_1280X720P60_SB_HALF, + V4L2_DV_BT_CEA_1280X720P60_TB, + V4L2_DV_BT_CEA_1280X720P50_SB_HALF, + V4L2_DV_BT_CEA_1280X720P50_TB, + V4L2_DV_BT_CEA_1920X1080P24_FP, + V4L2_DV_BT_CEA_1920X1080P24_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P24_TB, + V4L2_DV_BT_CEA_1920X1080I60_SB_HALF, + V4L2_DV_BT_CEA_1920X1080I50_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P60_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P60_TB, + V4L2_DV_BT_CEA_1920X1080P30_SB_HALF, + V4L2_DV_BT_CEA_1920X1080P30_TB, +}; + diff --git a/libhwc/Android.mk b/libhwc/Android.mk new file mode 100644 index 0000000..36c7fec --- /dev/null +++ b/libhwc/Android.mk @@ -0,0 +1,123 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +# HAL module implemenation, not prelinked and stored in +# hw/..so + +ifneq ($(BOARD_DISABLE_HWC_DEBUG),true) +include $(CLEAR_VARS) +LOCAL_SRC_FILES := ExynosHWCDebug.c +LOCAL_MODULE := hwcdebug +include $(BUILD_EXECUTABLE) +endif + +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_MODULE_RELATIVE_PATH := hw +LOCAL_SHARED_LIBRARIES := liblog libcutils libEGL libGLESv1_CM libhardware \ + libhardware_legacy libutils libsync \ + libexynosv4l2 libexynosutils libhwcutils libdisplay libhdmi \ + libmpp +#libMcClient + +ifeq ($(BOARD_USES_FIMC), true) + LOCAL_SHARED_LIBRARIES += libexynosfimc +else + LOCAL_SHARED_LIBRARIES += libexynosgscaler +endif + +ifeq ($(BOARD_USES_IP_SERVICE), true) + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/libipService + LOCAL_SHARED_LIBRARIES += libExynosIPService +endif + +ifeq ($(BOARD_USES_HWC_SERVICES),true) + LOCAL_SHARED_LIBRARIES += libExynosHWCService + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhwcService + +ifeq ($(BOARD_USES_WFD),true) +# LOCAL_SHARED_LIBRARIES += libfimg +# LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x +endif +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) +ifeq ($(BOARD_USES_VPP), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppvirtualdisplay +else + LOCAL_SHARED_LIBRARIES += libfimg + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvirtualdisplay +endif + LOCAL_SHARED_LIBRARIES += libvirtualdisplay + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libvirtualdisplaymodule +endif + +ifeq ($(BOARD_USES_FB_PHY_LINEAR),true) + LOCAL_SHARED_LIBRARIES += libfimg + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x +endif + +ifeq ($(BOARD_HDMI_INCAPABLE), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libhdmi_dummy +else +ifeq ($(BOARD_USES_VPP), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvpphdmi +else +ifeq ($(BOARD_USES_NEW_HDMI), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libhdmi +else + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libhdmi_legacy +endif +endif +ifeq ($(BOARD_USES_CEC),true) + LOCAL_SHARED_LIBRARIES += libcec +endif +ifeq ($(TARGET_BOARD_PLATFORM),exynos4) + LOCAL_CFLAGS += -DCHANGE_POWEROFF_SEQ +endif +endif + +LOCAL_CFLAGS += -DLOG_TAG=\"hwcomposer\" +LOCAL_CFLAGS += -DHLOG_CODE=0 + +LOCAL_C_INCLUDES += \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos/libcec \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhdmimodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libdisplay +endif + +LOCAL_SRC_FILES := ExynosHWC.cpp + +LOCAL_MODULE := hwcomposer.$(TARGET_BOARD_PLATFORM) +LOCAL_MODULE_TAGS := optional + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhwc/ExynosHWC.cpp b/libhwc/ExynosHWC.cpp new file mode 100644 index 0000000..338ab8d --- /dev/null +++ b/libhwc/ExynosHWC.cpp @@ -0,0 +1,1587 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +#include +#include +#include +#include + +#if defined(USES_CEC) +#include "libcec.h" +#endif + +#ifdef HWC_SERVICES +#include "ExynosHWCService.h" +namespace android { +class ExynosHWCService; +} +#endif + +#ifdef IP_SERVICE +#include "ExynosIPService.h" +#endif + +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosOverlayDisplay.h" +#include "ExynosExternalDisplayModule.h" +#include "ExynosPrimaryDisplay.h" +#if defined(USES_DUAL_DISPLAY) +#include "ExynosSecondaryDisplayModule.h" +#endif +#ifdef USES_VIRTUAL_DISPLAY +#include "ExynosVirtualDisplayModule.h" +#endif + +void doPSRExit(struct exynos5_hwc_composer_device_1_t *pdev) +{ + int val; + int ret; + if (pdev->psrMode != PSR_NONE && pdev->notifyPSRExit) { + pdev->notifyPSRExit = false; + ret = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_WIN_PSR_EXIT, &val); + } +} + +#if defined(USES_CEC) +void handle_cec(exynos5_hwc_composer_device_1_t *pdev) +{ + unsigned char buffer[16]; + int size; + unsigned char lsrc, ldst, opcode; + + size = CECReceiveMessage(buffer, CEC_MAX_FRAME_SIZE, 1000); + + /* no data available or ctrl-c */ + if (!size) + return; + + /* "Polling Message" */ + if (size == 1) + return; + + lsrc = buffer[0] >> 4; + + /* ignore messages with src address == mCecLaddr */ + if (lsrc == pdev->mCecLaddr) + return; + + opcode = buffer[1]; + + if (CECIgnoreMessage(opcode, lsrc)) { + ALOGE("### ignore message coming from address 15 (unregistered)"); + return; + } + + if (!CECCheckMessageSize(opcode, size)) { + /* + * For some reason the TV sometimes sends messages that are too long + * Dropping these causes the connect process to fail, so for now we + * simply ignore the extra data and process the message as if it had + * the correct size + */ + ALOGD("### invalid message size: %d(opcode: 0x%x) ###", size, opcode); + } + + /* check if message broadcasted/directly addressed */ + if (!CECCheckMessageMode(opcode, (buffer[0] & 0x0F) == CEC_MSG_BROADCAST ? 1 : 0)) { + ALOGE("### invalid message mode (directly addressed/broadcast) ###"); + return; + } + + ldst = lsrc; + + /* TODO: macros to extract src and dst logical addresses */ + /* TODO: macros to extract opcode */ + + switch (opcode) { + case CEC_OPCODE_GIVE_PHYSICAL_ADDRESS: + /* respond with "Report Physical Address" */ + buffer[0] = (pdev->mCecLaddr << 4) | CEC_MSG_BROADCAST; + buffer[1] = CEC_OPCODE_REPORT_PHYSICAL_ADDRESS; + buffer[2] = (pdev->mCecPaddr >> 8) & 0xFF; + buffer[3] = pdev->mCecPaddr & 0xFF; + buffer[4] = 3; + size = 5; + break; + + case CEC_OPCODE_SET_STREAM_PATH: + case CEC_OPCODE_REQUEST_ACTIVE_SOURCE: + /* respond with "Active Source" */ + buffer[0] = (pdev->mCecLaddr << 4) | CEC_MSG_BROADCAST; + buffer[1] = CEC_OPCODE_ACTIVE_SOURCE; + buffer[2] = (pdev->mCecPaddr >> 8) & 0xFF; + buffer[3] = pdev->mCecPaddr & 0xFF; + size = 4; + break; + + case CEC_OPCODE_GIVE_DEVICE_POWER_STATUS: + /* respond with "Report Power Status" */ + buffer[0] = (pdev->mCecLaddr << 4) | ldst; + buffer[1] = CEC_OPCODE_REPORT_POWER_STATUS; + buffer[2] = 0; + size = 3; + break; + + case CEC_OPCODE_REPORT_POWER_STATUS: + /* send Power On message */ + buffer[0] = (pdev->mCecLaddr << 4) | ldst; + buffer[1] = CEC_OPCODE_USER_CONTROL_PRESSED; + buffer[2] = 0x6D; + size = 3; + break; + + case CEC_OPCODE_USER_CONTROL_PRESSED: + buffer[0] = (pdev->mCecLaddr << 4) | ldst; + size = 1; + break; + case CEC_OPCODE_GIVE_DECK_STATUS: + /* respond with "Deck Status" */ + buffer[0] = (pdev->mCecLaddr << 4) | ldst; + buffer[1] = CEC_OPCODE_DECK_STATUS; + buffer[2] = 0x11; + size = 3; + break; + + case CEC_OPCODE_ABORT: + case CEC_OPCODE_FEATURE_ABORT: + default: + /* send "Feature Abort" */ + buffer[0] = (pdev->mCecLaddr << 4) | ldst; + buffer[1] = CEC_OPCODE_FEATURE_ABORT; + buffer[2] = CEC_OPCODE_ABORT; + buffer[3] = 0x04; + size = 4; + break; + } + + if (CECSendMessage(buffer, size) != size) + ALOGE("CECSendMessage() failed!!!"); +} + +void start_cec(exynos5_hwc_composer_device_1_t *pdev) +{ + unsigned char buffer[CEC_MAX_FRAME_SIZE]; + int size; + pdev->mCecFd = CECOpen(); + pdev->mCecPaddr = CEC_NOT_VALID_PHYSICAL_ADDRESS; + pdev->mCecPaddr = pdev->externalDisplay->getCecPaddr(); + if (pdev->mCecPaddr < 0) { + ALOGE("Error getting physical address"); + return; + } + pdev->mCecLaddr = CECAllocLogicalAddress(pdev->mCecPaddr, CEC_DEVICE_PLAYER); + /* Request power state from TV */ + buffer[0] = (pdev->mCecLaddr << 4); + buffer[1] = CEC_OPCODE_GIVE_DEVICE_POWER_STATUS; + size = 2; + if (CECSendMessage(buffer, size) != size) + ALOGE("CECSendMessage(%#x) failed!!!", buffer[0]); +} +#endif + +void exynos5_boot_finished(exynos5_hwc_composer_device_1_t *dev) +{ + ALOGD("Boot Finished"); + int sw_fd; + exynos5_hwc_composer_device_1_t *pdev = + (exynos5_hwc_composer_device_1_t *)dev; + if (pdev == NULL) { + ALOGE("%s:: dev is NULL", __func__); + return; + } + sw_fd = open("/sys/class/switch/hdmi/state", O_RDONLY); + + if (sw_fd >= 0) { + char val; + if (read(sw_fd, &val, 1) == 1 && val == '1') { + if (pdev->hdmi_hpd != 1) { + pdev->hdmi_hpd = true; + if ((pdev->externalDisplay->openHdmi() > 0) && pdev->externalDisplay->getConfig()) { + ALOGE("Error reading HDMI configuration"); + pdev->hdmi_hpd = false; + } + pdev->externalDisplay->mBlanked = false; +#if defined(USES_CEC) + start_cec(pdev); +#endif + if (pdev->procs) { + pdev->procs->hotplug(pdev->procs, HWC_DISPLAY_EXTERNAL, true); + pdev->procs->invalidate(pdev->procs); + } + } + } + close(sw_fd); + } +} + +int exynos5_prepare(hwc_composer_device_1_t *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) +{ + ATRACE_CALL(); + if (!numDisplays || !displays) + return 0; + + exynos5_hwc_composer_device_1_t *pdev = + (exynos5_hwc_composer_device_1_t *)dev; +#if defined(USES_DUAL_DISPLAY) + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY0]; + hwc_display_contents_1_t *fimd_contents1 = displays[HWC_DISPLAY_PRIMARY1]; +#else + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY]; +#endif + + hwc_display_contents_1_t *hdmi_contents = displays[HWC_DISPLAY_EXTERNAL]; +#ifdef USES_VIRTUAL_DISPLAY + hwc_display_contents_1_t *virtual_contents = displays[HWC_DISPLAY_VIRTUAL]; + if (virtual_contents == NULL) + pdev->virtualDisplay->deInit(); +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + if (virtual_contents) + pdev->virtualDisplay->init(virtual_contents); +#endif +#endif + pdev->updateCallCnt++; + pdev->update_event_cnt++; + pdev->LastUpdateTimeStamp = systemTime(SYSTEM_TIME_MONOTONIC); + pdev->primaryDisplay->getCompModeSwitch(); + pdev->totPixels = 0; + pdev->incomingPixels = 0; + +#if defined(USES_DUAL_DISPLAY) + if (pdev->hdmi_hpd || (fimd_contents1 == NULL) || + (fimd_contents1->numHwLayers <= 1)) { + if (pdev->secondaryDisplay->mEnabled) + pdev->secondaryDisplay->disable(); + } +#endif + + pdev->externalDisplay->setHdmiStatus(pdev->hdmi_hpd); + +#if defined(USES_DUAL_DISPLAY) + if (!pdev->hdmi_hpd && fimd_contents1 && + (fimd_contents1->numHwLayers > 1)) { + if (!pdev->secondaryDisplay->mEnabled) + pdev->secondaryDisplay->enable(); + } +#endif + + if (pdev->hwc_ctrl.dynamic_recomp_mode == true && + pdev->update_stat_thread_flag == false && + pdev->primaryDisplay->mBlanked == false) { + exynos5_create_update_stat_thread(pdev); + } + +#ifdef USES_VPP + pdev->mDisplayResourceManager->assignResources(numDisplays, displays); +#endif + + if (fimd_contents) { + android::Mutex::Autolock lock(pdev->primaryDisplay->mLayerInfoMutex); + int err = pdev->primaryDisplay->prepare(fimd_contents); + if (err) + return err; + } + if (hdmi_contents) { + android::Mutex::Autolock lock(pdev->externalDisplay->mLayerInfoMutex); + int err = 0; + err = pdev->externalDisplay->prepare(hdmi_contents); + if (err) + return err; + } +#if defined(USES_DUAL_DISPLAY) + if ((pdev->hdmi_hpd == false) && fimd_contents1) { + android::Mutex::Autolock lock(pdev->secondaryDisplay->mLayerInfoMutex); + int err = pdev->secondaryDisplay->prepare(fimd_contents1); + if (err) + return err; + } +#endif + + +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents) { +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + ExynosVirtualDisplayModule *virDisplay = (ExynosVirtualDisplayModule *)pdev->virtualDisplay; + virDisplay->setPriContents(fimd_contents); +#endif + int err = pdev->virtualDisplay->prepare(virtual_contents); + if (err) + return err; + } +#endif + + return 0; +} + +int exynos5_set(struct hwc_composer_device_1 *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) +{ + ATRACE_CALL(); + if (!numDisplays || !displays) + return 0; + + exynos5_hwc_composer_device_1_t *pdev = + (exynos5_hwc_composer_device_1_t *)dev; +#if defined(USES_DUAL_DISPLAY) + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY0]; + hwc_display_contents_1_t *fimd_contents1 = displays[HWC_DISPLAY_PRIMARY1]; +#else + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY]; +#endif + hwc_display_contents_1_t *hdmi_contents = displays[HWC_DISPLAY_EXTERNAL]; + int fimd_err = 0, hdmi_err = 0; +#ifdef USES_VIRTUAL_DISPLAY + int virtual_err = 0; + hwc_display_contents_1_t *virtual_contents = displays[HWC_DISPLAY_VIRTUAL]; +#endif + +#if defined(USES_DUAL_DISPLAY) + if ((pdev->hdmi_hpd == false) && fimd_contents1) { + hdmi_err = pdev->secondaryDisplay->set(fimd_contents1); + } +#endif + + if (fimd_contents) { + android::Mutex::Autolock lock(pdev->primaryDisplay->mLayerInfoMutex); +#if defined(USES_DUAL_DISPLAY) + fimd_err = pdev->primaryDisplay->set_dual(fimd_contents, fimd_contents1); +#else + fimd_err = pdev->primaryDisplay->set(fimd_contents); +#endif + } + + if (pdev->mS3DMode != S3D_MODE_STOPPING && !pdev->mHdmiResolutionHandled) { + pdev->mHdmiResolutionHandled = true; + pdev->hdmi_hpd = true; + pdev->externalDisplay->enable(); + if (pdev->procs) { + pdev->procs->hotplug(pdev->procs, HWC_DISPLAY_EXTERNAL, true); + pdev->procs->invalidate(pdev->procs); + } + } + + if (hdmi_contents && fimd_contents) { + android::Mutex::Autolock lock(pdev->externalDisplay->mLayerInfoMutex); + hdmi_err = pdev->externalDisplay->set(hdmi_contents); + } + + if (pdev->hdmi_hpd && pdev->mHdmiResolutionChanged) { + if (pdev->mS3DMode == S3D_MODE_DISABLED && pdev->externalDisplay->isPresetSupported(pdev->mHdmiPreset)) + pdev->externalDisplay->setPreset(pdev->mHdmiPreset); + } + if (pdev->mS3DMode == S3D_MODE_STOPPING) { + pdev->mS3DMode = S3D_MODE_DISABLED; +#ifndef USES_VPP + for (int i = 0; i < pdev->primaryDisplay->mNumMPPs; i++) + pdev->primaryDisplay->mMPPs[i]->mS3DMode = S3D_NONE; + + if (pdev->externalDisplay->mMPPs[0] != NULL) + pdev->externalDisplay->mMPPs[0]->mS3DMode = S3D_NONE; +#endif + } + +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents && fimd_contents) + virtual_err = pdev->virtualDisplay->set(virtual_contents); +#endif + +#ifdef EXYNOS_SUPPORT_PSR_EXIT + pdev->notifyPSRExit = true; +#else + pdev->notifyPSRExit = false; +#endif + + pdev->primaryDisplay->freeMPP(); + +#ifdef USES_VPP + pdev->mDisplayResourceManager->cleanupMPPs(); +#endif + + if (fimd_err) + return fimd_err; + +#ifndef USES_VIRTUAL_DISPLAY + return hdmi_err; +#else + if (hdmi_err) + return hdmi_err; + + return virtual_err; +#endif +} + +void exynos5_registerProcs(struct hwc_composer_device_1* dev, + hwc_procs_t const* procs) +{ + struct exynos5_hwc_composer_device_1_t* pdev = + (struct exynos5_hwc_composer_device_1_t*)dev; + pdev->procs = procs; +} + +int exynos5_query(struct hwc_composer_device_1* dev, int what, int *value) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + switch (what) { + case HWC_BACKGROUND_LAYER_SUPPORTED: + // we support the background layer + value[0] = 1; + break; + case HWC_VSYNC_PERIOD: + // vsync period in nanosecond + value[0] = pdev->primaryDisplay->mVsyncPeriod; + break; + default: + // unsupported query + return -EINVAL; + } + return 0; +} + +int exynos5_eventControl(struct hwc_composer_device_1 *dev, int __unused dpy, + int event, int enabled) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + switch (event) { + case HWC_EVENT_VSYNC: + __u32 val = !!enabled; + pdev->VsyncInterruptStatus = val; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_SET_VSYNC_INT, &val); + if (err < 0) { + ALOGE("vsync ioctl failed"); + return -errno; + } + return 0; + } + + return -EINVAL; +} + +void handle_hdmi_uevent(struct exynos5_hwc_composer_device_1_t *pdev, + const char *buff, int len) +{ + const char *s = buff; + s += strlen(s) + 1; + + while (*s) { + if (!strncmp(s, "SWITCH_STATE=", strlen("SWITCH_STATE="))) + pdev->hdmi_hpd = atoi(s + strlen("SWITCH_STATE=")) == 1; + + s += strlen(s) + 1; + if (s - buff >= len) + break; + } + + if (pdev->hdmi_hpd) { + if ((pdev->externalDisplay->openHdmi() > 0) && pdev->externalDisplay->getConfig()) { + ALOGE("Error reading HDMI configuration"); + pdev->hdmi_hpd = false; + return; + } + + pdev->externalDisplay->mBlanked = false; +#if defined(USES_CEC) + start_cec(pdev); + } else { + CECClose(); + pdev->mCecFd = -1; + } +#else + } +#endif + + ALOGV("HDMI HPD changed to %s", pdev->hdmi_hpd ? "enabled" : "disabled"); + if (pdev->hdmi_hpd) + ALOGI("HDMI Resolution changed to %dx%d", + pdev->externalDisplay->mXres, pdev->externalDisplay->mYres); + + /* hwc_dev->procs is set right after the device is opened, but there is + * still a race condition where a hotplug event might occur after the open + * but before the procs are registered. */ + if (pdev->procs) + pdev->procs->hotplug(pdev->procs, HWC_DISPLAY_EXTERNAL, pdev->hdmi_hpd); +} + +void handle_tui_uevent(struct exynos5_hwc_composer_device_1_t *pdev, + const char *buff, int len) +{ +#ifdef USES_VPP +#ifdef DISABLE_IDMA_SECURE + return; +#else + const char *s = buff; + unsigned int tui_disabled = 1; + bool useSecureDMA = true; + s += strlen(s) + 1; + + while (*s) { + if (!strncmp(s, "SWITCH_STATE=", strlen("SWITCH_STATE="))) + tui_disabled = atoi(s + strlen("SWITCH_STATE=")) == 0; + + s += strlen(s) + 1; + if (s - buff >= len) + break; + } + + if (tui_disabled) + useSecureDMA = true; + else + useSecureDMA = false; + + ALOGI("TUI mode is %s", tui_disabled ? "disabled" : "enabled"); + + if (pdev->primaryDisplay->mUseSecureDMA != useSecureDMA) { + pdev->primaryDisplay->mUseSecureDMA = useSecureDMA; + if ((pdev->procs) && (pdev->procs->invalidate)) + pdev->procs->invalidate(pdev->procs); + } +#endif +#endif +} + +void handle_vsync_event(struct exynos5_hwc_composer_device_1_t *pdev) +{ + if (!pdev->procs) + return; + + int err = lseek(pdev->vsync_fd, 0, SEEK_SET); + if (err < 0) { + ALOGE("error seeking to vsync timestamp: %s", strerror(errno)); + return; + } + + char buf[4096]; + err = read(pdev->vsync_fd, buf, sizeof(buf)); + if (err < 0) { + ALOGE("error reading vsync timestamp: %s", strerror(errno)); + return; + } + buf[sizeof(buf) - 1] = '\0'; + + errno = 0; + uint64_t timestamp = strtoull(buf, NULL, 0); + if (!errno) + pdev->procs->vsync(pdev->procs, 0, timestamp); +} + +void *hwc_update_stat_thread(void *data) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)data; + int event_cnt = 0; + + while (pdev->update_stat_thread_flag) { + event_cnt = pdev->update_event_cnt; + /* + * If there is no update for more than 100ms, favor the 3D composition mode. + * If all other conditions are met, mode will be switched to 3D composition. + */ + usleep(100000); + if (event_cnt == pdev->update_event_cnt) { + if (pdev->primaryDisplay->getCompModeSwitch() == HWC_2_GLES) { + if ((pdev->procs) && (pdev->procs->invalidate)) { + pdev->update_event_cnt = 0; + pdev->procs->invalidate(pdev->procs); + } + } + } + } + return NULL; +} + +void exynos5_create_update_stat_thread(struct exynos5_hwc_composer_device_1_t *dev) +{ + /* pthread_create shouldn't have ben failed. But, ignore even if some error */ + if (pthread_create(&dev->update_stat_thread, NULL, hwc_update_stat_thread, dev) != 0) { + ALOGE("%s: failed to start update_stat thread:", __func__); + dev->update_stat_thread_flag = false; + } else { + dev->update_stat_thread_flag = true; + } +} + +void *hwc_vsync_thread(void *data) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)data; + char uevent_desc[4096]; + memset(uevent_desc, 0, sizeof(uevent_desc)); + + setpriority(PRIO_PROCESS, 0, HAL_PRIORITY_URGENT_DISPLAY); + + uevent_init(); + + char temp[4096]; + int err = read(pdev->vsync_fd, temp, sizeof(temp)); + if (err < 0) { + ALOGE("error reading vsync timestamp: %s", strerror(errno)); + return NULL; + } + +#if defined(USES_CEC) + struct pollfd fds[3]; +#else + struct pollfd fds[2]; +#endif + fds[0].fd = pdev->vsync_fd; + fds[0].events = POLLPRI; + fds[1].fd = uevent_get_fd(); + fds[1].events = POLLIN; +#if defined(USES_CEC) + fds[2].fd = pdev->mCecFd; + fds[2].events = POLLIN; +#endif + + while (true) { +#if defined(USES_CEC) + int err; + fds[2].fd = pdev->mCecFd; + if (fds[2].fd > 0) + err = poll(fds, 3, -1); + else + err = poll(fds, 2, -1); +#else + int err = poll(fds, 2, -1); +#endif + + if (err > 0) { + if (fds[0].revents & POLLPRI) { + handle_vsync_event(pdev); + } + else if (fds[1].revents & POLLIN) { + int len = uevent_next_event(uevent_desc, + sizeof(uevent_desc) - 2); + + bool hdmi = !strcmp(uevent_desc, + "change@/devices/virtual/switch/hdmi"); + bool tui_status = !strcmp(uevent_desc, + "change@/devices/virtual/switch/tui"); + + if (hdmi) + handle_hdmi_uevent(pdev, uevent_desc, len); + else if (tui_status) + handle_tui_uevent(pdev, uevent_desc, len); +#if defined(USES_CEC) + } else if (pdev->hdmi_hpd && fds[2].revents & POLLIN) { + handle_cec(pdev); +#endif + } + } + else if (err == -1) { + if (errno == EINTR) + break; + ALOGE("error in vsync thread: %s", strerror(errno)); + } + } + + return NULL; +} + +int exynos5_blank(struct hwc_composer_device_1 *dev, int disp, int blank) +{ + ATRACE_CALL(); + int fence = 0; + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; +#ifdef SKIP_DISPLAY_BLANK_CTRL + return 0; +#endif + ALOGI("%s:: disp(%d), blank(%d)", __func__, disp, blank); + switch (disp) { + case HWC_DISPLAY_PRIMARY: { + int fb_blank = blank ? FB_BLANK_POWERDOWN : FB_BLANK_UNBLANK; + if (fb_blank == FB_BLANK_POWERDOWN) { + int fence = pdev->primaryDisplay->clearDisplay(); + if (fence < 0) { + HLOGE("error clearing primary display"); + } else { +#ifndef USES_VPP + if (pdev->primaryDisplay->mGscUsed && pdev->primaryDisplay->mMPPs[FIMD_GSC_IDX]->isOTF()) + pdev->primaryDisplay->mMPPs[FIMD_GSC_IDX]->cleanupOTF(); +#endif + close(fence); + } + } +#if !defined(HDMI_ON_IN_SUSPEND) && defined(CHANGE_POWEROFF_SEQ) + /* + * LCD power block shouldn't be turned off + * before TV power block is turned off in Exynos4. + */ + if (pdev->hdmi_hpd) { + if (blank && !pdev->externalDisplay->mBlanked) { + pdev->externalDisplay->disable(); + } + pdev->externalDisplay->mBlanked = !!blank; + } +#endif + pdev->primaryDisplay->mBlanked = !!blank; + + if (pthread_kill(pdev->update_stat_thread, 0) != ESRCH) { //check if the thread is alive + if (fb_blank == FB_BLANK_POWERDOWN) { + pdev->update_stat_thread_flag = false; + pthread_join(pdev->update_stat_thread, 0); + } + } else { // thread is not alive + if (fb_blank == FB_BLANK_UNBLANK && pdev->hwc_ctrl.dynamic_recomp_mode == true) + exynos5_create_update_stat_thread(pdev); + } +#if defined(USES_DUAL_DISPLAY) + struct decon_dual_display_blank_data blank_data; + memset(&blank_data, 0, sizeof(blank_data)); + blank_data.display_type = blank_data.DECON_PRIMARY_DISPLAY; + blank_data.blank = fb_blank; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_DUAL_DISPLAY_BLANK, &blank_data); +#else + int err = ioctl(pdev->primaryDisplay->mDisplayFd, FBIOBLANK, fb_blank); +#endif + if (err < 0) { + if (errno == EBUSY) + ALOGI("%sblank ioctl failed (display already %sblanked)", + blank ? "" : "un", blank ? "" : "un"); + else + ALOGE("%sblank ioctl failed: %s", blank ? "" : "un", + strerror(errno)); + return -errno; + } + break; + } +#if defined(USES_DUAL_DISPLAY) + case HWC_DISPLAY_PRIMARY1: { + int fb_blank = blank ? FB_BLANK_POWERDOWN : FB_BLANK_UNBLANK; + /* To do: Should be implemented */ + pdev->secondaryDisplay->mBlanked = !!blank; + struct decon_dual_display_blank_data blank_data; + memset(&blank_data, 0, sizeof(blank_data)); + blank_data.display_type = blank_data.DECON_SECONDARY_DISPLAY; + blank_data.blank = fb_blank; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_DUAL_DISPLAY_BLANK, &blank_data); + break; + } +#endif + case HWC_DISPLAY_EXTERNAL: +#if !defined(HDMI_ON_IN_SUSPEND) && !defined(CHANGE_POWEROFF_SEQ) + if (pdev->hdmi_hpd) { + if (blank && !pdev->externalDisplay->mBlanked) { + pdev->externalDisplay->disable(); + } + pdev->externalDisplay->mBlanked = !!blank; + } +#else + fence = pdev->externalDisplay->clearDisplay(); + if (fence >= 0) + close(fence); + pdev->externalDisplay->mBlanked = !!blank; +#endif + break; + + default: + return -EINVAL; + + } + + return 0; +} + +void exynos5_dump(hwc_composer_device_1* dev, char *buff, int buff_len) +{ + if (buff_len <= 0) + return; + + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + android::String8 result; + + result.appendFormat("\n hdmi_enabled=%u\n", pdev->externalDisplay->mEnabled); + if (pdev->externalDisplay->mEnabled) + result.appendFormat(" w=%u, h=%u\n", pdev->externalDisplay->mXres, pdev->externalDisplay->mYres); + + result.append("Primary device's config information\n"); + pdev->primaryDisplay->dump(result); + +#ifdef USES_VPP + if (pdev->hdmi_hpd) { + result.append("\n"); + result.append("External device's config information\n"); + pdev->externalDisplay->dump(result); + } +#if defined(USES_DUAL_DISPLAY) + else { + result.append("\n"); + result.append("Secondary device's config information\n"); + pdev->secondaryDisplay->dump(result); + } +#endif +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + if (pdev->virtualDisplay->mIsWFDState) { + result.append("\n"); + result.append("Virtual device's config information\n"); + pdev->virtualDisplay->dump(result); + } +#endif +#endif + { + android::Mutex::Autolock lock(pdev->primaryDisplay->mLayerInfoMutex); + result.append("\n"); + result.append("Primary device's layer information\n"); + pdev->primaryDisplay->dumpLayerInfo(result); + } + + if (pdev->hdmi_hpd) { + android::Mutex::Autolock lock(pdev->externalDisplay->mLayerInfoMutex); + result.append("\n"); + result.append("External device's layer information\n"); + pdev->externalDisplay->dumpLayerInfo(result); + } +#if defined(USES_DUAL_DISPLAY) + else { + android::Mutex::Autolock lock(pdev->secondaryDisplay->mLayerInfoMutex); + result.append("\n"); + result.append("Secondary device's layer information\n"); + pdev->secondaryDisplay->dumpLayerInfo(result); + } +#endif +#if USES_VIRTUAL_DISPLAY_DECON_EXT_WB + if (pdev->virtualDisplay->mIsWFDState) { + android::Mutex::Autolock lock(pdev->virtualDisplay->mLayerInfoMutex); + result.append("\n"); + result.append("Virtual device's layer information\n"); + pdev->virtualDisplay->dumpLayerInfo(result); + } +#endif + strlcpy(buff, result.string(), buff_len); +} + +int exynos5_getDisplayConfigs(struct hwc_composer_device_1 *dev, + int disp, uint32_t *configs, size_t *numConfigs) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + int err = 0; + + if (*numConfigs == 0) + return 0; + +#if defined(USES_DUAL_DISPLAY) + if (disp == HWC_DISPLAY_PRIMARY0) { + configs[0] = 0; + *numConfigs = 1; + return 0; + } else if (disp == HWC_DISPLAY_PRIMARY1) { + configs[0] = 0; + *numConfigs = 1; + return 0; +#else + if (disp == HWC_DISPLAY_PRIMARY) { + configs[0] = 0; + *numConfigs = 1; + return 0; +#endif + } else if (disp == HWC_DISPLAY_EXTERNAL) { + if (!pdev->hdmi_hpd) { + return -EINVAL; + } + if (hwcHasApiVersion((hwc_composer_device_1_t*)dev, HWC_DEVICE_API_VERSION_1_4)) + err = pdev->externalDisplay->getDisplayConfigs(configs, numConfigs); + else { + err = pdev->externalDisplay->getConfig(); + configs[0] = 0; + *numConfigs = 1; + } + if (err) { + return -EINVAL; + } + return 0; +#ifdef USES_VIRTUAL_DISPLAY + } else if (disp == HWC_DISPLAY_VIRTUAL) { + int err = pdev->virtualDisplay->getConfig(); + if (err) { + return -EINVAL; + } + configs[0] = 0; + *numConfigs = 1; + return 0; +#endif + } + + return -EINVAL; +} + +int32_t exynos5_hdmi_attribute(struct exynos5_hwc_composer_device_1_t *pdev, + const uint32_t attribute) +{ + switch(attribute) { + case HWC_DISPLAY_VSYNC_PERIOD: + return pdev->primaryDisplay->mVsyncPeriod; + + case HWC_DISPLAY_WIDTH: + return pdev->externalDisplay->mXres; + + case HWC_DISPLAY_HEIGHT: + return pdev->externalDisplay->mYres; + + case HWC_DISPLAY_DPI_X: + case HWC_DISPLAY_DPI_Y: + return 0; // unknown + + default: + ALOGE("unknown display attribute %u", attribute); + return -EINVAL; + } +} + +int exynos5_getDisplayAttributes(struct hwc_composer_device_1 *dev, + int disp, uint32_t config, const uint32_t *attributes, int32_t *values) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + for (int i = 0; attributes[i] != HWC_DISPLAY_NO_ATTRIBUTE; i++) { +#if defined(USES_DUAL_DISPLAY) + if (disp == HWC_DISPLAY_PRIMARY0) + values[i] = pdev->primaryDisplay->getDisplayAttributes(attributes[i]); + else if (disp == HWC_DISPLAY_PRIMARY1) + values[i] = pdev->secondaryDisplay->getDisplayAttributes(attributes[i]); +#else + if (disp == HWC_DISPLAY_PRIMARY) + values[i] = pdev->primaryDisplay->getDisplayAttributes(attributes[i]); +#endif + else if (disp == HWC_DISPLAY_EXTERNAL) { + if (hwcHasApiVersion((hwc_composer_device_1_t*)dev, HWC_DEVICE_API_VERSION_1_4)) + values[i] = pdev->externalDisplay->getDisplayAttributes(attributes[i], config); + else + values[i] = exynos5_hdmi_attribute(pdev, attributes[i]); + } +#ifdef USES_VIRTUAL_DISPLAY + else if (disp == HWC_DISPLAY_VIRTUAL) + values[i] = pdev->virtualDisplay->getDisplayAttributes(attributes[i]); +#endif + else { + ALOGE("unknown display type %u", disp); + return -EINVAL; + } + } + + return 0; +} + +int exynos_getActiveConfig(struct hwc_composer_device_1* dev, int disp) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + if (disp == HWC_DISPLAY_PRIMARY) + return 0; + else if (disp == HWC_DISPLAY_EXTERNAL) { + if (pdev->hdmi_hpd) { + if (hwcHasApiVersion((hwc_composer_device_1_t*)dev, HWC_DEVICE_API_VERSION_1_4)) + return pdev->externalDisplay->getActiveConfig(); + else + return 0; + } else { + ALOGE("%s::External device is not connected", __func__); + return -1; + } + } else if (disp == HWC_DISPLAY_VIRTUAL) + return 0; + else { + ALOGE("%s:: unknown display type %u", __func__, disp); + return -EINVAL; + } +} + +int exynos_setActiveConfig(struct hwc_composer_device_1* dev, int disp, int index) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + ALOGI("%s:: disp(%d), index(%d)", __func__, disp, index); + if (disp == HWC_DISPLAY_PRIMARY) { + if (index != 0) { + ALOGE("%s::Primary display doen't support index(%d)", __func__, index); + return -1; + } + return 0; + } + else if (disp == HWC_DISPLAY_EXTERNAL) { + if (pdev->hdmi_hpd) { + if (hwcHasApiVersion((hwc_composer_device_1_t*)dev, HWC_DEVICE_API_VERSION_1_4)) { + return pdev->externalDisplay->setActiveConfig(index); + } else { + if (index != 0) { + ALOGE("%s::External display doen't support index(%d)", __func__, index); + return -1; + } else { + return 0; + } + } + } else { + ALOGE("%s::External device is not connected", __func__); + return -1; + } + } else if (disp == HWC_DISPLAY_VIRTUAL) + return 0; + + return -1; +} + +int exynos_setCursorPositionAsync(struct hwc_composer_device_1 *dev, int disp, int x_pos, int y_pos) +{ + return 0; +} + +int exynos_setPowerMode(struct hwc_composer_device_1* dev, int disp, int mode) +{ + ATRACE_CALL(); + int fence = 0; + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; +#ifdef SKIP_DISPLAY_BLANK_CTRL + return 0; +#endif + ALOGI("%s:: disp(%d), mode(%d)", __func__, disp, mode); + int fb_blank = 0; + int blank = 0; + if (mode == HWC_POWER_MODE_OFF) { + fb_blank = FB_BLANK_POWERDOWN; + blank = 1; + } else { + fb_blank = FB_BLANK_UNBLANK; + blank = 0; + } + + switch (disp) { + case HWC_DISPLAY_PRIMARY: { +#ifdef USES_VPP + if ((mode == HWC_POWER_MODE_DOZE) || (mode == HWC_POWER_MODE_DOZE_SUSPEND)) { + if (pdev->primaryDisplay->mBlanked == 0) { + fb_blank = FB_BLANK_POWERDOWN; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, FBIOBLANK, fb_blank); + if (err < 0) { + ALOGE("blank ioctl failed: %s, mode(%d)", strerror(errno), mode); + return -errno; + } + } + pdev->primaryDisplay->mBlanked = 1; + return pdev->primaryDisplay->setPowerMode(mode); + } +#endif + if (fb_blank == FB_BLANK_POWERDOWN) { + int fence = -1; +#if defined(USES_DUAL_DISPLAY) + if (pdev->secondaryDisplay->mBlanked) +#endif + fence = pdev->primaryDisplay->clearDisplay(); + if (fence < 0) { + HLOGE("error clearing primary display"); + } else { +#ifndef USES_VPP + if (pdev->primaryDisplay->mGscUsed && pdev->primaryDisplay->mMPPs[FIMD_GSC_IDX]->isOTF()) + pdev->primaryDisplay->mMPPs[FIMD_GSC_IDX]->cleanupOTF(); +#endif + close(fence); + } + } +#if !defined(HDMI_ON_IN_SUSPEND) && defined(CHANGE_POWEROFF_SEQ) + /* + * LCD power block shouldn't be turned off + * before TV power block is turned off in Exynos4. + */ + if (pdev->hdmi_hpd) { + if (blank && !pdev->externalDisplay->mBlanked) { + pdev->externalDisplay->disable(); + } + pdev->externalDisplay->mBlanked = !!blank; + } +#endif + pdev->primaryDisplay->mBlanked = !!blank; + + if (pthread_kill(pdev->update_stat_thread, 0) != ESRCH) { //check if the thread is alive + if (fb_blank == FB_BLANK_POWERDOWN) { + pdev->update_stat_thread_flag = false; + pthread_join(pdev->update_stat_thread, 0); + } + } else { // thread is not alive + if (fb_blank == FB_BLANK_UNBLANK && pdev->hwc_ctrl.dynamic_recomp_mode == true) + exynos5_create_update_stat_thread(pdev); + } +#if defined(USES_DUAL_DISPLAY) + struct decon_dual_display_blank_data blank_data; + memset(&blank_data, 0, sizeof(blank_data)); + blank_data.display_type = blank_data.DECON_PRIMARY_DISPLAY; + blank_data.blank = fb_blank; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_DUAL_DISPLAY_BLANK, &blank_data); +#else + int err = ioctl(pdev->primaryDisplay->mDisplayFd, FBIOBLANK, fb_blank); +#endif + if (err < 0) { + if (errno == EBUSY) + ALOGI("%sblank ioctl failed (display already %sblanked)", + blank ? "" : "un", blank ? "" : "un"); + else + ALOGE("%sblank ioctl failed: %s", blank ? "" : "un", + strerror(errno)); + return -errno; + } + break; + } +#if defined(USES_DUAL_DISPLAY) + case HWC_DISPLAY_PRIMARY1: { + if (fb_blank == FB_BLANK_POWERDOWN) { + int fence = -1; + if (pdev->primaryDisplay->mBlanked) + fence = pdev->primaryDisplay->clearDisplay(); + if (fence < 0) { + HLOGE("error clearing primary display"); + } else { + close(fence); + } + } + /* To do: Should be implemented */ + pdev->secondaryDisplay->mBlanked = !!blank; + struct decon_dual_display_blank_data blank_data; + memset(&blank_data, 0, sizeof(blank_data)); + blank_data.display_type = blank_data.DECON_SECONDARY_DISPLAY; + blank_data.blank = fb_blank; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_DUAL_DISPLAY_BLANK, &blank_data); + break; + } +#endif + case HWC_DISPLAY_EXTERNAL: +#if !defined(HDMI_ON_IN_SUSPEND) && !defined(CHANGE_POWEROFF_SEQ) + if (pdev->hdmi_hpd) { + if (blank && !pdev->externalDisplay->mBlanked) { + pdev->externalDisplay->disable(); + } + pdev->externalDisplay->mBlanked = !!blank; + } +#else + fence = pdev->externalDisplay->clearDisplay(); + if (fence >= 0) + close(fence); + pdev->externalDisplay->mBlanked = !!blank; +#endif + break; + + default: + return -EINVAL; + + } + + return 0; +} + +int exynos5_close(hw_device_t* device); + +int exynos5_open(const struct hw_module_t *module, const char *name, + struct hw_device_t **device) +{ + int ret = 0; + int refreshRate; + int sw_fd; + + if (strcmp(name, HWC_HARDWARE_COMPOSER)) { + return -EINVAL; + } + + struct exynos5_hwc_composer_device_1_t *dev; + dev = (struct exynos5_hwc_composer_device_1_t *)malloc(sizeof(*dev)); + memset(dev, 0, sizeof(*dev)); + + dev->primaryDisplay = new ExynosPrimaryDisplay(NUM_GSC_UNITS, dev); + dev->externalDisplay = new ExynosExternalDisplayModule(dev); +#ifdef USES_VIRTUAL_DISPLAY + dev->virtualDisplay = new ExynosVirtualDisplayModule(dev); +#endif + if (hw_get_module(GRALLOC_HARDWARE_MODULE_ID, + (const struct hw_module_t **)&dev->primaryDisplay->mGrallocModule)) { + ALOGE("failed to get gralloc hw module"); + ret = -EINVAL; + goto err_get_module; + } + + if (gralloc_open((const hw_module_t *)dev->primaryDisplay->mGrallocModule, + &dev->primaryDisplay->mAllocDevice)) { + ALOGE("failed to open gralloc"); + ret = -EINVAL; + goto err_get_module; + } + dev->externalDisplay->mAllocDevice = dev->primaryDisplay->mAllocDevice; +#ifdef USES_VIRTUAL_DISPLAY + dev->virtualDisplay->mAllocDevice = dev->primaryDisplay->mAllocDevice; +#endif + + dev->primaryDisplay->mDisplayFd = open("/dev/graphics/fb0", O_RDWR); + if (dev->primaryDisplay->mDisplayFd < 0) { + ALOGE("failed to open framebuffer"); + ret = dev->primaryDisplay->mDisplayFd; + goto err_open_fb; + } + + struct fb_var_screeninfo info; + if (ioctl(dev->primaryDisplay->mDisplayFd, FBIOGET_VSCREENINFO, &info) == -1) { + ALOGE("FBIOGET_VSCREENINFO ioctl failed: %s", strerror(errno)); + ret = -errno; + goto err_ioctl; + } + + if (info.reserved[0] == 0 && info.reserved[1] == 0) { + /* save physical lcd width, height to reserved[] */ + info.reserved[0] = info.xres; + info.reserved[1] = info.yres; + + if (ioctl(dev->primaryDisplay->mDisplayFd, FBIOPUT_VSCREENINFO, &info) == -1) { + ALOGE("FBIOPUT_VSCREENINFO ioctl failed: %s", strerror(errno)); + ret = -errno; + goto err_ioctl; + } + } + + char value[PROPERTY_VALUE_MAX]; + property_get("debug.hwc.force_gpu", value, "0"); + dev->force_gpu = atoi(value); + + /* restore physical lcd width, height from reserved[] */ + int lcd_xres, lcd_yres; + lcd_xres = info.reserved[0]; + lcd_yres = info.reserved[1]; + + refreshRate = 1000000000000LLU / + ( + uint64_t( info.upper_margin + info.lower_margin + lcd_yres + info.vsync_len ) + * ( info.left_margin + info.right_margin + lcd_xres + info.hsync_len ) + * info.pixclock + ); + + if (refreshRate == 0) { + ALOGW("invalid refresh rate, assuming 60 Hz"); + refreshRate = 60; + } + +#if defined(USES_DUAL_DISPLAY) + dev->primaryDisplay->mXres = 2 * lcd_xres; +#else + dev->primaryDisplay->mXres = lcd_xres; +#endif + dev->primaryDisplay->mYres = lcd_yres; + dev->primaryDisplay->mXdpi = 1000 * (lcd_xres * 25.4f) / info.width; + dev->primaryDisplay->mYdpi = 1000 * (lcd_yres * 25.4f) / info.height; + dev->primaryDisplay->mVsyncPeriod = 1000000000 / refreshRate; + + ALOGD("using\n" + "xres = %d px\n" + "yres = %d px\n" + "width = %d mm (%f dpi)\n" + "height = %d mm (%f dpi)\n" + "refresh rate = %d Hz\n", + dev->primaryDisplay->mXres, dev->primaryDisplay->mYres, info.width, dev->primaryDisplay->mXdpi / 1000.0, + info.height, dev->primaryDisplay->mYdpi / 1000.0, refreshRate); +#ifndef USES_VPP +#ifdef FIMD_BW_OVERLAP_CHECK + fimd_bw_overlap_limits_init(dev->primaryDisplay->mXres, dev->primaryDisplay->mYres, + dev->primaryDisplay->mDmaChannelMaxBandwidth, dev->primaryDisplay->mDmaChannelMaxOverlapCount); +#else + for (size_t i = 0; i < MAX_NUM_FIMD_DMA_CH; i++) { + dev->primaryDisplay->mDmaChannelMaxBandwidth[i] =2560 * 1600; + dev->primaryDisplay->mDmaChannelMaxOverlapCount[i] = 1; + } +#endif +#endif + +#ifdef FIMD_WINDOW_OVERLAP_CHECK + /* + * Trivial implementation. + * Effective only for checking the case that + * mMaxWindowOverlapCnt = (NUM_HW_WINDOWS - 1) + */ + dev->primaryDisplay->mMaxWindowOverlapCnt = + fimd_window_overlap_limits_init(dev->primaryDisplay->mXres, dev->primaryDisplay->mYres); +#else + dev->primaryDisplay->mMaxWindowOverlapCnt = NUM_HW_WINDOWS; +#endif + + if (dev->externalDisplay->openHdmi() < 0) { + ALOGE("openHdmi fail"); + } + +#if defined(USES_DUAL_DISPLAY) + dev->secondaryDisplay = new ExynosSecondaryDisplayModule(dev); + dev->secondaryDisplay->mAllocDevice = dev->primaryDisplay->mAllocDevice; +#endif +#ifdef USES_VPP + dev->mDisplayResourceManager = new ExynosDisplayResourceManagerModule(dev); +#endif + char devname[MAX_DEV_NAME + 1]; + devname[MAX_DEV_NAME] = '\0'; + + strncpy(devname, VSYNC_DEV_PREFIX, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME, MAX_DEV_NAME); + + dev->vsync_fd = open(devname, O_RDONLY); + if (dev->vsync_fd < 0) { + ALOGI("Failed to open vsync attribute at %s", devname); + devname[strlen(VSYNC_DEV_PREFIX)] = '\0'; + strlcat(devname, VSYNC_DEV_MIDDLE, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME, MAX_DEV_NAME); + ALOGI("Retrying with %s", devname); + dev->vsync_fd = open(devname, O_RDONLY); + } + +#ifdef TRY_SECOND_VSYNC_DEV + if (dev->vsync_fd < 0) { + strncpy(devname, VSYNC_DEV_PREFIX, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME2, MAX_DEV_NAME); + + dev->vsync_fd = open(devname, O_RDONLY); + if (dev->vsync_fd < 0) { + ALOGI("Failed to open vsync attribute at %s", devname); + devname[strlen(VSYNC_DEV_PREFIX)] = '\0'; + strlcat(devname, VSYNC_DEV_MIDDLE2, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME2, MAX_DEV_NAME); + ALOGI("Retrying with %s", devname); + dev->vsync_fd = open(devname, O_RDONLY); + } + } + +#endif + + if (dev->vsync_fd < 0) { + ALOGE("failed to open vsync attribute"); + ret = dev->vsync_fd; + goto err_hdmi_open; + } else { + struct stat st; + if (fstat(dev->vsync_fd, &st) < 0) { + ALOGE("Failed to stat vsync node at %s", devname); + goto err_vsync_stat; + } + + if (!S_ISREG(st.st_mode)) { + ALOGE("vsync node at %s should be a regualar file", devname); + goto err_vsync_stat; + } + } + + dev->psrInfoFd = NULL; + + char psrDevname[MAX_DEV_NAME + 1]; + memset(psrDevname, 0, MAX_DEV_NAME + 1); + strncpy(psrDevname, devname, strlen(devname) - 5); + strlcat(psrDevname, "psr_info", MAX_DEV_NAME); + ALOGI("PSR info devname = %s\n", psrDevname); + + dev->psrInfoFd = fopen(psrDevname, "r"); + if (dev->psrInfoFd == NULL) { + ALOGW("HWC needs to know whether LCD driver is using PSR mode or not\n"); + } else { + char val[4]; + if (fread(&val, 1, 1, dev->psrInfoFd) == 1) { + dev->psrMode = (0x03 & atoi(val)); + dev->panelType = ((0x03 << 2) & atoi(val)) >> 2; + } + } + + ALOGI("PSR mode = %d (0: video mode, 1: DP PSR mode, 2: MIPI-DSI command mode)\n", + dev->psrMode); + ALOGI("Panel type = %d (0: Legacy, 1: DSC)\n", + dev->panelType); + +#ifdef USES_VPP + dev->primaryDisplay->mPanelType = dev->panelType; + if (dev->panelType == PANEL_DSC) { + uint32_t sliceNum = 0; + uint32_t sliceSize = 0; + if (fscanf(dev->psrInfoFd, "\n%d\n%d\n", &sliceNum, &sliceSize) < 0) { + ALOGE("Fail to read slice information"); + } else { + dev->primaryDisplay->mDSCHSliceNum = sliceNum; + dev->primaryDisplay->mDSCYSliceSize = sliceSize; + } + ALOGI("DSC H_Slice_Num: %d, Y_Slice_Size: %d", dev->primaryDisplay->mDSCHSliceNum, dev->primaryDisplay->mDSCYSliceSize); + } +#endif + + dev->base.common.tag = HARDWARE_DEVICE_TAG; + dev->base.common.version = HWC_VERSION; + dev->base.common.module = const_cast(module); + dev->base.common.close = exynos5_close; + + dev->base.prepare = exynos5_prepare; + dev->base.set = exynos5_set; + dev->base.eventControl = exynos5_eventControl; + dev->base.query = exynos5_query; + dev->base.registerProcs = exynos5_registerProcs; + dev->base.dump = exynos5_dump; + dev->base.getDisplayConfigs = exynos5_getDisplayConfigs; + dev->base.getDisplayAttributes = exynos5_getDisplayAttributes; + if (hwcHasApiVersion((hwc_composer_device_1_t*)dev, HWC_DEVICE_API_VERSION_1_4)) { + dev->base.getActiveConfig = exynos_getActiveConfig; + dev->base.setActiveConfig = exynos_setActiveConfig; + dev->base.setCursorPositionAsync = exynos_setCursorPositionAsync; + dev->base.setPowerMode = exynos_setPowerMode; + } else { + dev->base.blank = exynos5_blank; + } + + *device = &dev->base.common; + +#ifdef IP_SERVICE + android::ExynosIPService *mIPService; + mIPService = android::ExynosIPService::getExynosIPService(); + ret = mIPService->createServiceLocked(); + if (ret < 0) + goto err_vsync; +#endif + +#ifdef HWC_SERVICES + android::ExynosHWCService *mHWCService; + mHWCService = android::ExynosHWCService::getExynosHWCService(); + mHWCService->setExynosHWCCtx(dev); + mHWCService->setPSRExitCallback(doPSRExit); +#if !defined(HDMI_INCAPABLE) + mHWCService->setBootFinishedCallback(exynos5_boot_finished); +#endif +#endif + + dev->mHdmiResolutionChanged = false; + dev->mHdmiResolutionHandled = true; + dev->mS3DMode = S3D_MODE_DISABLED; + dev->mHdmiPreset = HDMI_PRESET_DEFAULT; + dev->mHdmiCurrentPreset = HDMI_PRESET_DEFAULT; + dev->mUseSubtitles = false; + dev->notifyPSRExit = false; + +#if defined(USES_CEC) + if (dev->hdmi_hpd) + start_cec(dev); + else + dev->mCecFd = -1; +#endif + + ret = pthread_create(&dev->vsync_thread, NULL, hwc_vsync_thread, dev); + if (ret) { + ALOGE("failed to start vsync thread: %s", strerror(ret)); + ret = -ret; + goto err_vsync; + } + +#ifdef G2D_COMPOSITION + dev->primaryDisplay->num_of_allocated_lay = 0; +#endif + + dev->allowOTF = true; + + dev->hwc_ctrl.max_num_ovly = NUM_HW_WINDOWS; + dev->hwc_ctrl.num_of_video_ovly = 2; + dev->hwc_ctrl.dynamic_recomp_mode = (dev->psrMode == PSR_NONE); + dev->hwc_ctrl.skip_static_layer_mode = true; + dev->hwc_ctrl.dma_bw_balance_mode = true; + + hwcDebug = 0; + + if (dev->hwc_ctrl.dynamic_recomp_mode == true) + exynos5_create_update_stat_thread(dev); + + return 0; + +err_vsync: + if (dev->psrInfoFd != NULL) + fclose(dev->psrInfoFd); +err_vsync_stat: + close(dev->vsync_fd); +err_hdmi_open: + if (dev->externalDisplay->mDisplayFd > 0) + close(dev->externalDisplay->mDisplayFd); +err_ioctl: + close(dev->primaryDisplay->mDisplayFd); +#if defined(USES_DUAL_DISPLAY) + if (dev->secondaryDisplay->mDisplayFd > 0) + close(dev->secondaryDisplay->mDisplayFd); +#endif +err_open_fb: + gralloc_close(dev->primaryDisplay->mAllocDevice); +err_get_module: + free(dev); + return ret; +} + +int exynos5_close(hw_device_t *device) +{ + struct exynos5_hwc_composer_device_1_t *dev = + (struct exynos5_hwc_composer_device_1_t *)device; + pthread_kill(dev->vsync_thread, SIGTERM); + pthread_join(dev->vsync_thread, NULL); + if (pthread_kill(dev->update_stat_thread, 0) != ESRCH) { + pthread_kill(dev->update_stat_thread, SIGTERM); + pthread_join(dev->update_stat_thread, NULL); + } +#ifndef USES_VPP + for (size_t i = 0; i < NUM_GSC_UNITS; i++) + dev->primaryDisplay->mMPPs[i]->cleanupM2M(); +#endif + gralloc_close(dev->primaryDisplay->mAllocDevice); + close(dev->vsync_fd); + +#ifdef USE_FB_PHY_LINEAR +#ifdef G2D_COMPOSITION + for (int i = 0; i < NUM_HW_WIN_FB_PHY; i++) { + for (int j = 0; j < NUM_GSC_DST_BUFS; j++) { + if (dev->win_buf_vir_addr[i][j]) { + ion_unmap((void *)dev->win_buf_vir_addr[i][j], dev->win_buf_map_size[i]); + dev->win_buf_vir_addr[i][j] = 0; + } + } + } +#endif + + for (int i = 0; i < NUM_HW_WIN_FB_PHY; i++) { + for (int j = 0; j < NUM_GSC_DST_BUFS; j++) { + if (dev->win_buf[i][j]) { + dev->primaryDisplay->mAllocDevice->free(dev->primaryDisplay->mAllocDevice, dev->win_buf[i][j]); + dev->win_buf[i][j] = NULL; + } + } + } +#endif + +#ifdef USES_VPP + delete dev->mDisplayResourceManager; +#endif + + return 0; +} + +static struct hw_module_methods_t exynos5_hwc_module_methods = { + open: exynos5_open, +}; + +hwc_module_t HAL_MODULE_INFO_SYM = { + common: { + tag: HARDWARE_MODULE_TAG, + module_api_version: HWC_MODULE_API_VERSION_0_1, + hal_api_version: HARDWARE_HAL_API_VERSION, + id: HWC_HARDWARE_MODULE_ID, + name: "Samsung exynos5 hwcomposer module", + author: "Samsung LSI", + methods: &exynos5_hwc_module_methods, + dso: 0, + reserved: {0}, + } +}; diff --git a/libhwc/ExynosHWC.h b/libhwc/ExynosHWC.h new file mode 100644 index 0000000..8f8b735 --- /dev/null +++ b/libhwc/ExynosHWC.h @@ -0,0 +1,452 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EXYNOS_HWC_H_ +#define ANDROID_EXYNOS_HWC_H_ +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#ifdef USES_VPP +#ifdef USES_VPP_V2 +#include +#else +#include +#endif +#include "ExynosDisplayResourceManagerModule.h" +#else +#include +#endif + +#include + +#define HWC_REMOVE_DEPRECATED_VERSIONS 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "gralloc_priv.h" +#ifndef USES_FIMC +#include "exynos_gscaler.h" +#else +#include "exynos_fimc.h" +#endif +#include "exynos_format.h" +#include "exynos_v4l2.h" +#include "s5p_tvout_v4l2.h" +#include "ExynosHWCModule.h" +#include "ExynosRect.h" +#include "videodev2.h" + +#ifdef USE_FB_PHY_LINEAR +const size_t NUM_HW_WIN_FB_PHY = 5; +#undef DUAL_VIDEO_OVERLAY_SUPPORT +#define G2D_COMPOSITION +#ifdef G2D_COMPOSITION +#define USE_FIMG2D_API +#endif +#endif + +#if defined(DUAL_VIDEO_OVERLAY_SUPPORT) +#define MAX_VIDEO_LAYERS 2 +#else +#define MAX_VIDEO_LAYERS 1 +#endif + +#ifndef FIMD_WORD_SIZE_BYTES +#define FIMD_WORD_SIZE_BYTES 8 +#endif + +#ifndef FIMD_BURSTLEN +#define FIMD_BURSTLEN 16 +#endif + +#ifndef DRM_FIMD_BURSTLEN +#define DRM_FIMD_BURSTLEN 8 +#endif + +#ifndef FIMD_ADDED_BURSTLEN_BYTES +#define FIMD_ADDED_BURSTLEN_BYTES 0 +#endif + +#ifndef FIMD_TOTAL_BW_LIMIT +#define FIMD_TOTAL_BW_LIMIT (2560 * 1600 * 5) +#endif + +#ifndef WINUPDATE_X_ALIGNMENT +#define WINUPDATE_X_ALIGNMENT (8) +#endif + +#ifndef WINUPDATE_W_ALIGNMENT +#define WINUPDATE_W_ALIGNMENT (8) +#endif + +#ifndef WINUPDATE_DSC_H_SLICE_NUM +#define WINUPDATE_DSC_H_SLICE_NUM (4) +#endif + +#ifndef WINUPDATE_DSC_Y_SLICE_SIZE +#define WINUPDATE_DSC_Y_SLICE_SIZE (64) +#endif + +#ifndef WINUPDATE_THRESHOLD +#define WINUPDATE_THRESHOLD (75) +#endif + +#ifndef WINUPDATE_MIN_HEIGHT +#define WINUPDATE_MIN_HEIGHT (1) +#endif + +#define MEDIA_PROCESSOR_GSC 0 +#define MEDIA_PROCESSOR_FIMC 1 +#define MEDIA_PROCESSOR_G2D 2 + +#ifdef USES_FIMC +#define DEFAULT_MEDIA_PROCESSOR MEDIA_PROCESSOR_FIMC +#else +#define DEFAULT_MEDIA_PROCESSOR MEDIA_PROCESSOR_GSC +#endif + +#ifdef NUM_AVAILABLE_HW_WINDOWS +/* + * NUM_AVAILABLE_HW_WINDOWS can be optionally provided by + * soc specific header file which is generally present at + * $SoC\libhwcmodule\ExynosHWCModule.h. This is useful when + * same display controller driver is used by SoCs having + * different number of windows. + * S3C_FB_MAX_WIN: max number of hardware windows supported + * by the display controller driver. + * NUM_AVAILABLE_HW_WINDOWS: max windows in the given SoC. + */ +const size_t NUM_HW_WINDOWS = NUM_AVAILABLE_HW_WINDOWS; +#else +const size_t NUM_HW_WINDOWS = S3C_FB_MAX_WIN; +#endif + +#ifndef HWC_VERSION +#define HWC_VERSION HWC_DEVICE_API_VERSION_1_3 +#endif + +#ifndef IDMA_SECURE +#define IDMA_SECURE IDMA_G2 +#endif + +const size_t NO_FB_NEEDED = NUM_HW_WINDOWS + 1; + +#ifndef FIMD_BW_OVERLAP_CHECK +const size_t MAX_NUM_FIMD_DMA_CH = 2; +const int FIMD_DMA_CH_IDX[S3C_FB_MAX_WIN] = {0, 1, 1, 1, 0}; +#endif + +#define MAX_DEV_NAME 128 +#ifndef VSYNC_DEV_PREFIX +#define VSYNC_DEV_PREFIX "" +#endif +#ifndef VSYNC_DEV_MIDDLE +#define VSYNC_DEV_MIDDLE "" +#endif + +#ifdef TRY_SECOND_VSYNC_DEV +#ifndef VSYNC_DEV_NAME2 +#define VSYNC_DEV_NAME2 "" +#endif +#ifndef VSYNC_DEV_MIDDLE2 +#define VSYNC_DEV_MIDDLE2 "" +#endif +#endif + +#ifdef USES_VPP +#ifndef MPP_VPP_G +#define MPP_VPP_G 10 +const uint32_t VPP_ASSIGN_ORDER[] = {MPP_VG, MPP_VGR}; +#else +const uint32_t VPP_ASSIGN_ORDER[] = {MPP_VPP_G, MPP_VG, MPP_VGR}; +#endif +#endif + +const size_t NUM_GSC_UNITS = sizeof(AVAILABLE_GSC_UNITS) / + sizeof(AVAILABLE_GSC_UNITS[0]); + +const size_t BURSTLEN_BYTES = FIMD_BURSTLEN * FIMD_WORD_SIZE_BYTES + FIMD_ADDED_BURSTLEN_BYTES; +const size_t DRM_BURSTLEN_BYTES = DRM_FIMD_BURSTLEN * FIMD_WORD_SIZE_BYTES + FIMD_ADDED_BURSTLEN_BYTES; +#if defined(NOT_USE_TRIPLE_BUFFER) +const size_t NUM_HDMI_BUFFERS = 4; +#else +const size_t NUM_HDMI_BUFFERS = 3; +#endif + +#define NUM_VIRT_OVER 5 + +#define NUM_VIRT_OVER_HDMI 5 + +#define HWC_PAGE_MISS_TH 5 + +#define S3D_ERROR -1 +#ifndef HDMI_INCAPABLE +#define HDMI_PRESET_DEFAULT V4L2_DV_1080P60 +#else +#define HDMI_PRESET_DEFAULT 0 +#endif +#define HDMI_PRESET_ERROR -1 + +#define HWC_FIMD_BW_TH 1 /* valid range 1 to 5 */ +#define HWC_FPS_TH 5 /* valid range 1 to 60 */ +#define VSYNC_INTERVAL (1000000000.0 / 60) +#define NUM_CONFIG_STABLE 10 + +#define OTF_SWITCH_THRESHOLD 2 + +#ifndef HLOG_CODE +#define HLOG_CODE 0 +#endif + +extern int hwcDebug; + +inline bool hwcPrintDebugMessages() +{ + return hwcDebug & (1 << HLOG_CODE); +} + +#if defined(DISABLE_HWC_DEBUG) +#define HLOGD(...) +#define HLOGV(...) +#define HLOGE(...) +#else +#define HLOGD(...) \ + if (hwcPrintDebugMessages()) \ + ALOGD(__VA_ARGS__); +#define HLOGV(...) \ + if (hwcPrintDebugMessages()) \ + ALOGV(__VA_ARGS__); +#define HLOGE(...) \ + if (hwcPrintDebugMessages()) \ + ALOGE(__VA_ARGS__); +#endif + +typedef enum _COMPOS_MODE_SWITCH { + NO_MODE_SWITCH, + HWC_2_GLES = 1, + GLES_2_HWC, +} HWC_COMPOS_MODE_SWITCH; + +struct exynos5_hwc_composer_device_1_t; + +typedef struct { + uint32_t x; + uint32_t y; + uint32_t w; + uint32_t h; + uint32_t fw; + uint32_t fh; + uint32_t format; + uint32_t rot; + uint32_t cacheable; + uint32_t drmMode; + uint32_t index; +} video_layer_config; + +struct exynos5_gsc_map_t { + enum { + GSC_NONE = 0, + GSC_M2M, + // TODO: GSC_LOCAL_PATH + GSC_LOCAL, + } mode; + int idx; +}; + +struct exynos5_hwc_post_data_t { + int overlay_map[NUM_HW_WINDOWS]; + exynos5_gsc_map_t gsc_map[NUM_HW_WINDOWS]; + size_t fb_window; +}; + +const size_t NUM_GSC_DST_BUFS = 3; +const size_t NUM_DRM_GSC_DST_BUFS = 2; +struct exynos5_gsc_data_t { + void *gsc; + exynos_mpp_img src_cfg; + exynos_mpp_img mid_cfg; + exynos_mpp_img dst_cfg; + buffer_handle_t dst_buf[NUM_GSC_DST_BUFS]; + buffer_handle_t mid_buf[NUM_GSC_DST_BUFS]; + int dst_buf_fence[NUM_GSC_DST_BUFS]; + int mid_buf_fence[NUM_GSC_DST_BUFS]; + size_t current_buf; + int gsc_mode; + uint32_t last_gsc_lay_hnd; +}; + +struct hdmi_layer_t { + int id; + int fd; + bool enabled; + exynos_mpp_img cfg; + + bool streaming; + size_t current_buf; + size_t queued_buf; +}; + +struct hwc_ctrl_t { + int max_num_ovly; + int num_of_video_ovly; + int dynamic_recomp_mode; + int skip_static_layer_mode; + int dma_bw_balance_mode; +}; + +#if defined(G2D_COMPOSITION) || defined(USE_GRALLOC_FLAG_FOR_HDMI) +#include "FimgApi.h" +#endif + +#ifdef G2D_COMPOSITION +struct exynos5_g2d_data_t { + int ovly_lay_idx[NUM_HW_WIN_FB_PHY]; + int win_used[NUM_HW_WINDOWS]; +}; +#endif + +class ExynosPrimaryDisplay; +class ExynosExternalDisplay; +class ExynosVirtualDisplay; +#if defined(USES_DUAL_DISPLAY) +class ExynosSecondaryDisplayModule; +#endif +#ifdef USES_VPP +class ExynosDisplayResourceManagerModule; +#endif + +struct exynos5_hwc_composer_device_1_t { + hwc_composer_device_1_t base; + + ExynosPrimaryDisplay *primaryDisplay; +#if defined(USES_DUAL_DISPLAY) + ExynosSecondaryDisplayModule *secondaryDisplay; +#endif + ExynosExternalDisplay *externalDisplay; + ExynosVirtualDisplay *virtualDisplay; + struct v4l2_rect mVirtualDisplayRect; +#ifdef USES_VPP + ExynosDisplayResourceManagerModule *mDisplayResourceManager; +#endif + + int vsync_fd; + FILE *psrInfoFd; + int psrMode; + int panelType; + + const hwc_procs_t *procs; + pthread_t vsync_thread; + int force_gpu; + + bool hdmi_hpd; + + int mHdmiPreset; + int mHdmiCurrentPreset; + bool mHdmiResolutionChanged; + bool mHdmiResolutionHandled; + int mS3DMode; + bool mUseSubtitles; + int video_playback_status; + + int VsyncInterruptStatus; + int CompModeSwitch; + uint64_t LastUpdateTimeStamp; + uint64_t LastModeSwitchTimeStamp; + int totPixels; + int incomingPixels; + int updateCallCnt; + pthread_t update_stat_thread; + int update_event_cnt; + volatile bool update_stat_thread_flag; + + struct hwc_ctrl_t hwc_ctrl; + + int mCecFd; + int mCecPaddr; + int mCecLaddr; + + bool force_mirror_mode; + int ext_fbt_transform; /* HAL_TRANSFORM_ROT_XXX */ + bool external_display_pause; + bool local_external_display_pause; + + bool notifyPSRExit; + bool allowOTF; + int setCount; + bool debugMessages; +}; + +void exynos5_create_update_stat_thread(struct exynos5_hwc_composer_device_1_t *dev); + +enum { + OTF_OFF = 0, + OTF_RUNNING, + OTF_TO_M2M, + SEC_M2M, +}; + +enum { + S3D_MODE_DISABLED = 0, + S3D_MODE_READY, + S3D_MODE_RUNNING, + S3D_MODE_STOPPING, +}; + +enum { + S3D_FB = 0, + S3D_SBS, + S3D_TB, + S3D_NONE, +}; + +enum { + NO_DRM = 0, + NORMAL_DRM, + SECURE_DRM, +}; + +enum { + PSR_NONE = 0, + PSR_DP, + PSR_MIPI, +}; + +enum { + PANEL_LEGACY = 0, + PANEL_DSC, +}; +#endif diff --git a/libhwc/ExynosHWCDebug.c b/libhwc/ExynosHWCDebug.c new file mode 100644 index 0000000..43c26e4 --- /dev/null +++ b/libhwc/ExynosHWCDebug.c @@ -0,0 +1,59 @@ +#include +#include +#include + +int main(int argc, char **argv) +{ + int option = 0; + size_t i; + int ignoreRest = 0; + char buf[128]; + + if (argc != 2) { + printf("Usage: debug h|d|e|v|u|a|x\n"); + printf("h = hwc\n"); + printf("d = display\n"); + printf("e = hdmi (external)\n"); + printf("v = virtual\n"); + printf("u = hwcutils\n"); + printf("a = enable all\n"); + printf("x = disable all\n"); + return 0; + } + + for (i = 0; i < strlen(argv[1]); i++) { + switch (argv[1][i]) { + case 'h': + option |= (1 << 0); + break; + case 'd': + option |= (1 << 1); + break; + case 'e': + option |= (1 << 2); + break; + case 'v': + option |= (1 << 3); + break; + case 'u': + option |= (1 << 4); + break; + case 'a': + option = 31; + ignoreRest = 1; + break; + case 'x': + option = 0; + ignoreRest = 1; + break; + } + if (ignoreRest) + break; + } + + + sprintf(buf, "service call Exynos.HWCService 105 i32 %d", option); + system(buf); + + return 0; +} diff --git a/libhwc/ExynosHWCDebug.h b/libhwc/ExynosHWCDebug.h new file mode 100644 index 0000000..c927b4c --- /dev/null +++ b/libhwc/ExynosHWCDebug.h @@ -0,0 +1,49 @@ +#ifndef HWC_DEBUG_H +#define HWC_DEBUG_H + +enum { + eDebugDefault = 0x00000001, + eDebugWindowUpdate = 0x00000002, + eDebugWinConfig = 0x00000004, + eDebugSkipStaicLayer = 0x00000008, + eDebugOverlaySupported = 0x00000010, + eDebugResourceAssigning = 0x00000020, + eDebugFence = 0x00000040, + eDebugResourceManager = 0x00000080, + eDebugMPP = 0x00000100, +}; + +inline bool hwcCheckDebugMessages(uint32_t type) +{ + return hwcDebug & type; +} + +#if defined(DISABLE_HWC_DEBUG) +#define HDEBUGLOGD(...) +#define HDEBUGLOGV(...) +#define HDEBUGLOGE(...) +#else +#define HDEBUGLOGD(type, ...) \ + if (hwcCheckDebugMessages(type)) \ + ALOGD(__VA_ARGS__); +#define HDEBUGLOGV(type, ...) \ + if (hwcCheckDebugMessages(type)) \ + ALOGV(__VA_ARGS__); +#define HDEBUGLOGE(type, ...) \ + if (hwcCheckDebugMessages(type)) \ + ALOGE(__VA_ARGS__); +#endif + +#if defined(DISABLE_HWC_DEBUG) +#define DISPLAY_LOGD(...) +#else +#define DISPLAY_LOGD(type, msg, ...) \ + if (hwcCheckDebugMessages(type)) \ + ALOGD("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#endif +#define DISPLAY_LOGV(msg, ...) ALOGV("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGI(msg, ...) ALOGI("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGW(msg, ...) ALOGW("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGE(msg, ...) ALOGE("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) + +#endif diff --git a/libhwc/NOTICE b/libhwc/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhwc/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhwcService/Android.mk b/libhwcService/Android.mk new file mode 100644 index 0000000..e2cbb15 --- /dev/null +++ b/libhwcService/Android.mk @@ -0,0 +1,91 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ifeq ($(BOARD_USES_HWC_SERVICES),true) + +LOCAL_PATH:= $(call my-dir) +# HAL module implemenation, not prelinked and stored in +# hw/..so + +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libcutils libhardware_legacy libutils libbinder \ + libexynosv4l2 libhdmi libhwcutils libsync +LOCAL_CFLAGS += -DLOG_TAG=\"HWCService\" + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/exynos/libhwcutils \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libdisplay +endif + +ifeq ($(BOARD_HDMI_INCAPABLE), true) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi_dummy +else +ifeq ($(BOARD_USES_VPP), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvpphdmi +else +ifeq ($(BOARD_USES_NEW_HDMI), true) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi +else +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi_legacy +endif +endif +endif + +ifeq ($(BOARD_TV_PRIMARY), true) +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../libhwc_tvprimary +else +LOCAL_C_INCLUDES += \ + $(TOP)/hardware/samsung_slsi/exynos/libhwc +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) +ifeq ($(BOARD_USES_VPP), true) + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libvppvirtualdisplay +else + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libvirtualdisplay +endif +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY_DECON_EXT_WB), true) + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libvirtualdisplaymodule +endif + LOCAL_SHARED_LIBRARIES += libvirtualdisplay +ifeq ($(BOARD_USES_VDS_BGRA8888), true) +LOCAL_C_INCLUDES += \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp +endif +endif + +LOCAL_SRC_FILES := ExynosHWCService.cpp IExynosHWC.cpp + +LOCAL_MODULE := libExynosHWCService +LOCAL_MODULE_TAGS := optional + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/libhwcService/ExynosHWCService.cpp b/libhwcService/ExynosHWCService.cpp new file mode 100644 index 0000000..257afe9 --- /dev/null +++ b/libhwcService/ExynosHWCService.cpp @@ -0,0 +1,516 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "ExynosHWCService.h" +#include "exynos_v4l2.h" +#include "videodev2_exynos_media.h" +#include "ExynosOverlayDisplay.h" +#include "ExynosExternalDisplay.h" +#ifdef USES_VPP +#include "ExynosPrimaryDisplay.h" +#endif +#if defined(USES_DUAL_DISPLAY) +#include "ExynosSecondaryDisplayModule.h" +#endif +#ifdef USES_VIRTUAL_DISPLAY +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB +#include "ExynosVirtualDisplayModule.h" +#else +#include "ExynosVirtualDisplay.h" +#endif +#endif + +#define HWC_SERVICE_DEBUG 0 + +namespace android { + +ANDROID_SINGLETON_STATIC_INSTANCE(ExynosHWCService); + +enum { + HWC_CTL_MAX_OVLY_CNT = 100, + HWC_CTL_VIDEO_OVLY_CNT = 101, + HWC_CTL_DYNAMIC_RECOMP = 102, + HWC_CTL_SKIP_STATIC = 103, + HWC_CTL_DMA_BW_BAL = 104, + HWC_CTL_SECURE_DMA = 105, +}; + +ExynosHWCService::ExynosHWCService() : + mHWCService(NULL), + mHWCCtx(NULL), + bootFinishedCallback(NULL), + doPSRExit(NULL) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "ExynosHWCService Constructor is called"); +} + +ExynosHWCService::~ExynosHWCService() +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "ExynosHWCService Destructor is called"); +} + +int ExynosHWCService::setWFDMode(unsigned int mode) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, mode); +#ifdef USES_VIRTUAL_DISPLAY +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + if (!!mode) { + mHWCCtx->virtualDisplay->requestIONMemory(); + } else { + mHWCCtx->virtualDisplay->freeIONMemory(); + } +#endif + + mHWCCtx->virtualDisplay->mIsWFDState = !!mode; +#endif + return INVALID_OPERATION; +} + +int ExynosHWCService::setWFDOutputResolution(unsigned int width, unsigned int height, + unsigned int __unused disp_w, unsigned int __unused disp_h) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::width=%d, height=%d", __func__, width, height); + +#ifdef USES_VIRTUAL_DISPLAY +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + ExynosVirtualDisplayModule *virDisplay = (ExynosVirtualDisplayModule *)mHWCCtx->virtualDisplay; + virDisplay->setWFDOutputResolution(width, height, disp_w, height); +#else + mHWCCtx->virtualDisplay->mWidth = width; + mHWCCtx->virtualDisplay->mHeight = height; + mHWCCtx->virtualDisplay->mDisplayWidth = disp_w; + mHWCCtx->virtualDisplay->mDisplayHeight = disp_h; +#endif + return NO_ERROR; +#endif + return INVALID_OPERATION; +} + +int ExynosHWCService::setVDSGlesFormat(int format) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::format=%d", __func__, format); + +#ifdef USES_VIRTUAL_DISPLAY +#ifdef USES_VDS_OTHERFORMAT + ExynosVirtualDisplayModule *virDisplay = (ExynosVirtualDisplayModule *)mHWCCtx->virtualDisplay; + virDisplay->setVDSGlesFormat(format); + + return NO_ERROR; +#endif +#endif + return INVALID_OPERATION; +} + +void ExynosHWCService::setWFDSleepCtrl(bool __unused black) +{ +} + +int ExynosHWCService::setExtraFBMode(unsigned int mode) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, mode); + return NO_ERROR; +} + +int ExynosHWCService::setCameraMode(unsigned int mode) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, mode); + return NO_ERROR; +} + +int ExynosHWCService::setForceMirrorMode(unsigned int mode) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, mode); + mHWCCtx->force_mirror_mode = mode; + mHWCCtx->procs->invalidate(mHWCCtx->procs); + return NO_ERROR; +} + +int ExynosHWCService::setVideoPlayStatus(unsigned int status) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::status=%d", __func__, status); + if (mHWCCtx) + mHWCCtx->video_playback_status = status; + + return NO_ERROR; +} + +int ExynosHWCService::setExternalDisplayPause(bool onoff) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::onoff=%d", __func__, onoff); + if (mHWCCtx) + mHWCCtx->external_display_pause = onoff; + + return NO_ERROR; +} + +int ExynosHWCService::setDispOrientation(unsigned int transform) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%x", __func__, transform); +#ifdef USES_VIRTUAL_DISPLAY + mHWCCtx->virtualDisplay->mDeviceOrientation = transform; +#endif + return NO_ERROR; +} + +int ExynosHWCService::setProtectionMode(unsigned int mode) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, mode); + return NO_ERROR; +} + +int ExynosHWCService::setExternalDispLayerNum(unsigned int num) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::mode=%d", __func__, num); + return NO_ERROR; +} + +int ExynosHWCService::setForceGPU(unsigned int on) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::on/off=%d", __func__, on); + mHWCCtx->force_gpu = on; + mHWCCtx->procs->invalidate(mHWCCtx->procs); + return NO_ERROR; +} + +int ExynosHWCService::setExternalUITransform(unsigned int transform) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::transform=%d", __func__, transform); + mHWCCtx->ext_fbt_transform = transform; +#ifdef USES_VIRTUAL_DISPLAY + mHWCCtx->virtualDisplay->mFrameBufferTargetTransform = transform; +#endif + mHWCCtx->procs->invalidate(mHWCCtx->procs); + return NO_ERROR; +} + +int ExynosHWCService::getExternalUITransform(void) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s", __func__); +#ifdef USES_VIRTUAL_DISPLAY + return mHWCCtx->virtualDisplay->mFrameBufferTargetTransform; +#else + return mHWCCtx->ext_fbt_transform; +#endif +} + +int ExynosHWCService::setWFDOutputTransform(unsigned int transform) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::transform=%d", __func__, transform); + return INVALID_OPERATION; +} + +int ExynosHWCService::getWFDOutputTransform(void) +{ + return INVALID_OPERATION; +} + +void ExynosHWCService::setHdmiResolution(int resolution, int s3dMode) +{ +#ifndef HDMI_INCAPABLE + if (resolution == 0) + resolution = mHWCCtx->mHdmiCurrentPreset; + if (s3dMode == S3D_NONE) { + if (mHWCCtx->mHdmiCurrentPreset == resolution) + return; + mHWCCtx->mHdmiPreset = resolution; + mHWCCtx->mHdmiResolutionChanged = true; + mHWCCtx->procs->invalidate(mHWCCtx->procs); + return; + } + + switch (resolution) { + case HDMI_720P_60: + resolution = S3D_720P_60_BASE + s3dMode; + break; + case HDMI_720P_59_94: + resolution = S3D_720P_59_94_BASE + s3dMode; + break; + case HDMI_720P_50: + resolution = S3D_720P_50_BASE + s3dMode; + break; + case HDMI_1080P_24: + resolution = S3D_1080P_24_BASE + s3dMode; + break; + case HDMI_1080P_23_98: + resolution = S3D_1080P_23_98_BASE + s3dMode; + break; + case HDMI_1080P_30: + resolution = S3D_1080P_30_BASE + s3dMode; + break; + case HDMI_1080I_60: + if (s3dMode != S3D_SBS) + return; + resolution = V4L2_DV_1080I60_SB_HALF; + break; + case HDMI_1080I_59_94: + if (s3dMode != S3D_SBS) + return; + resolution = V4L2_DV_1080I59_94_SB_HALF; + break; + case HDMI_1080P_60: + if (s3dMode != S3D_SBS && s3dMode != S3D_TB) + return; + resolution = S3D_1080P_60_BASE + s3dMode; + break; + default: + return; + } + mHWCCtx->mHdmiPreset = resolution; + mHWCCtx->mHdmiResolutionChanged = true; + mHWCCtx->mS3DMode = S3D_MODE_READY; + mHWCCtx->procs->invalidate(mHWCCtx->procs); +#endif +} + +void ExynosHWCService::setHdmiCableStatus(int status) +{ + mHWCCtx->hdmi_hpd = !!status; +} + +void ExynosHWCService::setHdmiHdcp(int status) +{ + mHWCCtx->externalDisplay->setHdcpStatus(status); +} + +void ExynosHWCService::setHdmiAudioChannel(uint32_t channels) +{ + mHWCCtx->externalDisplay->setAudioChannel(channels); +} + +void ExynosHWCService::setHdmiSubtitles(bool use) +{ + mHWCCtx->externalDisplay->mUseSubtitles = use; +} + +void ExynosHWCService::setPresentationMode(bool use) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::PresentationMode=%s", __func__, use == false ? "false" : "true"); +#ifdef USES_VIRTUAL_DISPLAY + mHWCCtx->virtualDisplay->mPresentationMode = !!use; +#endif +} + +int ExynosHWCService::getWFDMode() +{ +#ifdef USES_VIRTUAL_DISPLAY + return mHWCCtx->virtualDisplay->mIsWFDState; +#endif + return INVALID_OPERATION; +} + +void ExynosHWCService::getWFDOutputResolution(unsigned int *width, unsigned int *height) +{ +#ifdef USES_VIRTUAL_DISPLAY + *width = mHWCCtx->virtualDisplay->mWidth; + *height = mHWCCtx->virtualDisplay->mHeight; +#else + *width = 0; + *height = 0; +#endif +} + +int ExynosHWCService::getWFDOutputInfo(int __unused *fd1, int __unused *fd2, struct wfd_layer_t __unused *wfd_info) +{ + return INVALID_OPERATION; +} + +int ExynosHWCService::getPresentationMode() +{ +#ifdef USES_VIRTUAL_DISPLAY + return mHWCCtx->virtualDisplay->mPresentationMode; +#else + return INVALID_OPERATION; +#endif +} + +void ExynosHWCService::getHdmiResolution(uint32_t *width, uint32_t *height) +{ +#ifndef HDMI_INCAPABLE + switch (mHWCCtx->mHdmiCurrentPreset) { + case V4L2_DV_480P59_94: + case V4L2_DV_480P60: + *width = 640; + *height = 480; + break; + case V4L2_DV_576P50: + *width = 720; + *height = 576; + break; + case V4L2_DV_720P24: + case V4L2_DV_720P25: + case V4L2_DV_720P30: + case V4L2_DV_720P50: + case V4L2_DV_720P59_94: + case V4L2_DV_720P60: + case V4L2_DV_720P60_FP: + case V4L2_DV_720P60_SB_HALF: + case V4L2_DV_720P60_TB: + case V4L2_DV_720P59_94_FP: + case V4L2_DV_720P59_94_SB_HALF: + case V4L2_DV_720P59_94_TB: + case V4L2_DV_720P50_FP: + case V4L2_DV_720P50_SB_HALF: + case V4L2_DV_720P50_TB: + *width = 1280; + *height = 720; + break; + case V4L2_DV_1080I29_97: + case V4L2_DV_1080I30: + case V4L2_DV_1080I25: + case V4L2_DV_1080I50: + case V4L2_DV_1080I60: + case V4L2_DV_1080P24: + case V4L2_DV_1080P25: + case V4L2_DV_1080P30: + case V4L2_DV_1080P50: + case V4L2_DV_1080P60: + case V4L2_DV_1080I59_94: + case V4L2_DV_1080P59_94: + case V4L2_DV_1080P24_FP: + case V4L2_DV_1080P24_SB_HALF: + case V4L2_DV_1080P24_TB: + case V4L2_DV_1080P23_98_FP: + case V4L2_DV_1080P23_98_SB_HALF: + case V4L2_DV_1080P23_98_TB: + case V4L2_DV_1080I60_SB_HALF: + case V4L2_DV_1080I59_94_SB_HALF: + case V4L2_DV_1080I50_SB_HALF: + case V4L2_DV_1080P60_SB_HALF: + case V4L2_DV_1080P60_TB: + case V4L2_DV_1080P30_FP: + case V4L2_DV_1080P30_SB_HALF: + case V4L2_DV_1080P30_TB: + *width = 1920; + *height = 1080; + break; + } +#endif +} + +uint32_t ExynosHWCService::getHdmiCableStatus() +{ + return !!mHWCCtx->hdmi_hpd; +} + +uint32_t ExynosHWCService::getHdmiAudioChannel() +{ + return mHWCCtx->externalDisplay->getAudioChannel(); +} + +void ExynosHWCService::setHWCCtl(int ctrl, int val) +{ + int err = 0; + switch (ctrl) { + case HWC_CTL_MAX_OVLY_CNT: + mHWCCtx->hwc_ctrl.max_num_ovly = val; + break; + case HWC_CTL_VIDEO_OVLY_CNT: + mHWCCtx->hwc_ctrl.num_of_video_ovly = val; + break; + case HWC_CTL_DYNAMIC_RECOMP: + mHWCCtx->hwc_ctrl.dynamic_recomp_mode = val; + break; + case HWC_CTL_SKIP_STATIC: + mHWCCtx->hwc_ctrl.skip_static_layer_mode = val; + break; + case HWC_CTL_DMA_BW_BAL: + mHWCCtx->hwc_ctrl.dma_bw_balance_mode = val; + break; + case HWC_CTL_SECURE_DMA: +#ifdef USES_VPP +#if defined(USES_DUAL_DISPLAY) + mHWCCtx->secondaryDisplay->mUseSecureDMA = (bool)val; +#else + mHWCCtx->primaryDisplay->mUseSecureDMA = (bool)val; +#endif +#endif + break; + default: + ALOGE("%s: unsupported HWC_CTL", __func__); + err = -1; + break; + } + + if ((err >= 0) && (mHWCCtx->procs)) { + mHWCCtx->procs->invalidate(mHWCCtx->procs); + } +} + +void ExynosHWCService::notifyPSRExit() +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s", __func__); + if (doPSRExit != NULL) + doPSRExit(mHWCCtx); +} + +void ExynosHWCService::setHWCDebug(int debug) +{ + hwcDebug = debug; +} + +int ExynosHWCService::createServiceLocked() +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::", __func__); + sp sm = defaultServiceManager(); + sm->addService(String16("Exynos.HWCService"), mHWCService); + if (sm->checkService(String16("Exynos.HWCService")) != NULL) { + ALOGD_IF(HWC_SERVICE_DEBUG, "adding Exynos.HWCService succeeded"); + return 0; + } else { + ALOGE_IF(HWC_SERVICE_DEBUG, "adding Exynos.HWCService failed"); + return -1; + } +} + +ExynosHWCService *ExynosHWCService::getExynosHWCService() +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "%s::", __func__); + ExynosHWCService& instance = ExynosHWCService::getInstance(); + Mutex::Autolock _l(instance.mLock); + if (instance.mHWCService == NULL) { + instance.mHWCService = &instance; + + int status = ExynosHWCService::getInstance().createServiceLocked(); + if (status != 0) { + ALOGE_IF(HWC_SERVICE_DEBUG, "getExynosHWCService failed"); + } + } + return instance.mHWCService; +} + +void ExynosHWCService::setExynosHWCCtx(ExynosHWCCtx *HWCCtx) +{ + ALOGD_IF(HWC_SERVICE_DEBUG, "HWCCtx=0x%x", (ptrdiff_t)HWCCtx); + if(HWCCtx) { + mHWCCtx = HWCCtx; + } +} + +void ExynosHWCService::setBootFinishedCallback(void (*callback)(exynos5_hwc_composer_device_1_t *)) +{ + bootFinishedCallback = callback; +} + +void ExynosHWCService::setPSRExitCallback(void (*callback)(exynos5_hwc_composer_device_1_t *)) +{ + doPSRExit = callback; +} + +void ExynosHWCService::setBootFinished() { + if (bootFinishedCallback != NULL) + bootFinishedCallback(mHWCCtx); +} + +} diff --git a/libhwcService/ExynosHWCService.h b/libhwcService/ExynosHWCService.h new file mode 100644 index 0000000..e910da8 --- /dev/null +++ b/libhwcService/ExynosHWCService.h @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EXYNOS_HWC_SERVICE_H_ +#define ANDROID_EXYNOS_HWC_SERVICE_H_ + +#include +#include +#include +#include +#include +#include +#include "IExynosHWC.h" +#include "ExynosHWC.h" + +typedef struct exynos5_hwc_composer_device_1_t ExynosHWCCtx; + +namespace android { + + enum { + HDMI_RESOLUTION_BASE = 0, + HDMI_480P_59_94, + HDMI_576P_50, + HDMI_720P_24, + HDMI_720P_25, + HDMI_720P_30, + HDMI_720P_50, + HDMI_720P_59_94, + HDMI_720P_60, + HDMI_1080I_29_97, + HDMI_1080I_30, + HDMI_1080I_25, + HDMI_1080I_50, + HDMI_1080I_60, + HDMI_1080P_24, + HDMI_1080P_25, + HDMI_1080P_30, + HDMI_1080P_50, + HDMI_1080P_60, + HDMI_480P_60, + HDMI_1080I_59_94, + HDMI_1080P_59_94, + HDMI_1080P_23_98, + HDMI_2160P_30 = 47, + }; + +#define S3D_720P_60_BASE 22 +#define S3D_720P_59_94_BASE 25 +#define S3D_720P_50_BASE 28 +#define S3D_1080P_24_BASE 31 +#define S3D_1080P_23_98_BASE 34 +#define S3D_1080P_60_BASE 39 +#define S3D_1080P_30_BASE 42 + +class ExynosHWCService + : public BnExynosHWCService, Singleton { + +public: + static ExynosHWCService* getExynosHWCService(); + ~ExynosHWCService(); + + virtual int setWFDMode(unsigned int mode); + virtual int setWFDOutputResolution(unsigned int width, unsigned int height, + unsigned int disp_w, unsigned int disp_h); + virtual int setVDSGlesFormat(int format); + virtual int setExtraFBMode(unsigned int mode); + virtual int setCameraMode(unsigned int mode); + virtual int setForceMirrorMode(unsigned int mode); + virtual int setVideoPlayStatus(unsigned int mode); + virtual int setExternalDisplayPause(bool onoff); + virtual int setDispOrientation(unsigned int transform); + virtual int setProtectionMode(unsigned int mode); + virtual int setExternalDispLayerNum(unsigned int num); + virtual int setForceGPU(unsigned int on); + virtual int setExternalUITransform(unsigned int transform); + virtual int getExternalUITransform(void); + virtual int setWFDOutputTransform(unsigned int transform); + virtual int getWFDOutputTransform(void); + + virtual void setHdmiResolution(int resolution, int s3dMode); + virtual void setHdmiCableStatus(int status); + virtual void setHdmiHdcp(int status); + virtual void setHdmiAudioChannel(uint32_t channels); + virtual void setHdmiSubtitles(bool use); + virtual void setPresentationMode(bool use); + virtual void setWFDSleepCtrl(bool black); + + virtual int getWFDMode(); + virtual void getWFDOutputResolution(unsigned int *width, unsigned int *height); + virtual int getWFDOutputInfo(int *fd1, int *fd2, struct wfd_layer_t *wfd_info); + virtual int getPresentationMode(void); + virtual void getHdmiResolution(uint32_t *width, uint32_t *height); + virtual uint32_t getHdmiCableStatus(); + virtual uint32_t getHdmiAudioChannel(); + virtual void setHWCCtl(int ctrl, int val); + virtual void setHWCDebug(int debug); + virtual void setExynosHWCCtx(ExynosHWCCtx *); + virtual void setBootFinished(void); + void setBootFinishedCallback(void (*callback)(exynos5_hwc_composer_device_1_t *)); + void setPSRExitCallback(void (*callback)(exynos5_hwc_composer_device_1_t *)); + + virtual void notifyPSRExit(); + +private: + friend class Singleton; + ExynosHWCService(); + int createServiceLocked(); + ExynosHWCService *mHWCService; + Mutex mLock; + ExynosHWCCtx *mHWCCtx; + void (*bootFinishedCallback)(exynos5_hwc_composer_device_1_t *); + void (*doPSRExit)(ExynosHWCCtx *ctx); +}; + +} +#endif diff --git a/libhwcService/IExynosHWC.cpp b/libhwcService/IExynosHWC.cpp new file mode 100644 index 0000000..71732a4 --- /dev/null +++ b/libhwcService/IExynosHWC.cpp @@ -0,0 +1,670 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include +#include +#include + +#include +#include + +#include "IExynosHWC.h" + +namespace android { + +enum { + SET_WFD_MODE = 0, + SET_WFD_OUTPUT_RESOLUTION, + SET_VDS_GLES_FORMAT, + SET_EXT_FB_MODE, + SET_CAMERA_MODE, + SET_FORCE_MIRROR_MODE, + SET_VIDEO_PLAY_STATUS, + SET_EXTERNAL_DISPLAY_PAUSE, + SET_DISPLAY_ORIENTATION, + SET_PROTECTION_MODE, + SET_EXTERNAL_DISP_LAY_NUM, + SET_FORCE_GPU, + SET_EXT_UI_TRANSFORM, + GET_EXT_UI_TRANSFORM, + SET_WFD_OUTPUT_TRANSFORM, + GET_WFD_OUTPUT_TRANSFORM, + SET_HDMI_CABLE_STATUS, + SET_HDMI_MODE, + SET_HDMI_RESOLUTION, + SET_HDMI_HDCP, + SET_HDMI_HWC_LAYER, + SET_HDMI_ENABLE, + SET_HDMI_LAYER_ENABLE, + SET_HDMI_LAYER_DISABLE, + SET_HDMI_AUDIO_CHANNEL, + SET_HDMI_SUBTITLES, + SET_HDMI_ROTATE, + SET_HDMI_PATH, + SET_HDMI_DRM, + SET_PRESENTATION_MODE, + GET_WFD_MODE, + GET_WFD_OUTPUT_RESOLUTION, + GET_WFD_OUTPUT_INFO, + GET_PRESENTATION_MODE, + GET_HDMI_CABLE_STATUS, + GET_HDMI_RESOLUTION, + GET_HDMI_AUDIO_CHANNEL, + SET_WFD_SLEEP_CTRL, + SET_BOOT_FINISHED, + NOTIFY_PSR_EXIT, + SET_HWC_CTL_MAX_OVLY_CNT = 100, + SET_HWC_CTL_VIDEO_OVLY_CNT = 101, + SET_HWC_CTL_DYNAMIC_RECOMP = 102, + SET_HWC_CTL_SKIP_STATIC = 103, + SET_HWC_CTL_DMA_BW_BAL = 104, + SET_HWC_CTL_SECURE_DMA = 105, + SET_HWC_DEBUG = 106, +}; + +class BpExynosHWCService : public BpInterface { +public: + BpExynosHWCService(const sp& impl) + : BpInterface(impl) + { + } + + virtual int setWFDMode(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_WFD_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setWFDOutputResolution(unsigned int width, unsigned int height, + unsigned int disp_w, unsigned int disp_h) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(width); + data.writeInt32(height); + data.writeInt32(disp_w); + data.writeInt32(disp_h); + int result = remote()->transact(SET_WFD_OUTPUT_RESOLUTION, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setVDSGlesFormat(int format) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(format); + int result = remote()->transact(SET_VDS_GLES_FORMAT, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual void setWFDSleepCtrl(bool black) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(black); + remote()->transact(SET_WFD_SLEEP_CTRL, data, &reply); + } + + virtual int setExtraFBMode(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_EXT_FB_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setCameraMode(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_CAMERA_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setForceMirrorMode(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_FORCE_MIRROR_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setVideoPlayStatus(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_VIDEO_PLAY_STATUS, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setExternalDisplayPause(bool onoff) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(onoff); + int result = remote()->transact(SET_EXTERNAL_DISPLAY_PAUSE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setDispOrientation(unsigned int transform) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(transform); + int result = remote()->transact(SET_DISPLAY_ORIENTATION, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setProtectionMode(unsigned int mode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(mode); + int result = remote()->transact(SET_PROTECTION_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setExternalDispLayerNum(unsigned int num) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(num); + int result = remote()->transact(SET_EXTERNAL_DISP_LAY_NUM, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setForceGPU(unsigned int on) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(on); + int result = remote()->transact(SET_FORCE_GPU, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setExternalUITransform(unsigned int transform) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(transform); + int result = remote()->transact(SET_EXT_UI_TRANSFORM, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int getExternalUITransform(void) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + int result = remote()->transact(GET_EXT_UI_TRANSFORM, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int setWFDOutputTransform(unsigned int transform) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(transform); + int result = remote()->transact(SET_WFD_OUTPUT_TRANSFORM, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual int getWFDOutputTransform(void) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + int result = remote()->transact(GET_WFD_OUTPUT_TRANSFORM, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual void setHdmiResolution(int resolution, int s3dMode) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(resolution); + data.writeInt32(s3dMode); + remote()->transact(SET_HDMI_RESOLUTION, data, &reply); + } + + virtual void setHdmiCableStatus(int status) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(status); + remote()->transact(SET_HDMI_CABLE_STATUS, data, &reply); + } + + virtual void setHdmiHdcp(int status) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(status); + remote()->transact(SET_HDMI_HDCP, data, &reply); + } + + virtual void setHdmiAudioChannel(uint32_t channels) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(channels); + remote()->transact(SET_HDMI_AUDIO_CHANNEL, data, &reply); + } + + virtual void setHdmiSubtitles(bool use) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(use); + remote()->transact(SET_HDMI_SUBTITLES, data, &reply); + } + + virtual void setPresentationMode(bool use) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + data.writeInt32(use); + remote()->transact(SET_PRESENTATION_MODE, data, &reply); + } + + virtual int getWFDMode() + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + int result = remote()->transact(GET_WFD_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual void getWFDOutputResolution(unsigned int *width, unsigned int *height) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(GET_WFD_OUTPUT_RESOLUTION, data, &reply); + *width = reply.readInt32(); + *height = reply.readInt32(); + } + + virtual int getWFDOutputInfo(int *fd1, int *fd2, struct wfd_layer_t *wfd_info) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(GET_WFD_OUTPUT_INFO, data, &reply); + int y_fd = reply.readFileDescriptor(); + int uv_fd = reply.readFileDescriptor(); + reply.read(wfd_info, sizeof(struct wfd_layer_t)); + + if (y_fd >= 0 && uv_fd >= 0) { + *fd1 = dup(y_fd); + *fd2 = dup(uv_fd); + } else { + *fd1 = *fd2 = -1; + } + + return reply.readInt32(); + } + + virtual int getPresentationMode(void) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + int result = remote()->transact(GET_PRESENTATION_MODE, data, &reply); + result = reply.readInt32(); + return result; + } + + virtual void getHdmiResolution(uint32_t *width, uint32_t *height) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(GET_HDMI_RESOLUTION, data, &reply); + *width = (uint32_t)reply.readInt32(); + *height = (uint32_t)reply.readInt32(); + } + + virtual uint32_t getHdmiCableStatus() + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(GET_HDMI_CABLE_STATUS, data, &reply); + return (uint32_t)reply.readInt32(); + } + + virtual uint32_t getHdmiAudioChannel() + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(GET_HDMI_AUDIO_CHANNEL, data, &reply); + return (uint32_t)reply.readInt32(); + } + + virtual void setHWCCtl(int ctrl, int val) { + }; + + virtual void setBootFinished() + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(SET_BOOT_FINISHED, data, &reply); + } + + virtual void notifyPSRExit() + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(NOTIFY_PSR_EXIT, data, &reply); + } + + virtual void setHWCDebug(int debug) + { + Parcel data, reply; + data.writeInterfaceToken(IExynosHWCService::getInterfaceDescriptor()); + remote()->transact(SET_HWC_DEBUG, data, &reply); + } +}; + +IMPLEMENT_META_INTERFACE(ExynosHWCService, "android.hal.ExynosHWCService"); + +status_t BnExynosHWCService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case SET_WFD_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setWFDMode(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_WFD_OUTPUT_RESOLUTION: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int width = data.readInt32(); + int height = data.readInt32(); + int disp_w = data.readInt32(); + int disp_h = data.readInt32(); + int res = setWFDOutputResolution(width, height, disp_w, disp_h); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_VDS_GLES_FORMAT: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int format = data.readInt32(); + int res = setVDSGlesFormat(format); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_WFD_SLEEP_CTRL: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + bool mode = data.readInt32(); + setWFDSleepCtrl(mode); + return NO_ERROR; + } break; + case SET_EXT_FB_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setExtraFBMode(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_CAMERA_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setCameraMode(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_FORCE_MIRROR_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setForceMirrorMode(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_VIDEO_PLAY_STATUS: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setVideoPlayStatus(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_EXTERNAL_DISPLAY_PAUSE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + bool onoff = data.readInt32(); + int res = setExternalDisplayPause(onoff); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_DISPLAY_ORIENTATION: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int transform = data.readInt32(); + int res = setDispOrientation(transform); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_PROTECTION_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int mode = data.readInt32(); + int res = setProtectionMode(mode); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_EXTERNAL_DISP_LAY_NUM: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int num = data.readInt32(); + int res = setExternalDispLayerNum(num); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_FORCE_GPU: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int on = data.readInt32(); + int res = setForceGPU(on); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_EXT_UI_TRANSFORM: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int transform = data.readInt32(); + int res = setExternalUITransform(transform); + reply->writeInt32(res); + return NO_ERROR; + } break; + case GET_EXT_UI_TRANSFORM: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int transform = getExternalUITransform(); + reply->writeInt32(transform); + return NO_ERROR; + } break; + case SET_WFD_OUTPUT_TRANSFORM: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int transform = data.readInt32(); + int res = setWFDOutputTransform(transform); + reply->writeInt32(res); + return NO_ERROR; + } break; + case GET_WFD_OUTPUT_TRANSFORM: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int transform = getWFDOutputTransform(); + reply->writeInt32(transform); + return NO_ERROR; + } break; + case SET_HDMI_RESOLUTION: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int resolution = data.readInt32(); + int s3dMode = data.readInt32(); + setHdmiResolution(resolution, s3dMode); + return NO_ERROR; + } break; + case GET_HDMI_RESOLUTION: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + uint32_t width, height; + getHdmiResolution(&width, &height); + reply->writeInt32(width); + reply->writeInt32(height); + return NO_ERROR; + } break; + case SET_HDMI_CABLE_STATUS: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int status = data.readInt32(); + setHdmiCableStatus(status); + return NO_ERROR; + } break; + case GET_HDMI_CABLE_STATUS: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + reply->writeInt32(getHdmiCableStatus()); + return NO_ERROR; + } break; + case SET_HDMI_HDCP: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int status = data.readInt32(); + setHdmiHdcp(status); + return NO_ERROR; + } break; + case GET_HDMI_AUDIO_CHANNEL: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + reply->writeInt32(getHdmiAudioChannel()); + return NO_ERROR; + } break; + case SET_HDMI_AUDIO_CHANNEL: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int channels = data.readInt32(); + setHdmiAudioChannel(channels); + return NO_ERROR; + } break; + case SET_HDMI_SUBTITLES: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int use = data.readInt32(); + setHdmiSubtitles(use); + return NO_ERROR; + } break; + case SET_PRESENTATION_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int use = data.readInt32(); + setPresentationMode(use); + return NO_ERROR; + } break; + case GET_WFD_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int res = getWFDMode(); + reply->writeInt32(res); + return NO_ERROR; + } break; + case GET_WFD_OUTPUT_RESOLUTION: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + uint32_t width, height; + getWFDOutputResolution(&width, &height); + reply->writeInt32(width); + reply->writeInt32(height); + return NO_ERROR; + } break; + case GET_WFD_OUTPUT_INFO: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int fd1, fd2; + struct wfd_layer_t wfd_info; + int res = getWFDOutputInfo(&fd1, &fd2, &wfd_info); + reply->writeFileDescriptor(fd1); + reply->writeFileDescriptor(fd2); + reply->write(&wfd_info, sizeof(struct wfd_layer_t)); + reply->writeInt32(res); + return NO_ERROR; + } break; + case GET_PRESENTATION_MODE: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int res = getPresentationMode(); + reply->writeInt32(res); + return NO_ERROR; + } break; + case SET_HWC_CTL_MAX_OVLY_CNT: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_MAX_OVLY_CNT, val); + return NO_ERROR; + } break; + case SET_HWC_CTL_VIDEO_OVLY_CNT: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_VIDEO_OVLY_CNT, val); + return NO_ERROR; + } break; + case SET_HWC_CTL_DYNAMIC_RECOMP: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_DYNAMIC_RECOMP, val); + return NO_ERROR; + } break; + case SET_HWC_CTL_SKIP_STATIC: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_SKIP_STATIC, val); + return NO_ERROR; + } break; + case SET_HWC_CTL_DMA_BW_BAL: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_DMA_BW_BAL, val); + return NO_ERROR; + } break; + case SET_HWC_CTL_SECURE_DMA: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int val = data.readInt32(); + setHWCCtl(SET_HWC_CTL_SECURE_DMA, val); + return NO_ERROR; + } break; + case SET_BOOT_FINISHED: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + setBootFinished(); + return NO_ERROR; + } break; + case NOTIFY_PSR_EXIT: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + notifyPSRExit(); + return NO_ERROR; + } + case SET_HWC_DEBUG: { + CHECK_INTERFACE(IExynosHWCService, data, reply); + int debug = data.readInt32(); + setHWCDebug(debug); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} +} diff --git a/libhwcService/IExynosHWC.h b/libhwcService/IExynosHWC.h new file mode 100644 index 0000000..36fe8d1 --- /dev/null +++ b/libhwcService/IExynosHWC.h @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EXYNOS_IHWC_H_ +#define ANDROID_EXYNOS_IHWC_H_ + +#include +#include + +#include +#include +#include + +struct wfd_layer_t { + bool isPresentation; + bool isDrm; + struct timeval tv_stamp; +}; + +namespace android { + +enum { + VIDEO_PLAY_NORMAL = 0, + VIDEO_PLAY_PAUSE, + VIDEO_PLAY_SEEK, +}; + +class IExynosHWCService : public IInterface { +public: + + DECLARE_META_INTERFACE(ExynosHWCService); + + /* + * setWFDMode() function sets the WFD operation Mode. + * It enables / disables the WFD. + */ + virtual int setWFDMode(unsigned int mode) = 0; + virtual int setWFDOutputResolution(unsigned int width, unsigned int height, + unsigned int disp_w, unsigned int disp_h) = 0; + virtual int setVDSGlesFormat(int format) = 0; + + /* + * setExtraFBMode() function Enables / disables the extra FB usage. + */ + virtual int setExtraFBMode(unsigned int mode) = 0; + virtual int setCameraMode(unsigned int mode) = 0; + virtual int setForceMirrorMode(unsigned int mode) = 0; + + /* + * setVideoPlayStatus() function sets the Video playback status. + * It is used to inform the HWC about the video playback seek and + * pause status. + */ + virtual int setVideoPlayStatus(unsigned int mode) = 0; + virtual int setExternalDisplayPause(bool onoff) = 0; + virtual int setDispOrientation(unsigned int transform) = 0; + virtual int setProtectionMode(unsigned int mode) = 0; + virtual int setExternalDispLayerNum(unsigned int num) = 0; + virtual int setForceGPU(unsigned int on) = 0; + virtual int setExternalUITransform(unsigned int transform) = 0; + virtual int getExternalUITransform(void) = 0; + virtual int setWFDOutputTransform(unsigned int transform) = 0; + virtual int getWFDOutputTransform(void) = 0; + virtual void setHdmiResolution(int resolution, int s3dMode) = 0; + virtual void setHdmiCableStatus(int status) = 0; + virtual void setHdmiHdcp(int status) = 0; + virtual void setHdmiAudioChannel(uint32_t channels) = 0; + virtual void setHdmiSubtitles(bool use) = 0; + virtual void setPresentationMode(bool use) = 0; + virtual void setWFDSleepCtrl(bool black) = 0; + + virtual int getWFDMode() = 0; + virtual void getWFDOutputResolution(unsigned int *width, unsigned int *height) = 0; + virtual int getWFDOutputInfo(int *fd1, int *fd2, struct wfd_layer_t *wfd_info) = 0; + virtual int getPresentationMode(void) = 0; + virtual void getHdmiResolution(uint32_t *width, uint32_t *height) = 0; + virtual uint32_t getHdmiCableStatus() = 0; + virtual uint32_t getHdmiAudioChannel() = 0; + virtual void setHWCCtl(int ctrl, int val) = 0; + virtual void setBootFinished(void) = 0; + + virtual void notifyPSRExit() = 0; + virtual void setHWCDebug(int debug) = 0; +}; + +/* Native Interface */ +class BnExynosHWCService : public BnInterface { +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); + +}; +} +#endif diff --git a/libhwcService/NOTICE b/libhwcService/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhwcService/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhwc_tiny/Android.mk b/libhwc_tiny/Android.mk new file mode 100644 index 0000000..b32625e --- /dev/null +++ b/libhwc_tiny/Android.mk @@ -0,0 +1,46 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +# HAL module implemenation, not prelinked and stored in +# hw/..so + +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_MODULE_RELATIVE_PATH := hw +LOCAL_SHARED_LIBRARIES := liblog libcutils libEGL libGLESv1_CM libhardware \ + libhardware_legacy libutils libsync \ + libexynosv4l2 libexynosutils libdisplay + +LOCAL_CFLAGS += -DLOG_TAG=\"hwcomposer\" +LOCAL_CFLAGS += -DHLOG_CODE=0 + +LOCAL_C_INCLUDES += \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils + + +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay_tiny +LOCAL_SRC_FILES := ExynosHWC.cpp + +LOCAL_MODULE := hwcomposer.exynos5 +LOCAL_MODULE_TAGS := optional + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhwc_tiny/ExynosHWC.cpp b/libhwc_tiny/ExynosHWC.cpp new file mode 100644 index 0000000..0197784 --- /dev/null +++ b/libhwc_tiny/ExynosHWC.cpp @@ -0,0 +1,475 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +#include +#include +#include +#include + +#include "ExynosHWC.h" +#include "ExynosOverlayDisplay.h" +#include "ExynosPrimaryDisplay.h" + + + + + +int exynos5_prepare(hwc_composer_device_1_t *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) +{ + ATRACE_CALL(); + ALOGD("#### exynos5_prepare"); + if (!numDisplays || !displays) + return 0; + + exynos5_hwc_composer_device_1_t *pdev = + (exynos5_hwc_composer_device_1_t *)dev; + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY]; + + pdev->updateCallCnt++; + pdev->update_event_cnt++; + pdev->LastUpdateTimeStamp = systemTime(SYSTEM_TIME_MONOTONIC); + + if (fimd_contents) { + int err = pdev->primaryDisplay->prepare(fimd_contents); + if (err) + return err; + } + + return 0; +} + +int exynos5_set(struct hwc_composer_device_1 *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) +{ + ATRACE_CALL(); + ALOGD("#### exynos5_set"); + if (!numDisplays || !displays) + return 0; + + exynos5_hwc_composer_device_1_t *pdev = + (exynos5_hwc_composer_device_1_t *)dev; + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY]; + int fimd_err = 0; + + if (fimd_contents) { + fimd_err = pdev->primaryDisplay->set(fimd_contents); + } + + + return fimd_err; + +} + +void exynos5_registerProcs(struct hwc_composer_device_1* dev, + hwc_procs_t const* procs) +{ + struct exynos5_hwc_composer_device_1_t* pdev = + (struct exynos5_hwc_composer_device_1_t*)dev; + pdev->procs = procs; +} + +int exynos5_query(struct hwc_composer_device_1* dev, int what, int *value) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + switch (what) { + case HWC_BACKGROUND_LAYER_SUPPORTED: + // we support the background layer + value[0] = 1; + break; + case HWC_VSYNC_PERIOD: + // vsync period in nanosecond + value[0] = pdev->primaryDisplay->mVsyncPeriod; + break; + default: + // unsupported query + return -EINVAL; + } + return 0; +} + +int exynos5_eventControl(struct hwc_composer_device_1 *dev, int __UNUSED__ dpy, + int event, int enabled) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + switch (event) { + case HWC_EVENT_VSYNC: + __u32 val = !!enabled; + pdev->VsyncInterruptStatus = val; + int err = ioctl(pdev->primaryDisplay->mDisplayFd, S3CFB_SET_VSYNC_INT, &val); + if (err < 0) { + ALOGE("vsync ioctl failed"); + return -errno; + } + return 0; + } + + return -EINVAL; +} + + +void handle_vsync_event(struct exynos5_hwc_composer_device_1_t *pdev) +{ + if (!pdev->procs) + return; + + int err = lseek(pdev->vsync_fd, 0, SEEK_SET); + if (err < 0) { + ALOGE("error seeking to vsync timestamp: %s", strerror(errno)); + return; + } + + char buf[4096]; + err = read(pdev->vsync_fd, buf, sizeof(buf)); + if (err < 0) { + ALOGE("error reading vsync timestamp: %s", strerror(errno)); + return; + } + buf[sizeof(buf) - 1] = '\0'; + + errno = 0; + uint64_t timestamp = strtoull(buf, NULL, 0); + if (!errno) + pdev->procs->vsync(pdev->procs, 0, timestamp); +} + +void *hwc_vsync_thread(void *data) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)data; + char uevent_desc[4096]; + memset(uevent_desc, 0, sizeof(uevent_desc)); + + setpriority(PRIO_PROCESS, 0, HAL_PRIORITY_URGENT_DISPLAY); + + uevent_init(); + + char temp[4096]; + int err = read(pdev->vsync_fd, temp, sizeof(temp)); + if (err < 0) { + ALOGE("error reading vsync timestamp: %s", strerror(errno)); + return NULL; + } + + struct pollfd fds[2]; + fds[0].fd = pdev->vsync_fd; + fds[0].events = POLLPRI; + fds[1].fd = uevent_get_fd(); + fds[1].events = POLLIN; + + while (true) { + + int err = poll(fds, 2, -1); + + if (err > 0) { + if (fds[0].revents & POLLPRI) { + handle_vsync_event(pdev); + } + else if (fds[1].revents & POLLIN) { + int len = uevent_next_event(uevent_desc, + sizeof(uevent_desc) - 2); + + } + } + else if (err == -1) { + if (errno == EINTR) + break; + ALOGE("error in vsync thread: %s", strerror(errno)); + } + } + + return NULL; +} + +int exynos5_blank(struct hwc_composer_device_1 *dev, int disp, int blank) +{ + ATRACE_CALL(); + ALOGD("#### exynos5_blank"); + int fence = 0; + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + ALOGI("%s:: disp(%d), blank(%d)", __func__, disp, blank); + switch (disp) { + case HWC_DISPLAY_PRIMARY: { + int fb_blank = blank ? FB_BLANK_POWERDOWN : FB_BLANK_UNBLANK; + if (fb_blank == FB_BLANK_POWERDOWN) { + int fence = pdev->primaryDisplay->clearDisplay(); + if (fence < 0) { + HLOGE("error clearing primary display"); + } else { + close(fence); + } + } + pdev->primaryDisplay->mBlanked = !!blank; + + int err = ioctl(pdev->primaryDisplay->mDisplayFd, FBIOBLANK, fb_blank); + if (err < 0) { + if (errno == EBUSY) + ALOGI("%sblank ioctl failed (display already %sblanked)", + blank ? "" : "un", blank ? "" : "un"); + else + ALOGE("%sblank ioctl failed: %s", blank ? "" : "un", + strerror(errno)); + return -errno; + } + break; + } + default: + return -EINVAL; + + } + + return 0; +} + +void exynos5_dump(hwc_composer_device_1* dev, char *buff, int buff_len) +{ + return; +} + +int exynos5_getDisplayConfigs(struct hwc_composer_device_1 *dev, + int disp, uint32_t *configs, size_t *numConfigs) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + if (*numConfigs == 0) + return 0; + + if (disp == HWC_DISPLAY_PRIMARY) { + configs[0] = 0; + *numConfigs = 1; + return 0; + } + + return -EINVAL; +} + + +int exynos5_getDisplayAttributes(struct hwc_composer_device_1 *dev, + int disp, uint32_t __UNUSED__ config, const uint32_t *attributes, int32_t *values) +{ + struct exynos5_hwc_composer_device_1_t *pdev = + (struct exynos5_hwc_composer_device_1_t *)dev; + + for (int i = 0; attributes[i] != HWC_DISPLAY_NO_ATTRIBUTE; i++) { + if (disp == HWC_DISPLAY_PRIMARY) + values[i] = pdev->primaryDisplay->getDisplayAttributes(attributes[i]); + else { + ALOGE("unknown display type %u", disp); + return -EINVAL; + } + } + + return 0; +} + +int exynos5_close(hw_device_t* device); + +int exynos5_open(const struct hw_module_t *module, const char *name, + struct hw_device_t **device) +{ + int ret = 0; + int refreshRate; + int sw_fd; + + ALOGD("#### exynos5_open START"); + if (strcmp(name, HWC_HARDWARE_COMPOSER)) { + return -EINVAL; + } + + struct exynos5_hwc_composer_device_1_t *dev; + dev = (struct exynos5_hwc_composer_device_1_t *)malloc(sizeof(*dev)); + memset(dev, 0, sizeof(*dev)); + + dev->primaryDisplay = new ExynosPrimaryDisplay(NUM_GSC_UNITS, dev); + + dev->primaryDisplay->mDisplayFd = open("/dev/graphics/fb0", O_RDWR); + if (dev->primaryDisplay->mDisplayFd < 0) { + ALOGE("failed to open framebuffer"); + ret = dev->primaryDisplay->mDisplayFd; + goto err_open_fb; + } + + struct fb_var_screeninfo info; + if (ioctl(dev->primaryDisplay->mDisplayFd, FBIOGET_VSCREENINFO, &info) == -1) { + ALOGE("FBIOGET_VSCREENINFO ioctl failed: %s", strerror(errno)); + ret = -errno; + goto err_ioctl; + } + + if (info.reserved[0] == 0 && info.reserved[1] == 0) { + /* save physical lcd width, height to reserved[] */ + info.reserved[0] = info.xres; + info.reserved[1] = info.yres; + + if (ioctl(dev->primaryDisplay->mDisplayFd, FBIOPUT_VSCREENINFO, &info) == -1) { + ALOGE("FBIOPUT_VSCREENINFO ioctl failed: %s", strerror(errno)); + ret = -errno; + goto err_ioctl; + } + } + + + /* restore physical lcd width, height from reserved[] */ + int lcd_xres, lcd_yres; + lcd_xres = info.reserved[0]; + lcd_yres = info.reserved[1]; + + refreshRate = 1000000000000LLU / + ( + uint64_t( info.upper_margin + info.lower_margin + lcd_yres ) + * ( info.left_margin + info.right_margin + lcd_xres ) + * info.pixclock + ); + + if (refreshRate == 0) { + ALOGW("invalid refresh rate, assuming 60 Hz"); + refreshRate = 60; + } + + dev->primaryDisplay->mXres = lcd_xres; + dev->primaryDisplay->mYres = lcd_yres; + dev->primaryDisplay->mXdpi = 1000 * (lcd_xres * 25.4f) / info.width; + dev->primaryDisplay->mYdpi = 1000 * (lcd_yres * 25.4f) / info.height; + dev->primaryDisplay->mVsyncPeriod = 1000000000 / refreshRate; + + ALOGD("using\n" + "xres = %d px\n" + "yres = %d px\n" + "width = %d mm (%f dpi)\n" + "height = %d mm (%f dpi)\n" + "refresh rate = %d Hz\n", + dev->primaryDisplay->mXres, dev->primaryDisplay->mYres, info.width, dev->primaryDisplay->mXdpi / 1000.0, + info.height, dev->primaryDisplay->mYdpi / 1000.0, refreshRate); + + char devname[MAX_DEV_NAME + 1]; + devname[MAX_DEV_NAME] = '\0'; + + strncpy(devname, VSYNC_DEV_PREFIX, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME, MAX_DEV_NAME); + + dev->vsync_fd = open(devname, O_RDONLY); + if (dev->vsync_fd < 0) { + ALOGI("Failed to open vsync attribute at %s", devname); + devname[strlen(VSYNC_DEV_PREFIX)] = '\0'; + strlcat(devname, VSYNC_DEV_MIDDLE, MAX_DEV_NAME); + strlcat(devname, VSYNC_DEV_NAME, MAX_DEV_NAME); + ALOGI("Retrying with %s", devname); + dev->vsync_fd = open(devname, O_RDONLY); + } + + + if (dev->vsync_fd < 0) { + ALOGE("failed to open vsync attribute"); + ret = dev->vsync_fd; + goto err_hdmi_open; + } else { + struct stat st; + if (fstat(dev->vsync_fd, &st) < 0) { + ALOGE("Failed to stat vsync node at %s", devname); + goto err_vsync_stat; + } + + if (!S_ISREG(st.st_mode)) { + ALOGE("vsync node at %s should be a regualar file", devname); + goto err_vsync_stat; + } + } + + dev->base.common.tag = HARDWARE_DEVICE_TAG; + dev->base.common.version = HWC_DEVICE_API_VERSION_1_3; + dev->base.common.module = const_cast(module); + dev->base.common.close = exynos5_close; + + dev->base.prepare = exynos5_prepare; + dev->base.set = exynos5_set; + dev->base.eventControl = exynos5_eventControl; + dev->base.blank = exynos5_blank; + dev->base.query = exynos5_query; + dev->base.registerProcs = exynos5_registerProcs; + dev->base.dump = exynos5_dump; + dev->base.getDisplayConfigs = exynos5_getDisplayConfigs; + dev->base.getDisplayAttributes = exynos5_getDisplayAttributes; + + *device = &dev->base.common; + +#ifdef IP_SERVICE + android::ExynosIPService *mIPService; + mIPService = android::ExynosIPService::getExynosIPService(); + ret = mIPService->createServiceLocked(); + if (ret < 0) + goto err_vsync; +#endif + + + ret = pthread_create(&dev->vsync_thread, NULL, hwc_vsync_thread, dev); + if (ret) { + ALOGE("failed to start vsync thread: %s", strerror(ret)); + ret = -ret; + goto err_vsync; + } + + ALOGD("#### exynos5_open END with success"); + return 0; + +err_vsync: +err_vsync_stat: + close(dev->vsync_fd); +err_hdmi_open: +err_ioctl: + close(dev->primaryDisplay->mDisplayFd); +err_open_fb: +err_get_module: + free(dev); + ALOGD("#### exynos5_open END with error"); + return ret; +} + +int exynos5_close(hw_device_t *device) +{ + struct exynos5_hwc_composer_device_1_t *dev = + (struct exynos5_hwc_composer_device_1_t *)device; + pthread_kill(dev->vsync_thread, SIGTERM); + pthread_join(dev->vsync_thread, NULL); + close(dev->vsync_fd); + + return 0; +} + +static struct hw_module_methods_t exynos5_hwc_module_methods = { + open: exynos5_open, +}; + +hwc_module_t HAL_MODULE_INFO_SYM = { + common: { + tag: HARDWARE_MODULE_TAG, + module_api_version: HWC_MODULE_API_VERSION_0_1, + hal_api_version: HARDWARE_HAL_API_VERSION, + id: HWC_HARDWARE_MODULE_ID, + name: "Samsung exynos5 hwcomposer module", + author: "Samsung LSI", + methods: &exynos5_hwc_module_methods, + dso: 0, + reserved: {0}, + } +}; diff --git a/libhwc_tiny/ExynosHWC.h b/libhwc_tiny/ExynosHWC.h new file mode 100644 index 0000000..947c525 --- /dev/null +++ b/libhwc_tiny/ExynosHWC.h @@ -0,0 +1,201 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_EXYNOS_HWC_H_ +#define ANDROID_EXYNOS_HWC_H_ +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + + +#include + + +//#include + + +#include + +#define HWC_REMOVE_DEPRECATED_VERSIONS 1 + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include + +#include "gralloc_priv.h" +#include "exynos_format.h" +#include "exynos_v4l2.h" +#include "ExynosHWCModule.h" +#include "s5p_tvout_v4l2.h" +#include "ExynosRect.h" +#include "videodev2.h" + + +#ifdef __GNUC__ +#define __UNUSED__ __attribute__((__unused__)) +#else +#define __UNUSED__ +#endif + +# + + +#ifdef NUM_AVAILABLE_HW_WINDOWS +/* + * NUM_AVAILABLE_HW_WINDOWS can be optionally provided by + * soc specific header file which is generally present at + * $SoC\libhwcmodule\ExynosHWCModule.h. This is useful when + * same display controller driver is used by SoCs having + * different number of windows. + * S3C_FB_MAX_WIN: max number of hardware windows supported + * by the display controller driver. + * NUM_AVAILABLE_HW_WINDOWS: max windows in the given SoC. + */ +const size_t NUM_HW_WINDOWS = NUM_AVAILABLE_HW_WINDOWS; +#else +const size_t NUM_HW_WINDOWS = S3C_FB_MAX_WIN; +#endif +const size_t NO_FB_NEEDED = NUM_HW_WINDOWS + 1; +#ifndef FIMD_BW_OVERLAP_CHECK +const size_t MAX_NUM_FIMD_DMA_CH = 2; +const int FIMD_DMA_CH_IDX[NUM_HW_WINDOWS] = {0, 1, 1, 1, 0}; +#endif + +#define MAX_DEV_NAME 128 +#ifndef VSYNC_DEV_PREFIX +#define VSYNC_DEV_PREFIX "" +#endif +#ifndef VSYNC_DEV_MIDDLE +#define VSYNC_DEV_MIDDLE "" +#endif + +#ifdef TRY_SECOND_VSYNC_DEV +#ifndef VSYNC_DEV_NAME2 +#define VSYNC_DEV_NAME2 "" +#endif +#ifndef VSYNC_DEV_MIDDLE2 +#define VSYNC_DEV_MIDDLE2 "" +#endif +#endif + +const size_t NUM_GSC_UNITS = 0; + +#define NUM_VIRT_OVER 5 + +#define NUM_VIRT_OVER_HDMI 5 + +#define HWC_PAGE_MISS_TH 5 + +#define S3D_ERROR -1 +#define HDMI_PRESET_DEFAULT V4L2_DV_1080P60 +#define HDMI_PRESET_ERROR -1 + +#define HWC_FIMD_BW_TH 1 /* valid range 1 to 5 */ +#define HWC_FPS_TH 5 /* valid range 1 to 60 */ +#define VSYNC_INTERVAL (1000000000.0 / 60) +#define NUM_CONFIG_STABLE 10 + +#define OTF_SWITCH_THRESHOLD 2 + +#ifndef HLOG_CODE +#define HLOG_CODE 0 +#endif + +#define HLOGD(...) +#define HLOGV(...) +#define HLOGE(...) + +struct exynos5_hwc_composer_device_1_t; + +typedef struct { + uint32_t x; + uint32_t y; + uint32_t w; + uint32_t h; + uint32_t fw; + uint32_t fh; + uint32_t format; + uint32_t rot; + uint32_t cacheable; + uint32_t drmMode; + uint32_t index; +} video_layer_config; + + +struct exynos5_hwc_post_data_t { + int overlay_map[NUM_HW_WINDOWS]; + size_t fb_window; +}; + +struct hwc_ctrl_t { + int max_num_ovly; + int num_of_video_ovly; + int dynamic_recomp_mode; + int skip_static_layer_mode; + int dma_bw_balance_mode; +}; + + + +class ExynosPrimaryDisplay; + + +struct exynos5_hwc_composer_device_1_t { + hwc_composer_device_1_t base; + + ExynosPrimaryDisplay *primaryDisplay; + + int vsync_fd; + + const hwc_procs_t *procs; + pthread_t vsync_thread; + + + + int VsyncInterruptStatus; + int CompModeSwitch; + uint64_t LastUpdateTimeStamp; + uint64_t LastModeSwitchTimeStamp; + int updateCallCnt; + pthread_t update_stat_thread; + int update_event_cnt; + volatile bool update_stat_thread_flag; + + struct hwc_ctrl_t hwc_ctrl; + + int setCount; +}; + + +#endif diff --git a/libhwc_tiny/NOTICE b/libhwc_tiny/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhwc_tiny/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhwcutils/Android.mk b/libhwcutils/Android.mk new file mode 100644 index 0000000..e70c360 --- /dev/null +++ b/libhwcutils/Android.mk @@ -0,0 +1,106 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +#LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ +#libexynosv4l2 libsync libion_exynos libmpp +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libsync libion libmpp + +ifeq ($(BOARD_DISABLE_HWC_DEBUG), true) + LOCAL_CFLAGS += -DDISABLE_HWC_DEBUG +endif + +ifeq ($(BOARD_USES_FIMC), true) + LOCAL_SHARED_LIBRARIES += libexynosfimc +else + LOCAL_SHARED_LIBRARIES += libexynosgscaler +endif + +ifeq ($(BOARD_USES_FB_PHY_LINEAR),true) + LOCAL_SHARED_LIBRARIES += libfimg + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x + LOCAL_SRC_FILES += ExynosG2DWrapper.cpp +endif +LOCAL_CFLAGS += -DLOG_TAG=\"hwcutils\" +LOCAL_CFLAGS += -DHLOG_CODE=4 +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libdisplay +endif + +ifeq ($(BOARD_HDMI_INCAPABLE), true) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi_dummy +else +ifeq ($(BOARD_USES_VPP), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvpphdmi +else +ifeq ($(BOARD_USES_NEW_HDMI), true) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi +else +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libhdmi_legacy +endif +endif +endif + +LOCAL_SRC_FILES += \ + ExynosHWCUtils.cpp + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_SRC_FILES += \ + ExynosMPPv2.cpp +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule +else +LOCAL_SRC_FILES += \ + ExynosMPP.cpp +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../libvppvirtualdisplay +else +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../libvirtualdisplay \ + $(TOP)/hardware/samsung_slsi/exynos/libfimg4x +LOCAL_SHARED_LIBRARIES += libfimg +LOCAL_SHARED_LIBRARIES += libMcClient +LOCAL_STATIC_LIBRARIES := libsecurepath +LOCAL_SRC_FILES += ExynosG2DWrapper.cpp +endif +endif + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libhwcutils + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libhwcutils/ExynosG2DWrapper.cpp b/libhwcutils/ExynosG2DWrapper.cpp new file mode 100644 index 0000000..280679c --- /dev/null +++ b/libhwcutils/ExynosG2DWrapper.cpp @@ -0,0 +1,843 @@ +//#define LOG_NDEBUG 0 +#undef LOG_TAG +#define LOG_TAG "ExynosG2DWrapper" +#include "ExynosG2DWrapper.h" +#include "ExynosHWCUtils.h" +#include "ExynosOverlayDisplay.h" +#ifdef USES_VIRTUAL_DISPLAY +#include "ExynosExternalDisplay.h" +#include "ExynosVirtualDisplay.h" +#else +#include "ExynosExternalDisplay.h" +#endif + +#define FIMG2D_WFD_DEFAULT (G2D_LV1) + +int formatValueHAL2G2D(int hal_format, + color_format *g2d_format, + pixel_order *g2d_order, + uint32_t *g2d_bpp) +{ + *g2d_format = MSK_FORMAT_END; + *g2d_order = ARGB_ORDER_END; + *g2d_bpp = 0; + + switch (hal_format) { + /* 16bpp */ + case HAL_PIXEL_FORMAT_RGB_565: + *g2d_format = CF_RGB_565; + *g2d_order = AX_RGB; + *g2d_bpp = 2; + break; + /* 32bpp */ + case HAL_PIXEL_FORMAT_RGBX_8888: + *g2d_format = CF_XRGB_8888; + *g2d_order = AX_BGR; + *g2d_bpp = 4; + break; +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: + *g2d_format = CF_XRGB_8888; + *g2d_order = AX_RGB; + *g2d_bpp = 4; + break; +#endif + case HAL_PIXEL_FORMAT_BGRA_8888: + *g2d_format = CF_ARGB_8888; + *g2d_order = AX_RGB; + *g2d_bpp = 4; + break; + case HAL_PIXEL_FORMAT_RGBA_8888: + *g2d_format = CF_ARGB_8888; + *g2d_order = AX_BGR; + *g2d_bpp = 4; + break; + case HAL_PIXEL_FORMAT_EXYNOS_ARGB_8888: + *g2d_format = CF_ARGB_8888; + *g2d_order = BGR_AX; + *g2d_bpp = 4; + break; + /* 12bpp */ + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + *g2d_format = CF_YCBCR_420; + *g2d_order = P2_CRCB; + *g2d_bpp = 1; + break; + default: + ALOGE("%s: no matching color format(0x%x): failed", + __func__, hal_format); + return -1; + break; + } + return 0; +} + +#ifdef USES_VIRTUAL_DISPLAY +ExynosG2DWrapper::ExynosG2DWrapper(ExynosOverlayDisplay *display, + ExynosExternalDisplay *externalDisplay, + ExynosVirtualDisplay *virtualDisplay) +#else +ExynosG2DWrapper::ExynosG2DWrapper(ExynosOverlayDisplay *display, ExynosExternalDisplay *hdmi) +#endif +{ + mDisplay = display; +#ifdef USES_VIRTUAL_DISPLAY + mExternalDisplay = externalDisplay; + mVirtualDisplay = virtualDisplay; + mAllocSize = 0; +#else + mExternalDisplay = hdmi; +#endif +} + +ExynosG2DWrapper::~ExynosG2DWrapper() +{ +} + +int ExynosG2DWrapper::runCompositor(hwc_layer_1_t &src_layer, private_handle_t *dst_handle, + uint32_t transform, uint32_t global_alpha, unsigned long solid, + blit_op mode, bool force_clear, unsigned long srcAddress, + unsigned long dstAddress, int is_lcd) +{ + int ret = 0; + unsigned long srcYAddress = 0; + unsigned long srcCbCrAddress = 0; + unsigned long dstYAddress = 0; + unsigned long dstCbCrAddress =0; + + ExynosRect srcImgRect, dstImgRect; + + fimg2d_blit BlitParam; + fimg2d_param g2d_param; + rotation g2d_rotation; + + fimg2d_addr srcYAddr; + fimg2d_addr srcCbCrAddr; + fimg2d_image srcImage; + fimg2d_rect srcRect; + + fimg2d_addr dstYAddr; + fimg2d_addr dstCbCrAddr; + fimg2d_image dstImage; + fimg2d_rect dstRect; + + fimg2d_scale Scaling; + fimg2d_repeat Repeat; + fimg2d_bluscr Bluscr; + fimg2d_clip Clipping; + + pixel_order g2d_order; + color_format g2d_format; + addr_space addr_type = ADDR_USER; + + uint32_t srcG2d_bpp, dstG2d_bpp; + uint32_t srcImageSize, dstImageSize; + bool src_ion_mapped = false; + bool dst_ion_mapped = false; + + private_handle_t *src_handle = private_handle_t::dynamicCast(src_layer.handle); + + if (!force_clear) { + srcImgRect.x = src_layer.sourceCropf.left; + srcImgRect.y = src_layer.sourceCropf.top; + srcImgRect.w = WIDTH(src_layer.sourceCropf); + srcImgRect.h = HEIGHT(src_layer.sourceCropf); + srcImgRect.fullW = src_handle->stride; + srcImgRect.fullH = src_handle->vstride; + srcImgRect.colorFormat = src_handle->format; + } + +#ifndef USES_VIRTUAL_DISPLAY + int w, h; + { + w = mExternalDisplay->mXres; + h = mExternalDisplay->mYres; + } +#endif + + if (is_lcd) { + dstImgRect.x = 0; + dstImgRect.y = 0; + dstImgRect.w = WIDTH(src_layer.displayFrame); + dstImgRect.h = HEIGHT(src_layer.displayFrame); + dstImgRect.fullW = dst_handle->stride; + dstImgRect.fullH = dst_handle->vstride; + dstImgRect.colorFormat = dst_handle->format; + } else { + if (force_clear) { + dstImgRect.x = 0; + dstImgRect.y = 0; + dstImgRect.w = dst_handle->width; + dstImgRect.h = dst_handle->height; + } else { + dstImgRect.x = src_layer.displayFrame.left; + dstImgRect.y = src_layer.displayFrame.top; + dstImgRect.w = WIDTH(src_layer.displayFrame); + dstImgRect.h = HEIGHT(src_layer.displayFrame); + } +#ifdef USES_VIRTUAL_DISPLAY + dstImgRect.fullW = dst_handle->stride; + dstImgRect.fullH = dst_handle->vstride; +#else + dstImgRect.fullW = w; + dstImgRect.fullH = h; +#endif + dstImgRect.colorFormat = dst_handle->format; + } + + g2d_rotation = rotateValueHAL2G2D(transform); + + ALOGV("%s: \n" + "s_fw %d s_fh %d s_w %d s_h %d s_x %d s_y %d s_f %x address %x \n" + "d_fw %d d_fh %d d_w %d d_h %d d_x %d d_y %d d_f %x address %x \n rot %d ", + __func__, + srcImgRect.fullW, srcImgRect.fullH, srcImgRect.w, srcImgRect.h, + srcImgRect.x, srcImgRect.y, srcImgRect.colorFormat, src_handle->fd, + dstImgRect.fullW, dstImgRect.fullH, dstImgRect.w, dstImgRect.h, + dstImgRect.x, dstImgRect.y, dstImgRect.colorFormat, dst_handle->fd, transform); + + if (!force_clear && src_handle->fd >= 0) { + int rotatedDstW = dstImgRect.w; + int rotatedDstH = dstImgRect.h; + if ((g2d_rotation == ROT_90) || (g2d_rotation == ROT_270)) { + if ((srcImgRect.w != dstImgRect.h) || (srcImgRect.h != dstImgRect.w)) { + rotatedDstW = dstImgRect.h; + rotatedDstH = dstImgRect.w; + } + } else { + if ((srcImgRect.w != dstImgRect.w) || (srcImgRect.h != dstImgRect.h)) { + rotatedDstW = dstImgRect.w; + rotatedDstH = dstImgRect.h; + } + } + + if (formatValueHAL2G2D(srcImgRect.colorFormat, &g2d_format, &g2d_order, &srcG2d_bpp) < 0) { + ALOGE("%s: formatValueHAL2G2D() failed", __func__); + return -1; + } + srcImageSize = srcImgRect.fullW*srcImgRect.fullH; + if (srcAddress) { + srcYAddress = srcAddress; + } else { + srcYAddress = (unsigned long)mmap(NULL, srcImageSize*srcG2d_bpp, PROT_READ | PROT_WRITE, MAP_SHARED, src_handle->fd, 0); + if (srcYAddress == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for src-Y address", __func__); + return -ENOMEM; + } + src_ion_mapped = true; + } + + srcYAddr.type = addr_type; + srcYAddr.start = (unsigned long)srcYAddress; + srcCbCrAddr.type = addr_type; + srcCbCrAddr.start = 0; + + srcRect.x1 = srcImgRect.x; + srcRect.y1 = srcImgRect.y; + srcRect.x2 = srcImgRect.x + srcImgRect.w; + srcRect.y2 = srcImgRect.y + srcImgRect.h; + srcImage.width = srcImgRect.fullW; + srcImage.height = srcImgRect.fullH; + srcImage.stride = srcImgRect.fullW*srcG2d_bpp; + srcImage.order = g2d_order; + srcImage.fmt = g2d_format; + srcImage.addr = srcYAddr; + srcImage.plane2 = srcCbCrAddr; + srcImage.rect = srcRect; + srcImage.need_cacheopr = false; + + Scaling.mode = SCALING_BILINEAR; + Scaling.src_w= srcImgRect.w; + Scaling.src_h= srcImgRect.h; + Scaling.dst_w= rotatedDstW; + Scaling.dst_h= rotatedDstH; + } else { + memset(&srcImage, 0, sizeof(srcImage)); + Scaling.mode = NO_SCALING; + Scaling.src_w= 0; + Scaling.src_h= 0; + Scaling.dst_w= 0; + Scaling.dst_h= 0; + } + + if (dst_handle->fd >= 0) { + if (formatValueHAL2G2D(dstImgRect.colorFormat, &g2d_format, &g2d_order, &dstG2d_bpp) < 0) { + ALOGE("%s: formatValueHAL2G2D() failed", __func__); + return -1; + } + dstImageSize = dstImgRect.fullW*dstImgRect.fullH; + if (dstAddress) { + dstYAddress = dstAddress; + } else { +#ifdef USES_VIRTUAL_DISPLAY + if (mVirtualDisplay == NULL) { + dstYAddress = (unsigned long)mmap(NULL, dstImageSize*dstG2d_bpp, PROT_READ | PROT_WRITE, MAP_SHARED, dst_handle->fd, 0); + if (dstYAddress == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for dst-Y address", __func__); + return -ENOMEM; + } + } else { + dstYAddress = (unsigned long)mmap(NULL, dstImageSize*dstG2d_bpp, PROT_READ | PROT_WRITE, MAP_SHARED, dst_handle->fd, 0); + if (dstYAddress == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for dst-Y address", __func__); + return -ENOMEM; + } + + if (dst_handle->fd1 > 0) { + dstCbCrAddress = (unsigned long)mmap(NULL, dstImageSize*dstG2d_bpp / 2, PROT_READ | PROT_WRITE, MAP_SHARED, dst_handle->fd1, 0); + if (dstCbCrAddress == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for dst-CbCr address", __func__); + munmap((void *)dstYAddress, dstImageSize*dstG2d_bpp); + return -ENOMEM; + } + } + } +#else + dstYAddress = (unsigned long)mmap(NULL, dstImageSize*dstG2d_bpp, PROT_READ | PROT_WRITE, MAP_SHARED, dst_handle->fd, 0); + if (dstYAddress == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for dst-Y address", __func__); + return -ENOMEM; + } +#endif + dst_ion_mapped = true; + } + + dstYAddr.type = addr_type; + dstYAddr.start = (unsigned long)dstYAddress; + dstCbCrAddr.type = addr_type; + dstCbCrAddr.start = (unsigned long)dstCbCrAddress; + + if (force_clear) { + dstRect.x1 = 0; + dstRect.y1 = 0; + dstRect.x2 = dstImgRect.fullW; + dstRect.y2 = dstImgRect.fullH; + } else { + dstRect.x1 = dstImgRect.x; + dstRect.y1 = dstImgRect.y; + dstRect.x2 = dstImgRect.x + dstImgRect.w; + dstRect.y2 = dstImgRect.y + dstImgRect.h; + } + + dstImage.width = dstImgRect.fullW; + dstImage.height = dstImgRect.fullH; + dstImage.stride = dstImgRect.fullW*dstG2d_bpp; + dstImage.order = g2d_order; + dstImage.fmt = g2d_format; + dstImage.addr = dstYAddr; + dstImage.plane2 = dstCbCrAddr; + dstImage.rect = dstRect; + dstImage.need_cacheopr = false; + } else { + memset(&dstImage, 0, sizeof(dstImage)); + } + + Repeat.mode = NO_REPEAT; + Repeat.pad_color = 0; + Bluscr.mode = OPAQUE; + Bluscr.bs_color = 0; + Bluscr.bg_color = 0; + Clipping.enable = false; + Clipping.x1 = 0; + Clipping.y1 = 0; + Clipping.x2 = 0; + Clipping.y2 = 0; + + g2d_param.solid_color = solid; + g2d_param.g_alpha = global_alpha; + g2d_param.dither = false; + g2d_param.rotate = g2d_rotation; + g2d_param.premult = PREMULTIPLIED; + g2d_param.scaling = Scaling; + g2d_param.repeat = Repeat; + g2d_param.bluscr = Bluscr; + g2d_param.clipping = Clipping; + + if (force_clear) { + BlitParam.op = mode; + BlitParam.param = g2d_param; + BlitParam.src = NULL; + BlitParam.msk = NULL; + BlitParam.tmp = NULL; + BlitParam.dst = &dstImage; + BlitParam.sync = BLIT_SYNC; + BlitParam.seq_no = 0; + BlitParam.qos_lv = FIMG2D_WFD_DEFAULT; + } else { + BlitParam.op = mode; + BlitParam.param = g2d_param; + BlitParam.src = &srcImage; + BlitParam.msk = NULL; + BlitParam.tmp = NULL; + BlitParam.dst = &dstImage; + BlitParam.sync = BLIT_SYNC; + BlitParam.seq_no = 0; + BlitParam.qos_lv = FIMG2D_WFD_DEFAULT; + } + + ret = stretchFimgApi(&BlitParam); + + if (src_ion_mapped) + munmap((void *)srcYAddress, srcImageSize*srcG2d_bpp); + + if (dst_ion_mapped) { +#ifdef USES_VIRTUAL_DISPLAY + if (mVirtualDisplay == NULL) + munmap((void *)dstYAddress, dstImageSize*dstG2d_bpp); + else { + munmap((void *)dstYAddress, dstImageSize*dstG2d_bpp); + munmap((void *)dstCbCrAddress, dstImageSize*dstG2d_bpp / 2); + } +#else + munmap((void *)dstYAddress, dstImageSize*dstG2d_bpp); +#endif + } + + if (ret < 0) { + ALOGE("%s: stretch failed", __func__); + return -1; + } + + return 0; +} + +#ifdef USES_VIRTUAL_DISPLAY +int ExynosG2DWrapper::runSecureCompositor(hwc_layer_1_t &src_layer, + private_handle_t *dst_handle, + private_handle_t *secure_handle, + uint32_t global_alpha, unsigned long solid, + blit_op mode, bool force_clear) +{ + int ret = 0; + unsigned long srcYAddress = 0; + + ExynosRect srcImgRect, dstImgRect; + + fimg2d_blit_raw BlitParam; + fimg2d_param g2d_param; + rotation g2d_rotation = ORIGIN; + + fimg2d_addr srcYAddr; + fimg2d_addr srcCbCrAddr; + fimg2d_image srcImage; + fimg2d_rect srcRect; + + fimg2d_addr dstYAddr; + fimg2d_addr dstCbCrAddr; + fimg2d_image dstImage; + fimg2d_rect dstRect; + + fimg2d_scale Scaling; + fimg2d_repeat Repeat; + fimg2d_bluscr Bluscr; + fimg2d_clip Clipping; + + pixel_order g2d_order; + color_format g2d_format; + addr_space addr_type = ADDR_PHYS; + + uint32_t srcG2d_bpp, dstG2d_bpp; + uint32_t srcImageSize, dstImageSize; + bool src_ion_mapped = false; + bool dst_ion_mapped = false; + + private_handle_t *src_handle = private_handle_t::dynamicCast(src_layer.handle); + + srcImgRect.x = src_layer.sourceCropf.left; + srcImgRect.y = src_layer.sourceCropf.top; + srcImgRect.w = WIDTH(src_layer.sourceCropf); + srcImgRect.h = HEIGHT(src_layer.sourceCropf); + srcImgRect.fullW = src_handle->stride; + srcImgRect.fullH = src_handle->vstride; + srcImgRect.colorFormat = src_handle->format; + + if (!secure_handle) { + ALOGE("%s: secure g2d buffer handle is NULL", __func__); + return -1; + } + + if (force_clear) { + dstImgRect.x = 0; + dstImgRect.y = 0; + dstImgRect.w = mVirtualDisplay->mWidth; + dstImgRect.h = mVirtualDisplay->mHeight; + dstImgRect.fullW = mVirtualDisplay->mWidth; + dstImgRect.fullH = mVirtualDisplay->mHeight; + dstImgRect.colorFormat = dst_handle->format; + } else { + dstImgRect.x = src_layer.displayFrame.left; + dstImgRect.y = src_layer.displayFrame.top; + dstImgRect.w = WIDTH(src_layer.displayFrame); + dstImgRect.h = HEIGHT(src_layer.displayFrame); + dstImgRect.fullW = mVirtualDisplay->mWidth; + dstImgRect.fullH = mVirtualDisplay->mHeight; + dstImgRect.colorFormat = dst_handle->format; + } + + ALOGV("%s: \n" + "s_fw %d s_fh %d s_w %d s_h %d s_x %d s_y %d s_f %x address %x \n" + "d_fw %d d_fh %d d_w %d d_h %d d_x %d d_y %d d_f %x address %x \n rot %d ", + __func__, + srcImgRect.fullW, srcImgRect.fullH, srcImgRect.w, srcImgRect.h, + srcImgRect.x, srcImgRect.y, srcImgRect.colorFormat, secure_handle->fd, + dstImgRect.fullW, dstImgRect.fullH, dstImgRect.w, dstImgRect.h, + dstImgRect.x, dstImgRect.y, dstImgRect.colorFormat, dst_handle->fd, g2d_rotation); + + if (secure_handle->fd >= 0) { + int rotatedDstW = dstImgRect.w; + int rotatedDstH = dstImgRect.h; + + if (formatValueHAL2G2D(srcImgRect.colorFormat, &g2d_format, &g2d_order, &srcG2d_bpp) < 0) { + ALOGE("%s: formatValueHAL2G2D() failed", __func__); + return -1; + } + srcImageSize = srcImgRect.fullW*srcImgRect.fullH; + + srcYAddr.type = addr_type; + srcYAddr.start = (unsigned long)secure_handle->fd; + srcCbCrAddr.type = addr_type; + srcCbCrAddr.start = 0; + + srcRect.x1 = srcImgRect.x; + srcRect.y1 = srcImgRect.y; + srcRect.x2 = srcImgRect.x + srcImgRect.w; + srcRect.y2 = srcImgRect.y + srcImgRect.h; + srcImage.width = srcImgRect.fullW; + srcImage.height = srcImgRect.fullH; + srcImage.stride = srcImgRect.fullW*srcG2d_bpp; + srcImage.order = g2d_order; + srcImage.fmt = g2d_format; + srcImage.addr = srcYAddr; + srcImage.plane2 = srcCbCrAddr; + srcImage.rect = srcRect; + srcImage.need_cacheopr = false; + + Scaling.mode = SCALING_BILINEAR; + Scaling.src_w= srcImgRect.w; + Scaling.src_h= srcImgRect.h; + Scaling.dst_w= rotatedDstW; + Scaling.dst_h= rotatedDstH; + } else { + memset(&srcImage, 0, sizeof(srcImage)); + Scaling.mode = NO_SCALING; + Scaling.src_w= 0; + Scaling.src_h= 0; + Scaling.dst_w= 0; + Scaling.dst_h= 0; + } + + if (dst_handle->fd >= 0) { + if (formatValueHAL2G2D(dstImgRect.colorFormat, &g2d_format, &g2d_order, &dstG2d_bpp) < 0) { + ALOGE("%s: formatValueHAL2G2D() failed", __func__); + return -1; + } + dstImageSize = dstImgRect.fullW*dstImgRect.fullH; + dstYAddr.type = addr_type; + dstYAddr.start = (unsigned long)dst_handle->fd; + dstCbCrAddr.type = addr_type; + dstCbCrAddr.start = (unsigned long)dst_handle->fd1; + + if (force_clear) { + dstRect.x1 = 0; + dstRect.y1 = 0; + dstRect.x2 = dstImgRect.fullW; + dstRect.y2 = dstImgRect.fullH; + } else { + dstRect.x1 = dstImgRect.x; + dstRect.y1 = dstImgRect.y; + dstRect.x2 = dstImgRect.x + dstImgRect.w; + dstRect.y2 = dstImgRect.y + dstImgRect.h; + } + + dstImage.width = dstImgRect.fullW; + dstImage.height = dstImgRect.fullH; + dstImage.stride = dstImgRect.fullW*dstG2d_bpp; + dstImage.order = g2d_order; + dstImage.fmt = g2d_format; + dstImage.addr = dstYAddr; + dstImage.plane2 = dstCbCrAddr; + dstImage.rect = dstRect; + dstImage.need_cacheopr = false; + } else { + memset(&dstImage, 0, sizeof(dstImage)); + } + + Repeat.mode = NO_REPEAT; + Repeat.pad_color = 0; + Bluscr.mode = OPAQUE; + Bluscr.bs_color = 0; + Bluscr.bg_color = 0; + Clipping.enable = false; + Clipping.x1 = 0; + Clipping.y1 = 0; + Clipping.x2 = 0; + Clipping.y2 = 0; + + g2d_param.solid_color = solid; + g2d_param.g_alpha = global_alpha; + g2d_param.dither = false; + g2d_param.rotate = g2d_rotation; + g2d_param.premult = PREMULTIPLIED; + g2d_param.scaling = Scaling; + g2d_param.repeat = Repeat; + g2d_param.bluscr = Bluscr; + g2d_param.clipping = Clipping; + + memset(&BlitParam, 0, sizeof(BlitParam)); + BlitParam.op = mode; + memcpy(&BlitParam.param, &g2d_param, sizeof(g2d_param)); + memcpy(&BlitParam.src, &srcImage, sizeof(srcImage)); + memcpy(&BlitParam.dst, &dstImage, sizeof(dstImage)); + BlitParam.sync = BLIT_SYNC; + BlitParam.seq_no = 0; + BlitParam.qos_lv = FIMG2D_WFD_DEFAULT; + + ret = G2DDRM_Blit(&BlitParam); + + if (ret != 0) { + ALOGE("%s: G2DDRM_Blit failed(ret=%d)", __func__, ret); + return -1; + } + + return 0; +} + +bool ExynosG2DWrapper::InitSecureG2D() +{ + if (mVirtualDisplay == NULL) + return false; + + int ret = 0; + if (mVirtualDisplay->mPhysicallyLinearBuffer == NULL) { + ALOGI("initialize secure G2D"); + ret = G2DDRM_Initialize(); + if (ret != 0) { + ALOGE("%s: G2DDRM_Initialize failed, ret %d", __func__, ret); + return false; + } + + int dst_stride; + int usage = GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_HW_COMPOSER | + GRALLOC_USAGE_PROTECTED | + GRALLOC_USAGE_PHYSICALLY_LINEAR | + GRALLOC_USAGE_PRIVATE_NONSECURE; + alloc_device_t* allocDevice = mVirtualDisplay->mAllocDevice; + int ret = allocDevice->alloc(allocDevice, + mVirtualDisplay->mWidth, mVirtualDisplay->mHeight, + mVirtualDisplay->mGLESFormat, usage, &mVirtualDisplay->mPhysicallyLinearBuffer, + &dst_stride); + if (ret < 0) { + ALOGE("failed to allocate secure g2d buffer: %s", strerror(-ret)); + G2DDRM_Terminate(); + return false; + } else { + mAllocSize = mVirtualDisplay->mWidth * mVirtualDisplay->mHeight * 4; + private_handle_t *handle = private_handle_t::dynamicCast( + mVirtualDisplay->mPhysicallyLinearBuffer); + mVirtualDisplay->mPhysicallyLinearBufferAddr = (unsigned long)mmap(NULL, mAllocSize, PROT_READ | PROT_WRITE, MAP_SHARED, handle->fd, 0); + if (mVirtualDisplay->mPhysicallyLinearBufferAddr == (unsigned long)MAP_FAILED) { + ALOGE("%s: failed to mmap for virtual display buffer", __func__); + return -ENOMEM; + } + ALOGI("allocated secure g2d input buffer: 0x%x", mVirtualDisplay->mPhysicallyLinearBufferAddr); + } + } + return true; + +} + +bool ExynosG2DWrapper::TerminateSecureG2D() +{ + if (mVirtualDisplay == NULL) + return false; + + int ret = 0; + if (mVirtualDisplay->mPhysicallyLinearBuffer) { + ALOGI("free g2d input buffer: 0x%x", mVirtualDisplay->mPhysicallyLinearBufferAddr); + munmap((void *)mVirtualDisplay->mPhysicallyLinearBufferAddr, mAllocSize); + mVirtualDisplay->mPhysicallyLinearBufferAddr = 0; + + alloc_device_t* allocDevice = mVirtualDisplay->mAllocDevice; + allocDevice->free(allocDevice, mVirtualDisplay->mPhysicallyLinearBuffer); + mVirtualDisplay->mPhysicallyLinearBuffer = NULL; + + ALOGI("terminate secure G2D"); + ret = G2DDRM_Terminate(); + if (ret != 0) { + ALOGE("%s: G2DDRM_Terminate failed, ret %d", __func__, ret); + } + } + return 0; +} +#endif + +void ExynosG2DWrapper::exynos5_cleanup_g2d(int force) +{ +#ifdef G2D_COMPOSITION + exynos5_g2d_data_t &mG2d = mDisplay->mG2d; + + if (!mDisplay->mG2dMemoryAllocated && !force) + return; + + for (int i = 0; i < (int)NUM_HW_WIN_FB_PHY; i++) { + mDisplay->mG2dCurrentBuffer[i] = 0; + mDisplay->mLastG2dLayerHandle[i] = 0; + for (int j = 0; j < NUM_GSC_DST_BUFS; j++) { + if (mDisplay->mWinBufFence[i][j] >= 0) { + sync_wait(mDisplay->mWinBufFence[i][j], 1000); + close(mDisplay->mWinBufFence[i][j]); + mDisplay->mWinBufFence[i][j] = -1; + } + } + } + + memset(&mG2d, 0, sizeof(mG2d)); + mDisplay->mG2dLayers = 0; + mDisplay->mG2dComposition = 0; + + for (int i = 0; i < mDisplay->mAllocatedLayers; i++) { + for (int j = 0; j < (int)NUM_GSC_DST_BUFS; j++) { + if (mDisplay->mWinBufVirtualAddress[i][j]) { + munmap((void *)mDisplay->mWinBufVirtualAddress[i][j], mDisplay->mWinBufMapSize[i]); + mDisplay->mWinBufVirtualAddress[i][j] = 0; + } + } + } + + for (int i = 0; i < mDisplay->mAllocatedLayers; i++) { + for (int j = 0; j < (int)NUM_GSC_DST_BUFS; j++) { + if (mDisplay->mWinBuf[i][j]) { + mDisplay->mAllocDevice->free(mDisplay->mAllocDevice, mDisplay->mWinBuf[i][j]); + mDisplay->mWinBuf[i][j] = NULL; + } + } + } + mDisplay->mAllocatedLayers = 0; + mDisplay->mG2dMemoryAllocated = 0; +#endif +} + +int ExynosG2DWrapper::exynos5_g2d_buf_alloc(hwc_display_contents_1_t* contents) +{ +#ifdef G2D_COMPOSITION + int w, h; + int dst_stride; + int format = HAL_PIXEL_FORMAT_RGBX_8888; + int usage; + + if (mDisplay->mG2dMemoryAllocated) + return 0; + + usage = GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | GRALLOC_USAGE_PHYSICALLY_LINEAR | + GRALLOC_USAGE_HW_COMPOSER; + usage |= GRALLOC_USAGE_PROTECTED; + usage &= ~GRALLOC_USAGE_PRIVATE_NONSECURE; + + for (int i = 0; i < mDisplay->mG2dLayers; i++) { + int lay_idx = mDisplay->mG2d.ovly_lay_idx[i]; + hwc_layer_1_t &layer = contents->hwLayers[lay_idx]; + w = WIDTH(layer.displayFrame); + h = HEIGHT(layer.displayFrame); + + for (int j = 0; j < (int)NUM_GSC_DST_BUFS; j++) { + int ret = mDisplay->mAllocDevice->alloc(mDisplay->mAllocDevice, w, h, + format, usage, &mDisplay->mWinBuf[i][j], + &dst_stride); + if (ret < 0) { + ALOGE("failed to allocate win %d buf %d buffer [w %d h %d f %x]: %s", + i, j, w, h, format, strerror(-ret)); + goto G2D_BUF_ALLOC_FAIL; + } + } + mDisplay->mWinBufMapSize[i] = dst_stride * h * 4 + 0x8000; + } + mDisplay->mAllocatedLayers = mDisplay->mG2dLayers; + + unsigned long vir_addr; + for (int i = 0; i < mDisplay->mG2dLayers; i++) { + for (int j = 0; j < (int)NUM_GSC_DST_BUFS; j++) { + mDisplay->mWinBufFence[i][j] = -1; + private_handle_t *buf_handle = private_handle_t::dynamicCast(mDisplay->mWinBuf[i][j]); + vir_addr = (unsigned long) mmap(NULL, mDisplay->mWinBufMapSize[i], PROT_READ | PROT_WRITE, MAP_SHARED, buf_handle->fd, 0); + if (vir_addr != (unsigned long)MAP_FAILED) { + mDisplay->mWinBufVirtualAddress[i][j] = vir_addr; + } else { + ALOGE("Failed to map win %d buf %d buffer", i, j); + goto G2D_BUF_ALLOC_FAIL; + } + } + } + + mDisplay->mG2dMemoryAllocated = 1; + return 0; + +G2D_BUF_ALLOC_FAIL: + mDisplay->mG2dMemoryAllocated = 1; + /* memory released from the caller */ + +#endif + return 1; +} + +int ExynosG2DWrapper::exynos5_config_g2d(hwc_layer_1_t &layer, private_handle_t *dst_handle, s3c_fb_win_config &cfg, int win_idx_2d, int win_idx) +{ +#ifdef G2D_COMPOSITION + int ret = 0; + int cur_buf = mDisplay->mG2dCurrentBuffer[win_idx_2d]; + int prev_buf; + private_handle_t *handle; + int fence = -1; + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + + if (mDisplay->mLastG2dLayerHandle[win_idx_2d] == (uint32_t)layer.handle) { + prev_buf = (cur_buf + NUM_GSC_DST_BUFS - 1) % NUM_GSC_DST_BUFS; + if (mDisplay->mWinBufFence[win_idx_2d][prev_buf] >= 0) { + close(mDisplay->mWinBufFence[win_idx_2d][prev_buf] ); + mDisplay->mWinBufFence[win_idx_2d][prev_buf] = -1; + } + mDisplay->mG2dCurrentBuffer[win_idx_2d] = prev_buf; + handle = private_handle_t::dynamicCast(mDisplay->mWinBuf[win_idx_2d][prev_buf]); + memcpy(dst_handle, handle, sizeof(*dst_handle)); + dst_handle->format = src_handle->format; + } else { + handle = private_handle_t::dynamicCast(mDisplay->mWinBuf[win_idx_2d][cur_buf]); + memcpy(dst_handle, handle, sizeof(*dst_handle)); + dst_handle->format = src_handle->format; + if (mDisplay->mWinBufFence[win_idx_2d][cur_buf] >= 0) { + sync_wait(mDisplay->mWinBufFence[win_idx_2d][cur_buf], 1000); + close(mDisplay->mWinBufFence[win_idx_2d][cur_buf] ); + mDisplay->mWinBufFence[win_idx_2d][cur_buf] = -1; + } + + if (layer.acquireFenceFd >= 0) { + sync_wait(layer.acquireFenceFd, 1000); + } + + if (dst_handle->format == HAL_PIXEL_FORMAT_RGBX_8888) { + ret = runCompositor(layer, dst_handle, 0, 0xff, 0, BLIT_OP_SRC, false, 0, + mDisplay->mWinBufVirtualAddress[win_idx_2d][cur_buf] + 0x8000, 1); + } else { + ret = runCompositor(layer, dst_handle, 0, 0xff, 0, BLIT_OP_SRC, false, 0, + mDisplay->mWinBufVirtualAddress[win_idx_2d][cur_buf], 1); + } + if (ret < 0) + ALOGE("%s:runCompositor: Failed", __func__); + } + + mDisplay->mLastG2dLayerHandle[win_idx_2d] = (uint32_t)layer.handle; + + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + + return fence; +#else + return -1; +#endif +} + diff --git a/libhwcutils/ExynosG2DWrapper.h b/libhwcutils/ExynosG2DWrapper.h new file mode 100644 index 0000000..7459f3e --- /dev/null +++ b/libhwcutils/ExynosG2DWrapper.h @@ -0,0 +1,65 @@ +#ifndef EXYNOS_G2D_WRAPPER_H +#define EXYNOS_G2D_WRAPPER_H + +#include "ExynosHWC.h" + +#ifdef USES_VIRTUAL_DISPLAY +#include "FimgApi.h" +#include "sec_g2ddrm.h" +#endif + +class ExynosOverlayDisplay; +class ExynosExternalDisplay; +#ifdef USES_VIRTUAL_DISPLAY +class ExynosVirtualDisplay; +#endif + +inline rotation rotateValueHAL2G2D(unsigned char transform) +{ + int rotate_flag = transform & 0x7; + + switch (rotate_flag) { + case HAL_TRANSFORM_ROT_90: return ROT_90; + case HAL_TRANSFORM_ROT_180: return ROT_180; + case HAL_TRANSFORM_ROT_270: return ROT_270; + } + return ORIGIN; +} + +int formatValueHAL2G2D(int hal_format, color_format *g2d_format, pixel_order *g2d_order, uint32_t *g2d_bpp); + +class ExynosG2DWrapper { + public: +#ifdef USES_VIRTUAL_DISPLAY + ExynosG2DWrapper(ExynosOverlayDisplay *display, + ExynosExternalDisplay *externalDisplay, + ExynosVirtualDisplay *virtualDisplay = NULL); +#else + ExynosG2DWrapper(ExynosOverlayDisplay *display, ExynosExternalDisplay *hdmi); +#endif + ~ExynosG2DWrapper(); + + int runCompositor(hwc_layer_1_t &src_layer, private_handle_t *dst_handle, + uint32_t transform, uint32_t global_alpha, unsigned long solid, + blit_op mode, bool force_clear, unsigned long srcAddress, + unsigned long dstAddress, int is_lcd); +#ifdef USES_VIRTUAL_DISPLAY + int runSecureCompositor(hwc_layer_1_t &src_layer, private_handle_t *dst_handle, + private_handle_t *secure_handle, uint32_t global_alpha, unsigned long solid, + blit_op mode, bool force_clear); + bool InitSecureG2D(); + bool TerminateSecureG2D(); +#endif + void exynos5_cleanup_g2d(int force); + int exynos5_g2d_buf_alloc(hwc_display_contents_1_t* contents); + int exynos5_config_g2d(hwc_layer_1_t &layer, private_handle_t *dstHandle, s3c_fb_win_config &cfg, int win_idx_2d, int win_idx); + + ExynosOverlayDisplay *mDisplay; + ExynosExternalDisplay *mExternalDisplay; +#ifdef USES_VIRTUAL_DISPLAY + ExynosVirtualDisplay *mVirtualDisplay; + int mAllocSize; +#endif +}; + +#endif diff --git a/libhwcutils/ExynosHWCUtils.cpp b/libhwcutils/ExynosHWCUtils.cpp new file mode 100644 index 0000000..ffbf39f --- /dev/null +++ b/libhwcutils/ExynosHWCUtils.cpp @@ -0,0 +1,492 @@ +#include "ExynosHWCUtils.h" +#include "ExynosHWCDebug.h" + +int hwcDebug; + +uint32_t hwcApiVersion(const hwc_composer_device_1_t* hwc) { + uint32_t hwcVersion = hwc->common.version; + return hwcVersion & HARDWARE_API_VERSION_2_MAJ_MIN_MASK; +} + +uint32_t hwcHeaderVersion(const hwc_composer_device_1_t* hwc) { + uint32_t hwcVersion = hwc->common.version; + return hwcVersion & HARDWARE_API_VERSION_2_HEADER_MASK; +} + +bool hwcHasApiVersion(const hwc_composer_device_1_t* hwc, + uint32_t version) { + return hwcApiVersion(hwc) >= (version & HARDWARE_API_VERSION_2_MAJ_MIN_MASK); +} +void dumpHandle(private_handle_t *h) +{ + ALOGV("\t\tformat = %d, width = %u, height = %u, stride = %u, vstride = %u", + h->format, h->width, h->height, h->stride, h->vstride); +} + +void dumpHandle(uint32_t type, private_handle_t *h) +{ + HDEBUGLOGD(type, "\t\tformat = %d, width = %u, height = %u, stride = %u, vstride = %u", + h->format, h->width, h->height, h->stride, h->vstride); +} + +void dumpLayer(hwc_layer_1_t const *l) +{ + ALOGV("\ttype=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, " + "{%7.1f,%7.1f,%7.1f,%7.1f}, {%d,%d,%d,%d}", + l->compositionType, l->flags, l->handle, l->transform, + l->blending, + l->sourceCropf.left, + l->sourceCropf.top, + l->sourceCropf.right, + l->sourceCropf.bottom, + l->displayFrame.left, + l->displayFrame.top, + l->displayFrame.right, + l->displayFrame.bottom); + + if(l->handle && !(l->flags & HWC_SKIP_LAYER)) + dumpHandle(private_handle_t::dynamicCast(l->handle)); +} + +void dumpLayer(uint32_t type, hwc_layer_1_t const *l) +{ + HDEBUGLOGD(type, "\ttype=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, " + "{%7.1f,%7.1f,%7.1f,%7.1f}, {%d,%d,%d,%d}", + l->compositionType, l->flags, l->handle, l->transform, + l->blending, + l->sourceCropf.left, + l->sourceCropf.top, + l->sourceCropf.right, + l->sourceCropf.bottom, + l->displayFrame.left, + l->displayFrame.top, + l->displayFrame.right, + l->displayFrame.bottom); + + if(l->handle && !(l->flags & HWC_SKIP_LAYER)) + dumpHandle(type, private_handle_t::dynamicCast(l->handle)); +} + +void dumpMPPImage(exynos_mpp_img &c) +{ + ALOGV("\tx = %u, y = %u, w = %u, h = %u, fw = %u, fh = %u", + c.x, c.y, c.w, c.h, c.fw, c.fh); + ALOGV("\tf = %u", c.format); + ALOGV("\taddr = {%d, %d, %d}, rot = %u, cacheable = %u, drmMode = %u", + c.yaddr, c.uaddr, c.vaddr, c.rot, c.cacheable, c.drmMode); + ALOGV("\tnarrowRgb = %u, acquireFenceFd = %d, releaseFenceFd = %d, mem_type = %u", + c.narrowRgb, c.acquireFenceFd, c.releaseFenceFd, c.mem_type); +} + +void dumpMPPImage(uint32_t type, exynos_mpp_img &c) +{ + HDEBUGLOGD(type, "\tx = %u, y = %u, w = %u, h = %u, fw = %u, fh = %u", + c.x, c.y, c.w, c.h, c.fw, c.fh); + HDEBUGLOGD(type, "\tf = %u", c.format); + HDEBUGLOGD(type, "\taddr = {%d, %d, %d}, rot = %u, cacheable = %u, drmMode = %u", + c.yaddr, c.uaddr, c.vaddr, c.rot, c.cacheable, c.drmMode); + HDEBUGLOGD(type, "\tnarrowRgb = %u, acquireFenceFd = %d, releaseFenceFd = %d, mem_type = %u", + c.narrowRgb, c.acquireFenceFd, c.releaseFenceFd, c.mem_type); +} + +#ifndef USES_FIMC +void dumpBlendMPPImage(struct SrcBlendInfo &c) +{ + ALOGV("\tblop = %d, srcblendfmt = %u, srcblendpremulti = %u\n", + c.blop, c.srcblendfmt, c.srcblendpremulti); + ALOGV("\tx = %u, y = %u, w = %u, h = %u, fw = %u\n", + c.srcblendhpos, c.srcblendvpos, c.srcblendwidth, + c.srcblendheight, c.srcblendstride); + ALOGV("\tglobalalpha = %d, tglobalalpha.val = %u, cscspec = %d, cscspec.space = %u, cscspec.wide = %u\n", + c.globalalpha.enable, c.globalalpha.val, c.cscspec.enable, + c.cscspec.space, c.cscspec.wide); +} +void dumpBlendMPPImage(uint32_t type, struct SrcBlendInfo &c) +{ + HDEBUGLOGD(type, "\tblop = %d, srcblendfmt = %u, srcblendpremulti = %u\n", + c.blop, c.srcblendfmt, c.srcblendpremulti); + HDEBUGLOGD(type, "\tx = %u, y = %u, w = %u, h = %u, fw = %u\n", + c.srcblendhpos, c.srcblendvpos, c.srcblendwidth, + c.srcblendheight, c.srcblendstride); + HDEBUGLOGD(type, "\tglobalalpha = %d, tglobalalpha.val = %u, cscspec = %d, cscspec.space = %u, cscspec.wide = %u\n", + c.globalalpha.enable, c.globalalpha.val, c.cscspec.enable, + c.cscspec.space, c.cscspec.wide); +} +#endif + +bool isDstCropWidthAligned(int dest_w) +{ + int dst_crop_w_alignement; + + /* GSC's dst crop size should be aligned 128Bytes */ + dst_crop_w_alignement = GSC_DST_CROP_W_ALIGNMENT_RGB888; + + return (dest_w % dst_crop_w_alignement) == 0; +} + +bool isTransformed(const hwc_layer_1_t &layer) +{ + return layer.transform != 0; +} + +bool isRotated(const hwc_layer_1_t &layer) +{ + return (layer.transform & HAL_TRANSFORM_ROT_90) || + (layer.transform & HAL_TRANSFORM_ROT_180); +} + +bool isScaled(const hwc_layer_1_t &layer) +{ + return WIDTH(layer.displayFrame) != WIDTH(layer.sourceCropf) || + HEIGHT(layer.displayFrame) != HEIGHT(layer.sourceCropf); +} + +bool isFormatRgb(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_BGRA_8888: +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: +#endif + return true; + + default: + return false; + } +} + +bool isFormatYUV420(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + return true; + default: + return false; + } +} + +bool isFormatYUV422(int __unused format) +{ + // Might add support later + return false; +} + +bool isFormatYCrCb(int format) +{ + return format == HAL_PIXEL_FORMAT_EXYNOS_YV12_M; +} + +uint8_t formatToBpp(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: +#endif + return 32; + case HAL_PIXEL_FORMAT_RGB_565: + return 16; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + return 12; + default: + ALOGW("unrecognized pixel format %u", format); + return 0; + } +} + +int getDrmMode(int flags) +{ + if (flags & GRALLOC_USAGE_PROTECTED) { + if (flags & GRALLOC_USAGE_PRIVATE_NONSECURE) + return NORMAL_DRM; + else + return SECURE_DRM; + } else { + return NO_DRM; + } +} + +int halFormatToV4L2Format(int format) +{ +#ifdef EXYNOS_SUPPORT_BGRX_8888 + if (format == HAL_PIXEL_FORMAT_BGRX_8888) + return HAL_PIXEL_FORMAT_2_V4L2_PIX(HAL_PIXEL_FORMAT_RGBX_8888); + else +#endif + return HAL_PIXEL_FORMAT_2_V4L2_PIX(format); +} + +bool isOffscreen(hwc_layer_1_t &layer, int xres, int yres) +{ + return layer.displayFrame.left < 0 || + layer.displayFrame.top < 0 || + layer.displayFrame.right > xres || + layer.displayFrame.bottom > yres; +} + +bool isSrcCropFloat(hwc_frect &frect) +{ + return (frect.left != (int)frect.left) || + (frect.top != (int)frect.top) || + (frect.right != (int)frect.right) || + (frect.bottom != (int)frect.bottom); +} + +bool isFloat(float f) +{ + return (f != floorf(f)); +} + +bool isUHD(const hwc_layer_1_t &layer) +{ + return (WIDTH(layer.sourceCropf) >= UHD_WIDTH && + HEIGHT(layer.sourceCropf) >= UHD_HEIGHT); +} + +bool isFullRangeColor(const hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + return (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL); +} + +bool isCompressed(const hwc_layer_1_t &layer) +{ + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->internal_format & ((uint64_t)(1) << 32)) + return true; + } + return false; +} + +bool compareYuvLayerConfig(int videoLayers, uint32_t index, + hwc_layer_1_t &layer, + video_layer_config *pre_src_data, video_layer_config *pre_dst_data) +{ + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + buffer_handle_t dst_buf; + private_handle_t *dst_handle; + int ret = 0; + bool reconfigure = 1; + + video_layer_config new_src_cfg, new_dst_cfg; + memset(&new_src_cfg, 0, sizeof(new_src_cfg)); + memset(&new_dst_cfg, 0, sizeof(new_dst_cfg)); + + new_src_cfg.x = (int)layer.sourceCropf.left; + new_src_cfg.y = (int)layer.sourceCropf.top; + new_src_cfg.w = WIDTH(layer.sourceCropf); + new_src_cfg.fw = src_handle->stride; + new_src_cfg.h = HEIGHT(layer.sourceCropf); + new_src_cfg.fh = src_handle->vstride; + new_src_cfg.format = src_handle->format; + new_src_cfg.drmMode = !!(getDrmMode(src_handle->flags) == SECURE_DRM); + new_src_cfg.index = index; + + new_dst_cfg.x = layer.displayFrame.left; + new_dst_cfg.y = layer.displayFrame.top; + new_dst_cfg.w = WIDTH(layer.displayFrame); + new_dst_cfg.h = HEIGHT(layer.displayFrame); + new_dst_cfg.rot = layer.transform; + new_dst_cfg.drmMode = new_src_cfg.drmMode; + + /* check to save previous yuv layer configration */ + if (pre_src_data && pre_dst_data) { + reconfigure = yuvConfigChanged(new_src_cfg, pre_src_data[videoLayers]) || + yuvConfigChanged(new_dst_cfg, pre_dst_data[videoLayers]); + } else { + ALOGE("Invalid parameter"); + return reconfigure; + } + + memcpy(&pre_src_data[videoLayers], &new_src_cfg, sizeof(new_src_cfg)); + memcpy(&pre_dst_data[videoLayers], &new_dst_cfg, sizeof(new_dst_cfg)); + + return reconfigure; + +} + +size_t getRequiredPixels(hwc_layer_1_t &layer, int xres, int yres) +{ + uint32_t w = WIDTH(layer.displayFrame); + uint32_t h = HEIGHT(layer.displayFrame); + if (layer.displayFrame.left < 0) { + unsigned int crop = -layer.displayFrame.left; + w -= crop; + } + + if (layer.displayFrame.right > xres) { + unsigned int crop = layer.displayFrame.right - xres; + w -= crop; + } + + if (layer.displayFrame.top < 0) { + unsigned int crop = -layer.displayFrame.top; + h -= crop; + } + + if (layer.displayFrame.bottom > yres) { + int crop = layer.displayFrame.bottom - yres; + h -= crop; + } + return w*h; +} + +/* OFF_Screen to ON_Screen changes */ +void recalculateDisplayFrame(hwc_layer_1_t &layer, int xres, int yres) +{ + uint32_t x, y; + uint32_t w = WIDTH(layer.displayFrame); + uint32_t h = HEIGHT(layer.displayFrame); + + if (layer.displayFrame.left < 0) { + unsigned int crop = -layer.displayFrame.left; + ALOGV("layer off left side of screen; cropping %u pixels from left edge", + crop); + HDEBUGLOGD(eDebugDefault, "layer off left side of screen; cropping %u pixels from left edge", + crop); + x = 0; + w -= crop; + } else { + x = layer.displayFrame.left; + } + + if (layer.displayFrame.right > xres) { + unsigned int crop = layer.displayFrame.right - xres; + ALOGV("layer off right side of screen; cropping %u pixels from right edge", + crop); + HDEBUGLOGD(eDebugDefault, "layer off right side of screen; cropping %u pixels from right edge", + crop); + w -= crop; + } + + if (layer.displayFrame.top < 0) { + unsigned int crop = -layer.displayFrame.top; + ALOGV("layer off top side of screen; cropping %u pixels from top edge", + crop); + HDEBUGLOGD(eDebugDefault, "layer off top side of screen; cropping %u pixels from top edge", + crop); + y = 0; + h -= crop; + } else { + y = layer.displayFrame.top; + } + + if (layer.displayFrame.bottom > yres) { + int crop = layer.displayFrame.bottom - yres; + ALOGV("layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + HDEBUGLOGD(eDebugDefault, "layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + h -= crop; + } + + layer.displayFrame.left = x; + layer.displayFrame.top = y; + layer.displayFrame.right = w + x; + layer.displayFrame.bottom = h + y; +} + +int getS3DFormat(int preset) +{ + switch (preset) { +#ifndef HDMI_INCAPABLE + case V4L2_DV_720P60_SB_HALF: + case V4L2_DV_720P50_SB_HALF: + case V4L2_DV_1080P60_SB_HALF: + case V4L2_DV_1080P30_SB_HALF: + return S3D_SBS; + case V4L2_DV_720P60_TB: + case V4L2_DV_720P50_TB: + case V4L2_DV_1080P60_TB: + case V4L2_DV_1080P30_TB: + return S3D_TB; +#endif + default: + return S3D_ERROR; + } +} + +void adjustRect(hwc_rect_t &rect, int32_t width, int32_t height) +{ + if (rect.left < 0) + rect.left = 0; + if (rect.left > width) + rect.left = width; + if (rect.top < 0) + rect.top = 0; + if (rect.top > height) + rect.top = height; + if (rect.right < rect.left) + rect.right = rect.left; + if (rect.right > width) + rect.right = width; + if (rect.bottom < rect.top) + rect.bottom = rect.top; + if (rect.bottom > height) + rect.bottom = height; +} + +uint32_t halDataSpaceToV4L2ColorSpace(uint32_t data_space) +{ + switch (data_space) { + case HAL_DATASPACE_BT2020: + case HAL_DATASPACE_BT2020_FULL: + return V4L2_COLORSPACE_BT2020; + case HAL_DATASPACE_DCI_P3: + case HAL_DATASPACE_DCI_P3_FULL: + return V4L2_COLORSPACE_DCI_P3; + default: + return V4L2_COLORSPACE_DEFAULT; + } + return V4L2_COLORSPACE_DEFAULT; +} + +unsigned int isNarrowRgb(int format, uint32_t data_space) +{ + if (format == HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL) + return 0; + else { + if (isFormatRgb(format)) + return 0; + else { + if ((data_space == HAL_DATASPACE_BT2020_FULL) || + (data_space == HAL_DATASPACE_DCI_P3_FULL)) + return 0; + else + return 1; + } + } +} diff --git a/libhwcutils/ExynosHWCUtils.h b/libhwcutils/ExynosHWCUtils.h new file mode 100644 index 0000000..977af92 --- /dev/null +++ b/libhwcutils/ExynosHWCUtils.h @@ -0,0 +1,143 @@ +#ifndef HWC_UTILS_H +#define HWC_UTILS_H + +#include "ExynosHWC.h" + +#define UHD_WIDTH 3840 +#define UHD_HEIGHT 2160 + +inline int WIDTH(const hwc_rect &rect) { return rect.right - rect.left; } +inline int HEIGHT(const hwc_rect &rect) { return rect.bottom - rect.top; } +inline int WIDTH(const hwc_frect_t &rect) { return (int)(rect.right - rect.left); } +inline int HEIGHT(const hwc_frect_t &rect) { return (int)(rect.bottom - rect.top); } + +template inline T max(T a, T b) { return (a > b) ? a : b; } +template inline T min(T a, T b) { return (a < b) ? a : b; } + +template void alignCropAndCenter(T &w, T &h, + hwc_frect_t *crop, size_t alignment) +{ + double aspect = 1.0 * h / w; + T w_orig = w, h_orig = h; + + w = ALIGN(w, alignment); + h = round(aspect * w); + if (crop) { + crop->left = (w - w_orig) / 2; + crop->top = (h - h_orig) / 2; + crop->right = crop->left + w_orig; + crop->bottom = crop->top + h_orig; + } +} + +inline bool intersect(const hwc_rect &r1, const hwc_rect &r2) +{ + return !(r1.left > r2.right || + r1.right < r2.left || + r1.top > r2.bottom || + r1.bottom < r2.top); +} + +inline hwc_rect intersection(const hwc_rect &r1, const hwc_rect &r2) +{ + hwc_rect i; + i.top = max(r1.top, r2.top); + i.bottom = min(r1.bottom, r2.bottom); + i.left = max(r1.left, r2.left); + i.right = min(r1.right, r2.right); + return i; +} + +inline hwc_rect expand(const hwc_rect &r1, const hwc_rect &r2) +{ + hwc_rect i; + i.top = min(r1.top, r2.top); + i.bottom = max(r1.bottom, r2.bottom); + i.left = min(r1.left, r2.left); + i.right = max(r1.right, r2.right); + return i; +} + +inline bool rectEqual(const hwc_rect &r1, const hwc_rect &r2) +{ + return ((r1.left == r2.left) && + (r1.right == r2.right) && + (r1.top == r2.top) && + (r1.bottom == r2.bottom)); +} + +inline bool yuvConfigChanged(video_layer_config &c1, video_layer_config &c2) +{ + return c1.x != c2.x || + c1.y != c2.y || + c1.w != c2.w || + c1.h != c2.h || + c1.fw != c2.fw || + c1.fh != c2.fh || + c1.format != c2.format || + c1.rot != c2.rot || + c1.cacheable != c2.cacheable || + c1.drmMode != c2.drmMode || + c1.index != c2.index; +} + +inline bool hasAlpha(int format) +{ + return format == HAL_PIXEL_FORMAT_RGBA_8888 || format == HAL_PIXEL_FORMAT_BGRA_8888; +} + +inline bool hasPlaneAlpha(hwc_layer_1_t &layer) +{ + return layer.planeAlpha > 0 && layer.planeAlpha < 255; +} + +#define FR_SHIFT 15 +#define DECIMALBIT 16 +inline int setFloatValue(float value) +{ + return ((int)value & 0x7FFF) | (((int)((value - (int)value) * (1< +#include "ExynosMPP.h" +#include "ExynosHWCUtils.h" +#ifdef USES_VIRTUAL_DISPLAY +#include "ExynosVirtualDisplay.h" +#endif + +size_t visibleWidth(ExynosMPP *processor, hwc_layer_1_t &layer, int format, + int xres) +{ + int bpp; + if (processor->isProcessingRequired(layer, format) && format != HAL_PIXEL_FORMAT_RGB_565) + bpp = 32; + else + bpp = formatToBpp(format); + int left = max(layer.displayFrame.left, 0); + int right = min(layer.displayFrame.right, xres); + + return (right - left) * bpp / 8; +} + +ExynosMPP::ExynosMPP() +{ + ExynosMPP(NULL, 0); +} + +ExynosMPP::ExynosMPP(ExynosDisplay *display, int gscIndex) +{ + ATRACE_CALL(); + this->mDisplay = display; + this->mIndex = gscIndex; + mNeedReqbufs = false; + mWaitVsyncCount = 0; + mCountSameConfig = 0; + mGscHandle = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + mDstBuffers[i] = NULL; + mMidBuffers[i] = NULL; + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } + mNumAvailableDstBuffers = NUM_GSC_DST_BUFS; + mCurrentBuf = 0; + mGSCMode = 0; + mLastGSCLayerHandle = -1; + mS3DMode = 0; + mppFact = NULL; + libmpp = NULL; + mDoubleOperation = false; + mBufferFreeThread = new BufferFreeThread(this); + mBufferFreeThread->mRunning = true; + mBufferFreeThread->run(); +} + +ExynosMPP::~ExynosMPP() +{ + if (mBufferFreeThread != NULL) { + mBufferFreeThread->mRunning = false; + mBufferFreeThread->requestExitAndWait(); + delete mBufferFreeThread; + } +} + +bool ExynosMPP::isM2M() +{ + return mGSCMode == exynos5_gsc_map_t::GSC_M2M; +} + +bool ExynosMPP::isUsingMSC() +{ + return (AVAILABLE_GSC_UNITS[mIndex] >= 4 && AVAILABLE_GSC_UNITS[mIndex] <= 6); +} + +bool ExynosMPP::isOTF() +{ + return mGSCMode == exynos5_gsc_map_t::GSC_LOCAL; +} + +void ExynosMPP::setMode(int mode) +{ + mGSCMode = mode; +} + +void ExynosMPP::free() +{ + if (mNeedReqbufs) { + if (mWaitVsyncCount > 0) { + if (mGscHandle) + freeMPP(mGscHandle); + mNeedReqbufs = false; + mWaitVsyncCount = 0; + mDisplay->mOtfMode = OTF_OFF; + mGscHandle = NULL; + libmpp = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(mDstBuffers, 0, sizeof(mDstBuffers)); + memset(mMidBuffers, 0, sizeof(mMidBuffers)); + mCurrentBuf = 0; + mGSCMode = 0; + mLastGSCLayerHandle = 0; + + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } + } else { + mWaitVsyncCount++; + } + } +} + +bool ExynosMPP::isSrcConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return isDstConfigChanged(c1, c2) || + c1.fw != c2.fw || + c1.fh != c2.fh; +} + +bool ExynosMPP::isFormatSupportedByGsc(int format) +{ + + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + return true; + + default: + return false; + } +} + +bool ExynosMPP::formatRequiresGsc(int format) +{ + return (isFormatSupportedByGsc(format) && + (format != HAL_PIXEL_FORMAT_RGBX_8888) && (format != HAL_PIXEL_FORMAT_RGB_565) && + (format != HAL_PIXEL_FORMAT_RGBA_8888)); +} + +int ExynosMPP::getDownscaleRatio(int *downNumerator, int *downDenominator) +{ + *downNumerator = 0; + *downDenominator = 0; + return -1; +} + +bool ExynosMPP::isFormatSupportedByGscOtf(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + return true; + default: + return false; + } +} + +int ExynosMPP::isProcessingSupported(hwc_layer_1_t &layer, int format, + bool local_path, int downNumerator, int downDenominator) +{ + if (local_path && downNumerator == 0) { + return -eMPPUnsupportedDownScale; + } + + if (isUsingMSC() && local_path) { + return -eMPPUnsupportedHW; + } + + if (!isUsingMSC() && layer.blending != HWC_BLENDING_NONE) + return -eMPPUnsupportedBlending; + + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (isUsingMSC() && handle && + isFormatRgb(handle->format) && !hasAlpha(handle->format) && + (layer.blending == HWC_BLENDING_PREMULT)) + return -eMPPUnsupportedBlending; + + int max_w = maxWidth(layer); + int max_h = maxHeight(layer); + int min_w = minWidth(layer); + int min_h = minHeight(layer); + int crop_max_w = 0; + int crop_max_h = 0; + int dest_min_h = 0; + + if (isUsingMSC()) { + crop_max_w = 8192; + crop_max_h = 8192; + } else { + crop_max_w = isRotated(layer) ? 2016 : 4800; + crop_max_h = isRotated(layer) ? 2016 : 3344; + } + int crop_min_w = isRotated(layer) ? 32: 64; + int crop_min_h = isRotated(layer) ? 64: 32; + + if (local_path) + dest_min_h = 64; + else + dest_min_h = 32; + + int srcWidthAlign = sourceWidthAlign(handle->format); + int srcHeightAlign = sourceHeightAlign(handle->format); + int dstAlign; + if (local_path) + dstAlign = destinationAlign(HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M); + else + dstAlign = destinationAlign(HAL_PIXEL_FORMAT_BGRA_8888); + + int maxDstWidth; + int maxDstHeight; + + bool rot90or270 = !!(layer.transform & HAL_TRANSFORM_ROT_90); + // n.b.: HAL_TRANSFORM_ROT_270 = HAL_TRANSFORM_ROT_90 | + // HAL_TRANSFORM_ROT_180 + + int src_w = WIDTH(layer.sourceCropf), src_h = HEIGHT(layer.sourceCropf); + if (!isUsingMSC()) { + src_w = getAdjustedCrop(src_w, WIDTH(layer.displayFrame), handle->format, false, rot90or270); + src_h = getAdjustedCrop(src_h, HEIGHT(layer.displayFrame), handle->format, true, rot90or270); + } + int dest_w, dest_h; + if (rot90or270) { + dest_w = HEIGHT(layer.displayFrame); + dest_h = WIDTH(layer.displayFrame); + } else { + dest_w = WIDTH(layer.displayFrame); + dest_h = HEIGHT(layer.displayFrame); + } + + if (getDrmMode(handle->flags) != NO_DRM) + alignCropAndCenter(dest_w, dest_h, NULL, + GSC_DST_CROP_W_ALIGNMENT_RGB888); + + maxDstWidth = 2560; + maxDstHeight = 2560; + int max_upscale = 8; + + /* check whether GSC can handle with local path */ + if (local_path) { + if (isFormatRgb(format) && rot90or270) + return -eMPPUnsupportedRotation; + + /* GSC OTF can't handle rot90 or rot270 */ + if (!rotationSupported(rot90or270)) + return -eMPPUnsupportedRotation; + /* + * if display co-ordinates are out of the lcd resolution, + * skip that scenario to OpenGL. + * GSC OTF can't handle such scenarios. + */ + if (layer.displayFrame.left < 0 || layer.displayFrame.top < 0 || + layer.displayFrame.right > mDisplay->mXres || layer.displayFrame.bottom > mDisplay->mYres) + return -eMPPUnsupportedCoordinate; + + /* GSC OTF can't handle GRALLOC_USAGE_PROTECTED layer */ + if (getDrmMode(handle->flags) != NO_DRM) + return -eMPPUnsupportedDRMContents; + + if (!isFormatSupportedByGsc(format)) + return -eMPPUnsupportedFormat; + else if (!isFormatSupportedByGscOtf(format)) + return -eMPPUnsupportedFormat; + else if (format == HAL_PIXEL_FORMAT_RGBA_8888 && layer.blending != HWC_BLENDING_NONE) + return -eMPPUnsupportedBlending; + else if (mDisplay->mHwc->mS3DMode != S3D_MODE_DISABLED) + return -eMPPUnsupportedS3DContents; + else if (!paritySupported(dest_w, dest_h)) + return -eMPPNotAlignedDstSize; + else if (handle->stride > max_w) + return -eMPPExceedHStrideMaximum; + else if (src_w * downNumerator > dest_w * downDenominator) + return -eMPPExeedMaxDownScale; + else if (dest_w > maxDstWidth) + return -eMPPExeedMaxDstWidth; + else if (dest_w > src_w * max_upscale) + return -eMPPExeedMaxUpScale; + else if (handle->vstride > max_h) + return -eMPPExceedVStrideMaximum; + else if (src_h * downNumerator > dest_h * downDenominator) + return -eMPPExeedMaxDownScale; + else if (dest_h > maxDstHeight) + return -eMPPExeedMaxDstHeight; + else if (dest_h > src_h * max_upscale) + return -eMPPExeedMaxUpScale; + else if (src_w > crop_max_w) + return -eMPPExeedSrcWCropMax; + else if (src_h > crop_max_h) + return -eMPPExeedSrcHCropMax; + else if (src_w < crop_min_w) + return -eMPPExeedSrcWCropMin; + else if (src_h < crop_min_h) + return -eMPPExeedSrcHCropMin; + else if (dest_h < dest_min_h) + return -eMPPExeedMinDstHeight; + + return 1; + } + + bool need_gsc_op_twice = false; + if (getDrmMode(handle->flags) != NO_DRM) { + need_gsc_op_twice = ((dest_w > src_w * max_upscale) || + (dest_h > src_h * max_upscale)) ? true : false; + if (need_gsc_op_twice) + max_upscale = 8 * 8; + } else { + if (!mDisplay->mHasDrmSurface) { + need_gsc_op_twice = false; + max_upscale = 8; + } + } + + if (getDrmMode(handle->flags) != NO_DRM) { + /* make even for gscaler */ + layer.sourceCropf.top = (unsigned int)layer.sourceCropf.top & ~1; + layer.sourceCropf.left = (unsigned int)layer.sourceCropf.left & ~1; + layer.sourceCropf.bottom = (unsigned int)layer.sourceCropf.bottom & ~1; + layer.sourceCropf.right = (unsigned int)layer.sourceCropf.right & ~1; + } + + /* check whether GSC can handle with M2M */ + if (!isFormatSupportedByGsc(format)) + return -eMPPUnsupportedFormat; + else if (src_w < min_w) + return -eMPPExeedMinSrcWidth; + else if (src_h < min_h) + return -eMPPExeedMinSrcHeight; + else if (!isDstCropWidthAligned(dest_w)) + return -eMPPNotAlignedDstSize; + else if (handle->stride > max_w) + return -eMPPExceedHStrideMaximum; + else if (handle->stride % srcWidthAlign != 0) + return -eMPPNotAlignedHStride; + else if (src_w * downNumerator >= dest_w * downDenominator) + return -eMPPExeedMaxDownScale; + else if (dest_w > src_w * max_upscale) + return -eMPPExeedMaxUpScale; + else if (handle->vstride > max_h) + return -eMPPExceedVStrideMaximum; + else if (handle->vstride % srcHeightAlign != 0) + return -eMPPNotAlignedVStride; + else if (src_h * downNumerator >= dest_h * downDenominator) + return -eMPPExeedMaxDownScale; + else if (dest_h > src_h * max_upscale) + return -eMPPExeedMaxUpScale; + else if (src_w > crop_max_w) + return -eMPPExeedSrcWCropMax; + else if (src_h > crop_max_h) + return -eMPPExeedSrcHCropMax; + else if (src_w < crop_min_w) + return -eMPPExeedSrcWCropMin; + else if (src_h < crop_min_h) + return -eMPPExeedSrcHCropMin; + else if (dest_h < dest_min_h) + return -eMPPExeedMinDstHeight; + // per 46.3.1.6 + + return 1; +} + +bool ExynosMPP::isProcessingRequired(hwc_layer_1_t &layer, int format) +{ + return formatRequiresGsc(format) || isScaled(layer) + || isTransformed(layer); +} + +int ExynosMPP::getAdjustedCrop(int rawSrcSize, int dstSize, int format, bool isVertical, bool isRotated) +{ + int ratio; + int adjustedSize; + int align; + + if (dstSize >= rawSrcSize || rawSrcSize <= dstSize * FIRST_PRESCALER_THRESHOLD) + ratio = 1; + else if (rawSrcSize < dstSize * SECOND_PRESCALER_THRESHOLD) + ratio = 2; + else + ratio = 4; + + if (isFormatRgb(format)) { + if (isRotated) + align = ratio << 1; + else + align = ratio; + } else { + align = ratio << 1; + } + + return rawSrcSize - (rawSrcSize % align); +} + +void ExynosMPP::setupSource(exynos_mpp_img &src_img, hwc_layer_1_t &layer) +{ + bool rotation = !!(layer.transform & HAL_TRANSFORM_ROT_90); + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + src_img.x = ALIGN((unsigned int)layer.sourceCropf.left, srcXOffsetAlign(layer)); + src_img.y = ALIGN((unsigned int)layer.sourceCropf.top, srcYOffsetAlign(layer)); + src_img.w = WIDTH(layer.sourceCropf) - (src_img.x - (uint32_t)layer.sourceCropf.left); + src_img.fw = src_handle->stride; + if (src_img.x + src_img.w > src_img.fw) + src_img.w = src_img.fw - src_img.x; + src_img.h = HEIGHT(layer.sourceCropf) - (src_img.y - (uint32_t)layer.sourceCropf.top); + src_img.fh = src_handle->vstride; + if (src_img.y + src_img.h > src_img.fh) + src_img.h = src_img.fh - src_img.y; + src_img.yaddr = src_handle->fd; + if (!isUsingMSC()) { + src_img.w = getAdjustedCrop(src_img.w, WIDTH(layer.displayFrame), src_handle->format, false, rotation); + src_img.h = getAdjustedCrop(src_img.h, HEIGHT(layer.displayFrame), src_handle->format, true, rotation); + } + if (mS3DMode == S3D_SBS) + src_img.w /= 2; + if (mS3DMode == S3D_TB) + src_img.h /= 2; + if (isFormatYCrCb(src_handle->format)) { + src_img.uaddr = src_handle->fd2; + src_img.vaddr = src_handle->fd1; + } else { + src_img.uaddr = src_handle->fd1; + src_img.vaddr = src_handle->fd2; + } + if (src_handle->format != HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL) + src_img.format = src_handle->format; + else + src_img.format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + + if (layer.blending == HWC_BLENDING_COVERAGE) + src_img.pre_multi = false; + else + src_img.pre_multi = true; + + src_img.drmMode = !!(getDrmMode(src_handle->flags) == SECURE_DRM); + src_img.acquireFenceFd = layer.acquireFenceFd; +} + +void ExynosMPP::setupOtfDestination(exynos_mpp_img &src_img, exynos_mpp_img &dst_img, hwc_layer_1_t &layer) +{ + dst_img.x = layer.displayFrame.left; + dst_img.y = layer.displayFrame.top; + dst_img.fw = mDisplay->mXres; + dst_img.fh = mDisplay->mYres; + dst_img.w = WIDTH(layer.displayFrame); + dst_img.h = HEIGHT(layer.displayFrame); + dst_img.w = min(dst_img.w, dst_img.fw - dst_img.x); + dst_img.h = min(dst_img.h, dst_img.fh - dst_img.y); + dst_img.format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M; + dst_img.rot = layer.transform; + dst_img.drmMode = src_img.drmMode; + dst_img.yaddr = (ptrdiff_t)NULL; +} + +int ExynosMPP::sourceWidthAlign(int format) +{ + return 16; +} + +int ExynosMPP::sourceHeightAlign(int format) +{ + return 16; +} + +int ExynosMPP::destinationAlign(int format) +{ + return 16; +} + +int ExynosMPP::reconfigureOtf(exynos_mpp_img *src_img, exynos_mpp_img *dst_img) +{ + int ret = 0; + if (mGscHandle) { + ret = stopMPP(mGscHandle); + if (ret < 0) { + ALOGE("failed to stop gscaler %u", mIndex); + return ret; + } + mNeedReqbufs = true; + mCountSameConfig = 0; + } + + if (!mGscHandle) { + mGscHandle = createMPP(AVAILABLE_GSC_UNITS[mIndex], + GSC_OUTPUT_MODE, GSC_OUT_FIMD, false); + if (!mGscHandle) { + ALOGE("failed to create gscaler handle"); + return -1; + } + } + + ret = configMPP(mGscHandle, src_img, dst_img); + if (ret < 0) { + ALOGE("failed to configure gscaler %u", mIndex); + return ret; + } + + return ret; +} + +int ExynosMPP::processOTF(hwc_layer_1_t &layer) +{ + ATRACE_CALL(); + ALOGV("configuring gscaler %u for memory-to-fimd-localout", mIndex); + + buffer_handle_t dst_buf; + private_handle_t *dst_handle; + int ret = 0; + + int dstAlign; + + exynos_mpp_img src_img, dst_img; + memset(&src_img, 0, sizeof(src_img)); + memset(&dst_img, 0, sizeof(dst_img)); + + setupSource(src_img, layer); + setupOtfDestination(src_img, dst_img, layer); + + dstAlign = destinationAlign(dst_img.format); + + ALOGV("source configuration:"); + dumpMPPImage(src_img); + + if (!mGscHandle || isSrcConfigChanged(src_img, mSrcConfig) || + isDstConfigChanged(dst_img, mDstConfig)) { + if (!isPerFrameSrcChanged(src_img, mSrcConfig) || + !isPerFrameDstChanged(dst_img, mDstConfig)) { + ret = reconfigureOtf(&src_img, &dst_img); + if (ret < 0) + goto err_gsc_local; + } + } + + if (isSourceCropChanged(src_img, mSrcConfig)) { + setInputCrop(mGscHandle, &src_img, &dst_img); + } + + ALOGV("destination configuration:"); + dumpMPPImage(dst_img); + + ret = runMPP(mGscHandle, &src_img, &dst_img); + if (ret < 0) { + ALOGE("failed to run gscaler %u", mIndex); + goto err_gsc_local; + } + + memcpy(&mSrcConfig, &src_img, sizeof(mSrcConfig)); + memcpy(&mDstConfig, &dst_img, sizeof(mDstConfig)); + + return 0; + +err_gsc_local: + if (src_img.acquireFenceFd >= 0) + close(src_img.acquireFenceFd); + + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + + return ret; +} + +bool ExynosMPP::setupDoubleOperation(exynos_mpp_img &src_img, exynos_mpp_img &mid_img, hwc_layer_1_t &layer) +{ + /* check if GSC need to operate twice */ + bool need_gsc_op_twice = false; + bool need_unscaled_csc = false; + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + const int max_upscale = 8; + bool rot90or270 = !!(layer.transform & HAL_TRANSFORM_ROT_90); + int src_w = WIDTH(layer.sourceCropf), src_h = HEIGHT(layer.sourceCropf); + int dest_w, dest_h; + if (rot90or270) { + dest_w = HEIGHT(layer.displayFrame); + dest_h = WIDTH(layer.displayFrame); + } else { + dest_w = WIDTH(layer.displayFrame); + dest_h = HEIGHT(layer.displayFrame); + } + if (getDrmMode(src_handle->flags) != NO_DRM) + need_gsc_op_twice = ((dest_w > src_w * max_upscale) || + (dest_h > src_h * max_upscale)) ? true : false; + + if (isUsingMSC() && src_handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED) { + need_gsc_op_twice = true; + need_unscaled_csc = true; + } + + if (need_gsc_op_twice) { + mid_img.x = 0; + mid_img.y = 0; + + int mid_w = 0, mid_h = 0; + + if (need_unscaled_csc) { + mid_img.w = src_w; + mid_img.h = src_h; + } else { + if (rot90or270) { + mid_w = HEIGHT(layer.displayFrame); + mid_h = WIDTH(layer.displayFrame); + } else { + mid_w = WIDTH(layer.displayFrame); + mid_h = HEIGHT(layer.displayFrame); + } + + if (WIDTH(layer.sourceCropf) * max_upscale < mid_w) + mid_img.w = (((mid_w + 7) / 8) + 1) & ~1; + else + mid_img.w = mid_w; + + if (HEIGHT(layer.sourceCropf) * max_upscale < mid_h) + mid_img.h = (((mid_h + 7) / 8) + 1) & ~1; + else + mid_img.h = mid_h; + } + mid_img.drmMode = src_img.drmMode; + mid_img.format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + + if (layer.blending == HWC_BLENDING_PREMULT || layer.blending == HWC_BLENDING_COVERAGE) + mid_img.pre_multi = false; + else + mid_img.pre_multi = true; + + mid_img.mem_type = GSC_MEM_DMABUF; + mid_img.narrowRgb = !isFormatRgb(src_handle->format); + } + + return need_gsc_op_twice; +} + +void ExynosMPP::setupM2MDestination(exynos_mpp_img &src_img, exynos_mpp_img &dst_img, + int dst_format, hwc_layer_1_t &layer, hwc_frect_t *sourceCrop) +{ + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + dst_img.x = 0; + dst_img.y = 0; + dst_img.w = WIDTH(layer.displayFrame); + dst_img.h = HEIGHT(layer.displayFrame); + dst_img.rot = layer.transform; + dst_img.drmMode = src_img.drmMode; + dst_img.format = dst_format; + + if (layer.blending == HWC_BLENDING_PREMULT || layer.blending == HWC_BLENDING_COVERAGE) + dst_img.pre_multi = false; + else + dst_img.pre_multi = true; + + dst_img.mem_type = GSC_MEM_DMABUF; + if (src_handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL) + dst_img.narrowRgb = 0; + else + dst_img.narrowRgb = !isFormatRgb(src_handle->format); + if (dst_img.drmMode) + alignCropAndCenter(dst_img.w, dst_img.h, sourceCrop, + GSC_DST_CROP_W_ALIGNMENT_RGB888); +} + +int ExynosMPP::reallocateBuffers(private_handle_t *src_handle, exynos_mpp_img &dst_img, exynos_mpp_img &mid_img, bool need_gsc_op_twice) +{ + ATRACE_CALL(); + alloc_device_t* alloc_device = mDisplay->mAllocDevice; + int ret = 0; + int dst_stride; + int usage = GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_NOZEROED | +#ifdef USE_FB_PHY_LINEAR + ((mIndex == FIMD_GSC_IDX) ? GRALLOC_USAGE_PHYSICALLY_LINEAR : 0) | +#endif + GRALLOC_USAGE_HW_COMPOSER; + +#ifdef USE_FB_PHY_LINEAR + usage |= GRALLOC_USAGE_PROTECTED; + usage &= ~GRALLOC_USAGE_PRIVATE_NONSECURE; +#else + if (getDrmMode(src_handle->flags) == SECURE_DRM) { + usage |= GRALLOC_USAGE_PROTECTED; + usage &= ~GRALLOC_USAGE_PRIVATE_NONSECURE; + } else if (getDrmMode(src_handle->flags) == NORMAL_DRM) { + usage |= GRALLOC_USAGE_PROTECTED; + usage |= GRALLOC_USAGE_PRIVATE_NONSECURE; + } +#endif + /* HACK: for distinguishing FIMD_VIDEO region */ + if (!(src_handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_PN || + src_handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN || + src_handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_TILED)) { + usage |= (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER); + } + + int w, h; + { + int dstAlign = destinationAlign(dst_img.format); + w = ALIGN(mDisplay->mXres, dstAlign); + h = ALIGN(mDisplay->mYres, dstAlign); + } + + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + if (mDstBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mDstBuffers[i]); + mDstBuffers[i] = NULL; + } + + if (mDstBufFence[i] >= 0) { + close(mDstBufFence[i]); + mDstBufFence[i] = -1; + } + + if (mMidBuffers[i] != NULL) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mMidBuffers[i]); + mMidBuffers[i] = NULL; + } + + if (mMidBufFence[i] >= 0) { + close(mMidBufFence[i]); + mMidBufFence[i] = -1; + } + } + + if (getDrmMode(src_handle->flags) != NO_DRM && + mDisplay != NULL && mDisplay->mHwc != NULL && + (ExynosDisplay *)mDisplay->mHwc->externalDisplay == mDisplay) + mNumAvailableDstBuffers = NUM_DRM_GSC_DST_BUFS; + else + mNumAvailableDstBuffers = NUM_GSC_DST_BUFS; + + for (size_t i = 0; i < mNumAvailableDstBuffers; i++) { + int format = dst_img.format; + ret = alloc_device->alloc(alloc_device, w, h, + format, usage, &mDstBuffers[i], + &dst_stride); + if (ret < 0) { + ALOGE("failed to allocate destination buffer(%dx%d): %s", w, h, + strerror(-ret)); + return ret; + } + + if (need_gsc_op_twice) { + ret = alloc_device->alloc(alloc_device, mid_img.w, mid_img.h, + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M, usage, &mMidBuffers[i], + &dst_stride); + if (ret < 0) { + ALOGE("failed to allocate intermediate buffer(%dx%d): %s", mid_img.w, mid_img.h, + strerror(-ret)); + return ret; + } + } + } + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + return ret; +} + +#ifdef USES_VIRTUAL_DISPLAY +int ExynosMPP::processM2M(hwc_layer_1_t &layer, int dst_format, hwc_frect_t *sourceCrop, bool isNeedBufferAlloc) +#else +int ExynosMPP::processM2M(hwc_layer_1_t &layer, int dst_format, hwc_frect_t *sourceCrop) +#endif +{ + ATRACE_CALL(); + ALOGV("configuring gscaler %u for memory-to-memory", AVAILABLE_GSC_UNITS[mIndex]); + + alloc_device_t* alloc_device = mDisplay->mAllocDevice; + private_handle_t *src_handle = private_handle_t::dynamicCast(layer.handle); + buffer_handle_t dst_buf; + private_handle_t *dst_handle; + buffer_handle_t mid_buf; + private_handle_t *mid_handle; + int ret = 0; + int dstAlign; + + if (isUsingMSC()) { + if (dst_format != HAL_PIXEL_FORMAT_RGB_565) + dst_format = HAL_PIXEL_FORMAT_BGRA_8888; + } + + exynos_mpp_img src_img, dst_img; + memset(&src_img, 0, sizeof(src_img)); + memset(&dst_img, 0, sizeof(dst_img)); + exynos_mpp_img mid_img; + memset(&mid_img, 0, sizeof(mid_img)); + + hwc_frect_t sourceCropTemp; + if (!sourceCrop) + sourceCrop = &sourceCropTemp; + + setupSource(src_img, layer); + src_img.mem_type = GSC_MEM_DMABUF; + + if (isUsingMSC()) { + if (src_img.format == HAL_PIXEL_FORMAT_RGBA_8888 || src_img.format == HAL_PIXEL_FORMAT_RGBX_8888) + src_img.format = HAL_PIXEL_FORMAT_BGRA_8888; + } + + bool need_gsc_op_twice = setupDoubleOperation(src_img, mid_img, layer); + + setupM2MDestination(src_img, dst_img, dst_format, layer, sourceCrop); + +#ifdef USES_VIRTUAL_DISPLAY + if (!isNeedBufferAlloc) { + dst_img.x = mDisplay->mHwc->mVirtualDisplayRect.left; + dst_img.y = mDisplay->mHwc->mVirtualDisplayRect.top; + dst_img.w = mDisplay->mHwc->mVirtualDisplayRect.width; + dst_img.h = mDisplay->mHwc->mVirtualDisplayRect.height; + } +#endif + + ALOGV("source configuration:"); + dumpMPPImage(src_img); + + bool reconfigure = isSrcConfigChanged(src_img, mSrcConfig) || + isDstConfigChanged(dst_img, mDstConfig); + +#ifdef USES_VIRTUAL_DISPLAY + if (isNeedBufferAlloc) { +#endif + bool realloc = mDstConfig.fw <= 0 || formatToBpp(mDstConfig.format) != formatToBpp(dst_format); + + /* ext_only andn int_only changes */ + if (!need_gsc_op_twice && getDrmMode(src_handle->flags) == SECURE_DRM) { + if (dst_img.drmMode != mDstConfig.drmMode) + realloc = true; + else + realloc = false; + } + + if (need_gsc_op_twice && isSrcConfigChanged(mid_img, mMidConfig)) + realloc = true; + + if (need_gsc_op_twice && !mDoubleOperation) + realloc = true; + mDoubleOperation = need_gsc_op_twice; + + if (reconfigure && realloc) { + if ((ret = reallocateBuffers(src_handle, dst_img, mid_img, need_gsc_op_twice)) < 0) + goto err_alloc; + + mCurrentBuf = 0; + mLastGSCLayerHandle = 0; + } + + if (!reconfigure && (mLastGSCLayerHandle == (ptrdiff_t)layer.handle)) { + ALOGV("[USE] GSC_SKIP_DUPLICATE_FRAME_PROCESSING\n"); + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + + layer.releaseFenceFd = -1; + layer.acquireFenceFd = -1; + mDstConfig.releaseFenceFd = -1; + + mCurrentBuf = (mCurrentBuf + mNumAvailableDstBuffers- 1) % mNumAvailableDstBuffers; + return 0; + } else { + mLastGSCLayerHandle = (ptrdiff_t)layer.handle; + } +#ifdef USES_VIRTUAL_DISPLAY + } else { + if (reconfigure && need_gsc_op_twice) { + int dst_stride; + if (mMidBuffers[0] != NULL) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mMidBuffers[0]); + mBufferFreeThread->mCondition.signal(); + mMidBuffers[0] = NULL; + } + + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + mMidBufFence[0] = -1; + } + + ret = alloc_device->alloc(alloc_device, mid_img.w, mid_img.h, + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M, ((ExynosVirtualDisplay*)mDisplay)->mSinkUsage, &mMidBuffers[0], + &dst_stride); + if (ret < 0) { + ALOGE("failed to allocate intermediate buffer(%dx%d): %s", mid_img.w, mid_img.h, + strerror(-ret)); + goto err_alloc; + } + } + } +#endif + + layer.acquireFenceFd = -1; + if (need_gsc_op_twice) { + mid_img.acquireFenceFd = mMidBufFence[mCurrentBuf]; + mMidBufFence[mCurrentBuf] = -1; + mid_buf = mMidBuffers[mCurrentBuf]; + mid_handle = private_handle_t::dynamicCast(mid_buf); + + mid_img.fw = mid_handle->stride; + mid_img.fh = mid_handle->vstride; + mid_img.yaddr = mid_handle->fd; + if (isFormatYCrCb(mid_handle->format)) { + mid_img.uaddr = mid_handle->fd2; + mid_img.vaddr = mid_handle->fd1; + } else { + mid_img.uaddr = mid_handle->fd1; + mid_img.vaddr = mid_handle->fd2; + } + //mid_img.acquireFenceFd = -1; + + ALOGV("mid configuration:"); + dumpMPPImage(mid_img); + } + + dst_buf = mDstBuffers[mCurrentBuf]; + dst_handle = private_handle_t::dynamicCast(dst_buf); + + dst_img.fw = dst_handle->stride; + dst_img.fh = dst_handle->vstride; + dst_img.yaddr = dst_handle->fd; + dst_img.uaddr = dst_handle->fd1; + dst_img.vaddr = dst_handle->fd2; + dst_img.acquireFenceFd = mDstBufFence[mCurrentBuf]; + mDstBufFence[mCurrentBuf] = -1; + + ALOGV("destination configuration:"); + dumpMPPImage(dst_img); + + if ((int)dst_img.w != WIDTH(layer.displayFrame)) + ALOGV("padding %u x %u output to %u x %u and cropping to {%7.1f,%7.1f,%7.1f,%7.1f}", + WIDTH(layer.displayFrame), HEIGHT(layer.displayFrame), + dst_img.w, dst_img.h, sourceCrop->left, sourceCrop->top, + sourceCrop->right, sourceCrop->bottom); + + if (mGscHandle) { + ALOGV("reusing open gscaler %u", AVAILABLE_GSC_UNITS[mIndex]); + } else { + ALOGV("opening gscaler %u", AVAILABLE_GSC_UNITS[mIndex]); + mGscHandle = createMPP( + AVAILABLE_GSC_UNITS[mIndex], GSC_M2M_MODE, GSC_DUMMY, (getDrmMode(src_handle->flags) != NO_DRM)); + if (!mGscHandle) { + ALOGE("failed to create gscaler handle"); + ret = -1; + goto err_alloc; + } + } + + if (!need_gsc_op_twice) + memcpy(&mid_img, &dst_img, sizeof(exynos_mpp_img)); + + /* src -> mid or src->dest */ + if (reconfigure || need_gsc_op_twice) { + ret = stopMPP(mGscHandle); + if (ret < 0) { + ALOGE("failed to stop gscaler %u", mIndex); + goto err_gsc_config; + } + + ret = setCSCProperty(mGscHandle, 0, !mid_img.narrowRgb, 1); + ret = configMPP(mGscHandle, &src_img, &mid_img); + if (ret < 0) { + ALOGE("failed to configure gscaler %u", mIndex); + goto err_gsc_config; + } + } + + ret = runMPP(mGscHandle, &src_img, &mid_img); + if (ret < 0) { + ALOGE("failed to run gscaler %u", mIndex); + goto err_gsc_config; + } + + /* mid -> dst */ + if (need_gsc_op_twice) { + ret = stopMPP(mGscHandle); + if (ret < 0) { + ALOGE("failed to stop gscaler %u", mIndex); + goto err_gsc_config; + } + + mid_img.acquireFenceFd = mid_img.releaseFenceFd; + + ret = setCSCProperty(mGscHandle, 0, !dst_img.narrowRgb, 1); + ret = configMPP(mGscHandle, &mid_img, &dst_img); + if (ret < 0) { + ALOGE("failed to configure gscaler %u", mIndex); + goto err_gsc_config; + } + + ret = runMPP(mGscHandle, &mid_img, &dst_img); + if (ret < 0) { + ALOGE("failed to run gscaler %u", mIndex); + goto err_gsc_config; + } + mMidBufFence[mCurrentBuf] = mid_img.releaseFenceFd; + } + + mSrcConfig = src_img; + mMidConfig = mid_img; + + if (need_gsc_op_twice) { + mDstConfig = dst_img; + } else { + mDstConfig = mid_img; + } + + layer.releaseFenceFd = src_img.releaseFenceFd; + + return 0; + +err_gsc_config: + if (mGscHandle) + destroyMPP(mGscHandle); + mGscHandle = NULL; + libmpp = NULL; +err_alloc: + if (src_img.acquireFenceFd >= 0) + close(src_img.acquireFenceFd); +#ifdef USES_VIRTUAL_DISPLAY + if (isNeedBufferAlloc) { +#endif + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + if (mDstBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mDstBuffers[i]); + mDstBuffers[i] = NULL; + } + if (mDstBufFence[i] >= 0) { + close(mDstBufFence[i]); + mDstBufFence[i] = -1; + } + if (mMidBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mMidBuffers[i]); + mMidBuffers[i] = NULL; + } + if (mMidBufFence[i] >= 0) { + close(mMidBufFence[i]); + mMidBufFence[i] = -1; + } + } +#ifdef USES_VIRTUAL_DISPLAY + } else { + if (mMidBuffers[0]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mMidBuffers[0]); + mMidBuffers[0] = NULL; + } + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + mMidBufFence[0] = -1; + } + } +#endif + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + return ret; +} + +void ExynosMPP::cleanupM2M() +{ + cleanupM2M(false); +} + +void ExynosMPP::cleanupM2M(bool noFenceWait) +{ + ATRACE_CALL(); + if (!mGscHandle) + return; + + ALOGV("closing gscaler %u", AVAILABLE_GSC_UNITS[mIndex]); + + if (!noFenceWait) { + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { +#ifndef FORCEFB_YUVLAYER + if (mDstBufFence[i] >= 0) + if (sync_wait(mDstBufFence[i], 1000) < 0) + ALOGE("sync_wait error"); +#endif + if (mMidBufFence[i] >= 0) + if (sync_wait(mMidBufFence[i], 1000) < 0) + ALOGE("sync_wait error"); + } + } + + if (mGscHandle) { + stopMPP(mGscHandle); + destroyMPP(mGscHandle); + } + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + if (mDstBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mDstBuffers[i]); + } + if (mDstBufFence[i] >= 0) + close(mDstBufFence[i]); + if (mMidBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + mFreedBuffers.push_back(mMidBuffers[i]); + mMidBuffers[i] = NULL; + } + if (mMidBufFence[i] >= 0) + close(mMidBufFence[i]); + } + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + + mGscHandle = NULL; + libmpp = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(mDstBuffers, 0, sizeof(mDstBuffers)); + memset(mMidBuffers, 0, sizeof(mMidBuffers)); + mCurrentBuf = 0; + mGSCMode = 0; + mLastGSCLayerHandle = 0; + + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } +} + +void ExynosMPP::cleanupOTF() +{ + if (mGscHandle) { + stopMPP(mGscHandle); + freeMPP(mGscHandle); + } + + mNeedReqbufs = false; + mWaitVsyncCount = 0; + mDisplay->mOtfMode = OTF_OFF; + mGscHandle = NULL; + libmpp = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(mDstBuffers, 0, sizeof(mDstBuffers)); + memset(mMidBuffers, 0, sizeof(mMidBuffers)); + mCurrentBuf = 0; + mGSCMode = 0; + mLastGSCLayerHandle = 0; + + for (size_t i = 0; i < NUM_GSC_DST_BUFS; i++) { + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } +} + +bool ExynosMPP::rotationSupported(bool rotation) +{ + return !rotation; +} + +bool ExynosMPP::paritySupported(int w, int h) +{ + return (w % 2 == 0) && (h % 2 == 0); +} + +bool ExynosMPP::isDstConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return c1.x != c2.x || + c1.y != c2.y || + c1.w != c2.w || + c1.h != c2.h || + c1.format != c2.format || + c1.rot != c2.rot || + c1.narrowRgb != c2.narrowRgb || + c1.cacheable != c2.cacheable || + c1.pre_multi != c2.pre_multi || + c1.drmMode != c2.drmMode; +} + +bool ExynosMPP::isSourceCropChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return false; +} + +bool ExynosMPP::isPerFrameSrcChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return false; +} + +bool ExynosMPP::isPerFrameDstChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return false; +} + +bool ExynosMPP::isReallocationRequired(int w, int h, exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return ALIGN(w, GSC_W_ALIGNMENT) != ALIGN(c2.fw, GSC_W_ALIGNMENT) || + ALIGN(h, GSC_H_ALIGNMENT) != ALIGN(c2.fh, GSC_H_ALIGNMENT) || + c1.format != c2.format || + c1.drmMode != c2.drmMode; +} + +uint32_t ExynosMPP::halFormatToMPPFormat(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + return HAL_PIXEL_FORMAT_BGRA_8888; + case HAL_PIXEL_FORMAT_BGRA_8888: + return HAL_PIXEL_FORMAT_RGBA_8888; + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + return HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + default: + return format; + } +} + +int ExynosMPP::minWidth(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + return isRotated(layer) ? 16 : 32; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return isRotated(layer) ? 32 : 64; + } +} + +int ExynosMPP::minHeight(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + return isRotated(layer) ? 32 : 16; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return isRotated(layer) ? 64 : 32; + } +} + +int ExynosMPP::maxWidth(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (isUsingMSC()) + return 8192; + + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + return 4800; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return 2047; + } +} + +int ExynosMPP::maxHeight(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (isUsingMSC()) + return 8192; + + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + return 3344; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return 2047; + } +} + +int ExynosMPP::srcXOffsetAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + return 1; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return 2; + } +} + +int ExynosMPP::srcYOffsetAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (handle->format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + return 1; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + default: + return isRotated(layer) ? 2 : 1; + } +} + +void *ExynosMPP::createMPP(int id, int mode, int outputMode, int drm) +{ + ATRACE_CALL(); + mppFact = new MppFactory(); + libmpp = mppFact->CreateMpp(id, mode, outputMode, drm); + + return reinterpret_cast(libmpp); +} + +int ExynosMPP::configMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) +{ + ATRACE_CALL(); + return libmpp->ConfigMpp(handle, src, dst); +} + +int ExynosMPP::runMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) +{ + ATRACE_CALL(); + return libmpp->RunMpp(handle, src, dst); +} + +int ExynosMPP::stopMPP(void *handle) +{ + ATRACE_CALL(); + return libmpp->StopMpp(handle); +} + +void ExynosMPP::destroyMPP(void *handle) +{ + ATRACE_CALL(); + libmpp->DestroyMpp(handle); + delete(mppFact); +} + +int ExynosMPP::setCSCProperty(void *handle, unsigned int eqAuto, unsigned int fullRange, unsigned int colorspace) +{ + return libmpp->SetCSCProperty(handle, eqAuto, fullRange, colorspace); +} + +int ExynosMPP::freeMPP(void *handle) +{ + ATRACE_CALL(); + return libmpp->FreeMpp(handle); +} + +int ExynosMPP::setInputCrop(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) +{ + return libmpp->SetInputCrop(handle, src, dst); +} + +bool ExynosMPP::bufferChanged(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + return mSrcConfig.fw != (uint32_t)handle->stride || + mSrcConfig.fh != (uint32_t)handle->vstride || + mSrcConfig.format != (uint32_t)handle->format || + mDstConfig.rot != (uint32_t)layer.transform; +} + +bool ExynosMPP::needsReqbufs() +{ + return mNeedReqbufs; +} + +bool ExynosMPP::inUse() +{ + return mGscHandle != NULL; +} + +void ExynosMPP::freeBuffers() +{ + alloc_device_t* alloc_device = mDisplay->mAllocDevice; + android::List::iterator it; + android::List::iterator end; + { + android::Mutex::Autolock lock(mMutex); + it = mFreedBuffers.begin(); + end = mFreedBuffers.end(); + } + while (it != end) { + buffer_handle_t buffer = (buffer_handle_t)(*it); + alloc_device->free(alloc_device, buffer); + { + android::Mutex::Autolock lock(mMutex); + it = mFreedBuffers.erase(it); + } + } +} + +bool BufferFreeThread::threadLoop() +{ + while(mRunning) { + { + android::Mutex::Autolock lock(mExynosMPP->mMutex); + while(mExynosMPP->mFreedBuffers.size() == 0) { + mCondition.wait(mExynosMPP->mMutex); + } + } + mExynosMPP->freeBuffers(); + } + return true; +} + +bool ExynosMPP::isSrcCropAligned(hwc_layer_1_t &layer, bool rotation) +{ + return !rotation || + ((uint32_t)layer.sourceCropf.left % srcXOffsetAlign(layer) == 0 && + (uint32_t)layer.sourceCropf.top % srcYOffsetAlign(layer) == 0); +} diff --git a/libhwcutils/ExynosMPP.h b/libhwcutils/ExynosMPP.h new file mode 100644 index 0000000..bebdae3 --- /dev/null +++ b/libhwcutils/ExynosMPP.h @@ -0,0 +1,165 @@ +#ifndef EXYNOS_MPP_H +#define EXYNOS_MPP_H + +#include "ExynosDisplay.h" +#include "MppFactory.h" +#include +#include +#include +#include + +#define FIRST_PRESCALER_THRESHOLD 4 +#define SECOND_PRESCALER_THRESHOLD 8 + +class BufferFreeThread; +enum { + eMPPUnsupportedDownScale = 0x00000001, + eMPPUnsupportedHW = 0x00000002, + eMPPUnsupportedRotation = 0x00000004, + eMPPUnsupportedCoordinate = 0x00000008, + eMPPUnsupportedDRMContents = 0x00000010, + eMPPUnsupportedS3DContents = 0x00000020, + eMPPUnsupportedBlending = 0x00000040, + eMPPUnsupportedFormat = 0x00000080, + eMPPNotAlignedDstSize = 0x00000100, + eMPPNotAlignedSrcCropPosition = 0x00000200, + eMPPNotAlignedHStride = 0x00000400, + eMPPNotAlignedVStride = 0x00000800, + eMPPExceedHStrideMaximum = 0x00001000, + eMPPExceedVStrideMaximum = 0x00002000, + eMPPExeedMaxDownScale = 0x00004000, + eMPPExeedMaxDstWidth = 0x00008000, + eMPPExeedMaxDstHeight = 0x00010000, + eMPPExeedMinSrcWidth = 0x00020000, + eMPPExeedMinSrcHeight = 0x00040000, + eMPPExeedMaxUpScale = 0x00080000, + eMPPExeedSrcWCropMax = 0x00100000, + eMPPExeedSrcHCropMax = 0x00200000, + eMPPExeedSrcWCropMin = 0x00400000, + eMPPExeedSrcHCropMin = 0x00800000, + eMPPExeedMinDstHeight = 0x01000000, +}; + +class ExynosMPP { + MppFactory *mppFact; + LibMpp *libmpp; + public: + /* Methods */ + ExynosMPP(); + ExynosMPP(ExynosDisplay *display, int gscIndex); + virtual ~ExynosMPP(); + + static bool isSrcConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + virtual bool isFormatSupportedByGsc(int format); + bool formatRequiresGsc(int format); + static bool isFormatSupportedByGscOtf(int format); + + virtual bool rotationSupported(bool rotation); + virtual bool paritySupported(int w, int h); + + virtual bool isProcessingRequired(hwc_layer_1_t &layer, int format); + virtual int getDownscaleRatio(int *numerator, int *denominator); + virtual int isProcessingSupported(hwc_layer_1_t &layer, int format, bool otf, int downNumerator = 1, int downDenominator = 16); +#ifdef USES_VIRTUAL_DISPLAY + virtual int processM2M(hwc_layer_1_t &layer, int dstFormat, hwc_frect_t *sourceCrop, bool isNeedBufferAlloc = true); +#else + virtual int processM2M(hwc_layer_1_t &layer, int dstFormat, hwc_frect_t *sourceCrop); +#endif + virtual int processOTF(hwc_layer_1_t &layer); + virtual void cleanupM2M(); + virtual void cleanupM2M(bool noFenceWait); + virtual void cleanupOTF(); + + virtual int getAdjustedCrop(int rawSrcSize, int dstSize, int format, bool isVertical, bool isRotated); + + bool isM2M(); + bool isOTF(); + virtual bool isUsingMSC(); + void setMode(int mode); + void free(); + bool bufferChanged(hwc_layer_1_t &layer); + bool needsReqbufs(); + bool inUse(); + void freeBuffers(); + + /* Fields */ + uint32_t mIndex; + void *mGscHandle; + bool mNeedReqbufs; + int mWaitVsyncCount; + int mCountSameConfig; + ExynosDisplay *mDisplay; + exynos_mpp_img mSrcConfig; + exynos_mpp_img mMidConfig; + exynos_mpp_img mDstConfig; + buffer_handle_t mDstBuffers[NUM_GSC_DST_BUFS]; + buffer_handle_t mMidBuffers[NUM_GSC_DST_BUFS]; + int mDstBufFence[NUM_GSC_DST_BUFS]; + int mMidBufFence[NUM_GSC_DST_BUFS]; + size_t mCurrentBuf; + int mGSCMode; + ptrdiff_t mLastGSCLayerHandle; + int mS3DMode; + bool mDoubleOperation; + android::List mFreedBuffers; + BufferFreeThread *mBufferFreeThread; + android::Mutex mMutex; + size_t mNumAvailableDstBuffers; + + protected: + /* Methods */ + static bool isDstConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + static bool isReallocationRequired(int w, int h, exynos_mpp_img &c1, exynos_mpp_img &c2); + static int minWidth(hwc_layer_1_t &layer); + static int minHeight(hwc_layer_1_t &layer); + int maxWidth(hwc_layer_1_t &layer); + int maxHeight(hwc_layer_1_t &layer); + static int srcXOffsetAlign(hwc_layer_1_t &layer); + static int srcYOffsetAlign(hwc_layer_1_t &layer); + + virtual void setupSource(exynos_mpp_img &src_img, hwc_layer_1_t &layer); + void setupOtfDestination(exynos_mpp_img &src_img, exynos_mpp_img &dst_img, hwc_layer_1_t &layer); + int reconfigureOtf(exynos_mpp_img *src_img, exynos_mpp_img *dst_img); + + virtual void setupM2MDestination(exynos_mpp_img &src_img, exynos_mpp_img &dst_img, int dst_format, hwc_layer_1_t &layer, hwc_frect_t *sourceCrop); + bool setupDoubleOperation(exynos_mpp_img &src_img, exynos_mpp_img &mid_img, hwc_layer_1_t &layer); + int reallocateBuffers(private_handle_t *src_handle, exynos_mpp_img &dst_img, exynos_mpp_img &mid_img, bool need_gsc_op_twice); + + /* + * Override these virtual functions in chip directory to handle per-chip differences + * as well as GSC/FIMC differences + */ + virtual uint32_t halFormatToMPPFormat(int format); + + virtual int sourceWidthAlign(int format); + virtual int sourceHeightAlign(int format); + virtual int destinationAlign(int format); + virtual bool isSourceCropChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + virtual bool isPerFrameSrcChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + virtual bool isPerFrameDstChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + + virtual void *createMPP(int id, int mode, int outputMode, int drm); + virtual int configMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + virtual int runMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + virtual int stopMPP(void *handle); + virtual void destroyMPP(void *handle); + virtual int setCSCProperty(void *handle, unsigned int eqAuto, unsigned int fullRange, unsigned int colorspace); + virtual int freeMPP(void *handle); + virtual int setInputCrop(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + virtual bool isSrcCropAligned(hwc_layer_1_t &layer, bool rotation); +}; + +size_t visibleWidth(ExynosMPP *processor, hwc_layer_1_t &layer, int format, + int xres); + +class BufferFreeThread: public android::Thread { + public: + BufferFreeThread(ExynosMPP *exynosMPP): mRunning(false) {mExynosMPP = exynosMPP;}; + virtual bool threadLoop(); + bool mRunning; + android::Condition mCondition; + private: + ExynosMPP *mExynosMPP; +}; + +#endif diff --git a/libhwcutils/ExynosMPPv2.cpp b/libhwcutils/ExynosMPPv2.cpp new file mode 100644 index 0000000..872b6a9 --- /dev/null +++ b/libhwcutils/ExynosMPPv2.cpp @@ -0,0 +1,2001 @@ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +#include +#include "ExynosMPPv2.h" +#include "ExynosHWCUtils.h" +#ifdef USES_VIRTUAL_DISPLAY +#include "ExynosVirtualDisplay.h" +#endif +#include "ExynosHWCDebug.h" + +int ExynosMPP::mainDisplayWidth = 0; +size_t visibleWidth(ExynosMPP *processor, hwc_layer_1_t &layer, int format, + int xres) +{ + int bpp; + if (processor->isProcessingRequired(layer, format) && format != HAL_PIXEL_FORMAT_RGB_565) + bpp = 32; + else + bpp = formatToBpp(format); + int left = max(layer.displayFrame.left, 0); + int right = min(layer.displayFrame.right, xres); + + return (right - left) * bpp / 8; +} + +void ExynosMPP::initMPP() +{ + mMPPHandle = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + for (uint32_t i = 0; i < NUM_MPP_DST_BUFS; i++) { + mDstBuffers[i] = NULL; + mMidBuffers[i] = NULL; + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } + mCurrentBuf = 0; + mLastMPPLayerHandle= 0; + mS3DMode = 0; + mppFact = NULL; + libmpp = NULL; + mPreAssignedDisplay = NULL; + mDoubleOperation = false; + mCanRotate = (mType != MPP_VG); + mCanBlend = (mType != MPP_VG); + mAllocDevice = NULL; + mNumAvailableDstBuffers = NUM_MPP_DST_BUFS; + mBufferType = MPP_BUFFER_NORMAL; + mSMemFd = -1; + if (mType == MPP_MSC) { + mSMemFd = open(SMEM_PATH, O_RDWR); + if (mSMemFd < 0) + ALOGE("Fail to open smem_fd %s, error(%d)", SMEM_PATH, mSMemFd); + else + ALOGI("Open %s", SMEM_PATH); + } + + mCanBeUsed = true; + + switch (mType) { + case MPP_VG: + mName = "VG"; + break; + case MPP_VGR: + mName = "VGR"; + break; + case MPP_VPP_G: + mName = "G"; + break; + case MPP_MSC: + mName = "MSC"; + break; + default: + mName = "-"; + break; + } + + mAllocatedBufferNum = 0; + mBufferFreeThread = NULL; + mAllocatedMidBufferNum = 0; + mMidRealloc = false; + mDstRealloc = false; +} + +ExynosMPP::ExynosMPP() +{ + mDisplay = NULL; + mType = 0; + mIndex = 0; + initMPP(); +} + +ExynosMPP::ExynosMPP(ExynosDisplay *display, int gscIndex) +{ + mDisplay = display; + mType = AVAILABLE_GSC_UNITS[gscIndex]; + mIndex = gscIndex; + initMPP(); +} + +ExynosMPP::ExynosMPP(ExynosDisplay *display, unsigned int mppType, unsigned int mppIndex) +{ + mDisplay = display; + mType = mppType; + mIndex = mppIndex; + initMPP(); +} + +ExynosMPP::~ExynosMPP() +{ + if (mBufferFreeThread != NULL) { + mBufferFreeThread->mRunning = false; + mBufferFreeThread->requestExitAndWait(); + delete mBufferFreeThread; + } + if (mSMemFd >= 0) + close(mSMemFd); + mSMemFd = -1; +} + +const android::String8& ExynosMPP::getName() const +{ + return mName; +} + +bool ExynosMPP::isSrcConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return isDstConfigChanged(c1, c2) || + c1.fw != c2.fw || + c1.fh != c2.fh; +} + +bool ExynosMPP::isFormatSupportedByMPP(int format) +{ + if (mType == MPP_VPP_G) { + if (!isFormatRgb(format)) + return false; + } + + switch (format) { + case HAL_PIXEL_FORMAT_BGRA_8888: + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_RGB_565: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + return true; + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + if (mType == MPP_MSC) + return true; + else + return false; + + default: + return false; + } +} + +bool ExynosMPP::isCSCSupportedByMPP(int src_format, int dst_format, uint32_t dataSpace) +{ + return true; +} + +bool ExynosMPP::formatRequiresMPP(int format) +{ + return isFormatSupportedByMPP(format) && !isFormatRgb(format); +} + +int ExynosMPP::isProcessingSupported(hwc_layer_1_t &layer, int dst_format) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (isCompressed(layer)) { + if (mType != MPP_VGR) + return -eMPPUnsupportedCompression; + if (layer.transform) + return -eMPPUnsupportedRotation; + } + + if ((mType == MPP_VG) || (mType == MPP_VGR)) + dst_format = INTERNAL_MPP_DST_FORMAT; + + int maxWidth = getMaxWidth(layer); + int maxHeight = getMaxHeight(layer); + int minWidth = getMinWidth(layer); + int minHeight = getMinHeight(layer); + int srcWidthAlign = getSrcWidthAlign(layer); + int srcHeightAlign = getSrcHeightAlign(layer); + + int maxCropWidth = getMaxCropWidth(layer); + int maxCropHeight = getMaxCropHeight(layer); + int minCropWidth = getMinCropWidth(layer); + int minCropHeight = getMinCropHeight(layer); + int cropWidthAlign = getCropWidthAlign(layer); + int cropHeightAlign = getCropHeightAlign(layer); + int srcXOffsetAlign = getSrcXOffsetAlign(layer); + int srcYOffsetAlign = getSrcYOffsetAlign(layer); + + int maxDstWidth = getMaxDstWidth(dst_format); + int maxDstHeight = getMaxDstHeight(dst_format); + int minDstWidth = getMinDstWidth(dst_format); + int minDstHeight = getMinDstHeight(dst_format); + int dstWidthAlign = getDstWidthAlign(dst_format); + int dstHeightAlign = getDstHeightAlign(dst_format); + + int maxDownscale = getMaxDownscale(layer); + if (((mType == MPP_VG) || (mType == MPP_VGR)) && + isRotated(layer) && isFormatRgb(handle->format)) + maxDownscale = 1; + + int maxUpscale = getMaxUpscale(); + + bool isPerpendicular = !!(layer.transform & HAL_TRANSFORM_ROT_90); + + int srcW = WIDTH(layer.sourceCropf), srcH = HEIGHT(layer.sourceCropf); + if (isFloat(layer.sourceCropf.right - layer.sourceCropf.left)) + srcW = ceilf(layer.sourceCropf.right - layer.sourceCropf.left); + if (isFloat(layer.sourceCropf.bottom - layer.sourceCropf.top)) + srcH = ceilf(layer.sourceCropf.bottom - layer.sourceCropf.top); + + int xOffset = layer.sourceCropf.left; + int yOffset = layer.sourceCropf.top; + int dstW, dstH; + if (isPerpendicular) { + dstW = HEIGHT(layer.displayFrame); + dstH = WIDTH(layer.displayFrame); + } else { + dstW = WIDTH(layer.displayFrame); + dstH = HEIGHT(layer.displayFrame); + } + + if (isFormatYUV420(handle->format)) { + if ((mType == MPP_VG) || (mType == MPP_VGR)) { + if (xOffset % srcXOffsetAlign != 0) { + xOffset = ALIGN(xOffset, srcXOffsetAlign); + srcW = (int)layer.sourceCropf.right - xOffset; + } + if (yOffset % srcYOffsetAlign != 0) { + yOffset = ALIGN(yOffset, srcYOffsetAlign); + srcH = (int)layer.sourceCropf.bottom - yOffset; + } + } + if (srcW % cropWidthAlign != 0) + srcW = ALIGN_DOWN(srcW, cropWidthAlign); + if (srcH % cropHeightAlign != 0) + srcH = ALIGN_DOWN(srcH, cropHeightAlign); + } + + if (mType == MPP_MSC) { + bool needDoubleOperation = false; + if (getDrmMode(handle->flags) != NO_DRM) { + needDoubleOperation = (dstW > srcW * maxUpscale) || (dstH > srcH * maxUpscale); + if (needDoubleOperation) + maxUpscale = maxUpscale * maxUpscale; + + needDoubleOperation = (srcW > dstW * maxDownscale) || (srcH > dstH * maxDownscale); + if (needDoubleOperation) + maxDownscale = maxDownscale * maxDownscale; + } + } + + if (!isFormatSupportedByMPP(handle->format)) + return -eMPPUnsupportedFormat; + else if (!isFormatSupportedByMPP(dst_format)) + return -eMPPUnsupportedFormat; + else if (!isCSCSupportedByMPP(handle->format, dst_format, layer.dataSpace)) + return -eMPPUnsupportedCSC; + else if (!mCanBlend && + (handle->format == HAL_PIXEL_FORMAT_RGBA_8888 || handle->format == HAL_PIXEL_FORMAT_BGRA_8888) && + layer.blending != HWC_BLENDING_NONE) + return -eMPPUnsupportedBlending; + else if (!mCanRotate && layer.transform) + return -eMPPUnsupportedRotation; + else if (srcW < minWidth) + return -eMPPExeedMinSrcWidth; + else if (srcH < minHeight) + return -eMPPExeedMinSrcHeight; + else if (srcW < minCropWidth) + return -eMPPExeedSrcWCropMin; + else if (srcH < minCropHeight) + return -eMPPExeedSrcHCropMin; + else if (dstW > maxDstWidth) + return -eMPPExeedMaxDstWidth; + else if (dstW > maxDstHeight) + return -eMPPExeedMaxDstHeight; + else if (dstW < minDstWidth) + return -eMPPExeedMinDstWidth; + else if (dstW < minDstHeight) + return -eMPPExeedMinDstWidth; + else if ((dstW % dstWidthAlign != 0) || (dstH % dstHeightAlign != 0)) + return -eMPPNotAlignedDstSize; + else if (srcW > dstW * maxDownscale) + return -eMPPExeedMaxDownScale; + else if (dstW > srcW * maxUpscale) + return -eMPPExeedMaxUpScale; + else if (srcH > dstH * maxDownscale) + return -eMPPExeedMaxDownScale; + else if (dstH > srcH * maxUpscale) + return -eMPPExeedMaxUpScale; + + if (getDrmMode(handle->flags) == NO_DRM) { + if (handle->stride > maxWidth) + return -eMPPExceedHStrideMaximum; + else if (handle->vstride > maxHeight) + return -eMPPExceedVStrideMaximum; + else if (handle->stride % srcWidthAlign != 0) + return -eMPPNotAlignedHStride; + else if (handle->vstride % srcHeightAlign != 0) + return -eMPPNotAlignedVStride; + else if (srcW > maxCropWidth) + return -eMPPExeedSrcWCropMax; + else if (srcH > maxCropHeight) + return -eMPPExeedSrcHCropMax; + else if ((srcW % cropWidthAlign != 0) || (srcH % cropHeightAlign != 0)) + return -eMPPNotAlignedCrop; + else if ((xOffset % srcXOffsetAlign != 0) || (yOffset % srcYOffsetAlign != 0)) + return -eMPPNotAlignedOffset; + } + + return 1; +} + +bool ExynosMPP::isProcessingRequired(hwc_layer_1_t &layer, int format) +{ + return formatRequiresMPP(format) || isScaled(layer) || isTransformed(layer) || isCompressed(layer); +} + +void ExynosMPP::adjustSourceImage(hwc_layer_1_t &layer, exynos_mpp_img &srcImg) +{ + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + hwc_frect_t tmpFCrop = layer.sourceCropf; + int cropWidthAlign = getCropWidthAlign(layer); + int cropHeightAlign = getCropHeightAlign(layer); + int srcXOffsetAlign = getSrcXOffsetAlign(layer); + int srcYOffsetAlign = getSrcYOffsetAlign(layer); + + float srcW = layer.sourceCropf.right - layer.sourceCropf.left; + float srcH = layer.sourceCropf.bottom - layer.sourceCropf.top; + + srcImg.fw = srcHandle->stride; + srcImg.fh = srcHandle->vstride; + if (srcImg.fw > (unsigned int)getMaxWidth(layer)) + srcImg.fw = (unsigned int)getMaxWidth(layer); + if (srcImg.fh > (unsigned int)getMaxHeight(layer)) + srcImg.fh = (unsigned int)getMaxHeight(layer); + + srcImg.fw = ALIGN((unsigned int)srcImg.fw, getSrcWidthAlign(layer)); + srcImg.fh = ALIGN((unsigned int)srcImg.fh, getSrcHeightAlign(layer)); + + if (isFloat(tmpFCrop.left) && + (((int)tmpFCrop.left % srcXOffsetAlign) == 0)) { + srcImg.x = setFloatValue(tmpFCrop.left); + } else { + if (ALIGN_DOWN((unsigned int)tmpFCrop.left, srcXOffsetAlign) > 0) + tmpFCrop.left = ALIGN_DOWN((unsigned int)tmpFCrop.left, srcXOffsetAlign); + else + tmpFCrop.left = 0; + srcImg.x = tmpFCrop.left; + } + + if (isFloat(tmpFCrop.top) && + (((int)tmpFCrop.top % srcYOffsetAlign) == 0)) { + srcImg.y = setFloatValue(tmpFCrop.top); + } else { + if (ALIGN_DOWN((unsigned int)tmpFCrop.top, srcYOffsetAlign) > 0) + tmpFCrop.top = ALIGN_DOWN((unsigned int)tmpFCrop.top, srcYOffsetAlign); + else + tmpFCrop.top = 0; + srcImg.y = tmpFCrop.top; + } + + srcW = tmpFCrop.right - tmpFCrop.left; + srcH = tmpFCrop.bottom - tmpFCrop.top; + + if (tmpFCrop.left + srcW > srcImg.fw) + srcW = srcImg.fw - tmpFCrop.left; + if (tmpFCrop.top + srcH > srcImg.fh) + srcH = srcImg.fh - tmpFCrop.top; + + if (isFloat(srcW) && ((int)(ceilf(srcW)) % cropWidthAlign == 0)) { + srcImg.w = setFloatValue(srcW); + } else { + if (ALIGN_DOWN((unsigned int)srcW, cropWidthAlign) > 0) + srcImg.w = ALIGN_DOWN((unsigned int)srcW, cropWidthAlign); + else + srcImg.w = 0; + } + + if (isFloat(srcH) && ((int)(ceilf(srcH)) % cropHeightAlign == 0)) { + srcImg.h = setFloatValue(srcH); + } else { + if (ALIGN_DOWN((unsigned int)srcH, cropHeightAlign) > 0) + srcImg.h = ALIGN_DOWN((unsigned int)srcH, cropHeightAlign); + else + srcImg.h = 0; + } +} + +void ExynosMPP::setupSrc(exynos_mpp_img &srcImg, hwc_layer_1_t &layer) +{ + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + + srcImg.fw = srcHandle->stride; + srcImg.fh = srcHandle->vstride; + + if (srcHandle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) { + if (srcHandle->fd2 >= 0) { + void *metaData = NULL; + int interlacedType = -1; + metaData = mmap(0, 64, PROT_READ|PROT_WRITE, MAP_SHARED, srcHandle->fd2, 0); + if (metaData) + interlacedType = *(int *)metaData; + else + interlacedType = -1; + + if (interlacedType == V4L2_FIELD_INTERLACED_TB || + interlacedType == V4L2_FIELD_INTERLACED_BT) { + srcImg.fw = srcHandle->stride * 2; + srcImg.fh = srcHandle->vstride / 2; + } + if (metaData) + munmap(metaData, 64); + } + } + + if (srcImg.fw > (unsigned int)getMaxWidth(layer)) + srcImg.fw = (unsigned int)getMaxWidth(layer); + if (srcImg.fh > (unsigned int)getMaxHeight(layer)) + srcImg.fh = (unsigned int)getMaxHeight(layer); + srcImg.fw = ALIGN((unsigned int)srcImg.fw, getSrcWidthAlign(layer)); + srcImg.fh = ALIGN((unsigned int)srcImg.fh, getSrcHeightAlign(layer)); + + if (isSrcCropFloat(layer.sourceCropf)) + { + adjustSourceImage(layer, srcImg); + } else { + srcImg.x = ALIGN((unsigned int)layer.sourceCropf.left, getSrcXOffsetAlign(layer)); + srcImg.y = ALIGN((unsigned int)layer.sourceCropf.top, getSrcYOffsetAlign(layer)); + srcImg.w = WIDTH(layer.sourceCropf) - (srcImg.x - (uint32_t)layer.sourceCropf.left); + srcImg.h = HEIGHT(layer.sourceCropf) - (srcImg.y - (uint32_t)layer.sourceCropf.top); + if (srcImg.x + srcImg.w > srcImg.fw) + srcImg.w = srcImg.fw - srcImg.x; + if (srcImg.y + srcImg.h > srcImg.fh) + srcImg.h = srcImg.fh - srcImg.y; + srcImg.w = ALIGN_DOWN(srcImg.w, getCropWidthAlign(layer)); + srcImg.h = ALIGN_DOWN(srcImg.h, getCropHeightAlign(layer)); + } + + srcImg.yaddr = srcHandle->fd; + if (mS3DMode == S3D_SBS) + srcImg.w /= 2; + if (mS3DMode == S3D_TB) + srcImg.h /= 2; + if (isFormatYCrCb(srcHandle->format)) { + srcImg.uaddr = srcHandle->fd2; + srcImg.vaddr = srcHandle->fd1; + } else { + srcImg.uaddr = srcHandle->fd1; + srcImg.vaddr = srcHandle->fd2; + } + if (srcHandle->format != HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL) + srcImg.format = srcHandle->format; + else + srcImg.format = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + + if (layer.blending == HWC_BLENDING_COVERAGE) + srcImg.pre_multi = false; + else + srcImg.pre_multi = true; + srcImg.drmMode = !!(getDrmMode(srcHandle->flags) == SECURE_DRM); + srcImg.acquireFenceFd = layer.acquireFenceFd; + srcImg.mem_type = MPP_MEM_DMABUF; +} + +bool ExynosMPP::setupDoubleOperation(exynos_mpp_img &srcImg, exynos_mpp_img &midImg, hwc_layer_1_t &layer) +{ + bool needDoubleOperation = false; + bool needUnscaledCSC = false; + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + const int maxUpscale = getMaxUpscale(); + const int maxDownscale = getMaxDownscale(layer); + bool isPerpendicular = !!(layer.transform & HAL_TRANSFORM_ROT_90); + int srcW = WIDTH(layer.sourceCropf), srcH = HEIGHT(layer.sourceCropf); + int dstW, dstH; + if (isPerpendicular) { + dstW = HEIGHT(layer.displayFrame); + dstH = WIDTH(layer.displayFrame); + } else { + dstW = WIDTH(layer.displayFrame); + dstH = HEIGHT(layer.displayFrame); + } + if ((mType == MPP_MSC) && (getDrmMode(srcHandle->flags) != NO_DRM)) + needDoubleOperation = (dstW > srcW * maxUpscale) || (dstH > srcH * maxUpscale) + || (srcW > dstW * maxDownscale) || (srcH > dstH * maxDownscale); + + if ((mType == MPP_MSC) && (srcHandle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED)) { + needDoubleOperation = true; + needUnscaledCSC = true; + } + + if (needDoubleOperation) { + midImg.x = 0; + midImg.y = 0; + + int midW = 0, midH = 0; + // Use YUV420 to save bandwidth + // Format is either YUV420 or RGB, no conversion needed if already YUV420 + // Will RGB layers ever need double scaling? If so, is the saved BW worth the extra CSC? + int midFormat = isFormatRgb(srcHandle->format) ? HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M : srcHandle->format; + + if (needUnscaledCSC) { + midImg.w = srcW; + midImg.h = srcH; + } else { + if (isPerpendicular) { + midW = HEIGHT(layer.displayFrame); + midH = WIDTH(layer.displayFrame); + } else { + midW = WIDTH(layer.displayFrame); + midH = HEIGHT(layer.displayFrame); + } + + if (WIDTH(layer.sourceCropf) * maxUpscale < midW) + midImg.w = ALIGN(midW / maxUpscale, getDstWidthAlign(midFormat)); + else if (WIDTH(layer.sourceCropf) > midW * maxDownscale) + midImg.w = ALIGN((int)ceilf((double)WIDTH(layer.sourceCropf) / (double)maxDownscale), getDstWidthAlign(midFormat)); + else + midImg.w = midW; + + if (HEIGHT(layer.sourceCropf) * maxUpscale < midH) + midImg.h = ALIGN(midH / maxUpscale, getDstHeightAlign(midFormat)); + else if (HEIGHT(layer.sourceCropf) > midH * maxDownscale) + midImg.h = ALIGN((int)ceilf((double)HEIGHT(layer.sourceCropf) / (double)maxDownscale), getDstHeightAlign(midFormat)); + else + midImg.h = midH; + } + if (layer.blending == HWC_BLENDING_COVERAGE) + midImg.pre_multi = false; + else + midImg.pre_multi = true; + midImg.drmMode = srcImg.drmMode; + midImg.format = midFormat; + midImg.mem_type = MPP_MEM_DMABUF; + midImg.narrowRgb = isNarrowRgb(midFormat, layer.dataSpace); + } + + return needDoubleOperation; +} + +void ExynosMPP::setupMid(exynos_mpp_img &midImg) +{ + buffer_handle_t &midBuf = mMidBuffers[mCurrentBuf]; + private_handle_t *midHandle = private_handle_t::dynamicCast(midBuf); + + midImg.acquireFenceFd = mMidBufFence[mCurrentBuf]; + mMidBufFence[mCurrentBuf] = -1; + midImg.fw = midHandle->stride; + midImg.fh = midHandle->vstride; + midImg.yaddr = midHandle->fd; + if (isFormatYCrCb(midHandle->format)) { + midImg.uaddr = midHandle->fd2; + midImg.vaddr = midHandle->fd1; + } else { + midImg.uaddr = midHandle->fd1; + midImg.vaddr = midHandle->fd2; + } +} + +void ExynosMPP::setupDst(exynos_mpp_img &srcImg, exynos_mpp_img &dstImg, + int dstFormat, hwc_layer_1_t &layer) +{ + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + dstImg.x = 0; + dstImg.y = 0; + dstImg.w = WIDTH(layer.displayFrame); + dstImg.h = HEIGHT(layer.displayFrame); + dstImg.rot = layer.transform; + if (layer.blending == HWC_BLENDING_COVERAGE) + dstImg.pre_multi = false; + else + dstImg.pre_multi = true; + dstImg.drmMode = srcImg.drmMode; + dstImg.format = dstFormat; + if (dstFormat == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) + dstImg.format = HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M; + dstImg.mem_type = MPP_MEM_DMABUF; + dstImg.narrowRgb = isNarrowRgb(srcHandle->format, layer.dataSpace); +} + +void ExynosMPP::setupBlendCfg(exynos_mpp_img __unused &srcImg, exynos_mpp_img &dstImg, + hwc_layer_1_t __unused &layer1, hwc_layer_1_t &layer2, struct SrcBlendInfo &srcBlendInfo) +{ + private_handle_t *src2Handle = private_handle_t::dynamicCast(layer2.handle); + + srcBlendInfo.blop = SRC_BL_OP_DST_OVER; + srcBlendInfo.srcblendfmt = src2Handle->format; + srcBlendInfo.srcblendhpos = dstImg.x; + srcBlendInfo.srcblendvpos = dstImg.y; + srcBlendInfo.srcblendpremulti = 0; + srcBlendInfo.srcblendstride = src2Handle->stride; + srcBlendInfo.srcblendwidth = dstImg.w; + srcBlendInfo.srcblendheight = dstImg.h; + srcBlendInfo.globalalpha.enable = false; + srcBlendInfo.cscspec.enable = true; + srcBlendInfo.cscspec.space = COLORSPACE_REC709; + srcBlendInfo.cscspec.wide = 0; +} + +size_t ExynosMPP::getBufferType(uint32_t usage) +{ + if ((getDrmMode(usage) == SECURE_DRM) && (usage & GRALLOC_USAGE_VIDEO_EXT)) + return MPP_BUFFER_VIDEO_EXT; + else if (getDrmMode(usage) == SECURE_DRM) + return MPP_BUFFER_SECURE_DRM; + else if (getDrmMode(usage) == NORMAL_DRM) + return MPP_BUFFER_NORMAL_DRM; + else + return MPP_BUFFER_NORMAL; +} + +int ExynosMPP::getBufferUsage(private_handle_t *srcHandle) +{ + int usage = GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_NOZEROED | + GRALLOC_USAGE_HW_COMPOSER; + + if (getDrmMode(srcHandle->flags) == SECURE_DRM) { + usage |= GRALLOC_USAGE_PROTECTED; + usage &= ~GRALLOC_USAGE_PRIVATE_NONSECURE; + } else if (getDrmMode(srcHandle->flags) == NORMAL_DRM) { + usage |= GRALLOC_USAGE_PROTECTED; + usage |= GRALLOC_USAGE_PRIVATE_NONSECURE; + } + if (srcHandle->flags & GRALLOC_USAGE_VIDEO_EXT) + usage |= GRALLOC_USAGE_VIDEO_EXT; + + /* HACK: for distinguishing FIMD_VIDEO_region */ + usage |= (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER); + + return usage; +} + +int ExynosMPP::reallocateBuffers(private_handle_t *srcHandle, exynos_mpp_img &dstImg, exynos_mpp_img &midImg, bool needDoubleOperation, uint32_t index) +{ + ATRACE_CALL(); + alloc_device_t* allocDevice = mAllocDevice; + int ret = 0; + int dstStride; + if (mDisplay == NULL) { + ALOGE("%s is called without assigning to the display", __func__); + return -1; + } + + int usage = getBufferUsage(srcHandle); + size_t bufferType = getBufferType(usage); + + int w, h; + { + int dstWidthAlign = getDstWidthAlign(dstImg.format); + int dstHeightAlign = getDstHeightAlign(dstImg.format); + w = ALIGN(mDisplay->mXres, dstWidthAlign); + h = ALIGN(mDisplay->mYres, dstHeightAlign); + } + + if ((mBufferType == MPP_BUFFER_VIDEO_EXT) && + (bufferType != MPP_BUFFER_VIDEO_EXT) && (mSMemFd > 0)) { + int ret = 0; + int protection = 0; + ret = ioctl(mSMemFd, SECMEM_IOC_SET_VIDEO_EXT_PROC, &protection); + if (ret < 0) { + ALOGE("%s:: Unprotection failed, ret(%d)", __func__, ret); + return false; + } + ALOGI("%s:: VIDEO_EXT is unprotected", __func__); + } + + if (mAllocatedBufferNum < mNumAvailableDstBuffers) { + if (mDstBuffers[index]) { + android::Mutex::Autolock lock(mMutex); + if (mBufferType != MPP_BUFFER_NORMAL) { + allocDevice->free(allocDevice, mDstBuffers[index]); + } else { + deleteBufferInfo buffInfo; + buffInfo.buffer = mDstBuffers[index]; + buffInfo.bufFence = mDstBufFence[index]; + mFreedBuffers.push_back(buffInfo); + mDstBufFence[index] = -1; + } + mDstBuffers[index] = NULL; + } + if (mDstBufFence[index] >= 0) + close(mDstBufFence[index]); + mDstBufFence[index] = -1; + } + + if (needDoubleOperation && (mAllocatedMidBufferNum < mNumAvailableDstBuffers)) { + if (mMidBuffers[index] != NULL) { + android::Mutex::Autolock lock(mMutex); + if (mBufferType != MPP_BUFFER_NORMAL) { + allocDevice->free(allocDevice, mMidBuffers[index]); + } else { + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[index]; + buffInfo.bufFence = mMidBufFence[index]; + mFreedBuffers.push_back(buffInfo); + mMidBufFence[index] = -1; + } + mMidBuffers[index] = NULL; + } + if (mMidBufFence[index] >= 0) + close(mMidBufFence[index]); + mMidBufFence[index] = -1; + } + + int format = dstImg.format; + if (mAllocatedBufferNum < mNumAvailableDstBuffers) { + ret = allocDevice->alloc(allocDevice, w, h, + format, usage, &mDstBuffers[index], + &dstStride); + if (ret < 0) { + ALOGE("failed to allocate destination buffer(%dx%d): %s", w, h, + strerror(-ret)); + return ret; + } + HDEBUGLOGD(eDebugMPP, "%d dst buffer is allocated, mAllocatedBufferNum(%d)", mCurrentBuf, mAllocatedBufferNum); + mAllocatedBufferNum++; + } + + if (needDoubleOperation && (mAllocatedMidBufferNum < mNumAvailableDstBuffers)) { + ret = allocDevice->alloc(allocDevice, midImg.w, midImg.h, midImg.format, + usage, &mMidBuffers[index], &dstStride); + if (ret < 0) { + ALOGE("failed to allocate intermediate buffer(%dx%d): %s", + midImg.w, midImg.h, strerror(-ret)); + return ret; + } + HDEBUGLOGD(eDebugMPP, "%d mid buffer is allocated, mAllocatedBufferNum(%d)", mCurrentBuf, mAllocatedMidBufferNum); + mAllocatedMidBufferNum++; + } + + if ((mBufferType != MPP_BUFFER_VIDEO_EXT) && + (bufferType == MPP_BUFFER_VIDEO_EXT) && (mSMemFd > 0)) { + int ret = 0; + int protection = 1; + ret = ioctl(mSMemFd, SECMEM_IOC_SET_VIDEO_EXT_PROC, &protection); + if (ret < 0) { + ALOGE("Protection failed, ret(%d)", ret); + return false; + } + ALOGI("%s:: VIDEO_EXT is protected", __func__); + } + + + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + + return ret; +} + +void ExynosMPP::reusePreviousFrame(hwc_layer_1_t &layer) +{ + HDEBUGLOGD(eDebugMPP, "[USE] MPP_SKIP_DUPLICATE_FRAME_PROCESSING\n"); + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + + layer.releaseFenceFd = -1; + layer.acquireFenceFd = -1; + mDstConfig.releaseFenceFd = -1; + + mCurrentBuf = (mCurrentBuf + mNumAvailableDstBuffers - 1) % mNumAvailableDstBuffers; +} + +void ExynosMPP::freeBuffersCloseFences() +{ + alloc_device_t* allocDevice = mAllocDevice; + if ((mBufferType == MPP_BUFFER_VIDEO_EXT) && (mSMemFd > 0)) { + int ret = 0; + int protection = 0; + ret = ioctl(mSMemFd, SECMEM_IOC_SET_VIDEO_EXT_PROC, &protection); + if (ret < 0) { + ALOGE("%s:: Deprotection failed, ret(%d)", __func__, ret); + } + ALOGI("%s:: VIDEO_EXT is unprotected", __func__); + } + + for (size_t i = 0; i < NUM_MPP_DST_BUFS; i++) { + if (mDstBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + if (mBufferType != MPP_BUFFER_NORMAL) + allocDevice->free(allocDevice, mDstBuffers[i]); + else { + deleteBufferInfo buffInfo; + buffInfo.buffer = mDstBuffers[i]; + buffInfo.bufFence = mDstBufFence[i]; + mFreedBuffers.push_back(buffInfo); + mDstBufFence[i] = -1; + } + mDstBuffers[i] = NULL; + } + if (mDstBufFence[i] >= 0) + close(mDstBufFence[i]); + mDstBufFence[i] = -1; + + if (mMidBuffers[i]) { + android::Mutex::Autolock lock(mMutex); + if (mBufferType != MPP_BUFFER_NORMAL) + allocDevice->free(allocDevice, mMidBuffers[i]); + else { + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[i]; + buffInfo.bufFence = mMidBufFence[i]; + mFreedBuffers.push_back(buffInfo); + mMidBufFence[i] = -1; + } + mMidBuffers[i] = NULL; + } + if (mMidBufFence[i] >= 0) + close(mMidBufFence[i]); + mMidBufFence[i] = -1; + } + + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } +} + +int ExynosMPP::processM2M(hwc_layer_1_t &layer, int dstFormat, hwc_frect_t *sourceCrop, bool __unused needBufferAlloc) +{ + ATRACE_CALL(); + HDEBUGLOGD(eDebugMPP, "configuring mType(%u) mIndex(%u) for memory-to-memory", mType, mIndex); + + alloc_device_t* allocDevice = mAllocDevice; + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + buffer_handle_t dstBuf; + private_handle_t *dstHandle; + buffer_handle_t midBuf; + private_handle_t *midHandle; + int ret = 0; + int dstAlign; + unsigned int colorspace = halDataSpaceToV4L2ColorSpace(layer.dataSpace); + + exynos_mpp_img srcImg, dstImg; + memset(&srcImg, 0, sizeof(srcImg)); + memset(&dstImg, 0, sizeof(dstImg)); + exynos_mpp_img midImg; + memset(&midImg, 0, sizeof(midImg)); + + setupSrc(srcImg, layer); + HDEBUGLOGD(eDebugMPP, "source configuration:"); + dumpMPPImage(eDebugMPP, srcImg); + + setupDst(srcImg, dstImg, dstFormat, layer); + HDEBUGLOGD(eDebugMPP, "destination configuration:"); + dumpMPPImage(eDebugMPP, dstImg); + + /* mMPPHandle should be created again with proper bufferType */ + if ((mMPPHandle != NULL) && (mBufferType != getBufferType(srcHandle->flags))) { + stopMPP(mMPPHandle); + destroyMPP(mMPPHandle); + mMPPHandle = NULL; + } + + bool needDoubleOperation = setupDoubleOperation(srcImg, midImg, layer); + + bool reconfigure = isSrcConfigChanged(srcImg, mSrcConfig) || + isDstConfigChanged(dstImg, mDstConfig); + + mDstRealloc = mDstConfig.fw <= 0 || + mDstConfig.format != (uint32_t)dstFormat || + dstImg.drmMode != mDstConfig.drmMode || + mBufferType != getBufferType(srcHandle->flags); + + mMidRealloc = needDoubleOperation && + (needDoubleOperation != mDoubleOperation || + dstImg.drmMode != mDstConfig.drmMode || + mBufferType != getBufferType(srcHandle->flags) || + ((dstImg.w != mDstConfig.w) || (dstImg.h != mDstConfig.h))); + +#ifdef USES_VIRTUAL_DISPLAY + if (!needBufferAlloc) { + dstImg.x = mDisplay->mHwc->mVirtualDisplayRect.left; + dstImg.y = mDisplay->mHwc->mVirtualDisplayRect.top; + dstImg.w = mDisplay->mHwc->mVirtualDisplayRect.width; + dstImg.h = mDisplay->mHwc->mVirtualDisplayRect.height; + /* WFD can use MSC for blending or scaling. recofigure is required on WFD */ + reconfigure = true; + } +#endif + +#ifdef USES_VIRTUAL_DISPLAY + if (needBufferAlloc) { +#endif + if (reconfigure && (mMidRealloc || mDstRealloc)) { + if (getDrmMode(srcHandle->flags) != NO_DRM && + mDisplay != NULL && mDisplay->mType == EXYNOS_EXTERNAL_DISPLAY) + mNumAvailableDstBuffers = NUM_DRM_MPP_DST_BUFS; + else + mNumAvailableDstBuffers = NUM_MPP_DST_BUFS; + mBufferType = getBufferType(srcHandle->flags); + + if (mDstRealloc) { + mCurrentBuf = 0; + mLastMPPLayerHandle = 0; + mAllocatedBufferNum = 0; + } + if (mMidRealloc) + mAllocatedMidBufferNum = 0; + } + + if (!reconfigure && (mLastMPPLayerHandle == (intptr_t)layer.handle)) { + reusePreviousFrame(layer); + return 0; + } else { + mLastMPPLayerHandle = (intptr_t)layer.handle; + } + + if ((ret = reallocateBuffers(srcHandle, dstImg, midImg, needDoubleOperation, mCurrentBuf)) < 0) + goto err_alloc; + +#ifdef USES_VIRTUAL_DISPLAY + } else { + if (reconfigure && needDoubleOperation) { + int dstStride; + if (mMidBuffers[0] != NULL) { + android::Mutex::Autolock lock(mMutex); + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[0]; + buffInfo.bufFence = mMidBufFence[0]; + mFreedBuffers.push_back(buffInfo); + mBufferFreeThread->mCondition.signal(); + mMidBuffers[0] = NULL; + mMidBufFence[0] = -1; + } + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + } + mMidBufFence[0] = -1; + + ret = allocDevice->alloc(allocDevice, midImg.w, midImg.h, + HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M, ((ExynosVirtualDisplay*)mDisplay)->mSinkUsage, &mMidBuffers[0], + &dstStride); + if (ret < 0) { + ALOGE("failed to allocate intermediate buffer(%dx%d): %s", midImg.w, midImg.h, + strerror(-ret)); + goto err_alloc; + } + } + } +#endif + + layer.acquireFenceFd = -1; + if (needDoubleOperation) { + setupMid(midImg); + + HDEBUGLOGD(eDebugMPP, "mid configuration:"); + dumpMPPImage(eDebugMPP, midImg); + } + + dstBuf = mDstBuffers[mCurrentBuf]; + dstHandle = private_handle_t::dynamicCast(dstBuf); + + dstImg.fw = dstHandle->stride; + dstImg.fh = dstHandle->vstride; + dstImg.yaddr = dstHandle->fd; + dstImg.uaddr = dstHandle->fd1; + dstImg.vaddr = dstHandle->fd2; + dstImg.acquireFenceFd = mDstBufFence[mCurrentBuf]; + dstImg.drmMode = !!(getDrmMode(dstHandle->flags) == SECURE_DRM); + mDstBufFence[mCurrentBuf] = -1; + + if ((int)dstImg.w != WIDTH(layer.displayFrame)) + HDEBUGLOGD(eDebugMPP, "padding %u x %u output to %u x %u and cropping to {%7.1f,%7.1f,%7.1f,%7.1f}", + WIDTH(layer.displayFrame), HEIGHT(layer.displayFrame), + dstImg.w, dstImg.h, sourceCrop->left, sourceCrop->top, + sourceCrop->right, sourceCrop->bottom); + + if (mMPPHandle) { + HDEBUGLOGD(eDebugMPP, "reusing open mType(%u) mIndex(%u)", mType, mIndex); + } else { + HDEBUGLOGD(eDebugMPP, "opening mType(%u) mIndex(%u), drm(%d)", mType, mIndex, (getDrmMode(srcHandle->flags) != NO_DRM)); + mMPPHandle = createMPP( + mType, GSC_M2M_MODE, GSC_DUMMY, (getDrmMode(srcHandle->flags) != NO_DRM) || (getDrmMode(dstHandle->flags) != NO_DRM)); + if (!mMPPHandle) { + ALOGE("failed to create gscaler handle"); + ret = -1; + goto err_alloc; + } + } + + if (!needDoubleOperation) + memcpy(&midImg, &dstImg, sizeof(exynos_mpp_img)); + + /* src -> mid or src->dest */ + if (reconfigure || needDoubleOperation) { + ret = stopMPP(mMPPHandle); + if (ret < 0) { + ALOGE("failed to stop mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + ret = setCSCProperty(mMPPHandle, 0, !midImg.narrowRgb, colorspace); + ret = configMPP(mMPPHandle, &srcImg, &midImg); + if (ret < 0) { + ALOGE("failed to configure mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + } + + ret = runMPP(mMPPHandle, &srcImg, &midImg); + if (ret < 0) { + ALOGE("failed to run mType(%u) mIndex(%d)", mType, mIndex); + goto err_gsc_config; + } + + /* mid -> dst */ + if (needDoubleOperation) { + ret = stopMPP(mMPPHandle); + if (ret < 0) { + ALOGE("failed to stop mType(%d) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + + midImg.acquireFenceFd = midImg.releaseFenceFd; + ret = setCSCProperty(mMPPHandle, 0, !dstImg.narrowRgb, colorspace); + ret = configMPP(mMPPHandle, &midImg, &dstImg); + if (ret < 0) { + ALOGE("failed to configure mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + + ret = runMPP(mMPPHandle, &midImg, &dstImg); + if (ret < 0) { + ALOGE("failed to run mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + mMidBufFence[mCurrentBuf] = midImg.releaseFenceFd; + } + + mSrcConfig = srcImg; + mMidConfig = midImg; + + if (needDoubleOperation) { + mDstConfig = dstImg; + } else { + mDstConfig = midImg; + } + + mDoubleOperation = needDoubleOperation; + + layer.releaseFenceFd = srcImg.releaseFenceFd; + + return 0; + +err_gsc_config: + destroyMPP(mMPPHandle); + mMPPHandle = NULL; +err_alloc: + if (srcImg.acquireFenceFd >= 0) + close(srcImg.acquireFenceFd); +#ifdef USES_VIRTUAL_DISPLAY + if (needBufferAlloc) { +#endif + freeBuffersCloseFences(); +#ifdef USES_VIRTUAL_DISPLAY + } else { + if (mMidBuffers[0]) { + android::Mutex::Autolock lock(mMutex); + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[0]; + buffInfo.bufFence = mMidBufFence[0]; + mFreedBuffers.push_back(buffInfo); + mMidBuffers[0] = NULL; + mMidBufFence[0] = -1; + } + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + } + mMidBufFence[0] = -1; + } +#endif + + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + mBufferType = MPP_BUFFER_NORMAL; + mAllocatedBufferNum = 0; + mAllocatedMidBufferNum = 0; + return ret; +} + +int ExynosMPP::processM2MWithB(hwc_layer_1_t &layer1, hwc_layer_1_t &layer2, int dstFormat, + hwc_frect_t __unused *sourceCrop) +{ + HDEBUGLOGD(eDebugMPP, "configuring mType(%u) mIndex(%u) for blending", mType, mIndex); + + alloc_device_t* allocDevice = mAllocDevice; + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer1.handle); + private_handle_t *src2Handle = private_handle_t::dynamicCast(layer2.handle); + buffer_handle_t dstBuf; + private_handle_t *dstHandle; + struct SrcBlendInfo srcblendinfo; + int ret = 0; + int dstAlign; + unsigned int colorspace = V4L2_COLORSPACE_DEFAULT; + + exynos_mpp_img srcImg, dstImg; + memset(&srcImg, 0, sizeof(srcImg)); + memset(&dstImg, 0, sizeof(dstImg)); + memset(&srcblendinfo, 0, sizeof(srcblendinfo)); + + setupSrc(srcImg, layer1); + + exynos_mpp_img midImg; + memset(&midImg, 0, sizeof(midImg)); + + bool needDoubleOperation = setupDoubleOperation(srcImg, midImg, layer1); + HDEBUGLOGD(eDebugMPP, "processM2MWithB(), needDoubleOperation %d", needDoubleOperation); + /* Only DRM use double operation */ + if (needDoubleOperation) { + if (mMPPHandle != NULL){ + stopMPP(mMPPHandle); + destroyMPP(mMPPHandle); + } + mMPPHandle = createMPP( mType, GSC_M2M_MODE, GSC_DUMMY, (getDrmMode(srcHandle->flags) != NO_DRM)); + bool reconfigure = isDstConfigChanged(midImg, mMidConfig); + if (reconfigure) { + int dstStride; + if (mMidBuffers[0] != NULL) { + android::Mutex::Autolock lock(mMutex); + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[0]; + buffInfo.bufFence = mMidBufFence[0]; + mFreedBuffers.push_back(buffInfo); + mBufferFreeThread->mCondition.signal(); + mMidBuffers[0] = NULL; + mMidBufFence[0] = -1; + } + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + } + mMidBufFence[0] = -1; + int usage = getBufferUsage(srcHandle); + ret = allocDevice->alloc(allocDevice, midImg.x + midImg.w, midImg.y + midImg.h, + HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN, usage, &mMidBuffers[0], + &dstStride); + if (ret < 0) { + ALOGE("failed to allocate intermediate buffer(%dx%d): %s", midImg.w, midImg.h, + strerror(-ret)); + goto err_alloc; + } + } + + midImg.acquireFenceFd = -1; + midImg.releaseFenceFd = -1; + private_handle_t *midHandle = private_handle_t::dynamicCast(mMidBuffers[0]); + midImg.fw = midHandle->stride; + midImg.fh = midHandle->vstride; + midImg.yaddr = midHandle->fd; + if (isFormatYCrCb(midHandle->format)) { + midImg.uaddr = midHandle->fd2; + midImg.vaddr = midHandle->fd1; + } else { + midImg.uaddr = midHandle->fd1; + midImg.vaddr = midHandle->fd2; + } + + if (isFormatRgb(srcHandle->format) == false) + colorspace = halDataSpaceToV4L2ColorSpace(layer1.dataSpace); + else + colorspace = halDataSpaceToV4L2ColorSpace(layer2.dataSpace); + ret = setCSCProperty(mMPPHandle, 0, !midImg.narrowRgb, colorspace); + + HDEBUGLOGD(eDebugMPP, "src configuration:\n"); + dumpMPPImage(eDebugMPP, srcImg); + HDEBUGLOGD(eDebugMPP, "mid configuration:\n"); + dumpMPPImage(eDebugMPP, midImg); + + ret = configMPP(mMPPHandle, &srcImg, &midImg); + if (ret < 0) { + ALOGE("failed to configure mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + + ret = runMPP(mMPPHandle, &srcImg, &midImg); + if (ret < 0) { + ALOGE("failed to run mType(%u) mIndex(%d)", mType, mIndex); + goto err_gsc_config; + } + + layer1.releaseFenceFd = srcImg.releaseFenceFd; + int releaseFenceFd = midImg.releaseFenceFd; + midImg.releaseFenceFd = -1; + memcpy(&srcImg, &midImg, sizeof(exynos_mpp_img)); + srcImg.acquireFenceFd = releaseFenceFd; + srcImg.releaseFenceFd = -1; + + if (mMPPHandle != NULL){ + stopMPP(mMPPHandle); + destroyMPP(mMPPHandle); + mMPPHandle = NULL; + } + } + + /* if src1 is one plane NV12 format, uaddr should be src2's address. + if src1 is two plane NV12 format, vaddr should be src2's address. */ + if (srcImg.uaddr == -1) + srcImg.uaddr = src2Handle->fd; + else + srcImg.vaddr = src2Handle->fd; + + setupDst(srcImg, dstImg, dstFormat, layer1); + + dstImg.x = mDisplay->mHwc->mVirtualDisplayRect.left; + dstImg.y = mDisplay->mHwc->mVirtualDisplayRect.top; + dstImg.w = mDisplay->mHwc->mVirtualDisplayRect.width; + dstImg.h = mDisplay->mHwc->mVirtualDisplayRect.height; + + layer1.acquireFenceFd = -1; + + dstBuf = mDstBuffers[mCurrentBuf]; + dstHandle = private_handle_t::dynamicCast(dstBuf); + + dstImg.fw = dstHandle->stride; + dstImg.fh = dstHandle->vstride; + dstImg.yaddr = dstHandle->fd; + dstImg.uaddr = dstHandle->fd1; + dstImg.vaddr = dstHandle->fd2; + dstImg.acquireFenceFd = mDstBufFence[mCurrentBuf]; + mDstBufFence[mCurrentBuf] = -1; + + setupBlendCfg(srcImg, dstImg, layer1, layer2, srcblendinfo); + + if (mMPPHandle == NULL){ + mMPPHandle = createBlendMPP( mType, GSC_M2M_MODE, GSC_DUMMY, (getDrmMode(srcHandle->flags) != NO_DRM)); + } + + if (!mMPPHandle) { + ALOGE("BlendMpp is null"); + ret = -1; + goto err_gsc_config; + } + + HDEBUGLOGD(eDebugMPP, "Source1 Image:\n"); + dumpMPPImage(eDebugMPP, srcImg); + HDEBUGLOGD(eDebugMPP, "Destination Image:\n"); + dumpMPPImage(eDebugMPP, dstImg); + HDEBUGLOGD(eDebugMPP, "Blend Information:\n"); + dumpBlendMPPImage(eDebugMPP, srcblendinfo); + + if (isFormatRgb(srcHandle->format) == false) + colorspace = halDataSpaceToV4L2ColorSpace(layer1.dataSpace); + else + colorspace = halDataSpaceToV4L2ColorSpace(layer2.dataSpace); + ret = setCSCProperty(mMPPHandle, 0, !dstImg.narrowRgb, colorspace); + + ret = configBlendMpp(mMPPHandle, &srcImg, &dstImg, &srcblendinfo); + if (ret < 0) { + ALOGE("failed to configure mType(%u) mIndex(%u)", mType, mIndex); + goto err_gsc_config; + } + + ret = runMPP(mMPPHandle, &srcImg, &dstImg); + if (ret < 0) { + ALOGE("failed to run mType(%u) mIndex(%d)", mType, mIndex); + goto err_gsc_config; + } + + mSrcConfig = srcImg; + mDstConfig = dstImg; + mDoubleOperation = false; + + if (needDoubleOperation) { + if (srcImg.releaseFenceFd >= 0) { + close(srcImg.releaseFenceFd); + srcImg.releaseFenceFd = -1; + } + } else { + layer1.releaseFenceFd = srcImg.releaseFenceFd; + } + + return 0; + +err_alloc: + if (srcImg.acquireFenceFd >= 0) { + close(srcImg.acquireFenceFd); + srcImg.acquireFenceFd = -1; + } + if (mMidBuffers[0]) { + android::Mutex::Autolock lock(mMutex); + deleteBufferInfo buffInfo; + buffInfo.buffer = mMidBuffers[0]; + buffInfo.bufFence = mMidBufFence[0]; + mFreedBuffers.push_back(buffInfo); + mMidBuffers[0] = NULL; + mMidBufFence[0] = -1; + } + if (mMidBufFence[0] >= 0) { + close(mMidBufFence[0]); + mMidBufFence[0] = -1; + } + + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } + + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + mBufferType = MPP_BUFFER_NORMAL; + mAllocatedBufferNum = 0; + +err_gsc_config: + if (mMPPHandle != NULL){ + stopMPP(mMPPHandle); + destroyMPP(mMPPHandle); + mMPPHandle = NULL; + } + + if (srcImg.acquireFenceFd >= 0) { + close(srcImg.acquireFenceFd); + srcImg.acquireFenceFd = -1; + } + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + mBufferType = MPP_BUFFER_NORMAL; + + return ret; +} + +int ExynosMPP::setupInternalMPP() +{ + return 0; +} + +void ExynosMPP::cleanupM2M() +{ + cleanupM2M(false); +} + +void ExynosMPP::cleanupM2M(bool noFenceWait) +{ + ATRACE_CALL(); + if (!mMPPHandle) + return; + + if (noFenceWait) { + for (size_t i = 0; i < NUM_MPP_DST_BUFS; i++) { + if (mDstBufFence[i] >= 0) { + close(mDstBufFence[i]); + mDstBufFence[i] = -1; + } + if (mMidBufFence[i] >= 0) { + close(mMidBufFence[i]); + mMidBufFence[i] = -1; + } + } + } + + stopMPP(mMPPHandle); + destroyMPP(mMPPHandle); + freeBuffersCloseFences(); + + mMPPHandle = NULL; + memset(&mSrcConfig, 0, sizeof(mSrcConfig)); + memset(&mMidConfig, 0, sizeof(mMidConfig)); + memset(&mDstConfig, 0, sizeof(mDstConfig)); + memset(mDstBuffers, 0, sizeof(mDstBuffers)); + memset(mMidBuffers, 0, sizeof(mMidBuffers)); + mCurrentBuf = 0; + mLastMPPLayerHandle = 0; + + for (size_t i = 0; i < NUM_MPP_DST_BUFS; i++) { + mDstBufFence[i] = -1; + mMidBufFence[i] = -1; + } + mState = MPP_STATE_FREE; + mDisplay = NULL; + mBufferType = MPP_BUFFER_NORMAL; + mAllocatedBufferNum = 0; + mAllocatedMidBufferNum = 0; + mMidRealloc = 0; + mDstRealloc = 0; +} + +void ExynosMPP::cleanupInternalMPP() +{ + if (mType != MPP_VG && mType != MPP_VGR && mType != MPP_VPP_G) + return; + + mDisplay = NULL; + startTransition(mDisplay); +} + +bool ExynosMPP::isDstConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2) +{ + return c1.x != c2.x || + c1.y != c2.y || + c1.w != c2.w || + c1.h != c2.h || + c1.format != c2.format || + c1.rot != c2.rot || + c1.narrowRgb != c2.narrowRgb || + c1.cacheable != c2.cacheable || + c1.pre_multi != c2.pre_multi || + c1.drmMode != c2.drmMode; +} + +int ExynosMPP::getMaxWidth(hwc_layer_1_t __unused &layer) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 8190; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMaxHeight(hwc_layer_1_t &layer) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return isRotated(layer) ? 8190 : 4096; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMinWidth(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) { + if (formatToBpp(handle->format)) + return 32; + else + return isRotated(layer) ? 32 : 64; + } else if (isFormatYUV420(handle->format)) { + return 64; + } else { + return 64; + } + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getMinHeight(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) + return 16; + else if (isFormatYUV420(handle->format)) + return 32; + else return 32; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getSrcWidthAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) { + if (formatToBpp(handle->format) == 32) + return 1; + else + return isRotated(layer) ? 2 : 1; + } else if (isFormatYUV420(handle->format)) { + return isRotated(layer) ? 4 : 2; + } else { + return 4; + } + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getSrcHeightAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) { + if (formatToBpp(handle->format) == 32) + return 1; + else + return isRotated(layer) ? 2 : 1; + } else if (isFormatYUV420(handle->format)) { + return isRotated(layer) ? 4 : 2; + } else { + return 4; + } + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getMaxCropWidth(hwc_layer_1_t &layer) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + if ((layer.transform == HAL_TRANSFORM_ROT_180) || (layer.transform == HAL_TRANSFORM_FLIP_H) || + (layer.transform == HAL_TRANSFORM_FLIP_V)) + return 2048; + return isRotated(layer) ? 2560 : 4096; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMaxCropHeight(hwc_layer_1_t &layer) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return isRotated(layer) ? 2048 : 4096; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMinCropWidth(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) + return isRotated(layer) ? 16 : 32; + else if (isFormatYUV420(handle->format)) + return isRotated(layer) ? 32 : 64; + else + return 64; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getMinCropHeight(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) + return isRotated(layer) ? 32 : 16; + else if (isFormatYUV420(handle->format)) + return isRotated(layer) ? 64 : 32; + else + return 64; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getCropWidthAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getCropHeightAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getSrcXOffsetAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) + return isRotated(layer) ? 2 : 1; + else if (isFormatYUV420(handle->format)) + return isRotated(layer) ? 4 : 2; + else + return 4; + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 1; + else + return 2; + } +} + +int ExynosMPP::getSrcYOffsetAlign(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(handle->format)) + return isRotated(layer) ? 2 : 1; + else if (isFormatYUV420(handle->format)) + return isRotated(layer) ? 4 : 2; + else + return 4; + case MPP_MSC: + default: + if (isFormatRgb(handle->format)) + return 1; + else if (isFormatYUV420(handle->format)) + return 1; + else + return 2; + } +} + +int ExynosMPP::getMaxDstWidth(int __unused format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 4096; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMaxDstHeight(int __unused format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 4096; + case MPP_MSC: + default: + return 8192; + } +} + +int ExynosMPP::getMinDstWidth(int format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(format)) + return 16; + else if (isFormatYUV420(format)) + return 32; + else + return 32; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getMinDstHeight(int format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + if (isFormatRgb(format)) + return 8; + else if (isFormatYUV420(format)) + return 16; + else + return 16; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getDstWidthAlign(int format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 1; + case MPP_MSC: + default: + if (isFormatRgb(format)) + return 1; + else if (isFormatYUV420(format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getDstHeightAlign(int format) +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 1; + case MPP_MSC: + default: + if (isFormatRgb(format)) + return 1; + else if (isFormatYUV420(format)) + return 2; + else + return 2; + } +} + +int ExynosMPP::getMaxDownscale() +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + return 2; + case MPP_MSC: + default: + return 16; + } +} + +int ExynosMPP::getMaxDownscale(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + bool isPerpendicular = !!(layer.transform & HAL_TRANSFORM_ROT_90); + float scaleRatio_H = (layer.sourceCropf.right - layer.sourceCropf.left)/WIDTH(layer.displayFrame); + float scaleRatio_V = (layer.sourceCropf.bottom - layer.sourceCropf.top)/HEIGHT(layer.displayFrame); + float dstW = (float)WIDTH(layer.displayFrame); + float displayW = (float)ExynosMPP::mainDisplayWidth; + if (isPerpendicular) + dstW = (float)HEIGHT(layer.displayFrame); + + switch (mType) { + case MPP_VG: + case MPP_VGR: + if ((float)VPP_CLOCK < (VPP_VCLK * VPP_MIC_FACTOR * scaleRatio_H * scaleRatio_V)/2 * (dstW/displayW)) + return 1; + return 2; + case MPP_MSC: + if (handle && (getDrmMode(handle->flags) != NO_DRM)) + return 4; + else + return 16; + default: + return 16; + } +} + +int ExynosMPP::getMaxUpscale() +{ + switch (mType) { + case MPP_VG: + case MPP_VGR: + case MPP_MSC: + default: + return 8; + } +} + +bool ExynosMPP::inUse() +{ + return mMPPHandle != NULL; +} + +void ExynosMPP::setDisplay(ExynosDisplay *display) +{ + mDisplay = display; +} + +void ExynosMPP::preAssignDisplay(ExynosDisplay *display) +{ + mPreAssignedDisplay = display; +} + +bool ExynosMPP::isAssignable(ExynosDisplay *display) +{ + return (mPreAssignedDisplay == display || mPreAssignedDisplay == NULL); +} + +void ExynosMPP::setAllocDevice(alloc_device_t* allocDevice) +{ + mAllocDevice = allocDevice; + if (mBufferFreeThread == NULL) { + mBufferFreeThread = new BufferFreeThread(this); + mBufferFreeThread->mRunning = true; + mBufferFreeThread->run("MPPThread"); + } +} + +bool ExynosMPP::wasUsedByDisplay(ExynosDisplay *display) +{ + return mState == MPP_STATE_FREE && mDisplay == display; +} + +void ExynosMPP::startTransition(ExynosDisplay *display) +{ + mState = MPP_STATE_TRANSITION; + mDisplay = display; + { + android::Mutex::Autolock lock(mMutex); + mBufferFreeThread->mCondition.signal(); + } +} + +bool ExynosMPP::isOTF() +{ + return false; +} + +void ExynosMPP::cleanupOTF() +{ +} + +bool ExynosMPP::checkNoExtVideoBuffer() +{ + android::Mutex::Autolock lock(mMutex); + if (mBufferType != MPP_BUFFER_VIDEO_EXT) + return true; + else + return false; +} + +void ExynosMPP::freeBuffers() +{ + alloc_device_t* alloc_device = mAllocDevice; + android::List::iterator it; + android::List::iterator end; + { + android::Mutex::Autolock lock(mMutex); + it = mFreedBuffers.begin(); + end = mFreedBuffers.end(); + } + while (it != end) { + deleteBufferInfo bufferInfo = (deleteBufferInfo)(*it); + if (bufferInfo.bufFence >= 0) { + if (sync_wait(bufferInfo.bufFence, 1000) < 0) + ALOGE("sync_wait error"); + close(bufferInfo.bufFence); + } + alloc_device->free(alloc_device, bufferInfo.buffer); + { + android::Mutex::Autolock lock(mMutex); + it = mFreedBuffers.erase(it); + } + } +} + +bool BufferFreeThread::threadLoop() +{ + while(mRunning) { + { + android::Mutex::Autolock lock(mExynosMPP->mMutex); + while(mExynosMPP->mFreedBuffers.size() == 0 && + mExynosMPP->mState != MPP_STATE_TRANSITION) { + mCondition.wait(mExynosMPP->mMutex); + } + } + if (mExynosMPP->mState == MPP_STATE_TRANSITION) { + if ((mExynosMPP->mDstBufFence[0] >= 0) && sync_wait(mExynosMPP->mDstBufFence[0], 1000) < 0) + ALOGE("fence wait error"); + mExynosMPP->mState = MPP_STATE_FREE; + } + if (mExynosMPP->mFreedBuffers.size() != 0) { + mExynosMPP->freeBuffers(); + } + } + return true; +} + +void *ExynosMPP::createMPP(int id, int mode, int outputMode, int drm) +{ + ATRACE_CALL(); + mppFact = new MppFactory(); + libmpp = mppFact->CreateMpp(id, mode, outputMode, drm); + + return reinterpret_cast(libmpp); +} + +void *ExynosMPP::createBlendMPP(int id, int mode, int outputMode, int drm) +{ + mppFact = new MppFactory(); + libmpp = mppFact->CreateBlendMpp(id, mode, outputMode, drm); + + return reinterpret_cast(libmpp); +} + +int ExynosMPP::configMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) +{ + ATRACE_CALL(); + return libmpp->ConfigMpp(handle, src, dst); +} + +int ExynosMPP::configBlendMpp(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst, struct SrcBlendInfo *srcblendinfo) +{ + return libmpp->ConfigBlendMpp(handle, src, dst, srcblendinfo); +} + +int ExynosMPP::runMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) +{ + ATRACE_CALL(); + return libmpp->RunMpp(handle, src, dst); +} + +int ExynosMPP::stopMPP(void *handle) +{ + ATRACE_CALL(); + return libmpp->StopMpp(handle); +} + +void ExynosMPP::destroyMPP(void *handle) +{ + ATRACE_CALL(); + libmpp->DestroyMpp(handle); + delete(mppFact); +} + +int ExynosMPP::setCSCProperty(void *handle, unsigned int eqAuto, unsigned int fullRange, unsigned int colorspace) +{ + return libmpp->SetCSCProperty(handle, eqAuto, fullRange, colorspace); +} + +int ExynosMPP::freeMPP(void *handle) +{ + ATRACE_CALL(); + return libmpp->FreeMpp(handle); +} + +bool ExynosMPP::bufferChanged(hwc_layer_1_t &layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + return mSrcConfig.fw != (uint32_t)handle->stride || + mSrcConfig.fh != (uint32_t)handle->vstride || + mSrcConfig.format != (uint32_t)handle->format || + mDstConfig.rot != (uint32_t)layer.transform; +} diff --git a/libhwcutils/ExynosMPPv2.h b/libhwcutils/ExynosMPPv2.h new file mode 100644 index 0000000..bd88e0b --- /dev/null +++ b/libhwcutils/ExynosMPPv2.h @@ -0,0 +1,241 @@ +#ifndef EXYNOS_MPP_H +#define EXYNOS_MPP_H + +#include "ExynosDisplay.h" +#include "MppFactory.h" +#include +#include +#include +#include +#include + +class BufferFreeThread; + +const size_t NUM_MPP_DST_BUFS = 3; +const size_t NUM_DRM_MPP_DST_BUFS = 2; +#define INTERNAL_MPP_DST_FORMAT HAL_PIXEL_FORMAT_RGBX_8888 + +#ifndef VPP_CLOCK +#define VPP_CLOCK 400 +#endif +#ifndef VPP_VCLK +#define VPP_VCLK 133 +#endif +#ifndef VPP_MIC_FACTOR +#define VPP_MIC_FACTOR 2 +#endif + +enum { + eMPPUnsupportedDownScale = 0x00000001, + eMPPUnsupportedHW = 0x00000002, + eMPPUnsupportedRotation = 0x00000004, + eMPPUnsupportedCoordinate = 0x00000008, + eMPPUnsupportedDRMContents = 0x00000010, + eMPPUnsupportedS3DContents = 0x00000020, + eMPPUnsupportedBlending = 0x00000040, + eMPPUnsupportedFormat = 0x00000080, + eMPPNotAlignedDstSize = 0x00000100, + eMPPNotAlignedSrcCropPosition = 0x00000200, + eMPPNotAlignedHStride = 0x00000400, + eMPPNotAlignedVStride = 0x00000800, + eMPPExceedHStrideMaximum = 0x00001000, + eMPPExceedVStrideMaximum = 0x00002000, + eMPPExeedMaxDownScale = 0x00004000, + eMPPExeedMaxDstWidth = 0x00008000, + eMPPExeedMaxDstHeight = 0x00010000, + eMPPExeedMinSrcWidth = 0x00020000, + eMPPExeedMinSrcHeight = 0x00040000, + eMPPExeedMaxUpScale = 0x00080000, + eMPPExeedSrcWCropMax = 0x00100000, + eMPPExeedSrcHCropMax = 0x00200000, + eMPPExeedSrcWCropMin = 0x00400000, + eMPPExeedSrcHCropMin = 0x00800000, + eMPPNotAlignedCrop = 0x01000000, + eMPPNotAlignedOffset = 0x02000000, + eMPPExeedMinDstWidth = 0x04000000, + eMPPExeedMinDstHeight = 0x08000000, + eMPPUnsupportedCompression = 0x10000000, + eMPPUnsupportedCSC = 0x20000000, +}; + +enum { + MPP_NONE = 0, + MPP_M2M, + MPP_LOCAL, +}; + +enum { + MPP_MEM_MMAP = 1, + MPP_MEM_USERPTR, + MPP_MEM_OVERLAY, + MPP_MEM_DMABUF, +}; + +enum { + MPP_STATE_FREE = 0, + MPP_STATE_ASSIGNED, + MPP_STATE_TRANSITION, +}; + +enum { + MPP_BUFFER_NORMAL = 0, + MPP_BUFFER_NORMAL_DRM, + MPP_BUFFER_SECURE_DRM, + MPP_BUFFER_VIDEO_EXT, +}; + +class ExynosMPP { + MppFactory *mppFact; + LibMpp *libmpp; + + struct deleteBufferInfo { + buffer_handle_t buffer; + int bufFence; + }; + + public: + /* Methods */ + ExynosMPP(); + ExynosMPP(ExynosDisplay *display, int gscIndex); + ExynosMPP(ExynosDisplay *display, unsigned int mppType, unsigned int mppIndex); + virtual ~ExynosMPP(); + void initMPP(); + const android::String8& getName() const; + + bool isSrcConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + bool formatRequiresMPP(int format); + void setDisplay(ExynosDisplay *display); + void preAssignDisplay(ExynosDisplay *display); + bool isAssignable(ExynosDisplay *display); + bool bufferChanged(hwc_layer_1_t &layer); + bool needsReqbufs(); + bool inUse(); + void freeBuffers(); + void setAllocDevice(alloc_device_t* allocDevice); + bool wasUsedByDisplay(ExynosDisplay *display); + void startTransition(ExynosDisplay *display); + bool checkNoExtVideoBuffer(); + void adjustSourceImage(hwc_layer_1_t &layer, exynos_mpp_img &srcImg); + + /* For compatibility with libhwc */ + bool isOTF(); + void cleanupOTF(); + + /* Override these virtual functions in chip directory to handle per-chip differences */ + virtual bool isFormatSupportedByMPP(int format); + virtual bool isCSCSupportedByMPP(int src_format, int dst_format, uint32_t dataSpace); + virtual bool isProcessingRequired(hwc_layer_1_t &layer, int format); + virtual int isProcessingSupported(hwc_layer_1_t &layer, int dst_format); + virtual int processM2M(hwc_layer_1_t &layer, int dstFormat, hwc_frect_t *sourceCrop, bool needBufferAlloc = true); + virtual int processM2MWithB(hwc_layer_1_t &layer1, hwc_layer_1_t &layer2, int dstFormat, hwc_frect_t *sourceCrop); + virtual int setupInternalMPP(); + virtual void cleanupM2M(); + virtual void cleanupM2M(bool noFenceWait); + virtual void cleanupInternalMPP(); + virtual int getMaxDownscale(); + virtual int getMaxUpscale(); + virtual int getMaxDownscale(hwc_layer_1_t &layer); + virtual int getDstWidthAlign(int format); + virtual int getDstHeightAlign(int format); + virtual int getSrcXOffsetAlign(hwc_layer_1_t &layer); + virtual int getSrcYOffsetAlign(hwc_layer_1_t &layer); + virtual int getCropWidthAlign(hwc_layer_1_t &layer); + virtual int getCropHeightAlign(hwc_layer_1_t &layer); + virtual int getMaxWidth(hwc_layer_1_t &layer); + virtual int getMaxHeight(hwc_layer_1_t &layer); + virtual int getMinWidth(hwc_layer_1_t &layer); + virtual int getMinHeight(hwc_layer_1_t &layer); + virtual int getSrcWidthAlign(hwc_layer_1_t &layer); + virtual int getSrcHeightAlign(hwc_layer_1_t &layer); + virtual int getMaxCropWidth(hwc_layer_1_t &layer); + virtual int getMaxCropHeight(hwc_layer_1_t &layer); + virtual int configBlendMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst, + struct SrcBlendInfo *srcblendinfo); + + /* Fields */ + static int mainDisplayWidth; + uint32_t mType; + uint32_t mIndex; + int mState; + void *mMPPHandle; + ExynosDisplay *mDisplay; + ExynosDisplay *mPreAssignedDisplay; + exynos_mpp_img mSrcConfig; + exynos_mpp_img mMidConfig; + exynos_mpp_img mDstConfig; + buffer_handle_t mDstBuffers[NUM_MPP_DST_BUFS]; + buffer_handle_t mMidBuffers[NUM_MPP_DST_BUFS]; + int mDstBufFence[NUM_MPP_DST_BUFS]; + int mMidBufFence[NUM_MPP_DST_BUFS]; + size_t mCurrentBuf; + intptr_t mLastMPPLayerHandle; + int mS3DMode; + bool mDoubleOperation; + bool mCanRotate; + bool mCanBlend; + android::List mFreedBuffers; + BufferFreeThread *mBufferFreeThread; + android::Mutex mMutex; + alloc_device_t* mAllocDevice; + size_t mNumAvailableDstBuffers; + size_t mBufferType; + int mSMemFd; + bool mCanBeUsed; + uint32_t mAllocatedBufferNum; + uint32_t mAllocatedMidBufferNum; + bool mDstRealloc; + bool mMidRealloc; + + protected: + /* Methods */ + bool isDstConfigChanged(exynos_mpp_img &c1, exynos_mpp_img &c2); + bool setupDoubleOperation(exynos_mpp_img &srcImg, exynos_mpp_img &midImg, hwc_layer_1_t &layer); + int reallocateBuffers(private_handle_t *srcHandle, exynos_mpp_img &dstImg, exynos_mpp_img &midImg, bool needDoubleOperation, uint32_t index); + void reusePreviousFrame(hwc_layer_1_t &layer); + void freeBuffersCloseFences(); + + /* Override these virtual functions in chip directory to handle per-chip differences */ + virtual void setupSrc(exynos_mpp_img &srcImg, hwc_layer_1_t &layer); + virtual void setupMid(exynos_mpp_img &srcImg); + virtual void setupDst(exynos_mpp_img &srcImg, exynos_mpp_img &dstImg, int dst_format, hwc_layer_1_t &layer); + virtual void setupBlendCfg(exynos_mpp_img &srcImg, exynos_mpp_img &dstImg, hwc_layer_1_t &layer1, + hwc_layer_1_t &layer2, struct SrcBlendInfo &srcBlendInfo); + + virtual int getMinCropWidth(hwc_layer_1_t &layer); + virtual int getMinCropHeight(hwc_layer_1_t &layer); + + virtual int getMaxDstWidth(int format); + virtual int getMaxDstHeight(int format); + virtual int getMinDstWidth(int format); + virtual int getMinDstHeight(int format); + + virtual void *createMPP(int id, int mode, int outputMode, int drm); + virtual void *createBlendMPP(int id, int mode, int outputMode, int drm); + virtual int configMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + virtual int runMPP(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst); + virtual int stopMPP(void *handle); + virtual void destroyMPP(void *handle); + virtual int setCSCProperty(void *handle, unsigned int eqAuto, unsigned int fullRange, unsigned int colorspace); + virtual int getBufferUsage(private_handle_t *srcHandle); + virtual int freeMPP(void *handle); + + private: + size_t getBufferType(uint32_t usage); + android::String8 mName; +}; + +class BufferFreeThread: public android::Thread { + public: + BufferFreeThread(ExynosMPP *exynosMPP): mRunning(false) {mExynosMPP = exynosMPP;}; + virtual bool threadLoop(); + bool mRunning; + android::Condition mCondition; + private: + ExynosMPP *mExynosMPP; +}; + +size_t visibleWidth(ExynosMPP *processor, hwc_layer_1_t &layer, int format, + int xres); + +#endif diff --git a/libhwcutils/NOTICE b/libhwcutils/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libhwcutils/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libhwjpeg/Android.mk b/libhwjpeg/Android.mk new file mode 100644 index 0000000..4e5c4ed --- /dev/null +++ b/libhwjpeg/Android.mk @@ -0,0 +1,34 @@ +# Copyright (C) 2015 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_CFLAGS += -DLOG_TAG=\"exynos-libhwjpeg\" + +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libcsc libion + +LOCAL_C_INCLUDES := $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/system/core/libion/include + +LOCAL_SRC_FILES := hwjpeg-base.cpp hwjpeg-v4l2.cpp libhwjpeg-exynos.cpp +ifeq ($(TARGET_USES_UNIVERSAL_LIBHWJPEG), true) +LOCAL_SRC_FILES += ExynosJpegEncoder.cpp libcsc.cpp AppMarkerWriter.cpp ExynosJpegEncoderForCamera.cpp +endif + +LOCAL_MODULE_TAGS := optional +LOCAL_MODULE := libhwjpeg + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libhwjpeg/AppMarkerWriter.cpp b/libhwjpeg/AppMarkerWriter.cpp new file mode 100644 index 0000000..ac55bc3 --- /dev/null +++ b/libhwjpeg/AppMarkerWriter.cpp @@ -0,0 +1,629 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "hwjpeg-internal.h" +#include "AppMarkerWriter.h" +#include "IFDWriter.h" + +static const char ExifAsciiPrefix[] = { 'A', 'S', 'C', 'I', 'I', 0x0, 0x0, 0x0 }; +static const char ExifIdentifierCode[6] = { 'E', 'x', 'i', 'f', 0x00, 0x00 }; +static char TiffHeader[8] = { 'I', 'I', 0x2A, 0x00, 0x08, 0x00, 0x00, 0x00 }; + +CEndianessChecker __LITTLE_ENDIAN__; + +CEndianessChecker::CEndianessChecker() +{ + int num = 1; + __little = (*reinterpret_cast(&num) == 1); + if (__little) { + TiffHeader[0] = 'I'; + TiffHeader[1] = 'I'; + } else { + TiffHeader[0] = 'M'; + TiffHeader[1] = 'M'; + } +} + + +CAppMarkerWriter::CAppMarkerWriter() + : m_pAppBase(NULL), m_pApp1End(NULL), m_pExif(NULL), m_pDebug(NULL) +{ + Init(); +} + +CAppMarkerWriter::CAppMarkerWriter(char *base, exif_attribute_t *exif, debug_attribute_t *debug) +{ + PrepareAppWriter(base, exif, debug); +} + +void CAppMarkerWriter::Init() +{ + m_pApp1End = NULL; + + m_szApp1 = 0; + + m_n0thIFDFields = 0; + m_n1stIFDFields = 0; + m_nExifIFDFields = 0; + m_nGPSIFDFields = 0; + + m_szMake = 0; + m_szSoftware = 0; + m_szModel = 0; + m_szUniqueID = 0; + + m_pThumbBase = NULL; + m_szMaxThumbSize = 0; + m_pThumbSizePlaceholder = NULL; +} + +void CAppMarkerWriter::PrepareAppWriter(char *base, exif_attribute_t *exif, debug_attribute_t *debug) +{ + m_pAppBase = base; + m_pExif = exif; + + Init(); + + size_t applen = 0; + + if (exif) { + // APP1 + applen += JPEG_SEGMENT_LENFIELD_SIZE + + ARRSIZE(ExifIdentifierCode) + ARRSIZE(TiffHeader); + + // 0th IFD: Make, Model, Orientation, Software, + // DateTime, YCbCrPositioning, X/Y Resolutions, Exif and GPS + applen += IFD_FIELDCOUNT_SIZE + IFD_VALOFF_SIZE; + // Orientation, YCbCrPos, XYRes/Unit, DateTime and Exif + m_n0thIFDFields = 7; + applen += IFD_FIELD_SIZE * m_n0thIFDFields; + applen += sizeof(rational_t) * 2; // values of XResolution and YResolution + applen += EXIF_DATETIME_LENGTH; + + m_szMake = strlen(m_pExif->maker); + if (m_szMake > 0) { + m_n0thIFDFields++; + applen += IFD_FIELD_SIZE + m_szMake + 1; + } + + m_szSoftware = strlen(m_pExif->software); + if (m_szSoftware > 0) { + m_n0thIFDFields++; + applen += IFD_FIELD_SIZE + m_szSoftware + 1; + } + + m_szModel = strlen(m_pExif->model); + if (m_szModel > 0) { + m_n0thIFDFields++; + applen += IFD_FIELD_SIZE + m_szModel + 1; + } + + if (m_pExif->enableGps) { + m_n0thIFDFields++; + applen += IFD_FIELD_SIZE; + } + + /* + * Exif SubIFD: 29 fields + * Fields with no data offset: 12 + * - ExposureProgram, PhotographicSensitivity, ExifVersion, MeteringMode, + * - Flash, ColorSpace, PixelXDimension, PixelYDimension, ExposureMode, + * - WhiteBalance, FocalLengthIn35mmFilm, SceneCaptureType + * (S)Rational Fields: 8 + * - ExposureTime, FNumber, ShutterSpeedValue, ApertureValue, + * - BrightnessValue, ExposureBiasValue, MaxApertureValue, FocalLength + * ASCII Fields: 6 + * - DateTimeOriginal, DateTimeDigitized, SubsecTime, SubsecTimeOriginal, + * - SubsecTimeDigitized, ImageUniqueID + * Undefined Long Fields: 2 + * - MakerNote, UserComment + * SubIFD: 1 + * - Interoperability IFD + */ + m_nExifIFDFields = 20; // rational fields and fields withouth data offset + applen += IFD_FIELDCOUNT_SIZE + IFD_VALOFF_SIZE; + applen += IFD_FIELD_SIZE * m_nExifIFDFields; + applen += sizeof(rational_t) * 8; // 8 rational values + + // DateTime* + m_nExifIFDFields += 2; + applen += (IFD_FIELD_SIZE + EXIF_DATETIME_LENGTH) * 2; + + // SubSecTime* + m_nExifIFDFields += 3; + applen += (IFD_FIELD_SIZE + EXIF_SUBSECTIME_LENGTH) * 3; + + m_szUniqueID = strlen(m_pExif->unique_id); // len should be 32! + if (m_szUniqueID > 0) { + m_nExifIFDFields++; + applen += IFD_FIELD_SIZE + m_szUniqueID + 1; + } + + if (m_pExif->maker_note_size > 0) { + m_nExifIFDFields++; + applen += IFD_FIELD_SIZE + m_pExif->maker_note_size; + } + + if (m_pExif->user_comment_size > 0) { + m_nExifIFDFields++; + applen += IFD_FIELD_SIZE + m_pExif->user_comment_size; + } + + // Interoperability SubIFD + m_nExifIFDFields++; // Interoperability is sub IFD of Exif sub IFD + applen += IFD_FIELD_SIZE + + IFD_FIELDCOUNT_SIZE + IFD_VALOFF_SIZE + IFD_FIELD_SIZE * 2; + + if (m_pExif->enableGps) { + size_t len; + /* + * GPS SubIFD: 10 fields + * Fields with no data offset: 4 + * - GPSVersionID, GPSLattitudeRef, GPSLongitudeRev, GPSAltitudeRef + * Rational Fields: 4 (total 10 rational values) + * - GPSLatitude(3), GPSLongitude(3), GPSAltitude(1), GPSTImeStamp(3) + * ASCII or Undefined fields: 2 + * - PGSProcessingMethod, GPSDateStamp + */ + m_nGPSIFDFields = 8; + applen += IFD_FIELDCOUNT_SIZE + IFD_VALOFF_SIZE; + applen += IFD_FIELD_SIZE * m_nGPSIFDFields; + applen += sizeof(rational_t) * 10; + + // gps date stamp + m_nGPSIFDFields += 1; + applen += IFD_FIELD_SIZE + EXIF_GPSDATESTAMP_LENGTH; + + len = min(strlen(m_pExif->gps_processing_method), + MAX_GPS_PROCESSINGMETHOD_SIZE - sizeof(ExifAsciiPrefix) - 1); + if (len > 0) { + m_nGPSIFDFields++; + applen += IFD_FIELD_SIZE + len + sizeof(ExifAsciiPrefix) + 1; + } + } + + if (m_pExif->enableThumb) { + /* + * 1st IFD: 6 + * Fields with no data offset: 6 + * - ImageWidth, ImageHeight, Compression, Orientation, + * - JPEGInterchangeFormat, JPEGInterchangeFormatLength + */ + if ((m_pExif->widthThumb < 16) || (m_pExif->heightThumb < 16)) { + ALOGE("Insufficient thumbnail information %dx%d", + m_pExif->widthThumb, m_pExif->heightThumb); + return; + } + + m_n1stIFDFields = 6; + applen += IFD_FIELDCOUNT_SIZE + IFD_VALOFF_SIZE; + applen += IFD_FIELD_SIZE * m_n1stIFDFields; + + m_pThumbBase = m_pAppBase + JPEG_MARKER_SIZE + applen; + m_szMaxThumbSize = JPEG_MAX_SEGMENT_SIZE - applen; + } + + m_szApp1 = applen; + } + + if (debug) { + if (debug->num_of_appmarker > (EXTRA_APPMARKER_LIMIT - EXTRA_APPMARKER_MIN)) { + ALOGE("Too many extra APP markers %d", debug->num_of_appmarker); + return; + } + + for (int idx = 0; idx < debug->num_of_appmarker; idx++) { + int appid; + unsigned int len; + + appid = debug->idx[idx][0]; + if ((appid < EXTRA_APPMARKER_MIN) || (appid >= EXTRA_APPMARKER_LIMIT)) { + ALOGE("Invalid extra APP segment ID %d", appid); + return; + } + + len = debug->debugSize[appid]; + if ((len == 0) || (len > (JPEG_MAX_SEGMENT_SIZE - JPEG_SEGMENT_LENFIELD_SIZE))) { + ALOGE("Invalid APP%d segment size, %zu bytes", appid, len); + return; + } + + ALOGD("APP%d: %u bytes", appid, len + JPEG_SEGMENT_LENFIELD_SIZE); + } + } + + m_pDebug = debug; + + // |<- m_szApp1 ->|<- m_szMaxThumbSize ->|<-m_szAppX->| + // |<----- size of total APP1 and APP4 segments ----->|<-APP11->|<-- main image + // m_pAppBase m_pThumbBase | | return + // | | | | || + // v v | | v| + // --|--------------------------------------------------|---------|----------- + // ^ ^ ^ ^ | ^^ + // | | | | | || + // |APP1 SOIofThumb APPX SOIofMain + // | | + // SOI DHTofMain + + ALOGD("APP1: %zu bytes(ThumbMax %zu)", m_szApp1, m_szMaxThumbSize); +} + +#define APPMARKLEN (JPEG_MARKER_SIZE + JPEG_SEGMENT_LENFIELD_SIZE) +char *CAppMarkerWriter::WriteAPP11(char *current, size_t dummy, size_t align) +{ + ALOG_ASSERT((align & ~align) == 0); + + if ((dummy == 0) && (align == 1)) + return current; + + if (!m_pExif && !m_pDebug) + return current; + + uint16_t len = PTR_TO_ULONG(current + APPMARKLEN) & (align - 1); + + if (len) + len = align - len; + + len += dummy + JPEG_SEGMENT_LENFIELD_SIZE; + + *current++ = 0xFF; + *current++ = 0xEB; + WriteDataInBig(current, len); + + return current + len; +} + +char *CAppMarkerWriter::WriteAPPX(char *current, bool just_reserve) +{ + if (!m_pDebug) + return current; + + for (int idx = 0; idx < m_pDebug->num_of_appmarker; idx++) { + int appid = m_pDebug->idx[idx][0]; + uint16_t len = m_pDebug->debugSize[appid] + JPEG_SEGMENT_LENFIELD_SIZE; + + // APPx marker + *current++ = 0xFF; + *current++ = 0xE0 + (appid & 0xF); + // APPx length + current = WriteDataInBig(current, len); + // APPx data + if (!just_reserve) + memcpy(current, m_pDebug->debugData[appid], m_pDebug->debugSize[appid]); + current += m_pDebug->debugSize[appid]; + } + + return current; +} + +char *CAppMarkerWriter::WriteAPP1(char *current, bool reserve_thumbnail_space, bool updating) +{ + if (!m_pExif) + return current; + + // APP1 Marker + *current++ = 0xFF; + *current++ = 0xE1; + + // APP1 length + if (updating) { + current += JPEG_SEGMENT_LENFIELD_SIZE; + } else { + uint16_t len = m_szApp1; + if (reserve_thumbnail_space) + len += m_szMaxThumbSize; + current = WriteDataInBig(current, len); + } + + // Exif Identifier + for (size_t i = 0; i < ARRSIZE(ExifIdentifierCode); i++) + *current++ = ExifIdentifierCode[i]; + + char *tiffheader = current; + for (size_t i = 0; i < ARRSIZE(TiffHeader); i++) + *current++ = TiffHeader[i]; + + CIFDWriter writer(tiffheader, current, m_n0thIFDFields); + + writer.WriteShort(EXIF_TAG_ORIENTATION, 1, &m_pExif->orientation); + writer.WriteShort(EXIF_TAG_YCBCR_POSITIONING, 1, &m_pExif->ycbcr_positioning); + writer.WriteRational(EXIF_TAG_X_RESOLUTION, 1, &m_pExif->x_resolution); + writer.WriteRational(EXIF_TAG_Y_RESOLUTION, 1, &m_pExif->y_resolution); + writer.WriteShort(EXIF_TAG_RESOLUTION_UNIT, 1, &m_pExif->resolution_unit); + if (m_szMake > 0) + writer.WriteASCII(EXIF_TAG_MAKE, m_szMake + 1, m_pExif->maker); + if (m_szModel > 0) + writer.WriteASCII(EXIF_TAG_MODEL, m_szModel + 1, m_pExif->model); + if (m_szSoftware > 0) + writer.WriteASCII(EXIF_TAG_SOFTWARE, m_szSoftware + 1, m_pExif->software); + writer.WriteCString(EXIF_TAG_DATE_TIME, EXIF_DATETIME_LENGTH, m_pExif->date_time); + + char *pSubIFDBase = writer.BeginSubIFD(EXIF_TAG_EXIF_IFD_POINTER); + if (pSubIFDBase) { // This should be always true!! + CIFDWriter exifwriter(tiffheader, pSubIFDBase, m_nExifIFDFields); + exifwriter.WriteRational(EXIF_TAG_EXPOSURE_TIME, 1, &m_pExif->exposure_time); + exifwriter.WriteRational(EXIF_TAG_FNUMBER, 1, &m_pExif->fnumber); + exifwriter.WriteShort(EXIF_TAG_EXPOSURE_PROGRAM, 1, &m_pExif->exposure_program); + exifwriter.WriteShort(EXIF_TAG_ISO_SPEED_RATING, 1, &m_pExif->iso_speed_rating); + exifwriter.WriteUndef(EXIF_TAG_EXIF_VERSION, 4, reinterpret_cast(m_pExif->exif_version)); + exifwriter.WriteCString(EXIF_TAG_DATE_TIME_ORG, EXIF_DATETIME_LENGTH, m_pExif->date_time); + exifwriter.WriteCString(EXIF_TAG_DATE_TIME_DIGITIZE, EXIF_DATETIME_LENGTH, m_pExif->date_time); + exifwriter.WriteSRational(EXIF_TAG_SHUTTER_SPEED, 1, &m_pExif->shutter_speed); + exifwriter.WriteRational(EXIF_TAG_APERTURE, 1, &m_pExif->aperture); + exifwriter.WriteSRational(EXIF_TAG_BRIGHTNESS, 1, &m_pExif->brightness); + exifwriter.WriteSRational(EXIF_TAG_EXPOSURE_BIAS, 1, &m_pExif->exposure_bias); + exifwriter.WriteRational(EXIF_TAG_MAX_APERTURE, 1, &m_pExif->max_aperture); + exifwriter.WriteShort(EXIF_TAG_METERING_MODE, 1, &m_pExif->metering_mode); + exifwriter.WriteShort(EXIF_TAG_FLASH, 1, &m_pExif->flash); + exifwriter.WriteRational(EXIF_TAG_FOCAL_LENGTH, 1, &m_pExif->focal_length); + exifwriter.WriteCString(EXIF_TAG_SUBSEC_TIME, EXIF_SUBSECTIME_LENGTH, m_pExif->sec_time); + exifwriter.WriteCString(EXIF_TAG_SUBSEC_TIME_ORIG, EXIF_SUBSECTIME_LENGTH, m_pExif->sec_time); + exifwriter.WriteCString(EXIF_TAG_SUBSEC_TIME_DIG, EXIF_SUBSECTIME_LENGTH, m_pExif->sec_time); + if (m_pExif->maker_note_size > 0) + exifwriter.WriteUndef(EXIF_TAG_MAKER_NOTE, m_pExif->maker_note_size, m_pExif->maker_note); + if (m_pExif->user_comment_size > 0) + exifwriter.WriteUndef(EXIF_TAG_USER_COMMENT, m_pExif->user_comment_size, m_pExif->user_comment); + exifwriter.WriteShort(EXIF_TAG_COLOR_SPACE, 1, &m_pExif->color_space); + exifwriter.WriteLong(EXIF_TAG_PIXEL_X_DIMENSION, 1, &m_pExif->width); + exifwriter.WriteLong(EXIF_TAG_PIXEL_Y_DIMENSION, 1, &m_pExif->height); + exifwriter.WriteShort(EXIF_TAG_EXPOSURE_MODE, 1, &m_pExif->exposure_mode); + exifwriter.WriteShort(EXIF_TAG_WHITE_BALANCE, 1, &m_pExif->white_balance); + exifwriter.WriteShort(EXIF_TAG_FOCA_LENGTH_IN_35MM_FILM, 1, &m_pExif->focal_length_in_35mm_length); + exifwriter.WriteShort(EXIF_TAG_SCENCE_CAPTURE_TYPE, 1, &m_pExif->scene_capture_type); + if (m_szUniqueID > 0) + exifwriter.WriteASCII(EXIF_TAG_IMAGE_UNIQUE_ID, m_szUniqueID + 1, m_pExif->unique_id); + pSubIFDBase = exifwriter.BeginSubIFD(EXIF_TAG_INTEROPERABILITY); + if (pSubIFDBase) { + CIFDWriter interopwriter(tiffheader, pSubIFDBase, 2); + interopwriter.WriteASCII(EXIF_TAG_INTEROPERABILITY_INDEX, 4, + m_pExif->interoperability_index ? "THM" : "R98"); + interopwriter.WriteUndef(EXIF_TAG_INTEROPERABILITY_VERSION, 4, + reinterpret_cast("0100")); + interopwriter.Finish(true); + exifwriter.EndSubIFD(interopwriter.GetNextIFDBase()); + } else { + exifwriter.CancelSubIFD(); + } + exifwriter.Finish(true); + writer.EndSubIFD(exifwriter.GetNextIFDBase()); + } else { + writer.CancelSubIFD(); + } + + if (m_pExif->enableGps) { + pSubIFDBase = writer.BeginSubIFD(EXIF_TAG_GPS_IFD_POINTER); + if (pSubIFDBase) { // This should be always true!! + CIFDWriter gpswriter(tiffheader, pSubIFDBase, m_nGPSIFDFields); + gpswriter.WriteByte(EXIF_TAG_GPS_VERSION_ID, 4, m_pExif->gps_version_id); + gpswriter.WriteASCII(EXIF_TAG_GPS_LATITUDE_REF, 2, m_pExif->gps_latitude_ref); + gpswriter.WriteRational(EXIF_TAG_GPS_LATITUDE, 3, m_pExif->gps_latitude); + gpswriter.WriteASCII(EXIF_TAG_GPS_LONGITUDE_REF, 2, m_pExif->gps_longitude_ref); + gpswriter.WriteRational(EXIF_TAG_GPS_LONGITUDE, 3, m_pExif->gps_longitude); + gpswriter.WriteByte(EXIF_TAG_GPS_ALTITUDE_REF, 1, &m_pExif->gps_altitude_ref); + gpswriter.WriteRational(EXIF_TAG_GPS_ALTITUDE, 1, &m_pExif->gps_altitude); + gpswriter.WriteCString(EXIF_TAG_GPS_DATESTAMP, EXIF_GPSDATESTAMP_LENGTH, + m_pExif->gps_datestamp); + gpswriter.WriteRational(EXIF_TAG_GPS_TIMESTAMP, 3, m_pExif->gps_timestamp); + size_t len = strlen(m_pExif->gps_processing_method); + if (len > 0) { + size_t idx; + len = min(len, static_cast(99UL)); + unsigned char buf[sizeof(ExifAsciiPrefix) + len + 1]; + for (idx = 0; idx < sizeof(ExifAsciiPrefix); idx++) + buf[idx] = ExifAsciiPrefix[idx]; + strncpy(reinterpret_cast(buf) + idx, m_pExif->gps_processing_method, len + 1); + len += idx; + buf[len] = '\0'; + gpswriter.WriteUndef(EXIF_TAG_GPS_PROCESSING_METHOD, len + 1, buf); + } + gpswriter.Finish(true); + writer.EndSubIFD(gpswriter.GetNextIFDBase()); + } else { + writer.CancelSubIFD(); + } + } + + // thumbnail and the next IFD pointer is never updated. + if (updating) + return NULL; + + if (m_pExif->enableThumb) { + writer.Finish(false); + + CIFDWriter thumbwriter(tiffheader, writer.GetNextIFDBase(), m_n1stIFDFields); + thumbwriter.WriteLong(EXIF_TAG_IMAGE_WIDTH, 1, &m_pExif->widthThumb); + thumbwriter.WriteLong(EXIF_TAG_IMAGE_HEIGHT, 1, &m_pExif->heightThumb); + thumbwriter.WriteShort(EXIF_TAG_COMPRESSION_SCHEME, 1, &m_pExif->compression_scheme); + thumbwriter.WriteShort(EXIF_TAG_ORIENTATION, 1, &m_pExif->orientation); + + ALOG_ASSERT(thumbwriter.GetNextIFDBase() != m_pThumbBase); + uint32_t offset = thumbwriter.Offset(m_pThumbBase); + thumbwriter.WriteLong(EXIF_TAG_JPEG_INTERCHANGE_FORMAT, 1, &offset); + offset = 0; // temporarilly 0 byte + thumbwriter.WriteLong(EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN, 1, &offset); + m_pThumbSizePlaceholder = thumbwriter.GetNextTagAddress() - 4; + thumbwriter.Finish(true); + + size_t thumbspace = reserve_thumbnail_space ? m_szMaxThumbSize : 0; + + return thumbwriter.GetNextIFDBase() + thumbspace; + } + + writer.Finish(true); + + return writer.GetNextIFDBase(); +} + +void CAppMarkerWriter::Finalize(size_t thumbsize) +{ + if (m_pThumbSizePlaceholder) { + uint32_t len = static_cast(thumbsize); + WriteData(m_pThumbSizePlaceholder, len); + m_pThumbSizePlaceholder = NULL; + } +} +void CAppMarkerWriter::UpdateApp1Size(size_t amount) +{ + if (m_pAppBase) { + uint16_t len = m_szApp1 + amount; + WriteDataInBig(m_pAppBase + JPEG_MARKER_SIZE, len); + } +} + +static const char *dbgerrmsg = "Updating debug data failed"; + +static inline size_t GetSegLen(char *p) +{ + size_t len = (*reinterpret_cast(p) & 0xFF) << 8; + return len | (*reinterpret_cast(p + 1) & 0xFF); +} + +static inline size_t GetExtraAPPSize(debug_attribute_t *debug, unsigned int *appid_bits) +{ + int apps = debug->num_of_appmarker; + size_t len = 0; + + for (int idx = 0; idx < debug->num_of_appmarker; idx++) { + int appid = debug->idx[idx][0]; + unsigned int applen = debug->debugSize[appid]; + + if ((appid < EXTRA_APPMARKER_MIN) || (appid >= EXTRA_APPMARKER_LIMIT)) { + ALOGE("%s: Invalid extra APP segment ID %d", dbgerrmsg, appid); + return 0; + } + + if ((applen == 0) || (applen > (JPEG_MAX_SEGMENT_SIZE - JPEG_SEGMENT_LENFIELD_SIZE))) { + ALOGE("%s: Invalid APP%d segment size, %u bytes.", dbgerrmsg, appid, applen); + return 0; + } + + len += applen + JPEG_MARKER_SIZE + JPEG_SEGMENT_LENFIELD_SIZE; + *appid_bits |= 1 << appid; + } + + return len; +} + +bool UpdateDebugData(char *jpeg, size_t jpeglen, debug_attribute_t *debug) // include/ExynosExif.h +{ + if (!debug) { + ALOGI("No data to update in APPx"); + return true; + } + + if (debug->num_of_appmarker > (EXTRA_APPMARKER_LIMIT - EXTRA_APPMARKER_MIN)) { + ALOGE("%s: Too many extra APP markers %d", dbgerrmsg, debug->num_of_appmarker); + return false; + } + + unsigned int validappid_bits = 0; + size_t validlen = GetExtraAPPSize(debug, &validappid_bits); + + if (jpeglen < (validlen + JPEG_MARKER_SIZE)) { + ALOGE("%s: Too small JPEG stream length %zu", dbgerrmsg, jpeglen); + return false; + } + + if ((*jpeg++ != 0xFF) || (*jpeg++ != 0xD8)) { + ALOGE("%s: %p is not a valid JPEG stream", dbgerrmsg, jpeg); + return false; + } + jpeglen -= 2; + + while ((*jpeg++ == 0xFF) && (validlen > 0) && (jpeglen > validlen)) { + size_t seglen; + char marker; + int appid; + + marker = *jpeg++; + jpeglen -= 2; + + if ((marker == 0xDA) || (marker == 0xD9)) { // SOS and EOI + ALOGE("%s: No further space found for APPx metadata", dbgerrmsg); + return false; + } + + appid = marker & 0xF; + if (((marker & 0xF0) == 0xE0) && !!(validappid_bits & (1 << appid))) { + // validappid_bits always has valid index bits + // length check is performed in GetExtraAPPSize() + seglen = GetSegLen(jpeg); + if (seglen < (debug->debugSize[appid] + JPEG_SEGMENT_LENFIELD_SIZE)) { + ALOGE("%s: too small APP%d length %zu to store %u bytes", + dbgerrmsg, appid, seglen, debug->debugSize[appid]); + return false; + } + + memcpy(jpeg + JPEG_SEGMENT_LENFIELD_SIZE, + debug->debugData[appid], debug->debugSize[appid]); + ALOGD("Successfully updated %u bytes to APP%d", debug->debugSize[appid], appid); + + validlen -= debug->debugSize[appid] + JPEG_MARKER_SIZE + JPEG_SEGMENT_LENFIELD_SIZE; + } else { + // just skip all other segments + seglen = GetSegLen(jpeg); + if (seglen == 0) + seglen++; // fixup for invalid segment lengths + if (jpeglen < seglen) + seglen = jpeglen; + + } + + jpeg += seglen; + jpeglen -= seglen; + } + + return true; +} + +static const char *exiferrmsg = "Updating exif failed"; + +bool UpdateExif(char *jpeg, size_t jpeglen, exif_attribute_t *exif) +{ + if (!exif) { + ALOGI("No Exif to update"); + return true; + } + + if (jpeglen < (JPEG_MARKER_SIZE * 2 + JPEG_SEGMENT_LENFIELD_SIZE)) { + ALOGE("%s: Too small stream length %zu", exiferrmsg, jpeglen); + return false; + } + + if ((*jpeg++ != 0xFF) || (*jpeg++ != 0xD8)) { + ALOGE("%s: %p is not a valid JPEG stream", exiferrmsg, jpeg); + return false; + } + + if ((*jpeg != 0xFF) || (*(jpeg + 1) != 0xE1)) { + ALOGE("%s: APP1 marker is not found", exiferrmsg); + return false; + } + + if (jpeglen < GetSegLen(jpeg + JPEG_MARKER_SIZE)) { + ALOGE("%s: Too small stream length %zu", exiferrmsg, jpeglen); + return false; + } + + CAppMarkerWriter writer(jpeg, exif, NULL); + writer.Update(); + + ALOGD("Successfully updated Exif"); + + return true; +} diff --git a/libhwjpeg/AppMarkerWriter.h b/libhwjpeg/AppMarkerWriter.h new file mode 100644 index 0000000..f54be4f --- /dev/null +++ b/libhwjpeg/AppMarkerWriter.h @@ -0,0 +1,137 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef __HARDWARE_SAMSUNG_SLSI_EXYNOS_APPMARKER_WRITER_H__ +#define __HARDWARE_SAMSUNG_SLSI_EXYNOS_APPMARKER_WRITER_H__ + +#include + +#define JPEG_MAX_SEGMENT_SIZE ((1 << 16) - 1) +#define JPEG_MARKER_SIZE 2 +#define JPEG_SEGMENT_LENFIELD_SIZE 2 + +#define IFD_FIELDCOUNT_SIZE 2 +#define IFD_NEXTIFDOFFSET_SIZE 4 + +#define IFD_TAG_SIZE 2 +#define IFD_TYPE_SIZE 2 +#define IFD_COUNT_SIZE 4 +#define IFD_VALOFF_SIZE 4 + +#define IFD_FIELD_SIZE \ + (IFD_TAG_SIZE + IFD_TYPE_SIZE + IFD_COUNT_SIZE + IFD_VALOFF_SIZE) + +#define EXTRA_APPMARKER_MIN 4 +#define EXTRA_APPMARKER_LIMIT 10 + +#define MAX_GPS_PROCESSINGMETHOD_SIZE 108 + +#define EXIF_SUBSECTIME_LENGTH 5 +#define EXIF_DATETIME_LENGTH 20 +#define EXIF_GPSDATESTAMP_LENGTH 11 + +class CAppMarkerWriter { + char *m_pAppBase; + char *m_pApp1End; + size_t m_szMaxThumbSize; // Maximum available thumbnail stream size minus JPEG_MARKER_SIZE + uint16_t m_szApp1; // The size of APP1 segment without marker + uint16_t m_szApp11; // The size of APP11 segment without marker + uint16_t m_n0thIFDFields; + uint16_t m_n1stIFDFields; + uint16_t m_nExifIFDFields; + uint16_t m_nGPSIFDFields; + exif_attribute_t *m_pExif; + debug_attribute_t *m_pDebug; + + uint32_t m_szMake; + uint32_t m_szSoftware; + uint32_t m_szModel; + uint32_t m_szDateTime; + uint32_t m_szSubsecTime; + uint32_t m_szUniqueID; + + char *m_pMainBase; + // The address to write compressed stream of the thumbnail image + char *m_pThumbBase; + // points to the "ValueOffset" field of JPEGInterchangeFormatLen tag of 1st IFD + // This should be updated after compression of thumbnail image. + // Note that the address may not be aligned by 32-bit. + char *m_pThumbSizePlaceholder; + + void Init(); + + char *WriteAPP1(char *base, bool reserve_thumbnail_space, bool updating = false); + char *WriteAPPX(char *base, bool just_reserve); + char *WriteAPP11(char *current, size_t dummy, size_t align); +public: + // dummy: number of dummy bytes written by the compressor of the main image + // this dummy size should be added to the APP1 length. Howerver, this dummy area + // for the main image stream should not be written by neither of appwriter nor + // thumbnail compressor. + CAppMarkerWriter(); + CAppMarkerWriter(char *base, exif_attribute_t *exif, debug_attribute_t *debug); + + ~CAppMarkerWriter() { } + + void PrepareAppWriter(char *base, exif_attribute_t *exif, debug_attribute_t *debug); + + char *GetMainStreamBase() { return m_pMainBase; } + char *GetThumbStreamBase() { return m_pThumbBase; } + char *GetThumbStreamSizeAddr() { + char *p = m_pThumbSizePlaceholder; + m_pThumbSizePlaceholder = NULL; + return p; + } + size_t GetMaxThumbnailSize() { return m_szMaxThumbSize; } + // CalculateAPPSize() is valid after Write() is successful. + size_t CalculateAPPSize(size_t thumblen = JPEG_MAX_SEGMENT_SIZE) { + size_t appsize = 0; + if (m_szApp1 > 0) + appsize += m_szApp1 + JPEG_MARKER_SIZE; + if (m_pDebug) { + for (int idx = 0; idx < m_pDebug->num_of_appmarker; idx++) + appsize += m_pDebug->debugSize[m_pDebug->idx[idx][0]] + + JPEG_MARKER_SIZE + JPEG_SEGMENT_LENFIELD_SIZE; + } + if (IsThumbSpaceReserved()) + appsize += m_szMaxThumbSize; + else + appsize += min(m_szMaxThumbSize, thumblen); + + return appsize + m_szApp11; + } + + char *GetApp1End() { return m_pApp1End; } + + void Write(bool reserve_thumbnail_space, size_t dummy, size_t align, bool reserve_debug = false) { + m_pApp1End = WriteAPP1(m_pAppBase, reserve_thumbnail_space); + char *appXend = WriteAPPX(m_pApp1End, reserve_debug); + char *app11end = WriteAPP11(appXend, dummy, align); + m_szApp11 = PTR_DIFF(appXend, app11end); + m_pMainBase = app11end - dummy; + } + + void Update() { WriteAPP1(m_pAppBase, false, true); } + + bool IsThumbSpaceReserved() { + return PTR_DIFF(m_pAppBase, m_pApp1End) == (m_szApp1 + m_szMaxThumbSize + JPEG_MARKER_SIZE); + } + + void Finalize(size_t thumbsize); + + void UpdateApp1Size(size_t amount); +}; +#endif //__HARDWARE_SAMSUNG_SLSI_EXYNOS_APPMARKER_WRITER_H__ diff --git a/libhwjpeg/ExynosJpegEncoder.cpp b/libhwjpeg/ExynosJpegEncoder.cpp new file mode 100644 index 0000000..258ef0b --- /dev/null +++ b/libhwjpeg/ExynosJpegEncoder.cpp @@ -0,0 +1,251 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include "hwjpeg-internal.h" + +int ExynosJpegEncoder::setJpegConfig(void* pConfig) +{ + ExynosJpegEncoder *that = reinterpret_cast(pConfig); + + if (!setColorFormat(that->m_v4l2Format)) + return -1; + + if (!setJpegFormat(that->m_jpegFormat)) + return -1; + + if (!setSize(that->m_nWidth, that->m_nHeight)) + return -1; + + m_iInBufType = that->m_iInBufType; + m_iOutBufType = that->m_iOutBufType; + + return 0; +} + +int ExynosJpegEncoder::getInBuf(int *piBuf, int *piInputSize, int iSize) +{ + if (iSize < 1) { + ALOGE("Invalid array size %d for getInBuf()", iSize); + return -1; + } + + size_t len_buffers[iSize]; + if (!m_hwjpeg.GetImageBuffers(piBuf, len_buffers, static_cast(iSize))) + return -1; + + for (int i = 0; i < iSize; i++) + piInputSize[i] = static_cast(len_buffers[i]); + + return 0; +} + +int ExynosJpegEncoder::getOutBuf(int *piBuf, int *piOutputSize) +{ + size_t len; + if (!m_hwjpeg.GetJpegBuffer(piBuf, &len)) + return -1; + + *piOutputSize = static_cast(len); + return 0; +} + +int ExynosJpegEncoder::setInBuf(int *piBuf, int *iSize) +{ + size_t buflen[3]; + unsigned int bufnum = 3; + + if (!EnsureFormatIsApplied()) + return -1; + + if (!m_hwjpeg.GetImageBufferSizes(buflen, &bufnum)) + return -1; + + for (unsigned int i = 0; i < bufnum; i++) + buflen[i] = static_cast(iSize[i]); + + if (!m_hwjpeg.SetImageBuffer(piBuf, buflen, bufnum)) + return -1; + + m_iInBufType = JPEG_BUF_TYPE_DMA_BUF; + + return 0; +} + +int ExynosJpegEncoder::setOutBuf(int iBuf, int iSize) +{ + if (!m_hwjpeg.SetJpegBuffer(iBuf, static_cast(iSize))) + return -1; + + m_iOutBufType = JPEG_BUF_TYPE_DMA_BUF; + + return 0; +} + +int ExynosJpegEncoder::getInBuf(char **pcBuf, int *piInputSize, int iSize) +{ + if (iSize < 1) { + ALOGE("Invalid array size %d for getInBuf()", iSize); + return -1; + } + + size_t len_buffers[iSize]; + if (!m_hwjpeg.GetImageBuffers(pcBuf, len_buffers, static_cast(iSize))) + return -1; + + for (int i = 0; i < iSize; i++) + piInputSize[i] = static_cast(len_buffers[i]); + + return 0; +} + +int ExynosJpegEncoder::getOutBuf(char **pcBuf, int *piOutputSize) +{ + size_t len; + if (!m_hwjpeg.GetJpegBuffer(pcBuf, &len)) + return -1; + + *piOutputSize = static_cast(len); + return 0; +} + +int ExynosJpegEncoder::setInBuf(char **pcBuf, int *iSize) +{ + size_t buflen[3]; + unsigned int bufnum = 3; + + if (!EnsureFormatIsApplied()) + return -1; + + if (!m_hwjpeg.GetImageBufferSizes(buflen, &bufnum)) + return -1; + + for (unsigned int i = 0; i < bufnum; i++) + buflen[i] = static_cast(iSize[i]); + + if (!m_hwjpeg.SetImageBuffer(pcBuf, buflen, bufnum)) + return -1; + + m_iInBufType = JPEG_BUF_TYPE_USER_PTR; + return 0; +} + +int ExynosJpegEncoder::setOutBuf(char *pcBuf, int iSize) +{ + if (!m_hwjpeg.SetJpegBuffer(pcBuf, static_cast(iSize))) + return -1; + + m_iOutBufType = JPEG_BUF_TYPE_USER_PTR; + + return 0; +} + +int ExynosJpegEncoder::setJpegFormat(int iV4l2JpegFormat) +{ + if (m_jpegFormat == iV4l2JpegFormat) + return 0; + + unsigned int hfactor, vfactor; + switch (iV4l2JpegFormat) { + case V4L2_PIX_FMT_JPEG_444: + hfactor = 1; + vfactor = 1; + break; + case V4L2_PIX_FMT_JPEG_422: + hfactor = 2; + vfactor = 1; + break; + case V4L2_PIX_FMT_JPEG_420: + hfactor = 2; + vfactor = 2; + break; + case V4L2_PIX_FMT_JPEG_GRAY: + hfactor = 0; + vfactor = 0; + break; + case V4L2_PIX_FMT_JPEG_422V: + hfactor = 1; + vfactor = 2; + break; + case V4L2_PIX_FMT_JPEG_411: + hfactor = 4; + vfactor = 1; + break; + default: + ALOGE("Unknown JPEG format `%08Xh", iV4l2JpegFormat); + return -1; + } + + if (!m_hwjpeg.SetChromaSampFactor(hfactor, vfactor)) + return -1; + + m_jpegFormat = iV4l2JpegFormat; + + return 0; +} + +int ExynosJpegEncoder::setColorBufSize(int *piBufSize, int iSize) +{ + size_t len[3]; + unsigned int num = static_cast(iSize); + + if (!m_hwjpeg.GetImageBufferSizes(len, &num)) + return -1; + + for (unsigned int i = 0; i < num; i++) + piBufSize[i] = static_cast(len[i]); + + return 0; +} + +bool ExynosJpegEncoder::__EnsureFormatIsApplied() { + if (TestStateEither(STATE_SIZE_CHANGED | STATE_PIXFMT_CHANGED) && + !m_hwjpeg.SetImageFormat(m_v4l2Format, m_nWidth, m_nHeight)) + return false; + + ClearState(STATE_SIZE_CHANGED | STATE_PIXFMT_CHANGED); + return true; +} + +static unsigned char jpeg_zigzagorder[] = { + 0, 1, 8, 16, 9, 2, 3, 10, + 17, 24, 32, 25, 18, 11, 4, 5, + 12, 19, 26, 33, 40, 48, 41, 34, + 27, 20, 13, 6, 7, 14, 21, 28, + 35, 42, 49, 56, 57, 50, 43, 36, + 29, 22, 15, 23, 30, 37, 44, 51, + 58, 59, 52, 45, 38, 31, 39, 46, + 53, 60, 61, 54, 47, 55, 62, 63 +}; + +int ExynosJpegEncoder::setQuality(const unsigned char q_table[]) { + unsigned char qtbl[128]; + + for (unsigned int i = 0; i < ARRSIZE(jpeg_zigzagorder); i++) + qtbl[i] = q_table[jpeg_zigzagorder[i]]; + + for (unsigned int i = 0; i < ARRSIZE(jpeg_zigzagorder); i++) + qtbl[i + 64] = q_table[jpeg_zigzagorder[i] + 64]; + + if (!m_hwjpeg.SetQuality(qtbl)) + return -1; + + return 0; +} diff --git a/libhwjpeg/ExynosJpegEncoderForCamera.cpp b/libhwjpeg/ExynosJpegEncoderForCamera.cpp new file mode 100644 index 0000000..3310aa7 --- /dev/null +++ b/libhwjpeg/ExynosJpegEncoderForCamera.cpp @@ -0,0 +1,878 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include +#include + +#include +#include + +#include + +#include "hwjpeg-internal.h" +#include "AppMarkerWriter.h" +#include "hwjpeg-libcsc.h" +#include "IFDWriter.h" + +// Data length written by H/W without the scan data. +#define NECESSARY_JPEG_LENGTH (0x24B + 2 * JPEG_MARKER_SIZE) + +static size_t GetImageLength(unsigned int width, unsigned int height, int v4l2Format) +{ + size_t size = width * height; + + switch(v4l2Format) { + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVYU: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_VYUY: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_YUV422P: + return size * 2; + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + case V4L2_PIX_FMT_YUV420: + return size + (size / 4) * 2; + } + + return 0; +} + +ExynosJpegEncoderForCamera::ExynosJpegEncoderForCamera(bool bBTBComp) + : m_phwjpeg4thumb(NULL), m_fdIONClient(-1), m_fdIONThumbImgBuffer(-1), m_pIONThumbImgBuffer(NULL), + m_szIONThumbImgBuffer(0), m_pIONThumbJpegBuffer(NULL), m_szIONThumbJpegBuffer(0), + m_nThumbWidth(0), m_nThumbHeight(0), m_nThumbQuality(0), + m_iHWScalerID(CSC_HW_SC1), m_pStreamBase(NULL), m_fThumbBufferType(0) +{ + m_pAppWriter = new CAppMarkerWriter(); + if (!m_pAppWriter) { + ALOGE("Failed to allocated an instance of CAppMarkerWriter"); + return; + } + + m_phwjpeg4thumb = new CHWJpegV4L2Compressor(); + if (!m_phwjpeg4thumb) { + ALOGE("Failed to create thumbnail compressor!"); + return; + } + + if (!m_phwjpeg4thumb->SetChromaSampFactor(2, 2)) { + ALOGE("Failed to configure chroma subsampling factor to YUV420 for thumbnail compression"); + } + + m_pLibCSC = new CLibCSC; + if (m_pLibCSC) { + if (!m_pLibCSC->init(m_iHWScalerID - CSC_HW_SC0)) { + ALOGE("Failed to create LibCSC instance of HW%d", m_iHWScalerID); + delete m_pLibCSC; + m_pLibCSC = NULL; + } + } else { + ALOGE("Failed to create CLibCSC"); + } + + m_fdIONClient = ion_open(); + if (m_fdIONClient < 0) { + ALOGERR("Failed to create ION client for thumbnail conversion"); + } + + if (!bBTBComp) + SetState(STATE_NO_BTBCOMP); + + // STATE_THUMBSIZE_CHANGED is to know if thumbnail image size need to be + // configured to HWJPEG. If HWJPEG does not support for back-to-back + // compression, it should not be configured. + if (IsBTBCompressionSupported()) + SetState(STATE_THUMBSIZE_CHANGED); + + // Do not cache clean even the cacheable buffers because + // it is guaranteed that the source buffer is not written by CPU + GetCompressor().SetAuxFlags(EXYNOS_HWJPEG_AUXOPT_SRC_NOCACHECLEAN); + + ALOGD("ExynosJpegEncoderForCamera Created: %p, ION %d", this, m_fdIONClient); +} + +ExynosJpegEncoderForCamera::~ExynosJpegEncoderForCamera() +{ + delete m_pAppWriter; + delete m_pLibCSC; + delete m_phwjpeg4thumb; + + if (m_pIONThumbImgBuffer != NULL) + munmap(m_pIONThumbImgBuffer, m_szIONThumbImgBuffer); + + if (m_fdIONThumbImgBuffer >= 0) + close(m_fdIONThumbImgBuffer); + + if (m_pIONThumbJpegBuffer) + munmap(m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer); + + if (m_fdIONClient >= 0) + ion_close(m_fdIONClient); + + ALOGD("ExynosJpegEncoderForCamera Destroyed: %p, ION %d, ThumIMG %d ThumbJPG %p", + this, m_fdIONClient, m_fdIONThumbImgBuffer, m_pIONThumbJpegBuffer); +} + +int ExynosJpegEncoderForCamera::setThumbnailSize(int w, int h) +{ + if ((m_nThumbWidth == w) && (m_nThumbHeight == h)) + return 0; + + // w == 0 and h == 0 resets thumbnail configuration + if (((w | h) != 0) && ((w < 16) || (h < 16))) { + ALOGE("Too small thumbnail image size %dx%d", w, h); + return -1; + } + + m_nThumbWidth = w; + m_nThumbHeight = h; + + if (IsBTBCompressionSupported()) + SetState(STATE_THUMBSIZE_CHANGED); + + return 0; +} + +int ExynosJpegEncoderForCamera::setThumbnailQuality(int quality) +{ + if (m_nThumbQuality == quality) + return 0; + + if ((quality > 100) || (quality < 1)) { + ALOGE("Invalid quality factor %d for thumbnail image", quality); + return -1; + } + + m_nThumbQuality = quality; + + return GetCompressor().SetQuality(0, m_nThumbQuality) ? 0 : -1; +} + +bool ExynosJpegEncoderForCamera::EnsureFormatIsApplied() { + if (TestStateEither(STATE_PIXFMT_CHANGED | STATE_SIZE_CHANGED | STATE_THUMBSIZE_CHANGED)) { + int thumb_width = m_nThumbWidth; + int thumb_height = m_nThumbHeight; + int width = 0; + int height = 0; + + if (IsThumbGenerationNeeded() || !IsBTBCompressionSupported()) { + thumb_width = 0; + thumb_height = 0; + } + + getSize(&width, &height); + if (!GetCompressor().SetImageFormat( + getColorFormat(), width, height, thumb_width, thumb_height)) + return false; + + ClearState(STATE_PIXFMT_CHANGED | STATE_SIZE_CHANGED | STATE_THUMBSIZE_CHANGED); + } + + return true; +} + +size_t ExynosJpegEncoderForCamera::RemoveTrailingDummies(char *base, size_t len) +{ + ALOG_ASSERT(len > 4); + ALOG_ASSERT((base[0] == 0xFF) && (base[1] == 0xD8)); // SOI marker + + size_t riter = len - 2; + + while (riter > 0) { + if ((base[riter] == 0xFF) && (base[riter + 1] == 0xD9)) { // EOI marker + ALOGI_IF(riter < (len - 2), "Found %zu dummies after EOI", len - riter - 2); + return riter + 2; + } + riter--; + } + + ALOGE("EOI is not found!"); + ALOG_ASSERT(true); + + return 0; +} + +void *ExynosJpegEncoderForCamera::tCompressThumbnail(void *p) +{ + ExynosJpegEncoderForCamera *encoder = reinterpret_cast(p); + + size_t thumblen = encoder->CompressThumbnail(); + return reinterpret_cast(thumblen); +} + +bool ExynosJpegEncoderForCamera::ProcessExif(char *base, size_t limit, + exif_attribute_t *exifInfo, + debug_attribute_t *debuginfo) +{ + // PREREQUISITES: The main and the thumbnail image size should be configured before. + + // Sanity chck + uint32_t width = 0; + uint32_t height = 0; + + getSize(reinterpret_cast(&width), reinterpret_cast(&height)); + + if (exifInfo) { + if ((exifInfo->width != width) || (exifInfo->height != height)) { + ALOGE("Inconsistant image dimension: Exif %dx%d, Thumb %dx%d", + exifInfo->width, exifInfo->height, width, height); + return false; + } + + bool copy_exif = false; + if (exifInfo->enableThumb) { + if ((exifInfo->widthThumb != static_cast(m_nThumbWidth)) || + (exifInfo->heightThumb != static_cast(m_nThumbHeight))) { + ALOGE("Inconsistant thumbnail information: Exif %dx%d, Thumb %dx%d", + exifInfo->widthThumb, exifInfo->heightThumb, m_nThumbWidth, m_nThumbHeight); + return false; + } + } + } + + // Giving appwriter the address beyond SOS marker + // because it is handled by this class + size_t align = 16; + if (!!(GetDeviceCapabilities() & V4L2_CAP_EXYNOS_JPEG_NO_STREAMBASE_ALIGN)) + align = 1; + + m_pAppWriter->PrepareAppWriter(base + JPEG_MARKER_SIZE, exifInfo, debuginfo); + + if (limit <= (m_pAppWriter->CalculateAPPSize(0) + NECESSARY_JPEG_LENGTH)) { + ALOGE("Too small JPEG stream buffer size, %zu bytes", limit); + return false; + } + + bool reserve_thumbspace = true; + + // If the length of the given stream buffer is too small, and thumbnail + // compression is also required, the compressed stream data of the main + // image is appeneded after the end of the fields if IFD1. The place is + // actually reserved for the embedded thumbnail but the main JPEG stream + // is written in this case because it is unknown how the compressed data + // of the thumbnail image will be. + // After the main and the thumbnail image compressions are completed, + // the compressed data of the main image is shifted by the length of the + // compressed data of the thumbnail image. Then the compressed data of + // the thumbnail image is copied to the place for it. + if (!exifInfo || !exifInfo->enableThumb || (limit < (JPEG_MAX_SEGMENT_SIZE * 10))) + reserve_thumbspace = false; + + m_pAppWriter->Write(reserve_thumbspace, JPEG_MARKER_SIZE, align, + TestState(STATE_HWFC_ENABLED)); + + ALOGD("Image compression starts from offset %zu (APPx size %zu, HWFC? %d, NBTB? %d)", + PTR_DIFF(base, m_pAppWriter->GetMainStreamBase()), m_pAppWriter->CalculateAPPSize(), + TestState(STATE_HWFC_ENABLED),TestState(STATE_NO_BTBCOMP)); + + return true; +} + +bool ExynosJpegEncoderForCamera::PrepareCompression(bool thumbnail) +{ + if (!thumbnail) + return true; + + if (IsThumbGenerationNeeded()) { + if (pthread_create(&m_threadWorker, NULL, + tCompressThumbnail, reinterpret_cast(this)) != 0) { + ALOGERR("Failed to create thumbnail generation thread"); + return false; + } + } else { + // allocate temporary thumbnail stream buffer + // to prevent overflow of the compressed stream + if (!AllocThumbJpegBuffer()) { + return false; + } + } + + if (!TestState(STATE_NO_BTBCOMP) && IsBTBCompressionSupported()) { + if (!GetCompressor().SetJpegBuffer2(m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer)) { + ALOGE("Failed to configure thumbnail buffer @ %p(size %zu)", + m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer); + return false; + } + } + + return true; +} + +int ExynosJpegEncoderForCamera::encode(int *size, exif_attribute_t *exifInfo, + char** pcJpegBuffer, debug_attribute_t *debugInfo) +{ + if (!(*pcJpegBuffer)) { + ALOGE("Target stream buffer is not specified"); + return -1; + } + + if (*size <= 0) { + ALOGE("Too small stram buffer length %d bytes", *size); + return -1; + } + + m_pStreamBase = *pcJpegBuffer; + m_nStreamSize = *size; // contains max buffer length until the compression finishes + + char *jpeg_base = m_pStreamBase; + + ALOGI_IF(!exifInfo, "Exif is not specified. Skipping writing APP1 marker"); + ALOGI_IF(!debugInfo, + "Debugging information is not specified. Skipping writing APP4 marker"); + ALOGD("Given stream buffer size: %d bytes", *size); + + CStopWatch stopwatch(true); + + if (!ProcessExif(jpeg_base, m_nStreamSize, exifInfo, debugInfo)) + return -1; + + int buffsize = static_cast(m_nStreamSize - PTR_DIFF(m_pStreamBase, m_pAppWriter->GetMainStreamBase())); + if (setOutBuf(m_pAppWriter->GetMainStreamBase(),buffsize) < 0) { + ALOGE("Failed to configure stream buffer : addr %p, streamSize %d", + m_pAppWriter->GetMainStreamBase(), buffsize); + return -1; + } + + unsigned long compress_delay = 0; + unsigned long extra_delay = 0; + bool block_mode = !TestState(STATE_HWFC_ENABLED); + bool thumbenc = m_pAppWriter->GetThumbStreamBase() != NULL; + size_t thumblen = 0; + + // THUMB REQ? | THUMB IMG GIVEN? | B2B COMP? | HWFC(NONBLOCKING)? + // CASE1: O | X | - | X + // CASE2: O | X | - | O + // CASE3: O | O | X | X + // CASE4: O | O | O | X + // CASE5: O | O | O | O + // CASE6: X | - | - | - + // CASE7: O | O | X | O + // + // CASE1 = thumbenc && IsThumbGenerationNeeded() && block_mode + // CASE2 = thumbenc && IsThumbGenerationNeeded() && !block_mode + // CASE3 = thumbenc && !IsThumbGenerationNeeded() && !IsBTBCompressionSupported() && !block_mode + // CASE4 = thumbenc && !IsThumbGenerationNeeded() && !STATE_NO_BTBCOMP && IsBTBCompressionSupported() && !block_mode + // CASE5 = thumbenc && !IsThumbGenerationNeeded() && !STATE_NO_BTBCOMP && IsBTBCompressionSupported() && block_mode + // CASE6 = !thumbenc + // CASE7 = thumbenc && !IsThumbGenerationNeeded() && STATE_NO_BTBCOMP && block_mode + + if (!thumbenc) { + // Confirm that no thumbnail information is transferred to HWJPEG + setThumbnailSize(0, 0); + } else if (!IsThumbGenerationNeeded() && IsBTBCompressionSupported() && + (m_fThumbBufferType != checkInBufType())) { + ALOGE("Buffer types of thumbnail(%d) and main(%d) images should be the same", + m_fThumbBufferType, checkInBufType()); + return -1; + } else if (!IsThumbGenerationNeeded() && (m_fThumbBufferType == 0)) { + // Thumbnail buffer configuration failed but the client forces to compress with thumbnail + ThumbGenerationNeeded(); + SetState(STATE_THUMBSIZE_CHANGED); + } + + if (!EnsureFormatIsApplied()) { + ALOGE("Failed to confirm format"); + return -1; + } + + if (!PrepareCompression(thumbenc)) { + ALOGE("Failed to prepare compression"); + return -1; + } + + ssize_t mainlen = GetCompressor().Compress(&thumblen, block_mode); + if (mainlen < 0) { + ALOGE("Error occured while JPEG compression: %zd", mainlen); + return -1; + } + + if (mainlen == 0) { /* non-blocking compression */ + ALOGD("Waiting for MCSC run"); + return 0; + } + + *size = static_cast(FinishCompression(mainlen, thumblen)); + if (*size < 0) + return -1; + + ALOGD("....compression delay(usec.): HW %lu, Total %lu)", + GetHWDelay(), stopwatch.GetElapsed()); + + return 0; +} + +ssize_t ExynosJpegEncoderForCamera::FinishCompression(size_t mainlen, size_t thumblen) +{ + bool btb = false; + size_t max_streamsize = m_nStreamSize; + char *mainbase = m_pAppWriter->GetMainStreamBase(); + char *thumbbase = m_pAppWriter->GetThumbStreamBase(); + + m_nStreamSize = 0; + + mainlen = RemoveTrailingDummies(mainbase, mainlen); + + // Clearing SOI of the main image written by H/W + m_pAppWriter->GetMainStreamBase()[0] = 0; + m_pAppWriter->GetMainStreamBase()[1] = 0; + + if (thumbbase) { + if (IsThumbGenerationNeeded()) { + void *len; + int ret = pthread_join(m_threadWorker, &len); + if (ret != 0) { + ALOGERR("Failed to wait thumbnail thread(%d)", ret); + return -1; + } + + if (len == NULL) + ALOGE("Error occurred during thumbnail creation: no thumbnail is embedded"); + + thumblen = reinterpret_cast(len); + } else if (TestState(STATE_NO_BTBCOMP) || !IsBTBCompressionSupported()) { + thumblen = CompressThumbnailOnly(m_pAppWriter->GetMaxThumbnailSize(), m_nThumbQuality, getColorFormat(), checkInBufType()); + } else { + btb = true; + } + + size_t max_thumb = min(m_pAppWriter->GetMaxThumbnailSize(), max_streamsize - m_pAppWriter->CalculateAPPSize(0) - mainlen); + + if (thumblen > max_thumb) { + ALOGI("Too large thumbnail (%dx%d) stream size %zu (max: %zu, quality factor %d)", + m_nThumbWidth, m_nThumbHeight, thumblen, max_thumb, m_nThumbQuality); + ALOGI("Retrying thumbnail compression with quality factor 50"); + thumblen = CompressThumbnailOnly(max_thumb, 50, getColorFormat(), checkInBufType()); + if (thumblen == 0) + return -1; + } + + if (!m_pAppWriter->IsThumbSpaceReserved()) { + if (PTR_TO_ULONG(m_pStreamBase + max_streamsize) < + PTR_TO_ULONG(mainbase + mainlen + thumblen - JPEG_MARKER_SIZE)) { + ALOGE("Too small JPEG buffer length %zu (APP %zu, Main %zu, Thumb %zu)", + max_streamsize, m_pAppWriter->CalculateAPPSize(thumblen), mainlen, thumblen); + return -1; + } + + // the SOI of the stream of the main image is stored after the APP4 or APP11 segment if they exist. + memmove(m_pAppWriter->GetApp1End() + thumblen, m_pAppWriter->GetApp1End(), + mainlen + PTR_DIFF(m_pAppWriter->GetApp1End(), m_pAppWriter->GetMainStreamBase())); + m_pAppWriter->UpdateApp1Size(thumblen); + + // m_nAppLength has the value of appwriter.GetExactAPPSize() + // Therefore m_nStreamSize should be initialized with thumbnail stream length; + } + + if (thumblen > 0) { + memcpy(m_pAppWriter->GetThumbStreamBase(), m_pIONThumbJpegBuffer, thumblen); + m_pAppWriter->Finalize(thumblen); + } + + if (m_pAppWriter->IsThumbSpaceReserved()) { + // clear the possible stale data in the dummy area after the thumbnail stream + memset(m_pAppWriter->GetThumbStreamBase() + thumblen, 0, + m_pAppWriter->GetMaxThumbnailSize() - thumblen); + } + } else { + thumblen = 0; + } + + m_nStreamSize += m_pAppWriter->CalculateAPPSize(thumblen) + mainlen; + + /* + * m_nAppLength: The size of APP1 segment and APP4 segment including markers + * getJpegSize(): size of the compressed stream of the main image + * Note that 2 byte(size of SOI marker) is included in APP1 segment size. + * Thus the size of SOI marker in front of the stream is not added. + */ + ALOGD("Completed image compression (%zd(thumb %zu) bytes, HWFC? %d, BTB? %d)", + mainlen, thumblen, TestState(STATE_HWFC_ENABLED), btb); + + m_pStreamBase[0] = 0xFF; + m_pStreamBase[1] = 0xD8; + + return m_nStreamSize; +} + +/* The logic in WaitForHWFC() is the same with encode() */ +ssize_t ExynosJpegEncoderForCamera::WaitForCompression() +{ + if (!TestState(STATE_HWFC_ENABLED)) + return m_nStreamSize; + + size_t thumblen = 0; + ssize_t streamlen = GetCompressor().WaitForCompression(&thumblen); + if (streamlen < 0) + return streamlen; + + return FinishCompression(streamlen, thumblen); +} + +bool ExynosJpegEncoderForCamera::GenerateThumbnailImage() +{ + if (!m_pLibCSC) { + ALOGE("Unable to create thumbnail because of no LibCSC"); + return false; + } + + int main_width, main_height; + if (getSize(&main_width, &main_height) < 0) { + ALOGE("Failed to get main image size"); + return false; + } + + int v4l2Format = getColorFormat(); + + if (!AllocThumbBuffer(v4l2Format)) + return false; + + ALOGD("Generating thumbnail image: %dx%d -> %dx%d", + main_width, main_height, m_nThumbWidth, m_nThumbHeight); + + if (!m_pLibCSC->set_src_format(main_width, main_height, V4L2FMT2HALFMT(v4l2Format))) { + ALOGE("Failed to configure the main image format to LibCSC"); + return false; + } + + // The source format for thumbnail compression is always NV12 + if (!m_pLibCSC->set_dst_format(m_nThumbWidth, m_nThumbHeight, V4L2FMT2HALFMT(v4l2Format))) { + ALOGE("Failed to configure the target image format to LibCSC"); + return false; + } + + int len_srcbufs[3] = {0, 0, 0}; + void *srcbufs[3] = {NULL, NULL, NULL}; + int memtype; + + if (checkInBufType() == JPEG_BUF_TYPE_USER_PTR) { + char *bufs[3]; + if (getInBuf(bufs, len_srcbufs, 3) < 0) { + ALOGE("Failed to retrieve the main image buffers"); + return false; + } + memtype = V4L2_MEMORY_USERPTR; + srcbufs[0] = reinterpret_cast(bufs[0]); + srcbufs[1] = reinterpret_cast(bufs[1]); + srcbufs[2] = reinterpret_cast(bufs[2]); + } else { // mainbuftype == JPEG_BUF_TYPE_DMA_BUF + int bufs[3]; + if (getInBuf(bufs, len_srcbufs, 3) < 0) { + ALOGE("Failed to retrieve the main image buffers"); + return false; + } + memtype = V4L2_MEMORY_DMABUF; + srcbufs[0] = reinterpret_cast(bufs[0]); + srcbufs[1] = reinterpret_cast(bufs[1]); + srcbufs[2] = reinterpret_cast(bufs[2]); + } + + if (!m_pLibCSC->set_src_buffer(srcbufs, memtype)) { + ALOGE("Failed to configure the main image buffers to LibCSC"); + return false; + } + + void *dstbuf[3] = {NULL, NULL, NULL}; + dstbuf[0] = reinterpret_cast(m_fdIONThumbImgBuffer); + if (!m_pLibCSC->set_dst_buffer(dstbuf, V4L2_MEMORY_DMABUF)) { + ALOGE("Failed to configure the thumbnail source buffer to LibCSC"); + return false; + } + + if (!m_pLibCSC->convert()) { + ALOGE("Failed to convert the main image to thumbnail with LibCSC"); + return false; + } + + return true; +} + +size_t ExynosJpegEncoderForCamera::CompressThumbnail() +{ + size_t streamsize = 0; + unsigned int v4l2Format = getColorFormat(); + int buftype = checkInBufType(); + + if (IsThumbGenerationNeeded()) { + if (!GenerateThumbnailImage()) + return 0; + + // libcsc output configured by this class is always NV21. + v4l2Format = getColorFormat(); + buftype = JPEG_BUF_TYPE_DMA_BUF; + // reduced setInBuf2() + m_fdThumbnailImageBuffer[0] = m_fdIONThumbImgBuffer; + m_szThumbnailImageLen[0] = m_szIONThumbImgBuffer; + } + + return CompressThumbnailOnly(m_pAppWriter->GetMaxThumbnailSize(), m_nThumbQuality, v4l2Format, buftype); +} + +bool ExynosJpegEncoderForCamera::AllocThumbBuffer(int v4l2Format) +{ + if (m_fdIONClient < 0) { + ALOGE("ION client is not created"); + return false; + } + + size_t thumbbufsize = GetImageLength(m_nThumbWidth, m_nThumbHeight, v4l2Format); + if (thumbbufsize == 0) { + ALOGE("Unsupported V4L2 format %#X for thumbnail", v4l2Format); + return false; + } + + if (m_fdIONThumbImgBuffer >= 0) { + if (m_szIONThumbImgBuffer >= thumbbufsize) + return true; + + if (m_pIONThumbImgBuffer != NULL) + munmap(m_pIONThumbImgBuffer, m_szIONThumbImgBuffer); + + close(m_fdIONThumbImgBuffer); + + m_fdIONThumbImgBuffer = -1; + m_pIONThumbImgBuffer = NULL; + m_szIONThumbImgBuffer = 0; + } + + if (ion_alloc_fd(m_fdIONClient, thumbbufsize, 0, ION_HEAP_SYSTEM_MASK, 0, &m_fdIONThumbImgBuffer) < 0) { + ALOGERR("Failed to allocate %zu bytes for NV12 %ux%u", thumbbufsize, m_nThumbHeight, m_nThumbWidth); + m_fdIONThumbImgBuffer = -1; + return false; + } + + m_szIONThumbImgBuffer = thumbbufsize; + + return AllocThumbJpegBuffer(); +} + +bool ExynosJpegEncoderForCamera::AllocThumbJpegBuffer() +{ + if (m_fdIONClient < 0) { + ALOGE("ION client is not created"); + return false; + } + + size_t thumbbufsize = m_nThumbHeight * m_nThumbWidth * 3; + + if (m_pIONThumbJpegBuffer) { + if (m_szIONThumbJpegBuffer >= thumbbufsize) + return true; + + munmap(m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer); + + m_szIONThumbJpegBuffer = 0; + m_pIONThumbJpegBuffer = NULL; + } + + int fd = -1; + if (ion_alloc_fd(m_fdIONClient, thumbbufsize, 0, ION_HEAP_SYSTEM_MASK, + ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC, &fd) < 0) { + ALOGERR("Failed to allocate %zu bytes for thumbnail stream buffer of %ux%u", + thumbbufsize, m_nThumbHeight, m_nThumbWidth); + return false; + } + + m_pIONThumbJpegBuffer = reinterpret_cast( + mmap(NULL, thumbbufsize, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0)); + close(fd); + if (m_pIONThumbJpegBuffer == MAP_FAILED) { + ALOGERR("Failed to map thumbnail stream buffer (%zu bytes)", thumbbufsize); + + m_pIONThumbJpegBuffer = NULL; + } else { + m_szIONThumbJpegBuffer = thumbbufsize; + } + + return m_pIONThumbJpegBuffer != NULL; +} + +size_t ExynosJpegEncoderForCamera::CompressThumbnailOnly(size_t limit, int quality, + unsigned int v4l2Format, int src_buftype) +{ + if (!m_phwjpeg4thumb->SetImageFormat(v4l2Format, m_nThumbWidth, m_nThumbHeight)) { + ALOGE("Failed to configure thumbnail source image format to %#010x, %ux%u", + v4l2Format, m_nThumbWidth, m_nThumbHeight); + return 0; + } + + unsigned int num_buffers = 1; + switch (v4l2Format) { + case V4L2_PIX_FMT_YUV420M: + case V4L2_PIX_FMT_YVU420M: + num_buffers++; + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + num_buffers++; + } + + if (src_buftype == JPEG_BUF_TYPE_USER_PTR) { + if (!m_phwjpeg4thumb->SetImageBuffer(m_pThumbnailImageBuffer, + m_szThumbnailImageLen, num_buffers)) { + ALOGE("Failed to configure thumbnail buffers(userptr) for thumbnail"); + return 0; + } + } else { // JPEG_BUF_TYPE_DMA_BUF + if (!m_phwjpeg4thumb->SetImageBuffer(m_fdThumbnailImageBuffer, + m_szThumbnailImageLen, num_buffers)) { + ALOGE("Failed to configure thumbnail buffers(dmabuf) for thumbnail"); + return 0; + } + } + + if (!m_phwjpeg4thumb->SetJpegBuffer(m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer)) { + ALOGE("Failed to configure thumbnail stream buffer (addr %p, size %zu)", + m_pIONThumbJpegBuffer, m_szIONThumbJpegBuffer); + return 0; + } + + // Since the compressed stream of the thumbnail image is to be embedded in + // APP1 segment, at the end of Exif metadata, the length of the stream should + // not exceed the maximum length of a segment, 64KB minus the length of Exif + // metadata. If the stream length is too large, repeat the compression until + // the length become proper to embed. + while (quality >= 20) { + if (!m_phwjpeg4thumb->SetQuality(quality)) { + ALOGE("Failed to configure thumbnail quality factor %u", quality); + return 0; + } + + ssize_t thumbsize = m_phwjpeg4thumb->Compress(); + if (thumbsize < 0) { + ALOGE("Failed to compress thumbnail"); + return 0; + } + + thumbsize = RemoveTrailingDummies(m_pIONThumbJpegBuffer, thumbsize); + if (static_cast(thumbsize) > limit) { + quality = min(50, quality - 10); + ALOGI_IF(quality >= 20, + "Too large thumbnail stream size %zu. Retrying with quality factor %d...", + thumbsize, quality); + } else { + return thumbsize; + } + } + + ALOG_ASSERT(false, "It should never reach here"); + ALOGE("Thumbnail compression finally failed"); + + return 0; +} + +int ExynosJpegEncoderForCamera::setInBuf2(int *piBuf, int *iSize) +{ + NoThumbGenerationNeeded(); + + if (!EnsureFormatIsApplied()) + return -1; + + CHWJpegCompressor &hwjpeg = GetCompressor(); + unsigned int num_buffers = 3; + if (!hwjpeg.GetImageBufferSizes(m_szThumbnailImageLen, &num_buffers)) { + ALOGE("Failed to get image buffer sizes"); + return -1; + } + + for (unsigned int i = 0; i < num_buffers; i++) { + m_szThumbnailImageLen[i] = iSize[i]; + m_fdThumbnailImageBuffer[i] = piBuf[i]; + } + + if (IsBTBCompressionSupported() && + !hwjpeg.SetImageBuffer2(m_fdThumbnailImageBuffer, m_szThumbnailImageLen, num_buffers)) { + ALOGE("Failed to configure thumbnail buffers"); + return -1; + } + + m_fThumbBufferType = JPEG_BUF_TYPE_DMA_BUF; + + return 0; +} + +int ExynosJpegEncoderForCamera::setInBuf2(char **pcBuf, int *iSize) +{ + NoThumbGenerationNeeded(); + + if (!EnsureFormatIsApplied()) + return -1; + + CHWJpegCompressor &hwjpeg = GetCompressor(); + unsigned int num_buffers = 3; + if (!hwjpeg.GetImageBufferSizes(m_szThumbnailImageLen, &num_buffers)) { + ALOGE("Failed to get image buffer sizes"); + return -1; + } + + for (unsigned int i = 0; i < num_buffers; i++) { + m_szThumbnailImageLen[i] = iSize[i]; + m_pThumbnailImageBuffer[i] = pcBuf[i]; + } + + if (IsBTBCompressionSupported() && + !hwjpeg.SetImageBuffer2(m_pThumbnailImageBuffer, m_szThumbnailImageLen, num_buffers)) { + ALOGE("Failed to configure thumbnail buffers"); + return -1; + } + + m_fThumbBufferType = JPEG_BUF_TYPE_USER_PTR; + + return 0; +} + +size_t ExynosJpegEncoderForCamera::GetThumbnailImage(char *buffer, size_t buflen) +{ + if (m_fdIONThumbImgBuffer < 0) { + ALOGE("No internal thumbnail buffer is allocated"); + return 0; + } + + size_t thumbbufsize = GetImageLength(m_nThumbWidth, m_nThumbHeight, getColorFormat()); + if (buflen < thumbbufsize) { + ALOGE("Too small buffer %zu (thumbnail image size %zu)", buflen, thumbbufsize); + return 0; + } + + ALOG_ASSERT(m_szIONThumbImgBuffer >= thumbbufsize, + "m_szIONThumbImgBuffer(%zu) is smaller than the thumbnail (%zu)", + m_szIONThumbImgBuffer, thumbbufsize); + if (m_pIONThumbImgBuffer == NULL) { + m_pIONThumbImgBuffer = reinterpret_cast(mmap( + NULL, m_szIONThumbImgBuffer, PROT_READ, MAP_SHARED, m_fdIONThumbImgBuffer, 0)); + if (m_pIONThumbImgBuffer == MAP_FAILED) { + m_pIONThumbImgBuffer = NULL; + ALOGERR("Failed to map thumbnail image buffer (%zu bytes)", m_szIONThumbImgBuffer); + return 0; + } + } + + memcpy(buffer, m_pIONThumbImgBuffer, thumbbufsize); + + ALOGD("Copied thumbnail image to %p (%zu bytes)", buffer, thumbbufsize); + + return m_szIONThumbImgBuffer; +} + +int ExynosJpegEncoderForCamera::destroy() +{ + GetCompressor().Release(); + return 0; +} diff --git a/libhwjpeg/IFDWriter.h b/libhwjpeg/IFDWriter.h new file mode 100644 index 0000000..a866f74 --- /dev/null +++ b/libhwjpeg/IFDWriter.h @@ -0,0 +1,304 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef __HARDWARE_SAMSUNG_SLSI_EXYNOS_IFDWRITER_H__ +#define __HARDWARE_SAMSUNG_SLSI_EXYNOS_IFDWRITER_H__ + +#include "hwjpeg-internal.h" + +class CEndianessChecker { + bool __little; +public: + CEndianessChecker(); + operator bool() { return __little; } +}; + +extern CEndianessChecker __LITTLE_ENDIAN__; + +template +char *WriteDataInBig(char *p, T val) +{ + if (sizeof(val) == 1) { + *p++ = val; + } else if (__LITTLE_ENDIAN__) { + switch (sizeof(val)) { + case 2: + *p++ = static_cast((val >> 8) & 0xFF); + *p++ = static_cast(val & 0xFF); + break; + case 4: + *p++ = static_cast((val >> 24) & 0xFF); + *p++ = static_cast((val >> 16) & 0xFF); + *p++ = static_cast((val >> 8) & 0xFF); + *p++ = static_cast(val & 0xFF); + break; + } + } else { + switch (sizeof(val)) { + case 2: + *p++ = static_cast(val & 0xFF); + *p++ = static_cast((val >> 8) & 0xFF); + break; + case 4: + *p++ = static_cast(val & 0xFF); + *p++ = static_cast((val >> 8) & 0xFF); + *p++ = static_cast((val >> 16) & 0xFF); + *p++ = static_cast((val >> 24) & 0xFF); + break; + } + } + + return p; +} + +template +char *WriteData(char *p, T val) +{ + const char *pt = reinterpret_cast(&val); + for (size_t i = 0; i < sizeof(val); i++) + *p++ = *pt++; + return p; +} + +class CIFDWriter { + char *m_pBase; + char *m_pIFDBase; + char *m_pValue; + unsigned int m_nTags; + + char *WriteOffset(char *target, char *addr) { + uint32_t val = Offset(addr); + const char *p = reinterpret_cast(&val); + *target++ = *p++; + *target++ = *p++; + *target++ = *p++; + *target++ = *p++; + return target; + } + + void WriteTagTypeCount(uint16_t tag, uint16_t type, uint32_t count) { + const char *p = reinterpret_cast(&tag); + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + + p = reinterpret_cast(&type); + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + + p = reinterpret_cast(&count); + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + + m_nTags--; + } +public: + CIFDWriter(char *offset_base, char *ifdbase, uint16_t tagcount) { + m_nTags = tagcount; + m_pBase = offset_base; + m_pIFDBase = ifdbase; + m_pValue = m_pIFDBase + IFD_FIELDCOUNT_SIZE + + IFD_FIELD_SIZE * tagcount + IFD_NEXTIFDOFFSET_SIZE; + + // COUNT field of IFD + const char *pval = reinterpret_cast(&m_nTags); + *m_pIFDBase++ = *pval++; + *m_pIFDBase++ = *pval++; + } + + uint32_t Offset(char *p) { + return static_cast(PTR_TO_ULONG(p) - PTR_TO_ULONG(m_pBase)); + } + + void WriteByte(uint16_t tag, uint32_t count, const uint8_t value[]) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_BYTE, count); + + if (count > IFD_VALOFF_SIZE) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + for (uint32_t i = 0; i < count; i++) { + *m_pValue++ = static_cast(value[i]); + } + } else { + for (uint32_t i = 0; i < count; i++) + *m_pIFDBase++ = static_cast(value[i]); + m_pIFDBase += IFD_VALOFF_SIZE - count; + } + } + + void WriteShort(uint16_t tag, uint32_t count, const uint16_t value[]) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_SHORT, count); + + const char *p = reinterpret_cast(&value[0]); + + if (count > (IFD_VALOFF_SIZE / sizeof(value[0]))) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + for (uint32_t i = 0; i < count; i++) { + *m_pValue++ = *p++; + *m_pValue++ = *p++; + } + } else { + for (uint32_t i = 0; i < count; i++) { + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + } + m_pIFDBase += IFD_VALOFF_SIZE - count * sizeof(value[0]); + } + } + + void WriteLong(uint16_t tag, uint32_t count, const uint32_t value[]) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_LONG, count); + + const char *p = reinterpret_cast(&value[0]); + if (count > (IFD_VALOFF_SIZE / sizeof(value[0]))) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + *m_pValue++ = *p++; + } else { + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + *m_pIFDBase++ = *p++; + } + } + + void WriteASCII(uint16_t tag, uint32_t count, const char *value) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_ASCII, count); + + if (count > IFD_VALOFF_SIZE) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + memcpy(m_pValue, value, count); + m_pValue[count - 1] = '\0'; + m_pValue += count; + } else { + for (uint32_t i = 0; i < count; i++) + *m_pIFDBase++ = value[i]; + *(m_pIFDBase - 1) = '\0'; + m_pIFDBase += IFD_VALOFF_SIZE - count; + } + } + + void WriteCString(uint16_t tag, uint32_t count, const char *string) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_ASCII, count); + + if (count > IFD_VALOFF_SIZE) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + strncpy(m_pValue, string, count); + m_pValue[count - 1] = '\0'; + m_pValue += count; + } else { + uint32_t i; + + for (i = 0; (i < (count - 1)) && (string[i] != '\0'); i++) + *m_pIFDBase++ = string[i]; + + while (i++ < count) + *m_pIFDBase++ = '\0'; + + m_pIFDBase += IFD_VALOFF_SIZE - count; + } + } + + void WriteRational(uint16_t tag, uint32_t count, const rational_t value[]) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_RATIONAL, count); + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + + for (uint32_t i = 0; i < count; i++) { + const char *pt; + pt = reinterpret_cast(&value[i].num); + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + pt = reinterpret_cast(&value[i].den); + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + *m_pValue++ = *pt++; + } + } + + void WriteSRational(uint16_t tag, uint32_t count, const srational_t value[]) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_SRATIONAL, count); + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + + const char *pt = reinterpret_cast(value); + for (uint32_t i = 0; i < sizeof(srational_t) * count; i++) + *m_pValue++ = *pt++; + } + + void WriteUndef(uint16_t tag, uint32_t count, const unsigned char *value) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_UNDEFINED, count); + if (count > IFD_VALOFF_SIZE) { + m_pIFDBase = WriteOffset(m_pIFDBase, m_pValue); + memcpy(m_pValue, value, count); + m_pValue += count; + } else { + for (uint32_t i = 0; i < count; i++) + *m_pIFDBase++ = static_cast(value[i]); + m_pIFDBase += IFD_VALOFF_SIZE - count; + } + } + + char *BeginSubIFD(uint16_t tag) { + ALOG_ASSERT(m_nTags == 0); + + WriteTagTypeCount(tag, EXIF_TYPE_LONG, 1); + + uint32_t offset = Offset(m_pValue); + const char *poff = reinterpret_cast(&offset); + *m_pIFDBase++ = *poff++; + *m_pIFDBase++ = *poff++; + *m_pIFDBase++ = *poff++; + *m_pIFDBase++ = *poff++; + + return m_pValue; + } + + void EndSubIFD(char *end_of_subIFD) { m_pValue = end_of_subIFD; } + void CancelSubIFD() { m_pIFDBase -= IFD_FIELD_SIZE; } + + void Finish(bool last) { + ALOG_ASSERT(m_nTags > 0); + + uint32_t offset = last ? 0 : Offset(m_pValue); + const char *pv = reinterpret_cast(&offset); + *m_pIFDBase++ = *pv++; + *m_pIFDBase++ = *pv++; + *m_pIFDBase++ = *pv++; + *m_pIFDBase++ = *pv++; + } + + char *GetNextIFDBase() { return m_pValue; } + char *GetNextTagAddress() { return m_pIFDBase; } +}; + +#endif //__HARDWARE_SAMSUNG_SLSI_EXYNOS_IFDWRITER_H__ diff --git a/libhwjpeg/hwjpeg-base.cpp b/libhwjpeg/hwjpeg-base.cpp new file mode 100644 index 0000000..e593c02 --- /dev/null +++ b/libhwjpeg/hwjpeg-base.cpp @@ -0,0 +1,152 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#include +#include + +#include +#include "hwjpeg-internal.h" + +CHWJpegBase::CHWJpegBase(const char *path) + : m_iFD(-1), m_uiDeviceCaps(0), m_uiAuxFlags(0) +{ + m_iFD = open(path, O_RDWR); + if (m_iFD < 0) + ALOGERR("Failed to open '%s'", path); +} + +CHWJpegBase::~CHWJpegBase() +{ + if (m_iFD >= 0) + close(m_iFD); +} + +void CHWJpegBase::SetAuxFlags(unsigned int auxflags) +{ + ALOGW_IF(!!(m_uiAuxFlags & auxflags), + "Configuration auxiliary flags %#x overrides previous flags %#x", + auxflags , m_uiAuxFlags); + + m_uiAuxFlags |= auxflags; +} + +void CHWJpegBase::ClearAuxFlags(unsigned int auxflags) +{ + + ALOGW_IF(!!(m_uiAuxFlags & auxflags) && ((m_uiAuxFlags & auxflags) != auxflags), + "Clearing auxiliary flags %#x overrides previous flags %#x", + auxflags, m_uiAuxFlags); + + m_uiAuxFlags &= ~auxflags; +} + +bool CStopWatch::Start() +{ + int ret = clock_gettime(CLOCK_MONOTONIC, &m_tBegin); + if (ret) { + ALOGERR("Failed to get current clock"); + memset(&m_tBegin, 0, sizeof(m_tBegin)); + return false; + } + + return true; +} + +unsigned long CStopWatch::GetElapsed() +{ + timespec tp; + int ret = clock_gettime(CLOCK_MONOTONIC, &tp); + if (ret) { + ALOGERR("Failed to get current clock"); + return 0; + } + + unsigned long elapsed = (tp.tv_sec - m_tBegin.tv_sec) * 1000000; + return (m_tBegin.tv_nsec > tp.tv_nsec) + ? elapsed - (m_tBegin.tv_nsec - tp.tv_nsec) / 1000 + : elapsed + (tp.tv_nsec - m_tBegin.tv_nsec) / 1000; +} + +unsigned long CStopWatch::GetElapsedUpdate() +{ + timespec tp; + int ret = clock_gettime(CLOCK_MONOTONIC, &tp); + if (ret) { + ALOGERR("Failed to get current clock"); + return 0; + } + + unsigned long elapsed = (tp.tv_sec - m_tBegin.tv_sec) * 1000000; + elapsed = (m_tBegin.tv_nsec > tp.tv_nsec) + ? elapsed - (m_tBegin.tv_nsec - tp.tv_nsec) / 1000 + : elapsed + (tp.tv_nsec - m_tBegin.tv_nsec) / 1000; + + m_tBegin = tp; + return elapsed; +} + +bool WriteToFile(const char *path, const char *data, size_t len) +{ + int fd = open(path, O_RDWR | O_CREAT, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH | S_IWGRP ); + if (fd < 0) { + ALOGERR("Failed to open '%s' for write/create", path); + return false; + } + + ssize_t written = write(fd, data, len); + close(fd); + if (written < 0) { + ALOGERR("Failed to write %zu bytes to '%s'", len, path); + return false; + } + + ALOGI("%zu/%zu bytes from ptr %p are written to '%s'", written, len, data, path); + + return true; +} + +bool WriteToFile(const char *path, int dmabuf, size_t len) +{ + char *p = reinterpret_cast(mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, dmabuf, 0)); + if (p == MAP_FAILED) { + ALOGERR("Filed to map the given dmabuf fd %d", dmabuf); + return false; + } + + int fd = open(path, O_RDWR | O_CREAT, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH | S_IWGRP ); + if (fd < 0) { + ALOGERR("Failed to open '%s' for write/create", path); + munmap(p, len); + return false; + } + + ssize_t written = write(fd, p, len); + if (written < 0) + ALOGERR("Failed to write %zu bytes to '%s'", len, path); + else + ALOGI("%zu/%zu bytes from dmabuf fd %d are written to '%s'", written, len, dmabuf, path); + + munmap(p, len); + close(fd); + + return true; +} diff --git a/libhwjpeg/hwjpeg-internal.h b/libhwjpeg/hwjpeg-internal.h new file mode 100644 index 0000000..69da8a3 --- /dev/null +++ b/libhwjpeg/hwjpeg-internal.h @@ -0,0 +1,86 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __HWJPEG_INTERNAL_H__ +#define __HWJPEG_INTERNAL_H__ + +#ifndef LOG_TAG +#error "LOG_TAG is not defined!" +#endif + +#include +#include + +#include + +#ifdef __GNUC__ +# define __UNUSED__ __attribute__((__unused__)) +#else +# define __UNUSED__ +#endif + +#ifndef ALOGERR +#define ALOGERR(fmt, args...) ((void)ALOG(LOG_ERROR, LOG_TAG, fmt " [%s]", ##args, strerror(errno))) +#endif + +#define V4L2_CID_JPEG_SEC_COMP_QUALITY (V4L2_CID_JPEG_CLASS_BASE + 20) +#define V4L2_CID_JPEG_QTABLES2 (V4L2_CID_JPEG_CLASS_BASE + 22) +#define V4L2_CID_JPEG_HWFC_ENABLE (V4L2_CID_JPEG_CLASS_BASE + 25) + +#define TO_MAIN_SIZE(val) ((val) & 0xFFFF) +#define TO_THUMB_SIZE(val) (((val) & 0xFFFF) << 16) +#define TO_IMAGE_SIZE(main, thumb) (TO_MAIN_SIZE(main) | TO_THUMB_SIZE(thumb)) + +#define PTR_TO_ULONG(ptr) reinterpret_cast(ptr) +#define PTR_DIFF(ptr1, ptr2) (reinterpret_cast(ptr2) - reinterpret_cast(ptr1)) + +#define ARRSIZE(v) (sizeof(v) / sizeof(v[0])) + +#ifndef min +template +static inline T min(T val1, T val2) { + return (val1 > val2) ? val2 : val1; +} +#endif + +#ifndef max +template +static inline T max(T val1, T val2) { + return (val1 < val2) ? val2 : val1; +} +#endif + +// H/W requires 16-byte alignment +#define HW_BASE_ALIGN_BITS 4 +#define HW_BASE_ALIGN_SIZE (1 << HW_BASE_ALIGN_BITS) +#define HW_BASE_ALIGN_MASK ~(HW_BASE_ALIGN_SIZE - 1) + +class CStopWatch { + timespec m_tBegin; +public: + CStopWatch(bool start = false) { + if (start) + Start(); + } + bool Start(); + unsigned long GetElapsed(); + unsigned long GetElapsedUpdate(); +}; + +bool WriteToFile(const char *path, const char *data, size_t len); +bool WriteToFile(const char *path, int dmabuf, size_t len); +#endif //__HWJPEG_INTERNAL_H__ diff --git a/libhwjpeg/hwjpeg-libcsc.h b/libhwjpeg/hwjpeg-libcsc.h new file mode 100644 index 0000000..d8a556e --- /dev/null +++ b/libhwjpeg/hwjpeg-libcsc.h @@ -0,0 +1,54 @@ +#ifndef __HARDWARE_EXYNOS_LIBHWJPEG_LIBCSC_H__ +#define __HARDWARE_EXYNOS_LIBHWJPEG_LIBCSC_H__ + +#include + +int V4L2FMT2HALFMT(int fmt); + +class CLibCSC { + void *m_hdlLibCSC; +public: + CLibCSC(): m_hdlLibCSC(NULL) { } + ~CLibCSC() { destroy(); } + void destroy(); + bool init(int devid = 0); + + bool set_src_format( + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format); + + bool set_dst_format( + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format); + + bool set_src_format( + unsigned int width, + unsigned int height, + unsigned int color_format) { + return set_src_format(width, height, 0, 0, width, height, color_format); + } + + bool set_dst_format( + unsigned int width, + unsigned int height, + unsigned int color_format) { + return set_dst_format(width, height, 0, 0, width, height, color_format); + } + + bool set_src_buffer(void *addrs[CSC_MAX_PLANES], int mem_type); + bool set_dst_buffer(void *addrs[CSC_MAX_PLANES], int mem_type); + bool convert(); + bool convert(int rotation, int flip_horizontal, int flip_vertical); +}; + +#endif //__HARDWARE_EXYNOS_LIBHWJPEG_LIBCSC_H__ diff --git a/libhwjpeg/hwjpeg-v4l2.cpp b/libhwjpeg/hwjpeg-v4l2.cpp new file mode 100644 index 0000000..01a8742 --- /dev/null +++ b/libhwjpeg/hwjpeg-v4l2.cpp @@ -0,0 +1,975 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include +#include "hwjpeg-internal.h" + +CHWJpegV4L2Compressor::CHWJpegV4L2Compressor(): CHWJpegCompressor("/dev/video12") +{ + memset(&m_v4l2Format, 0, sizeof(m_v4l2Format)); + memset(&m_v4l2SrcBuffer, 0, sizeof(m_v4l2SrcBuffer)); + memset(&m_v4l2DstBuffer, 0, sizeof(m_v4l2DstBuffer)); + memset(&m_v4l2SrcPlanes, 0, sizeof(m_v4l2SrcPlanes)); + memset(&m_v4l2DstPlanes, 0, sizeof(m_v4l2DstPlanes)); + memset(&m_v4l2Controls, 0, sizeof(m_v4l2Controls)); + + m_v4l2Format.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + // default image format is initialized by 8x8 RGB24 in order for TryFormat() + // to seccess at anytime. + // 8x8 : the smallest image size to compress + // RGB24(or YUV444): The image format with the smallest memory footprint + // without any image size constraints. + m_v4l2Format.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_RGB24; + m_v4l2Format.fmt.pix_mp.width = TO_IMAGE_SIZE(16, 0); + m_v4l2Format.fmt.pix_mp.height = TO_IMAGE_SIZE(16, 0); + m_v4l2SrcBuffer.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + m_v4l2DstBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + + m_v4l2SrcBuffer.m.planes = m_v4l2SrcPlanes; + m_v4l2DstBuffer.m.planes = m_v4l2DstPlanes; + + m_uiControlsToSet = 0; + + m_bEnableHWFC = false; + + v4l2_capability cap; + memset(&cap, 0, sizeof(cap)); + if (ioctl(GetDeviceFD(), VIDIOC_QUERYCAP, &cap) < 0) { + ALOGERR("Failed to query capability of /dev/video12"); + } else if (!!(cap.capabilities & V4L2_CAP_DEVICE_CAPS)) { + SetDeviceCapabilities(cap.device_caps); + } + + // Initialy declare that s_fmt is required. + SetFlag(HWJPEG_FLAG_PIX_FMT); + + ALOGD("CHWJpegV4L2Compressor Created: %p, FD %d", this, GetDeviceFD()); +} + +CHWJpegV4L2Compressor::~CHWJpegV4L2Compressor() +{ + StopStreaming(); + + ALOGD("CHWJpegV4L2Compressor Destroyed: %p, FD %d", this, GetDeviceFD()); +} + +bool CHWJpegV4L2Compressor::SetChromaSampFactor( + unsigned int horizontal, unsigned int vertical) +{ + __s32 value; + switch ((horizontal << 4) | vertical) { + case 0x00: value = V4L2_JPEG_CHROMA_SUBSAMPLING_GRAY; break; + case 0x11: value = V4L2_JPEG_CHROMA_SUBSAMPLING_444; break; + case 0x21: value = V4L2_JPEG_CHROMA_SUBSAMPLING_422; break; + case 0x22: value = V4L2_JPEG_CHROMA_SUBSAMPLING_420; break; + case 0x41: value = V4L2_JPEG_CHROMA_SUBSAMPLING_411; break; + case 0x12: + default: + ALOGE("Unsupported chroma subsampling %ux%u", horizontal, vertical); + return false; + } + + m_v4l2Controls[HWJPEG_CTRL_CHROMFACTOR].id = V4L2_CID_JPEG_CHROMA_SUBSAMPLING; + m_v4l2Controls[HWJPEG_CTRL_CHROMFACTOR].value = value; + m_uiControlsToSet |= 1 << HWJPEG_CTRL_CHROMFACTOR; + + return true; +} + +bool CHWJpegV4L2Compressor::SetQuality( + unsigned int quality_factor, unsigned int quality_factor2) +{ + if (quality_factor > 100) { + ALOGE("Unsupported quality factor %u", quality_factor); + return false; + } + + if (quality_factor2 > 100) { + ALOGE("Unsupported quality factor %u for the secondary image", + quality_factor2); + return false; + } + + if (quality_factor > 0) { + m_v4l2Controls[HWJPEG_CTRL_QFACTOR].id = V4L2_CID_JPEG_COMPRESSION_QUALITY; + m_v4l2Controls[HWJPEG_CTRL_QFACTOR].value = static_cast<__s32>(quality_factor); + m_uiControlsToSet |= 1 << HWJPEG_CTRL_QFACTOR; + } + + if (quality_factor2 > 0) { + m_v4l2Controls[HWJPEG_CTRL_QFACTOR2].id = V4L2_CID_JPEG_SEC_COMP_QUALITY; + m_v4l2Controls[HWJPEG_CTRL_QFACTOR2].value = static_cast<__s32>(quality_factor2); + m_uiControlsToSet |= 1 << HWJPEG_CTRL_QFACTOR2; + } + + return true; +} + +bool CHWJpegV4L2Compressor::SetQuality(const unsigned char qtable[]) +{ + v4l2_ext_controls ctrls; + v4l2_ext_control ctrl; + + memset(&ctrls, 0, sizeof(ctrls)); + memset(&ctrl, 0, sizeof(ctrl)); + + ctrls.ctrl_class = V4L2_CTRL_CLASS_JPEG; + ctrls.controls = &ctrl; + ctrls.count = 1; + + ctrl.id = V4L2_CID_JPEG_QTABLES2; + ctrl.size = 128; /* two quantization tables */ + ctrl.p_u8 = const_cast(qtable); + + if (ioctl(GetDeviceFD(), VIDIOC_S_EXT_CTRLS, &ctrls) < 0) { + ALOGERR("Failed to configure %u controls", ctrls.count); + return false; + } + + return true; +} + +bool CHWJpegV4L2Compressor::SetImageFormat(unsigned int v4l2_fmt, + unsigned int width, unsigned int height, + unsigned int width2, unsigned int height2) +{ + if ((m_v4l2Format.fmt.pix_mp.pixelformat == v4l2_fmt) && + (m_v4l2Format.fmt.pix_mp.width == TO_IMAGE_SIZE(width, width2)) && + (m_v4l2Format.fmt.pix_mp.height == TO_IMAGE_SIZE(height, height2))) + return true; + + m_v4l2Format.fmt.pix_mp.pixelformat = v4l2_fmt; + m_v4l2Format.fmt.pix_mp.width = TO_IMAGE_SIZE(width, width2); + m_v4l2Format.fmt.pix_mp.height = TO_IMAGE_SIZE(height, height2); + + SetFlag(HWJPEG_FLAG_PIX_FMT); + + return TryFormat(); +} + +bool CHWJpegV4L2Compressor::GetImageBufferSizes(size_t buf_sizes[], unsigned int *num_buffers) +{ + if (buf_sizes) { + for (unsigned int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) + buf_sizes[i] = m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage; + } + + if (num_buffers) { + if (*num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("The size array length %u is smaller than the number of required buffers %u", + *num_buffers, m_v4l2Format.fmt.pix_mp.num_planes); + return false; + } + + *num_buffers = m_v4l2Format.fmt.pix_mp.num_planes; + } + + return true; +} + +bool CHWJpegV4L2Compressor::SetImageBuffer(char *buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("The number of buffers %u is smaller than the required %u", + num_buffers,m_v4l2Format.fmt.pix_mp.num_planes); + return false; + } + + for (unsigned int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) { + m_v4l2SrcPlanes[i].m.userptr = reinterpret_cast(buffers[i]); + if (len_buffers[i] < m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage) { + ALOGE("The size of the buffer[%u] %zu is smaller than required %u", + i, len_buffers[i], m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage); + return false; + } + m_v4l2SrcPlanes[i].bytesused = m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage; + m_v4l2SrcPlanes[i].length = len_buffers[i]; + } + + m_v4l2SrcBuffer.memory = V4L2_MEMORY_USERPTR; + + SetFlag(HWJPEG_FLAG_SRC_BUFFER); + + return true; +} + +bool CHWJpegV4L2Compressor::SetImageBuffer(int buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("The number of buffers %u is smaller than the required %u", + num_buffers,m_v4l2Format.fmt.pix_mp.num_planes); + return false; + } + + for (unsigned int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) { + m_v4l2SrcPlanes[i].m.fd = buffers[i]; + if (len_buffers[i] < m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage) { + ALOGE("The size of the buffer[%u] %zu is smaller than required %u", + i, len_buffers[i], m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage); + return false; + } + m_v4l2SrcPlanes[i].bytesused = m_v4l2Format.fmt.pix_mp.plane_fmt[i].sizeimage; + m_v4l2SrcPlanes[i].length = len_buffers[i]; + } + + m_v4l2SrcBuffer.memory = V4L2_MEMORY_DMABUF; + + SetFlag(HWJPEG_FLAG_SRC_BUFFER); + + return true; +} + + bool CHWJpegV4L2Compressor::SetImageBuffer2(char *buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (!IsDeviceCapability(V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION)) { + ALOGE("Back-to-back compression is not suppored by H/W"); + return false; + } + + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("The number of buffers %u is smaller than the required %u (secondary)", + num_buffers,m_v4l2Format.fmt.pix_mp.num_planes); + return false; + } + + unsigned int ibuf = 0; + for (unsigned int i = m_v4l2Format.fmt.pix_mp.num_planes; + i < (m_v4l2Format.fmt.pix_mp.num_planes * 2); i++, ibuf++) { + m_v4l2SrcPlanes[i].m.userptr = reinterpret_cast(buffers[ibuf]); + // size check is ignored for the secondary image buffers + m_v4l2SrcPlanes[i].bytesused = len_buffers[ibuf]; + m_v4l2SrcPlanes[i].length = len_buffers[ibuf]; + } + + // memory type is only configured by the primary image configuration + SetFlag(HWJPEG_FLAG_SRC_BUFFER2); + + return true; +} + + bool CHWJpegV4L2Compressor::SetImageBuffer2(int buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (!IsDeviceCapability(V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION)) { + ALOGE("Back-to-back compression is not suppored by H/W"); + return false; + } + + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("The number of buffers %u is smaller than the required %u (secondary)", + num_buffers,m_v4l2Format.fmt.pix_mp.num_planes); + return false; + } + + unsigned int ibuf = 0; + for (unsigned int i = m_v4l2Format.fmt.pix_mp.num_planes; + i < (m_v4l2Format.fmt.pix_mp.num_planes * 2); i++, ibuf++) { + m_v4l2SrcPlanes[i].m.fd = buffers[ibuf]; + // size check is ignored for the secondary image buffers + m_v4l2SrcPlanes[i].bytesused = len_buffers[ibuf]; + m_v4l2SrcPlanes[i].length = len_buffers[ibuf]; + } + + // memory type is only configured by the primary image configuration + + SetFlag(HWJPEG_FLAG_SRC_BUFFER2); + + return true; +} + +bool CHWJpegV4L2Compressor::SetJpegBuffer(char *buffer, size_t len_buffer) +{ + m_v4l2DstPlanes[0].m.userptr = reinterpret_cast(buffer); + m_v4l2DstPlanes[0].length = len_buffer; + m_v4l2DstBuffer.memory = V4L2_MEMORY_USERPTR; + SetFlag(HWJPEG_FLAG_DST_BUFFER); + return true; +} + +bool CHWJpegV4L2Compressor::SetJpegBuffer(int buffer, size_t len_buffer) +{ + m_v4l2DstPlanes[0].m.fd = buffer; + m_v4l2DstPlanes[0].length = len_buffer; + m_v4l2DstBuffer.memory = V4L2_MEMORY_DMABUF; + SetFlag(HWJPEG_FLAG_DST_BUFFER); + return true; +} + +bool CHWJpegV4L2Compressor::SetJpegBuffer2(char *buffer, size_t len_buffer) +{ + if (!IsDeviceCapability(V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION)) { + ALOGE("Back-to-back compression is not suppored by H/W"); + return false; + } + + m_v4l2DstPlanes[1].m.userptr = reinterpret_cast(buffer); + m_v4l2DstPlanes[1].length = len_buffer; + SetFlag(HWJPEG_FLAG_DST_BUFFER2); + return true; +} + +bool CHWJpegV4L2Compressor::SetJpegBuffer2(int buffer, size_t len_buffer) +{ + if (!IsDeviceCapability(V4L2_CAP_EXYNOS_JPEG_B2B_COMPRESSION)) { + ALOGE("Back-to-back compression is not suppored by H/W"); + return false; + } + + m_v4l2DstPlanes[1].m.fd = buffer; + m_v4l2DstPlanes[1].length = len_buffer; + SetFlag(HWJPEG_FLAG_DST_BUFFER2); + return true; +} + +bool CHWJpegV4L2Compressor::StopStreaming() +{ + if (TestFlag(HWJPEG_FLAG_STREAMING)) { + if (!StreamOff()) + return false; + ClearFlag(HWJPEG_FLAG_STREAMING); + } + + // Stream off dequeues all queued buffers + ClearFlag(HWJPEG_FLAG_QBUF_OUT | HWJPEG_FLAG_QBUF_CAP); + + // It is OK to skip DQBUF because STREAMOFF dequeues all queued buffers + if (TestFlag(HWJPEG_FLAG_REQBUFS)) { + if (!ReqBufs(0)) + return false; + ClearFlag(HWJPEG_FLAG_REQBUFS); + } + + return true; +} + +ssize_t CHWJpegV4L2Compressor::Compress(size_t *secondary_stream_size, bool block_mode) +{ + if (TestFlag(HWJPEG_FLAG_PIX_FMT)) { + if (!StopStreaming() || !SetFormat()) + return -1; + } + + if (!TestFlag(HWJPEG_FLAG_SRC_BUFFER)) { + ALOGE("Source image buffer is not specified"); + return -1; + } + + if (!TestFlag(HWJPEG_FLAG_DST_BUFFER)) { + ALOGE("Output JPEG stream buffer is not specified"); + return -1; + } + + m_v4l2SrcBuffer.length = m_v4l2Format.fmt.pix_mp.num_planes; + m_v4l2DstBuffer.length = 1; + if (IsB2BCompression()) { + if (!TestFlag(HWJPEG_FLAG_SRC_BUFFER2 | HWJPEG_FLAG_DST_BUFFER2)) { + ALOGE("Either of source or destination buffer of secondary image is not specified (%#x)", + GetFlags()); + return -1; + } + // The SMFC Driver expects the number of buffers to be doubled + // if back-to-back compression is enabled + m_v4l2SrcBuffer.length *= 2; + m_v4l2DstBuffer.length = 2; + } + + if (!!(GetAuxFlags() & EXYNOS_HWJPEG_AUXOPT_SRC_NOCACHECLEAN)) + m_v4l2SrcBuffer.flags |= V4L2_BUF_FLAG_NO_CACHE_CLEAN; + if (!!(GetAuxFlags() & EXYNOS_HWJPEG_AUXOPT_DST_NOCACHECLEAN)) + m_v4l2DstBuffer.flags |= V4L2_BUF_FLAG_NO_CACHE_CLEAN; + + if (!ReqBufs() || !StreamOn() || !UpdateControls() || !QBuf()) + return -1; + + return block_mode ? DQBuf(secondary_stream_size) : 0; +} + +bool CHWJpegV4L2Compressor::TryFormat() +{ + if (ioctl(GetDeviceFD(), VIDIOC_TRY_FMT, &m_v4l2Format) < 0) { + ALOGERR("Failed to TRY_FMT for compression"); + return false; + } + + return true; +} + +bool CHWJpegV4L2Compressor::SetFormat() +{ + if (ioctl(GetDeviceFD(), VIDIOC_S_FMT, &m_v4l2Format) < 0) { + ALOGERR("Failed to S_FMT for image to compress"); + return false; + } + + v4l2_format v4l2JpegFormat; + memset(&v4l2JpegFormat, 0, sizeof(v4l2JpegFormat)); + + v4l2JpegFormat.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + v4l2JpegFormat.fmt.pix_mp.pixelformat = V4L2_PIX_FMT_JPEG; + v4l2JpegFormat.fmt.pix_mp.width = m_v4l2Format.fmt.pix_mp.width; + v4l2JpegFormat.fmt.pix_mp.height = m_v4l2Format.fmt.pix_mp.height; + + if (ioctl(GetDeviceFD(), VIDIOC_S_FMT, &v4l2JpegFormat) < 0) { + ALOGERR("Failed to S_FMT for JPEG stream to capture"); + return false; + } + + ClearFlag(HWJPEG_FLAG_PIX_FMT); + + return true; +} + +bool CHWJpegV4L2Compressor::UpdateControls() +{ + bool enable_hwfc = !!(GetAuxFlags() & EXYNOS_HWJPEG_AUXOPT_ENABLE_HWFC); + + if ((m_uiControlsToSet == 0) && (enable_hwfc == m_bEnableHWFC)) + return true; + + v4l2_ext_controls ctrls; + v4l2_ext_control ctrl[HWJPEG_CTRL_NUM]; + + memset(&ctrls, 0, sizeof(ctrls)); + memset(&ctrl, 0, sizeof(ctrl)); + + ctrls.ctrl_class = V4L2_CTRL_CLASS_JPEG; + ctrls.controls = ctrl; + unsigned int idx_ctrl = 0; + while (m_uiControlsToSet != 0) { + if (m_uiControlsToSet & (1 << idx_ctrl)) { + ctrl[ctrls.count].id = m_v4l2Controls[idx_ctrl].id; + ctrl[ctrls.count].value = m_v4l2Controls[idx_ctrl].value; + m_uiControlsToSet &= ~(1 << idx_ctrl); + ctrls.count++; + } + idx_ctrl++; + } + + if (m_bEnableHWFC != enable_hwfc) { + m_bEnableHWFC = enable_hwfc; + ctrl[ctrls.count].id = V4L2_CID_JPEG_HWFC_ENABLE; + ctrl[ctrls.count].value = m_bEnableHWFC ? 1 : 0; + ctrls.count++; + } + + if (ioctl(GetDeviceFD(), VIDIOC_S_EXT_CTRLS, &ctrls) < 0) { + ALOGERR("Failed to configure %u controls", ctrls.count); + return false; + } + + return true; +} + +bool CHWJpegV4L2Compressor::ReqBufs(unsigned int count) +{ + // - count > 0 && REQBUFS is set: Just return true + // - count > 0 && REQBUFS is unset: REQBUFS(count) is required + // - count == 0 && REQBUFS is set: REQBUFS(0) is required + // - count == 0 && REQBUFS is unset: Just return true; + if ((count > 0) == TestFlag(HWJPEG_FLAG_REQBUFS)) + return true; + + v4l2_requestbuffers reqbufs; + + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.count = count; + reqbufs.memory = m_v4l2SrcBuffer.memory; + reqbufs.type = m_v4l2SrcBuffer.type; + if (ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs) < 0) { + ALOGERR("Failed to REQBUFS(%u) of the source image", count); + return false; + } + + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.count = count; + reqbufs.memory = m_v4l2DstBuffer.memory; + reqbufs.type = m_v4l2DstBuffer.type; + if (ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs) < 0) { + ALOGERR("Failed to REQBUFS(%u) of the JPEG stream", count); + // rolling back the reqbufs for the source image + reqbufs.memory = m_v4l2SrcBuffer.memory; + reqbufs.type = m_v4l2SrcBuffer.type; + reqbufs.count = 0; + ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs); // don't care if it fails + return false; + } + + if (count > 0) + SetFlag(HWJPEG_FLAG_REQBUFS); + else + ClearFlag(HWJPEG_FLAG_REQBUFS); + + return true; +} + +bool CHWJpegV4L2Compressor::StreamOn() +{ + if (TestFlag(HWJPEG_FLAG_STREAMING)) + return true; + + if (!TestFlag(HWJPEG_FLAG_REQBUFS)) { + ALOGE("Trying to STREAMON before REQBUFS"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_STREAMON, &m_v4l2SrcBuffer.type) < 0) { + ALOGERR("Failed to STREAMON for the source image"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_STREAMON, &m_v4l2DstBuffer.type) < 0) { + ALOGERR("Failed to STREAMON for the JPEG stream"); + ioctl(GetDeviceFD(), VIDIOC_STREAMOFF, &m_v4l2SrcBuffer.type); + return false; + } + + SetFlag(HWJPEG_FLAG_STREAMING); + + return true; +} + +bool CHWJpegV4L2Compressor::StreamOff() +{ + if (!TestFlag(HWJPEG_FLAG_STREAMING)) + return true; + + // error during stream off do not need further handling because of nothing to do + if (ioctl(GetDeviceFD(), VIDIOC_STREAMOFF, &m_v4l2SrcBuffer.type) < 0) + ALOGERR("Failed to STREAMOFF for the source image"); + + if (ioctl(GetDeviceFD(), VIDIOC_STREAMOFF, &m_v4l2DstBuffer.type) < 0) + ALOGERR("Failed to STREAMOFF for the JPEG stream"); + + ClearFlag(HWJPEG_FLAG_STREAMING); + + return true; +} + +bool CHWJpegV4L2Compressor::QBuf() +{ + if (!TestFlag(HWJPEG_FLAG_REQBUFS)) { + ALOGE("QBuf is not permitted until REQBUFS is performed"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_QBUF, &m_v4l2SrcBuffer) < 0) { + ALOGERR("QBuf of the source buffers is failed (B2B %s)", + IsB2BCompression() ? "enabled" : "disabled"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_QBUF, &m_v4l2DstBuffer) < 0) { + ALOGERR("QBuf of the JPEG buffers is failed (B2B %s)", + IsB2BCompression() ? "enabled" : "disabled"); + // Reqbufs(0) is the only way to cancel the previous queued buffer + StopStreaming(); + return false; + } + + SetFlag(HWJPEG_FLAG_QBUF_OUT | HWJPEG_FLAG_QBUF_CAP); + + return true; +} + +ssize_t CHWJpegV4L2Compressor::DQBuf(size_t *secondary_stream_size) +{ + bool failed = false; + v4l2_buffer buffer_src, buffer_dst; + v4l2_plane planes_src[6], planes_dst[2]; + + ALOG_ASSERT(TestFlag(HWJPEG_FLAG_QBUF_OUT) == TestFlag(HWJPEG_FLAG_QBUF_CAP)); + + memset(&buffer_src, 0, sizeof(buffer_src)); + memset(&buffer_dst, 0, sizeof(buffer_dst)); + memset(&planes_src, 0, sizeof(planes_src)); + memset(&planes_dst, 0, sizeof(planes_dst)); + + buffer_src.type = m_v4l2SrcBuffer.type; + buffer_src.memory = m_v4l2SrcBuffer.memory; + buffer_src.length = m_v4l2SrcBuffer.length; + buffer_src.m.planes = planes_src; + + buffer_dst.type = m_v4l2DstBuffer.type; + buffer_dst.memory = m_v4l2DstBuffer.memory; + buffer_dst.length = m_v4l2DstBuffer.length; + buffer_dst.m.planes = planes_dst; + + if (TestFlag(HWJPEG_FLAG_QBUF_OUT) && (ioctl(GetDeviceFD(), VIDIOC_DQBUF, &buffer_src) < 0)) { + ALOGERR("Failed to DQBUF of the image buffer"); + failed = true; + } + + if (TestFlag(HWJPEG_FLAG_QBUF_CAP) && (ioctl(GetDeviceFD(), VIDIOC_DQBUF, &buffer_dst) < 0)) { + ALOGERR("Failed to DQBUF of the JPEG stream buffer"); + failed = true; + } + + ClearFlag(HWJPEG_FLAG_QBUF_OUT | HWJPEG_FLAG_QBUF_CAP); + + if (failed) + return -1; + + if (!!((buffer_src.flags | buffer_dst.flags) & V4L2_BUF_FLAG_ERROR)) { + ALOGE("Error occurred during compression"); + return -1; + } + + // We don't need to check the length of secondary stream + // because it will be zero if the secondary image is not processed. + SetStreamSize(buffer_dst.m.planes[0].bytesused, buffer_dst.m.planes[1].bytesused); + + // The driver stores the delay in usec. of JPEG compression by H/W + // to v4l2_buffer.reserved2. + m_uiHWDelay = buffer_dst.reserved2; + + return GetStreamSize(secondary_stream_size); +} + +ssize_t CHWJpegV4L2Compressor::WaitForCompression(size_t *secondary_stream_size) +{ + return DQBuf(secondary_stream_size); +} + +bool CHWJpegV4L2Compressor::GetImageBuffers(int buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (m_v4l2SrcBuffer.memory != V4L2_MEMORY_DMABUF) { + ALOGE("Current image buffer type is not dma-buf but attempted to retrieve dma-buf buffers"); + return false; + } + + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("Number of planes are %u but attemts to retrieve %u buffers", + m_v4l2Format.fmt.pix_mp.num_planes, num_buffers); + return false; + } + + for (unsigned int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) { + buffers[i] = m_v4l2SrcBuffer.m.planes[i].m.fd; + len_buffers[i] = m_v4l2SrcBuffer.m.planes[i].length; + } + + return true; +} + +bool CHWJpegV4L2Compressor::GetImageBuffers(char *buffers[], size_t len_buffers[], + unsigned int num_buffers) +{ + if (m_v4l2SrcBuffer.memory != V4L2_MEMORY_USERPTR) { + ALOGE("Current image buffer type is not userptr but attempted to retrieve userptr buffers"); + return false; + } + + if (num_buffers < m_v4l2Format.fmt.pix_mp.num_planes) { + ALOGE("Number of planes are %u but attemts to retrieve %u buffers", + m_v4l2Format.fmt.pix_mp.num_planes, num_buffers); + return false; + } + + for (unsigned int i = 0; i < m_v4l2Format.fmt.pix_mp.num_planes; i++) { + buffers[i] = reinterpret_cast(m_v4l2SrcBuffer.m.planes[i].m.userptr); + len_buffers[i] = m_v4l2SrcBuffer.m.planes[i].length; + } + + return true; +} + +bool CHWJpegV4L2Compressor::GetJpegBuffer(int *buffer, size_t *len_buffer) +{ + if (m_v4l2DstBuffer.memory != V4L2_MEMORY_DMABUF) { + ALOGE("Current jpeg buffer type is not dma-buf but attempted to retrieve dma-buf buffer"); + return false; + } + + *buffer = m_v4l2DstBuffer.m.planes[0].m.fd; + *len_buffer = m_v4l2DstBuffer.m.planes[0].length; + + return true; +} + +bool CHWJpegV4L2Compressor::GetJpegBuffer(char **buffer, size_t *len_buffer) +{ + if (m_v4l2DstBuffer.memory != V4L2_MEMORY_USERPTR) { + ALOGE("Current jpeg buffer type is not userptr but attempted to retrieve userptr buffer"); + return false; + } + + *buffer = reinterpret_cast(m_v4l2DstBuffer.m.planes[0].m.userptr); + *len_buffer = m_v4l2DstBuffer.m.planes[0].length; + + return true; +} + +void CHWJpegV4L2Compressor::Release() +{ + StopStreaming(); +} + +/******************************************************************************/ +/********* D E C O M P R E S S I O N S U P P O R T **************************/ +/******************************************************************************/ + +CHWJpegV4L2Decompressor::CHWJpegV4L2Decompressor() : CHWJpegDecompressor("/dev/video12") +{ + m_v4l2Format.type = 0; // inidication of uninitialized state + + memset(&m_v4l2DstBuffer, 0, sizeof(m_v4l2DstBuffer)); + m_v4l2DstBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (Okay()) { + v4l2_capability cap; + memset(&cap, 0, sizeof(cap)); + if (ioctl(GetDeviceFD(), VIDIOC_QUERYCAP, &cap) < 0) { + ALOGERR("Failed to query capability of /dev/video12"); + } else if (!!(cap.capabilities & V4L2_CAP_DEVICE_CAPS)) { + SetDeviceCapabilities(cap.device_caps); + } + } +} + +CHWJpegV4L2Decompressor::~CHWJpegV4L2Decompressor() +{ + CancelCapture(); +} + +bool CHWJpegV4L2Decompressor::PrepareCapture() +{ + if (m_v4l2DstBuffer.length < m_v4l2Format.fmt.pix.sizeimage) { + ALOGE("The size of the buffer %zu is smaller than required %u", + m_v4l2DstBuffer.length, m_v4l2Format.fmt.pix.sizeimage); + return false; + } + + if (TestFlag(HWJPEG_FLAG_CAPTURE_READY)) + return true; + + v4l2_requestbuffers reqbufs; + + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.count = 1; + reqbufs.memory = m_v4l2DstBuffer.memory; + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs) < 0) { + ALOGERR("Failed to REQBUFS for the decompressed image"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_STREAMON, &reqbufs.type) < 0) { + ALOGERR("Failed to STREAMON for the decompressed image"); + reqbufs.count = 0; + ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs); + return false; + } + + SetFlag(HWJPEG_FLAG_CAPTURE_READY); + + return true; +} + +void CHWJpegV4L2Decompressor::CancelCapture() +{ + if (!TestFlag(HWJPEG_FLAG_CAPTURE_READY)) + return; + + v4l2_requestbuffers reqbufs; + + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + reqbufs.memory = m_v4l2DstBuffer.memory; + + ioctl(GetDeviceFD(), VIDIOC_STREAMOFF, &reqbufs.type); + ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &reqbufs); + + ClearFlag(HWJPEG_FLAG_CAPTURE_READY); +} + +bool CHWJpegV4L2Decompressor::SetImageFormat(unsigned int v4l2_fmt, + unsigned int width, unsigned int height) +{ + // Test if new format is the same as the current configured format + if (m_v4l2Format.type != 0) { + v4l2_pix_format *p = &m_v4l2Format.fmt.pix; + if ((p->pixelformat == v4l2_fmt) && + (p->width == width) && (p->height == height)) + return true; + } + + CancelCapture(); + + memset(&m_v4l2Format, 0, sizeof(m_v4l2Format)); + + m_v4l2Format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + m_v4l2Format.fmt.pix.pixelformat = v4l2_fmt; + m_v4l2Format.fmt.pix.width = width; + m_v4l2Format.fmt.pix.height = height; + + if (ioctl(GetDeviceFD(), VIDIOC_S_FMT, &m_v4l2Format) < 0) { + ALOGERR("Failed to S_FMT for decompressed image (%08X,%ux%u)", + v4l2_fmt, width, height); + return false; + } + + return true; +} + +bool CHWJpegV4L2Decompressor::SetImageBuffer(char *buffer, size_t len_buffer) +{ + m_v4l2DstBuffer.m.userptr = reinterpret_cast(buffer); + m_v4l2DstBuffer.bytesused = m_v4l2Format.fmt.pix.sizeimage; + m_v4l2DstBuffer.length = len_buffer; + m_v4l2DstBuffer.memory = V4L2_MEMORY_USERPTR; + + return true; +} + +bool CHWJpegV4L2Decompressor::SetImageBuffer(int buffer, size_t len_buffer) +{ + m_v4l2DstBuffer.m.fd = buffer; + m_v4l2DstBuffer.bytesused = m_v4l2Format.fmt.pix.sizeimage; + m_v4l2DstBuffer.length = len_buffer; + m_v4l2DstBuffer.memory = V4L2_MEMORY_DMABUF; + + return true; +} + +bool CHWJpegV4L2Decompressor::PrepareStream() +{ + if (TestFlag(HWJPEG_FLAG_OUTPUT_READY)) + return true; + + /* + * S_FMT for output stream is unneccessary because the driver assumes that + * the current mode is decompression if the capture stream is uncompressed + * format + */ + + v4l2_requestbuffers rb; + memset(&rb, 0, sizeof(rb)); + + rb.count = 1; + rb.memory = V4L2_MEMORY_USERPTR; + rb.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + + // REQBUFS fails if no S_FMT is not performed + if (ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &rb) < 0) { + ALOGERR("Failed to REQBUFS for the JPEG stream."); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_STREAMON, &rb.type) < 0) { + ALOGERR("Failed to STREAMON for the JPEG stream."); + + rb.count = 0; + // don't care if reqbufs(0) fails. + ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &rb); + + return false; + } + + SetFlag(HWJPEG_FLAG_OUTPUT_READY); + + return true; +} + +void CHWJpegV4L2Decompressor::CancelStream() +{ + if (!TestFlag(HWJPEG_FLAG_OUTPUT_READY)) + return; + + v4l2_requestbuffers rb; + memset(&rb, 0, sizeof(rb)); + rb.count = 0; + rb.memory = V4L2_MEMORY_USERPTR; + rb.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + + // ignore error during canceling + ioctl(GetDeviceFD(), VIDIOC_STREAMOFF, &rb.type); + ioctl(GetDeviceFD(), VIDIOC_REQBUFS, &rb); + + ClearFlag(HWJPEG_FLAG_OUTPUT_READY); +} + +bool CHWJpegV4L2Decompressor::QBufAndWait(const char *buffer, size_t len) +{ + v4l2_buffer buf; + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT; + buf.memory = V4L2_MEMORY_USERPTR; + buf.bytesused = len; + buf.m.userptr = reinterpret_cast(buffer); + buf.length = len; + + if (ioctl(GetDeviceFD(), VIDIOC_QBUF, &buf) < 0) { + ALOGERR("Failed to QBUF for the JPEG stream"); + return false; + } + + if (ioctl(GetDeviceFD(), VIDIOC_QBUF, &m_v4l2DstBuffer) < 0) { + CancelStream(); + ALOGERR("Failed to QBUF for the decompressed image"); + return false; + } + + bool ret = true; + + if (ioctl(GetDeviceFD(), VIDIOC_DQBUF, &buf) < 0) { + ALOGERR("Failed to DQBUF of the stream buffer"); + ret = false; + } + + buf.type = m_v4l2DstBuffer.type; + buf.memory = m_v4l2DstBuffer.memory; + + if (ioctl(GetDeviceFD(), VIDIOC_DQBUF, &buf) < 0) { + ALOGERR("Failed to DQBUF of the image buffer"); + ret = false; + } + + m_uiHWDelay = buf.reserved2; + + return ret; +} + +bool CHWJpegV4L2Decompressor::Decompress(const char *buffer, size_t len) +{ + if (m_v4l2Format.type == 0) { + ALOGE("Decompressed image format is not specified"); + return false; + } + + if (m_v4l2DstBuffer.length == 0) { + ALOGE("Decompressed image buffer is not specified"); + return false; + } + + // Do not change the order of PrepareCapture() and PrepareStream(). + // Otherwise, decompression will fail. + if (!PrepareCapture() || !PrepareStream()) + return false; + + if (!QBufAndWait(buffer, len)) + return false; + + return true; +} diff --git a/libhwjpeg/libcsc.cpp b/libhwjpeg/libcsc.cpp new file mode 100644 index 0000000..26c43ee --- /dev/null +++ b/libhwjpeg/libcsc.cpp @@ -0,0 +1,174 @@ +#include + +#include + +#include + +#include "hwjpeg-internal.h" +#include "hwjpeg-libcsc.h" + +int V4L2FMT2HALFMT(int fmt) +{ + switch (fmt) { + case V4L2_PIX_FMT_RGB32: + return HAL_PIXEL_FORMAT_RGBA_8888; + case V4L2_PIX_FMT_RGB24: + return HAL_PIXEL_FORMAT_RGB_888; + case V4L2_PIX_FMT_RGB565: + return HAL_PIXEL_FORMAT_RGB_565; + case V4L2_PIX_FMT_BGR32: + return HAL_PIXEL_FORMAT_BGRA_8888; + case V4L2_PIX_FMT_YVU420M: + return HAL_PIXEL_FORMAT_EXYNOS_YV12_M; + case V4L2_PIX_FMT_YUV420M: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M; + case V4L2_PIX_FMT_YVU420: + return HAL_PIXEL_FORMAT_YV12; + case V4L2_PIX_FMT_YUV420: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P; + case V4L2_PIX_FMT_NV16: + return HAL_PIXEL_FORMAT_YCbCr_422_SP; + case V4L2_PIX_FMT_NV12: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP; + case V4L2_PIX_FMT_NV12M: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M; + case V4L2_PIX_FMT_YUYV: + return HAL_PIXEL_FORMAT_YCbCr_422_I; + case V4L2_PIX_FMT_UYVY: + return HAL_PIXEL_FORMAT_EXYNOS_CbYCrY_422_I; + case V4L2_PIX_FMT_NV61: + return HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_SP; + case V4L2_PIX_FMT_NV21M: + return HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + case V4L2_PIX_FMT_NV21: + return HAL_PIXEL_FORMAT_YCrCb_420_SP; + case V4L2_PIX_FMT_NV12MT_16X16: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED; + case V4L2_PIX_FMT_YVYU: + return HAL_PIXEL_FORMAT_EXYNOS_YCrCb_422_I; + case V4L2_PIX_FMT_VYUY: + return HAL_PIXEL_FORMAT_EXYNOS_CrYCbY_422_I; + default: + ALOGE("Unkown HAL format %d", fmt); + } + + return -1; +} + +#define CSC_ERR(ret) (ret != CSC_ErrorNone) + +void CLibCSC::destroy() +{ + if (m_hdlLibCSC) { + CSC_ERRORCODE ret = csc_deinit(m_hdlLibCSC); + if (CSC_ERR(ret)) + ALOGE("Failed to deinit LibCSC: %d", ret); + m_hdlLibCSC = NULL; + } +} + +bool CLibCSC::init(int devid) +{ + destroy(); + m_hdlLibCSC = csc_init(CSC_METHOD_HW); + if (!m_hdlLibCSC) { + ALOGE("Failed to create CSC handle"); + return false; + } + + CSC_ERRORCODE ret; + + ret = csc_set_method(m_hdlLibCSC, CSC_METHOD_HW); + if (CSC_ERR(ret)) { + ALOGE("Failed to set CSC method to H/W"); + return false; + } + + ret = csc_set_hw_property(m_hdlLibCSC, CSC_HW_PROPERTY_FIXED_NODE, CSC_HW_SC0 + devid); + if (CSC_ERR(ret)) { + ALOGE("Failed to set CSC H/W selection to Scaler%d", devid); + return false; + } + + return true; +} + +bool CLibCSC::set_src_format( + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format) +{ + CSC_ERRORCODE ret; + ret = csc_set_src_format( + m_hdlLibCSC, width, height, + crop_left, crop_top, crop_width, crop_height, color_format, + false); + if (CSC_ERR(ret)) { + ALOGE("Failed to set source format: %d", ret); + return false; + } + + return true; +} + +bool CLibCSC::set_dst_format( + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int color_format) +{ + CSC_ERRORCODE ret; + ret = csc_set_dst_format( + m_hdlLibCSC, width, height, + crop_left, crop_top, crop_width, crop_height, color_format, + false); + if (CSC_ERR(ret)) { + ALOGE("Failed to set source format: %d", ret); + return false; + } + + return true; +} + +bool CLibCSC::set_src_buffer(void *addrs[CSC_MAX_PLANES], int mem_type) +{ + CSC_ERRORCODE ret; + ret = csc_set_src_buffer(m_hdlLibCSC, addrs, mem_type); + if (CSC_ERR(ret)) { + ALOGE("Failed to set source buffer: %d", ret); + return false; + } + + return true; +} + +bool CLibCSC::set_dst_buffer(void *addrs[CSC_MAX_PLANES], int mem_type) +{ + CSC_ERRORCODE ret; + ret = csc_set_dst_buffer(m_hdlLibCSC, addrs, mem_type); + if (CSC_ERR(ret)) { + ALOGE("Failed to set source buffer: %d", ret); + return false; + } + + return true; +} + +bool CLibCSC::convert() +{ + CSC_ERRORCODE ret; + ret = csc_convert(m_hdlLibCSC); + if (CSC_ERR(ret)) { + ALOGE("Failed to convert(): %d", ret); + return false; + } + + return true; +} diff --git a/libhwjpeg/libhwjpeg-exynos.cpp b/libhwjpeg/libhwjpeg-exynos.cpp new file mode 100644 index 0000000..968a458 --- /dev/null +++ b/libhwjpeg/libhwjpeg-exynos.cpp @@ -0,0 +1,522 @@ +/* + * Copyright Samsung Electronics Co.,LTD. + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +#include "hwjpeg-internal.h" + +#define ALOGERR(fmt, args...) ((void)ALOG(LOG_ERROR, LOG_TAG, fmt " [%s]", ##args, strerror(errno))) + +#define ROUND_DOWN(val, denom) ((val) & ~((denom) - 1)) +#define ROUND_UP(val, denom) ROUND_DOWN((val) + (denom) - 1, denom) +#define TO_MASK(val) ((val) - 1) + +class CJpegStreamParser { +private: + unsigned char *m_pStreamBase; + size_t m_nStreamSize; + + unsigned char m_nComponents; + unsigned short m_nWidth; + unsigned short m_nHeight; + + void Initialize(); + size_t GetLength(unsigned char *addr); + bool ParseFrame(unsigned char *addr); + + off_t GetOffset(unsigned char *addr) { + unsigned long beg = reinterpret_cast(m_pStreamBase); + unsigned long cur = reinterpret_cast(addr); + return static_cast(cur - beg); + } + +public: + unsigned char m_iHorizontalFactor; + unsigned char m_iVerticalFactor; + + CJpegStreamParser() : m_pStreamBase(NULL), m_nStreamSize(0) { } + ~CJpegStreamParser() { } + + bool Parse(unsigned char *streambase, size_t length); + + int GetImageFormat(); + unsigned int GetWidth() { return m_nWidth; } + unsigned int GetHeight() { return m_nHeight; } + unsigned int GetNumComponents() { return m_nComponents; } +}; + +void CJpegStreamParser::Initialize() +{ + m_nComponents = 0; + m_nWidth = 0; + m_nHeight = 0; + m_iHorizontalFactor = 1; + m_iVerticalFactor = 1; +} + +size_t CJpegStreamParser::GetLength(unsigned char *addr) +{ + size_t len = *addr++ * 0x100; + return len + *addr; +} + +bool CJpegStreamParser::Parse(unsigned char *streambase, size_t length) +{ + Initialize(); + + m_pStreamBase = streambase; + m_nStreamSize = length; + + unsigned char *addr = m_pStreamBase; + size_t filelen = m_nStreamSize; + + // Finding SOI (xFFD8) + if ((filelen < 2) || (addr[0] != 0xFF) || (addr[1] != 0xD8)) { + ALOGE("Not a valid JPEG stream (len %zu, marker %02x%02x", filelen, addr[0], addr[1]); + return false; + } + addr += 2; + filelen -= 2; + + while (true) { // DHT, DQT, SOF, SOS + if (filelen < 2) { + ALOGE("Incomplete JPEG Stream"); + return false; + } + + if (*addr++ != 0xFF) { + ALOGE("Corrupted JPEG stream"); + return false; + } + + unsigned char marker = *addr++; + + if ((marker != 0xC4) && ((marker & 0xF0) == 0xC0)) { // SOFn + if (marker != 0xC0) { + ALOGE("SOF%d is not supported (offset %zu)", marker & 0xF, m_nStreamSize - filelen); + return false; + } + + if (filelen < GetLength(addr)) { + ALOGE("Too small SOF0 segment"); + return false; + } + + if (!ParseFrame(addr)) + return false; + + return true; // this is the successful exit point + } else if (marker == 0xD9) { // EOI + // This will not meet. + ALOGE("Unexpected EOI found at %lu\n", GetOffset(addr - 2)); + return false; + } else { + if ((marker == 0xCC) || (marker == 0xDC)) { // DAC and DNL + ALOGE("Unsupported JPEG stream: found marker 0xFF%02X", marker); + return false; + } + + if (filelen < GetLength(addr)) { + ALOGE("Corrupted JPEG stream"); + return false; + } + } + + if (GetLength(addr) == 0) { + ALOGE("Invalid length 0 is read at offset %lu", GetOffset(addr)); + return false; + } + + filelen -= GetLength(addr); + addr += GetLength(addr); + } + + // NEVER REACH HERE + + ALOGE("Unable to find the frame header"); + + return false; +} + +bool CJpegStreamParser::ParseFrame(unsigned char *addr) +{ // 2 bytes of length + // 1 byte of bits per sample + // 2 bytes of height + // 2 bytes of width + // 1 byte of number of components + // n * 3 byte component specifications + if (GetLength(addr) < 17) { + ALOGE("SOF0 should include all three components"); + return false; + } + addr += 2; // skip length + + if (*addr != 8) { // bits per sample + ALOGE("Bits Per Sample should be 8 but it is %d", *addr); + return false; + } + addr++; + + m_nHeight = static_cast(GetLength(addr)); + if ((m_nHeight < 8) || (m_nHeight > 16383)) { + ALOGE("Height %d is not supported", m_nHeight); + return false; + } + addr += 2; + + m_nWidth = static_cast(GetLength(addr)); + if ((m_nWidth < 8) || (m_nWidth > 16383)) { + ALOGE("Width %d is not supported", m_nWidth); + return false; + } + addr += 2; + + m_nComponents = *addr; + if (m_nComponents != 3) { + ALOGE("Number of components should be 3 but it is %d", m_nComponents); + return false; + } + addr++; + + // Only the first component is needed to find chroma subsampling factor + addr++; // skip component identifier + if ((*addr != 0x11) && (*addr != 0x21) && (*addr != 0x12) && (*addr != 0x22)) { + ALOGE("Invalid Luma sampling factor %#02x", *addr); + return false; + } + m_iHorizontalFactor = *addr >> 4; + m_iVerticalFactor = *addr & 0xF; + + return true; +} + +class CLibhwjpegDecompressor: public hwjpeg_decompressor_struct { + enum { + HWJPG_FLAG_NEED_MUNMAP = 1, + }; + + unsigned int m_flags; + bool m_bPrepared; + CHWJpegDecompressor *m_hwjpeg; + + unsigned char *m_pStreamBuffer; + size_t m_nStreamLength; + size_t m_nDummyBytes; + + CJpegStreamParser m_jpegStreamParser; +public: + CLibhwjpegDecompressor() { + // members of hwjpeg_decompressor_struct + image_width = 0; + image_height = 0; + num_components = 3; + chroma_h_samp_factor = 1; + chroma_v_samp_factor = 1; + scale_factor = 1; + output_width = 0; + output_height = 0; + m_bPrepared = false; + + output_format = V4L2_PIX_FMT_RGB32; + + // members of this + m_nStreamLength = 0; + m_nDummyBytes = 0; + + m_hwjpeg = new CHWJpegV4L2Decompressor; + if (!m_hwjpeg || !*m_hwjpeg) { + ALOGE("Failed to create HWJPEG decompressor"); + delete m_hwjpeg; + } + } + + ~CLibhwjpegDecompressor() { + delete m_hwjpeg; + + if (!!(m_flags & HWJPG_FLAG_NEED_MUNMAP)) + munmap(m_pStreamBuffer, m_nStreamLength + m_nDummyBytes); + } + + bool SetStreamPath(const char *path) { + if ((m_pStreamBuffer != NULL) && !!(m_flags & HWJPG_FLAG_NEED_MUNMAP)) { + munmap(m_pStreamBuffer, m_nStreamLength + m_nDummyBytes); + m_flags &= ~HWJPG_FLAG_NEED_MUNMAP; + m_pStreamBuffer = NULL; + m_nStreamLength = 0; + } + + int fd = open(path, O_RDONLY); + if (fd < 0) { + ALOGERR("Failed to open '%s' for decompression", path); + return false; + } + + struct stat st; + if (fstat(fd, &st) < 0) { + ALOGERR("Failed to read size of '%s'", path); + close(fd); + return false; + } + + m_nStreamLength = st.st_size; + m_nDummyBytes = 0; + + m_pStreamBuffer = reinterpret_cast( + mmap(NULL, m_nStreamLength, + PROT_READ | PROT_WRITE, MAP_PRIVATE, fd, 0)); + if (m_pStreamBuffer == MAP_FAILED) { + m_pStreamBuffer = NULL; + close(fd); + ALOGERR("Failed to mmap %zu bytes of '%s'", m_nStreamLength, path); + return false; + } + + m_bPrepared = false; + + m_flags |= HWJPG_FLAG_NEED_MUNMAP; + + close(fd); + return true; + } + + bool SetStreamBuffer(unsigned char *buffer, size_t len, size_t dummybytes) { + if ((m_pStreamBuffer != NULL) && !!(m_flags & HWJPG_FLAG_NEED_MUNMAP)) { + munmap(m_pStreamBuffer, m_nStreamLength + m_nDummyBytes); + m_flags &= ~HWJPG_FLAG_NEED_MUNMAP; + } + + m_pStreamBuffer = buffer; + m_nStreamLength = len; + m_nDummyBytes = dummybytes; + + m_bPrepared = false; + + return true; + } + + bool SetStreamBuffer(int buffer, size_t len, size_t dummybytes) { + if ((m_pStreamBuffer != NULL) && !!(m_flags & HWJPG_FLAG_NEED_MUNMAP)) { + munmap(m_pStreamBuffer, m_nStreamLength + m_nDummyBytes); + m_flags &= ~HWJPG_FLAG_NEED_MUNMAP; + } + + m_nStreamLength = len; + m_nDummyBytes = dummybytes; + + m_pStreamBuffer = reinterpret_cast( + mmap(NULL, m_nStreamLength + m_nDummyBytes, + PROT_READ | PROT_WRITE, MAP_SHARED, buffer, 0)); + if (m_pStreamBuffer == MAP_FAILED) { + m_pStreamBuffer = NULL; + ALOGERR("Failed to mmap %zu bytes of dmabuf fd %d", m_nStreamLength, buffer); + return false; + } + + m_flags |= HWJPG_FLAG_NEED_MUNMAP; + + m_bPrepared = false; + + return true; + } + + bool SetImageBuffer(unsigned char *buffer[3], size_t len[3], unsigned int num_bufs) { + if (num_bufs != 1) { + ALOGE("multi-planar image is not supported(%u planes)", num_bufs); + return false; + } + + return m_hwjpeg->SetImageBuffer(reinterpret_cast(buffer[0]), len[0]); + } + + bool SetImageBuffer(int buffer[3], size_t len[3], unsigned int num_bufs) { + if (num_bufs != 1) { + ALOGE("multi-planar image is not supported(%u planes)", num_bufs); + return false; + } + + return m_hwjpeg->SetImageBuffer(buffer[0], len[0]); + } + + void SetDownscaleFactor(unsigned int factor) { scale_factor = factor; } + + bool PrepareDecompression(); + bool Decompress(); + + bool IsEnoughStreamBuffer() { return true; } +}; + +bool CLibhwjpegDecompressor::PrepareDecompression() +{ + if (!m_hwjpeg) { + ALOGE("device node is not opened!"); + return false; + } + + if ((scale_factor != 1) && (scale_factor != 2) && + (scale_factor != 4) && (scale_factor != 8)) { + ALOGE("Invalid downscaling factor %d", scale_factor); + return false; + } + + if (m_pStreamBuffer == NULL) { + ALOGE("No stream buffer is configured"); + return false; + } + + if (!m_jpegStreamParser.Parse(m_pStreamBuffer, m_nStreamLength)) + return false; + + image_width = m_jpegStreamParser.GetWidth(); + image_height = m_jpegStreamParser.GetHeight(); + num_components = m_jpegStreamParser.GetNumComponents(); + chroma_h_samp_factor = m_jpegStreamParser.m_iHorizontalFactor; + chroma_v_samp_factor = m_jpegStreamParser.m_iVerticalFactor; + + if (((image_width % (chroma_h_samp_factor * scale_factor)) != 0) || + ((image_height % (chroma_v_samp_factor * scale_factor)) != 0)) { + ALOGE("Downscaling by factor %d of compressed image size %dx%d(chroma %d:%d) is not supported", + scale_factor, image_width, image_height, chroma_h_samp_factor, chroma_v_samp_factor); + return false; + } + + output_width = image_width / scale_factor; + output_height = image_height / scale_factor; + + if (!m_hwjpeg->SetStreamPixelSize(image_width, image_height)) { + ALOGE("Failed to configure stream pixel size (%ux%u)", image_width, image_height); + return false; + } + + if (!m_hwjpeg->SetImageFormat(output_format, output_width, output_height)) { + ALOGE("Failed to configure image format (%ux%u/%08X)", output_width, output_height, output_format); + return false; + } + + m_bPrepared = true; + + return true; +} + +bool CLibhwjpegDecompressor::Decompress() +{ + if (!m_bPrepared) { + ALOGE("JPEG header is not parsed"); + return false; + } + + if (!IsEnoughStreamBuffer()) { + ALOGE("Not enough buffer length for HWJPEG"); + return false; + } + + m_bPrepared = false; + + if (!m_hwjpeg->Decompress(reinterpret_cast(m_pStreamBuffer), m_nStreamLength)) { + ALOGE("Failed to decompress"); + return false; + } + + return true; +} + +hwjpeg_decompress_ptr hwjpeg_create_decompress() +{ + hwjpeg_decompress_ptr p = new CLibhwjpegDecompressor(); + if (!p) + ALOGE("Failed to create decompress struct"); + return p; +} + +bool hwjpeg_file_src(hwjpeg_decompress_ptr cinfo, const char *path) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->SetStreamPath(path); +} + +void hwjpeg_config_image_format(hwjpeg_decompress_ptr cinfo, __u32 v4l2_pix_fmt) +{ + cinfo->output_format = v4l2_pix_fmt; +} + +bool hwjpeg_dmabuf_src(hwjpeg_decompress_ptr cinfo, int infd, size_t insize, size_t dummybytes) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->SetStreamBuffer(infd, insize, dummybytes); +} + +bool hwjpeg_mem_src(hwjpeg_decompress_ptr cinfo, + unsigned char *inbuffer, size_t insize, size_t dummybytes) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->SetStreamBuffer(inbuffer, insize, dummybytes); +} + +bool hwjpeg_mem_dst(hwjpeg_decompress_ptr cinfo, + unsigned char *outbuffer[], size_t outsize[], unsigned int num_buffers) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->SetImageBuffer(outbuffer, outsize, num_buffers); +} + +bool hwjpeg_dmabuf_dst(hwjpeg_decompress_ptr cinfo, + int outfd[], size_t outsize[], unsigned int num_buffers) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->SetImageBuffer(outfd, outsize, num_buffers); +} + +void hwjpeg_set_downscale_factor(hwjpeg_decompress_ptr cinfo, unsigned int factor) +{ + cinfo->scale_factor = factor; +} + +bool hwjpeg_read_header(hwjpeg_decompress_ptr cinfo) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->PrepareDecompression(); +} + +bool hwjpeg_start_decompress(hwjpeg_decompress_ptr cinfo) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->Decompress(); +} + +void hwjpeg_destroy_decompress(hwjpeg_decompress_ptr cinfo) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + delete decomp; +} + +bool hwjpeg_has_enough_stream_buffer(hwjpeg_decompress_ptr cinfo) +{ + CLibhwjpegDecompressor *decomp = reinterpret_cast(cinfo); + return decomp->IsEnoughStreamBuffer(); +} diff --git a/libion_exynos/Android.mk b/libion_exynos/Android.mk new file mode 100644 index 0000000..777f37b --- /dev/null +++ b/libion_exynos/Android.mk @@ -0,0 +1,28 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../include + +LOCAL_SRC_FILES:= \ + libion.cpp + +LOCAL_MODULE := libion_exynos + +LOCAL_MODULE_TAGS := optional + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libion_exynos/NOTICE b/libion_exynos/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libion_exynos/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libion_exynos/libion.cpp b/libion_exynos/libion.cpp new file mode 100644 index 0000000..d7e8132 --- /dev/null +++ b/libion_exynos/libion.cpp @@ -0,0 +1,210 @@ +/* + * Copyright (C) 2012 Samsung Electronics Co., Ltd. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +struct ion_allocation_data { + size_t len; + size_t align; + unsigned int heap_mask; + unsigned int flags; + ion_handle handle; +}; + +struct ion_fd_data { + ion_handle handle; + int fd; +}; + +struct ion_handle_data { + ion_handle handle; +}; + +struct ion_custom_data { + unsigned int cmd; + unsigned long arg; +}; + +struct ion_preload_data { + unsigned int heap_id_mask; + unsigned int flags; + unsigned int count; + struct ion_preload_object *obj; /* ion.h */ +}; + +#define ION_IOC_MAGIC 'I' +#define ION_IOC_ALLOC _IOWR(ION_IOC_MAGIC, 0, struct ion_allocation_data) +#define ION_IOC_FREE _IOWR(ION_IOC_MAGIC, 1, struct ion_handle_data) +#define ION_IOC_MAP _IOWR(ION_IOC_MAGIC, 2, struct ion_fd_data) +#define ION_IOC_SHARE _IOWR(ION_IOC_MAGIC, 4, struct ion_fd_data) +#define ION_IOC_IMPORT _IOWR(ION_IOC_MAGIC, 5, struct ion_fd_data) +#define ION_IOC_CUSTOM _IOWR(ION_IOC_MAGIC, 6, struct ion_custom_data) +#define ION_IOC_SYNC _IOWR(ION_IOC_MAGIC, 7, struct ion_fd_data) +#define ION_IOC_PRELOAD _IOW (ION_IOC_MAGIC, 8, struct ion_preload_data) + +struct ion_msync_data { + long flags; + ion_buffer buf; + size_t size; + off_t offset; +}; + +enum ION_EXYNOS_CUSTOM_CMD { + ION_EXYNOS_CUSTOM_MSYNC +}; + +ion_client ion_client_create(void) +{ + return open("/dev/ion", O_RDWR); +} + +void ion_client_destroy(ion_client client) +{ + close(client); +} + +ion_buffer ion_alloc(ion_client client, size_t len, size_t align, + unsigned int heap_mask, unsigned int flags) +{ + int ret; + struct ion_handle_data arg_free; + struct ion_fd_data arg_share; + struct ion_allocation_data arg_alloc; + + arg_alloc.len = len; + arg_alloc.align = align; + arg_alloc.heap_mask = heap_mask; + arg_alloc.flags = flags; + + ret = ioctl(client, ION_IOC_ALLOC, &arg_alloc); + if (ret < 0) + return ret; + + arg_share.handle = arg_alloc.handle; + ret = ioctl(client, ION_IOC_SHARE, &arg_share); + + if ((ret >= 0) && (!arg_share.fd)) { + ret = ioctl(client, ION_IOC_SHARE, &arg_share); + if (ret >= 0) + close(0); + else + ret = 0; + } + + arg_free.handle = arg_alloc.handle; + ioctl(client, ION_IOC_FREE, &arg_free); + + if (ret < 0) + return ret; + + return arg_share.fd; +} + +void ion_free(ion_buffer buffer) +{ + close(buffer); +} + +void *ion_map(ion_buffer buffer, size_t len, off_t offset) +{ + return mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, + buffer, offset); +} + +int ion_unmap(void *addr, size_t len) +{ + return munmap(addr, len); +} + +int ion_sync(ion_client client, ion_buffer buffer) +{ + struct ion_fd_data data; + + data.fd = buffer; + + return ioctl(client, ION_IOC_SYNC, &data); +} + +int ion_preload(ion_client client, unsigned int heap_mask, unsigned int flags, + int count, struct ion_preload_object objs[]) +{ + struct ion_preload_data data; + + data.heap_id_mask = heap_mask; + data.flags = flags; + data.count = count; + data.obj = objs; + + return ioctl(client, ION_IOC_PRELOAD, &data); +} + +#define ION_EXYNOS_SYNC_BY_HANDLE 0x01 /* otherwise, by fd */ +#define ION_EXYNOS_SYNC_INV 0x10 /* otherwise, clean */ + +struct ion_exynos_sync_data { + unsigned int flags; + union { + int dmabuf_fd; + ion_handle handle; + }; + void *addr; + size_t size; +}; +#define ION_IOC_EXYNOS_MAGIC 'E' + +#define ION_IOC_EXYNOS_SYNC _IOW(ION_IOC_EXYNOS_MAGIC, 0, struct ion_exynos_sync_data) + +int ion_sync_range(ion_client client, int dmabuf_fd, void *addr, size_t size) +{ + ion_exynos_sync_data data; + ion_custom_data custom; + + data.flags = 0; + data.dmabuf_fd = dmabuf_fd; + data.addr = addr; + data.size = size; + + custom.cmd = ION_IOC_EXYNOS_SYNC; + custom.arg = reinterpret_cast(&data); + + return ioctl(client, ION_IOC_CUSTOM, &custom); +} + +int ion_incRef(int fd, int share_fd, ion_handle *handle) +{ + struct ion_fd_data data; + + data.fd = share_fd; + + int ret = ioctl(fd, ION_IOC_IMPORT, &data); + if (ret < 0) + return ret; + + *handle = data.handle; + return ret; +} + +int ion_decRef(int fd, ion_handle handle) +{ + struct ion_handle_data data; + data.handle = handle; + + return ioctl(fd, ION_IOC_FREE, &data); +} diff --git a/libmemtrack/Android.mk b/libmemtrack/Android.mk new file mode 100644 index 0000000..47aa0a6 --- /dev/null +++ b/libmemtrack/Android.mk @@ -0,0 +1,26 @@ +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) + +# HAL module implemenation stored in +# hw/..so +include $(CLEAR_VARS) + +LOCAL_MODULE_RELATIVE_PATH := hw +LOCAL_C_INCLUDES += hardware/libhardware/include bionic/libc/include +LOCAL_SHARED_LIBRARIES := liblog +LOCAL_SRC_FILES := memtrack_exynos.c mali.c ion.c +LOCAL_MODULE := memtrack.$(TARGET_BOARD_PLATFORM) +include $(BUILD_SHARED_LIBRARY) diff --git a/libmemtrack/ion.c b/libmemtrack/ion.c new file mode 100644 index 0000000..1d4a99f --- /dev/null +++ b/libmemtrack/ion.c @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#include + +#include "memtrack_exynos.h" + +#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0])) +#define min(x, y) ((x) < (y) ? (x) : (y)) + +#define ION_DEBUG_PATH "/sys/kernel/debug/ion/" +#define ION_DEBUG_CLIENT_PATH "/sys/kernel/debug/ion/clients" + +static struct memtrack_record ion_record_templates[] = { + { + .flags = MEMTRACK_FLAG_SMAPS_UNACCOUNTED | + MEMTRACK_FLAG_PRIVATE | + MEMTRACK_FLAG_SYSTEM | + MEMTRACK_FLAG_NONSECURE, + }, +}; + +int ion_memtrack_get_memory(pid_t pid, enum memtrack_type __unused type, + struct memtrack_record *records, + size_t *num_records) +{ + size_t allocated_records = min(*num_records, ARRAY_SIZE(ion_record_templates)); + int i; + FILE *fp; + struct stat s; + char line[1024]; + char path[64]; + + *num_records = ARRAY_SIZE(ion_record_templates); + + /* fastpath to return the necessary number of records */ + if (allocated_records == 0) { + return 0; + } + + if (lstat(ION_DEBUG_CLIENT_PATH, &s) == 0) + snprintf(path, sizeof(path), "%s/%d-0", ION_DEBUG_CLIENT_PATH, pid); + else + snprintf(path, sizeof(path), "%s/%d", ION_DEBUG_PATH, pid); + + fp = fopen(path, "r"); + if (fp == NULL) { + return -errno; + } + + memcpy(records, ion_record_templates, + sizeof(struct memtrack_record) * allocated_records); + + while (1) { + if (fgets(line, sizeof(line), fp) == NULL) { + break; + } + + /* Format: + * heap_name: size_in_bytes + * ion_noncontig_he: 30134272 + */ + if (!strncmp(line, "ion_noncontig_he", 16)) { + unsigned int size_in_bytes; + + int ret = sscanf(line, "%*s %u\n", &size_in_bytes); + if (ret == 1) { + records[0].size_in_bytes = size_in_bytes; + break; + } + } + } + + fclose(fp); + + return 0; +} diff --git a/libmemtrack/mali.c b/libmemtrack/mali.c new file mode 100644 index 0000000..d817aed --- /dev/null +++ b/libmemtrack/mali.c @@ -0,0 +1,227 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#include + +#include "memtrack_exynos.h" + +/* Following includes added for directory parsing. */ +#include +#include + +/* Some general defines. */ +#define MALI_DEBUG_FS_PATH "/d/mali/mem/" +#define MALI_DEBUG_MEM_FILE "/mem_profile" + +#define MAX_FILES_PER_PID 8 +#define MAX_FILES_PER_NAME 32 +static int libmemtrack_gbl_input_filename_counter = 0; +static char libmemtrack_gbl_input_filename[MAX_FILES_PER_PID][MAX_FILES_PER_NAME]; + +#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0])) +#define min(x, y) ((x) < (y) ? (x) : (y)) + +struct memtrack_record record_templates[] = { + { + .flags = MEMTRACK_FLAG_SMAPS_ACCOUNTED | + MEMTRACK_FLAG_PRIVATE | + MEMTRACK_FLAG_NONSECURE, + }, + { + .flags = MEMTRACK_FLAG_SMAPS_UNACCOUNTED | + MEMTRACK_FLAG_PRIVATE | + MEMTRACK_FLAG_NONSECURE, + }, +}; + +static void scan_directory_for_filenames(pid_t pid) +{ + /* As per ARM, there can be multiple files */ + DIR *directory; + struct dirent *entries; + char pid_string[8] = {0}; + int pid_index = 0; + + /* Open directory. */ + directory = opendir(MALI_DEBUG_FS_PATH); + + sprintf(pid_string, "%d", pid); + for (pid_index = 0; pid_index < 8; pid_index++) { + if (pid_string[pid_index] == 0) { + pid_string[pid_index] = '_'; + break; + } + } + + libmemtrack_gbl_input_filename_counter = 0; + + if (directory != NULL) { + /* Keep reading the directory. */ + while ((entries = readdir(directory))) { + /* Check if the PID is present in the direcotry entry. + * If it is present, then keep the filename for reading + * contents. Also concatenate the directory path, so that + * file can be opened. + * */ + if (!strncmp(entries->d_name, pid_string, strlen(pid_string))) { + snprintf(&libmemtrack_gbl_input_filename[libmemtrack_gbl_input_filename_counter][0], MAX_FILES_PER_NAME, "%s%s%s", MALI_DEBUG_FS_PATH, entries->d_name, MALI_DEBUG_MEM_FILE); + libmemtrack_gbl_input_filename_counter++; + } + } + /* Close directory before leaving. */ + (void) closedir(directory); + } else { + ALOGE("libmemtrack-hw -- Couldn't open the directory - %s \r\n", MALI_DEBUG_FS_PATH); + } + + return; +} + +int mali_memtrack_get_memory(pid_t pid, enum memtrack_type __unused type, + struct memtrack_record *records, + size_t *num_records) +{ + size_t allocated_records = min(*num_records, ARRAY_SIZE(record_templates)); + FILE *fp; + int local_count; + unsigned int temp_val = 0, total_memory_size = 0, native_buf_mem_size = 0; + bool native_buffer_read = false; + char line[1024] = {0}; + + *num_records = ARRAY_SIZE(record_templates); + + /* fastpath to return the necessary number of records */ + if (allocated_records == 0) { + return 0; + } + + memcpy(records, record_templates, + sizeof(struct memtrack_record) * allocated_records); + + /* First, scan the directoy. */ + scan_directory_for_filenames(pid); + + local_count = 0; + total_memory_size = 0; + native_buf_mem_size = 0; + + while (local_count < libmemtrack_gbl_input_filename_counter) { + fp = fopen(&libmemtrack_gbl_input_filename[local_count][0], "r"); + + if (fp == NULL) { + /* Unable to open the file. Either move to next file, or + * return to caller. */ + local_count++; + continue; + } + + while (1) { + char memory_type[16] = {0}; + char memory_type_2[16] = {0}; + int ret = 0; + + if (native_buffer_read == false) { + if (fgets(line, sizeof(line), fp) == NULL) { + if (native_buffer_read == false) { + /* Unable to read Native buffer. + * Probably DDK doesn't support Native Buffer. + * Reset to start of file and look for Total + * Memory. */ + fseek(fp, 0, SEEK_SET); + + /* Also, set Native buffer flag and memory sizes. */ + native_buffer_read = true; + continue; + } + } + + /* Search for Total memory. */ + /* Format: + * + * Channel: Native Buffer (Total memory: 44285952) + * + */ + ret = sscanf(line, "%*s %15s %15s %*s %*s %d \n", memory_type, memory_type_2, &temp_val); + + if (ret != 3) + continue; + + if ((strcmp(memory_type, "Native") == 0) && + (strcmp(memory_type_2, "Buffer") == 0)) { + /* Set native buffer memory read flag to true. */ + native_buffer_read = true; + native_buf_mem_size += temp_val; + } else { + /* Ignore case. Nothing to do here. */ + /* Continue reading file until NativeBuffer is found. */ + } + } else { + if (fgets(line, sizeof(line), fp) == NULL) { + /* Unable to find, so break the loop here.*/ + break; + } + + /* Search for Total memory. */ + /* Format: + * + * Total allocated memory: 36146960 + * + */ + ret = sscanf(line, "%15s %*s %*s %d \n", memory_type, &temp_val); + + if (ret != 2) + continue; + + if (strcmp(memory_type, "Total") == 0) { + /* Store total memory. */ + total_memory_size += temp_val; + } else { + /* Ignore case. Nothing to do here. */ + } + } /* end if (native_buffer_read == false) */ + + } /* End while(1) */ + + if (fp != NULL) { + /* Close the opened file. */ + fclose(fp); + + /* Reset some of the local variables here. */ + fp = NULL; + native_buffer_read = false; + } + + /* Manage local variables and counters. */ + local_count++; + + } /* while (local_count <= libmemtrack_gbl_input_filename_counter) */ + + /* Arrange and return memory size details. */ + if (allocated_records > 0) + records[0].size_in_bytes = 0; + + if (allocated_records > 1) + records[1].size_in_bytes = total_memory_size - native_buf_mem_size; + + return 0; +} diff --git a/libmemtrack/memtrack_exynos.c b/libmemtrack/memtrack_exynos.c new file mode 100644 index 0000000..ef6358b --- /dev/null +++ b/libmemtrack/memtrack_exynos.c @@ -0,0 +1,61 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include + +#include "memtrack_exynos.h" + +int exynos_memtrack_init(const struct memtrack_module __unused *module) +{ + return 0; +} + +int exynos_memtrack_get_memory(const struct memtrack_module __unused *module, + pid_t pid, + int type, + struct memtrack_record *records, + size_t *num_records) +{ + if (type == MEMTRACK_TYPE_GL) { + return mali_memtrack_get_memory(pid, type, records, num_records); + } else if (type == MEMTRACK_TYPE_GRAPHICS) { + return ion_memtrack_get_memory(pid, type, records, num_records); + } + + return -EINVAL; +} + +static struct hw_module_methods_t memtrack_module_methods = { + .open = NULL, +}; + +struct memtrack_module HAL_MODULE_INFO_SYM = { + common: { + tag: HARDWARE_MODULE_TAG, + module_api_version: MEMTRACK_MODULE_API_VERSION_0_1, + hal_api_version: HARDWARE_HAL_API_VERSION, + id: MEMTRACK_HARDWARE_MODULE_ID, + name: "Exynos Memory Tracker HAL", + author: "The Android Open Source Project", + methods: &memtrack_module_methods, + }, + + init: exynos_memtrack_init, + getMemory: exynos_memtrack_get_memory, +}; + diff --git a/libmemtrack/memtrack_exynos.h b/libmemtrack/memtrack_exynos.h new file mode 100644 index 0000000..ccebb79 --- /dev/null +++ b/libmemtrack/memtrack_exynos.h @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2013 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MEMTRACK_EXYNOS5_H_ +#define _MEMTRACK_EXYNOS5_H_ + +int mali_memtrack_get_memory(pid_t pid, enum memtrack_type type, + struct memtrack_record *records, + size_t *num_records); +int ion_memtrack_get_memory(pid_t pid, enum memtrack_type type, + struct memtrack_record *records, + size_t *num_records); + +#endif diff --git a/libmpp/Android.mk b/libmpp/Android.mk new file mode 100644 index 0000000..d1058f0 --- /dev/null +++ b/libmpp/Android.mk @@ -0,0 +1,40 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos3/include \ + $(TOP)/hardware/samsung_slsi/exynos4/include \ + $(TOP)/hardware/samsung_slsi/exynos5/include + +LOCAL_SRC_FILES := MppFactory.cpp +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libmpp + +ifeq ($(BOARD_USES_FIMC), true) +LOCAL_SHARED_LIBRARIES += libexynosfimc +else +LOCAL_SHARED_LIBRARIES += libexynosgscaler +endif + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libmpp/LibMpp.h b/libmpp/LibMpp.h new file mode 100644 index 0000000..ec537c3 --- /dev/null +++ b/libmpp/LibMpp.h @@ -0,0 +1,49 @@ +#ifndef LIBMPP_H_ +#define LIBMPP_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "exynos_format.h" +#include "exynos_v4l2.h" +#ifdef USES_GSCALER +#include "exynos_gscaler.h" +#endif +#ifdef USES_FIMC +#include "exynos_fimc.h" +#endif + +class LibMpp { +public: + LibMpp() { + ALOGD("%s\n", __func__); + } + virtual ~LibMpp() { + ALOGD("%s\n", __func__); + } + virtual int ConfigMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst) = 0; + virtual int RunMpp(void *handle, exynos_mpp_img *src, + exynos_mpp_img *dst) = 0; + virtual int StopMpp(void *handle) = 0; + virtual void DestroyMpp(void *handle) = 0; + virtual int SetCSCProperty(void *handle, unsigned int eqAuto, + unsigned int fullRange, unsigned int colorspace) = 0; + virtual int FreeMpp(void *handle) = 0; + virtual int SetInputCrop(void *handle, exynos_mpp_img *src, exynos_mpp_img *dst) = 0; + virtual int ConfigBlendMpp(void __unused *handle, exynos_mpp_img __unused *src, + exynos_mpp_img __unused *dst, + struct SrcBlendInfo __unused *srcblendinfo) {return 0;}; +}; + +#endif diff --git a/libmpp/MppFactory.cpp b/libmpp/MppFactory.cpp new file mode 100644 index 0000000..cb70dad --- /dev/null +++ b/libmpp/MppFactory.cpp @@ -0,0 +1,25 @@ +#include "MppFactory.h" +#include "LibMpp.h" + +LibMpp *MppFactory::CreateMpp(int id, int mode, int outputMode, int drm) +{ + ALOGD("%s dev(%d) mode(%d) drm(%d), \n", __func__, id, mode, drm); +#ifdef USES_GSCALER + return reinterpret_cast(exynos_gsc_create_exclusive(id, + mode, outputMode, drm)); +#else + return reinterpret_cast(exynos_fimc_create_exclusive(id, + mode, outputMode, drm)); +#endif +} + +LibMpp *MppFactory::CreateBlendMpp(int id, int mode, int outputMode, int drm) +{ + ALOGD("%s(%d)\n", __func__, __LINE__); + +#ifdef USES_GSCALER + return reinterpret_cast(exynos_gsc_create_blend_exclusive(id, + mode, outputMode, drm)); +#endif + return NULL; +} diff --git a/libmpp/MppFactory.h b/libmpp/MppFactory.h new file mode 100644 index 0000000..7ed56ce --- /dev/null +++ b/libmpp/MppFactory.h @@ -0,0 +1,17 @@ +#ifndef MPP_FACTORY_H_ +#define MPP_FACTORY_H_ + +#include "LibMpp.h" + +class MppFactory { +public: + MppFactory() { + ALOGD("%s\n", __func__); + } + virtual ~MppFactory() { + ALOGD("%s\n", __func__); + } + virtual LibMpp *CreateMpp(int id, int mode, int outputMode, int drm); + virtual LibMpp *CreateBlendMpp(int id, int mode, int outputMode, int drm); +}; +#endif diff --git a/libmpp/NOTICE b/libmpp/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libmpp/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libscaler/Android.mk b/libscaler/Android.mk new file mode 100644 index 0000000..7edae57 --- /dev/null +++ b/libscaler/Android.mk @@ -0,0 +1,41 @@ +# Copyright (C) 2013 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils + +LOCAL_SRC_FILES := libscaler.cpp libscaler-v4l2.cpp libscalerblend-v4l2.cpp libscaler-m2m1shot.cpp libscaler-swscaler.cpp +ifeq ($(BOARD_USES_SCALER_M2M1SHOT), true) +LOCAL_CFLAGS += -DSCALER_USE_M2M1SHOT +endif + +# since 3.18 kernel +ifneq ($(filter 3.18, $(TARGET_LINUX_KERNEL_VERSION)),) +LOCAL_CFLAGS += -DSCALER_USE_LOCAL_CID +LOCAL_CFLAGS += -DSCALER_USE_PREMUL_FMT +endif + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libexynosscaler + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libscaler/NOTICE b/libscaler/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libscaler/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libscaler/libscaler-common.h b/libscaler/libscaler-common.h new file mode 100644 index 0000000..665436e --- /dev/null +++ b/libscaler/libscaler-common.h @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler-common.h + * \brief source file for Scaler HAL + * \author Cho KyongHo + * \date 2014/05/08 + * + * Revision History: + * - 2014.05.08 : Cho KyongHo (pullip.cho@samsung.com) \n + * Create + */ +#ifndef _LIBSCALER_COMMON_H_ +#define _LIBSCALER_COMMON_H_ + +#define LOG_TAG "libexynosscaler" +#include +#include +#include + +//#define LOG_NDEBUG 0 + +#ifdef __GNUC__ +# define __UNUSED__ __attribute__((__unused__)) +#else +# define __UNUSED__ +#endif + +#define SC_LOGERR(fmt, args...) ((void)ALOG(LOG_ERROR, LOG_TAG, "%s: " fmt " [%s]", __func__, ##args, strerror(errno))) +#define SC_LOGE(fmt, args...) ((void)ALOG(LOG_ERROR, LOG_TAG, "%s: " fmt, __func__, ##args)) +#define SC_LOGI(fmt, args...) ((void)ALOG(LOG_INFO, LOG_TAG, "%s: " fmt, __func__, ##args)) +#define SC_LOGI_IF(cond, fmt, args...) do { \ + if (cond) \ + SC_LOGI(fmt, ##args); \ + } while (0) +#define SC_LOGE_IF(cond, fmt, args...) do { \ + if (cond) \ + SC_LOGE(fmt, ##args); \ + } while (0) +#define SC_LOG_ASSERT(cont, fmt, args...) ((void)ALOG_ASSERT(cond, "%s: " fmt, __func__, ##args)) + +#ifdef SC_DEBUG +#define SC_LOGD(args...) ((void)ALOG(LOG_INFO, LOG_TAG, ##args)) +#define SC_LOGD_IF(cond, fmt, args...) do { \ + if (cond) \ + SC_LOGD(fmt, ##args); \ + } while (0) +#else +#define SC_LOGD(args...) do { } while (0) +#define SC_LOGD_IF(cond, fmt, args...) do { } while (0) +#endif + +#define ARRSIZE(arr) (sizeof(arr)/sizeof(arr[0])) + + + +namespace LibScaler { +template +static inline T min (T a, T b) { + return (a > b) ? b : a; +} + +template +static inline void swap(T &a, T &b) { + T t = a; + a = b; + b = a; +} + +static inline bool UnderOne16thScaling(unsigned int srcw, unsigned int srch, + unsigned int dstw, unsigned int dsth, unsigned int rot) { + if ((rot == 90) || (rot == 270)) + swap(srcw, srch); + + return ((srcw > (dstw * 16)) || (srch > (dsth * 16))); +} + +}; +// marker for output parameters +#define __out + +#endif //_LIBSCALER_COMMON_H_ diff --git a/libscaler/libscaler-m2m1shot.cpp b/libscaler/libscaler-m2m1shot.cpp new file mode 100644 index 0000000..79e61c5 --- /dev/null +++ b/libscaler/libscaler-m2m1shot.cpp @@ -0,0 +1,339 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler-m2m1shot.cpp + * \brief source file for Scaler HAL + * \author Cho KyongHo + * \date 2014/05/08 + * + * Revision History: + * - 2014.05.08 : Cho KyongHo (pullip.cho@samsung.com) \n + * Create + */ +#include +#include +#include +#include +#include + +#include + +#include "libscaler-common.h" +#include "libscaler-m2m1shot.h" +#include "libscaler-swscaler.h" + +using namespace std; + +const char *dev_base_name[] = { + "/dev/m2m1shot_scaler0", + "/dev/m2m1shot_scaler1", + "/dev/m2m1shot_scaler2", + "/dev/m2m1shot_scaler3", +}; + +struct PixFormat { + unsigned int pixfmt; + char planes; + char bit_pp[3]; +}; + +const static PixFormat g_pixfmt_table[] = { + {V4L2_PIX_FMT_RGB32, 1, {32, 0, 0}, }, + {V4L2_PIX_FMT_BGR32, 1, {32, 0, 0}, }, + {V4L2_PIX_FMT_RGB565, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_RGB555X, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_RGB444, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_YUYV, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_YVYU, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_UYVY, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_NV16, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_NV61, 1, {16, 0, 0}, }, + {V4L2_PIX_FMT_YUV420, 1, {12, 0, 0}, }, + {V4L2_PIX_FMT_YVU420, 1, {12, 0, 0}, }, + {V4L2_PIX_FMT_NV12M, 2, {8, 4, 0}, }, + {V4L2_PIX_FMT_NV21M, 2, {8, 4, 0}, }, + {v4l2_fourcc('V', 'M', '1', '2'), 2, {8, 4, 0}, }, + {V4L2_PIX_FMT_NV12, 1, {12, 0, 0}, }, + {V4L2_PIX_FMT_NV21, 1, {12, 0, 0}, }, + {v4l2_fourcc('N', 'M', '2', '1'), 2, {8, 4, 0}, }, + {V4L2_PIX_FMT_YUV420M, 3, {8, 2, 2}, }, + {V4L2_PIX_FMT_YVU420M, 3, {8, 2, 2}, }, + {V4L2_PIX_FMT_NV24, 1, {24, 0, 0}, }, + {V4L2_PIX_FMT_NV42, 1, {24, 0, 0}, }, +}; + + +CScalerM2M1SHOT::CScalerM2M1SHOT(int devid, int __UNUSED__ drm) : m_iFD(-1) +{ + memset(&m_task, 0, sizeof(m_task)); + + if ((devid < 0) || (devid > 3)) { // instance number must be between 0 ~ 3 + SC_LOGE("Invalid device instance ID %d", devid); + return; + } + + m_iFD = open(dev_base_name[devid], O_RDWR); + if (m_iFD < 0) { + SC_LOGERR("Failed to open '%s'", dev_base_name[devid]); + } else { + // default 3 planes not to miss any buffer address + m_task.buf_out.num_planes = 3; + m_task.buf_cap.num_planes = 3; + } +} + +CScalerM2M1SHOT::~CScalerM2M1SHOT() +{ + if (m_iFD >= 0) + close(m_iFD); +} + +bool CScalerM2M1SHOT::Run() +{ + int ret; + + if (LibScaler::UnderOne16thScaling( + m_task.fmt_out.crop.width, m_task.fmt_out.crop.height, + m_task.fmt_cap.crop.width, m_task.fmt_cap.crop.height, + m_task.op.rotate)) + return RunSWScaling(); + + ret = ioctl(m_iFD, M2M1SHOT_IOC_PROCESS, &m_task); + if (ret < 0) { + SC_LOGERR("Failed to process the given M2M1SHOT task"); + return false; + } + + return true; +} + +bool CScalerM2M1SHOT::SetFormat(m2m1shot_pix_format &fmt, m2m1shot_buffer &buf, + unsigned int width, unsigned int height, unsigned int v4l2_fmt) { + const PixFormat *pixfmt = NULL; + + fmt.width = width; + fmt.height = height; + fmt.fmt = v4l2_fmt; + + for (size_t i = 0; i < ARRSIZE(g_pixfmt_table); i++) { + if (g_pixfmt_table[i].pixfmt == v4l2_fmt) { + pixfmt = &g_pixfmt_table[i]; + break; + } + } + + if (!pixfmt) { + SC_LOGE("Format %#x is not supported", v4l2_fmt); + return false; + } + + for (int i = 0; i < pixfmt->planes; i++) { + if (((pixfmt->bit_pp[i] * width) % 8) != 0) { + SC_LOGE("Plane %d of format %#x must have even width", i, v4l2_fmt); + return false; + } + buf.plane[i].len = (pixfmt->bit_pp[i] * width * height) / 8; + } + + if (pixfmt->pixfmt == V4L2_PIX_FMT_YVU420) { + unsigned int y_size = width * height; + unsigned int c_span = ALIGN(width / 2, 16); + buf.plane[0].len = y_size + (c_span * height / 2) * 2; + } + + buf.num_planes = pixfmt->planes; + + return true; +} + +bool CScalerM2M1SHOT::SetCrop(m2m1shot_pix_format &fmt, + unsigned int l, unsigned int t, unsigned int w, unsigned int h) { + if (fmt.width <= l) { + SC_LOGE("crop left %d is larger than image width %d", l, fmt.width); + return false; + } + if (fmt.height <= t) { + SC_LOGE("crop top %d is larger than image height %d", t, fmt.height); + return false; + } + if (fmt.width < (l + w)) { + SC_LOGE("crop width %d@%d exceeds image width %d", w, l, fmt.width); + return false; + } + if (fmt.height < (t + h)) { + SC_LOGE("crop height %d@%d exceeds image height %d", h, t, fmt.height); + return false; + } + + fmt.crop.left = l; + fmt.crop.top = t; + fmt.crop.width = w; + fmt.crop.height = h; + + return true; +} + +bool CScalerM2M1SHOT::SetAddr( + m2m1shot_buffer &buf, void *addr[SC_NUM_OF_PLANES], int mem_type) { + if (mem_type == V4L2_MEMORY_DMABUF) { + buf.type = M2M1SHOT_BUFFER_DMABUF; + for (int i = 0; i < buf.num_planes; i++) + buf.plane[i].fd = static_cast<__s32>(reinterpret_cast(addr[i])); + } else if (mem_type == V4L2_MEMORY_USERPTR) { + buf.type = M2M1SHOT_BUFFER_USERPTR; + for (int i = 0; i < buf.num_planes; i++) + buf.plane[i].userptr = reinterpret_cast(addr[i]); + } else { + SC_LOGE("Unknown buffer type %d", mem_type); + return false; + } + + return true; +} + +bool CScalerM2M1SHOT::SetRotate(int rot, int hflip, int vflip) { + if ((rot % 90) != 0) { + SC_LOGE("Rotation degree %d must be multiple of 90", rot); + return false; + } + + rot = rot % 360; + if (rot < 0) + rot = 360 + rot; + + m_task.op.rotate = rot; + m_task.op.op &= ~(M2M1SHOT_OP_FLIP_HORI | M2M1SHOT_OP_FLIP_VIRT); + if (hflip) + m_task.op.op |= M2M1SHOT_OP_FLIP_HORI; + if (vflip) + m_task.op.op |= M2M1SHOT_OP_FLIP_VIRT; + + return true; +} + +static bool GetBuffer(m2m1shot_buffer &buf, char *addr[]) +{ + for (int i = 0; i < buf.num_planes; i++) { + if (buf.type == M2M1SHOT_BUFFER_DMABUF) { + addr[i] = reinterpret_cast(mmap(NULL, buf.plane[i].len, + PROT_READ | PROT_WRITE, MAP_SHARED, + buf.plane[i].fd, 0)); + if (addr[i] == MAP_FAILED) { + SC_LOGE("Failed to map FD %d", buf.plane[i].fd); + while (i-- > 0) + munmap(addr[i], buf.plane[i].len); + return false; + } + } else { + addr[i] = reinterpret_cast(buf.plane[i].userptr); + } + } + + return true; +} + +static void PutBuffer(m2m1shot_buffer &buf, char *addr[]) +{ + for (int i = 0; i < buf.num_planes; i++) { + if (buf.type == M2M1SHOT_BUFFER_DMABUF) + munmap(addr[i], buf.plane[i].len); + } +} + +bool CScalerM2M1SHOT::RunSWScaling() +{ + if (m_task.fmt_cap.fmt != m_task.fmt_out.fmt) { + SC_LOGE("Source and target image format must be the same"); + return false; + } + + if (m_task.op.rotate != 0) { + SC_LOGE("Rotation is not allowed for S/W Scaling"); + return false; + } + + SC_LOGI("Running S/W Scaler: %dx%d -> %dx%d", + m_task.fmt_out.crop.width, m_task.fmt_out.crop.height, + m_task.fmt_cap.crop.width, m_task.fmt_cap.crop.height); + + CScalerSW *swsc; + char *src[3], *dst[3]; + + switch (m_task.fmt_cap.fmt) { + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVYU: + if (!GetBuffer(m_task.buf_out, src)) + return false; + + if (!GetBuffer(m_task.buf_cap, dst)) { + PutBuffer(m_task.buf_out, src); + return false; + } + + swsc = new CScalerSW_YUYV(src[0], dst[0]); + break; + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + if (!GetBuffer(m_task.buf_out, src)) + return false; + + if (!GetBuffer(m_task.buf_cap, dst)) { + PutBuffer(m_task.buf_out, src); + return false; + } + + if (m_task.buf_out.num_planes == 1) + src[1] = src[0] + m_task.fmt_out.width * m_task.fmt_out.height; + + if (m_task.buf_cap.num_planes == 1) + dst[1] = dst[0] + m_task.fmt_cap.width * m_task.fmt_cap.height; + + swsc = new CScalerSW_NV12(src[0], src[1], dst[0], dst[1]); + break; + case V4L2_PIX_FMT_UYVY: // TODO: UYVY is not implemented yet. + default: + SC_LOGE("Format %x is not supported", m_task.fmt_out.fmt); + return false; + } + + if (swsc == NULL) { + SC_LOGE("Failed to allocate SW Scaler"); + PutBuffer(m_task.buf_out, src); + PutBuffer(m_task.buf_cap, dst); + return false; + } + + swsc->SetSrcRect(m_task.fmt_out.crop.left, m_task.fmt_out.crop.top, + m_task.fmt_out.crop.width, m_task.fmt_out.crop.height, + m_task.fmt_out.width); + + swsc->SetDstRect(m_task.fmt_cap.crop.left, m_task.fmt_cap.crop.top, + m_task.fmt_cap.crop.width, m_task.fmt_cap.crop.height, + m_task.fmt_cap.width); + + bool ret = swsc->Scale(); + + delete swsc; + + PutBuffer(m_task.buf_out, src); + PutBuffer(m_task.buf_cap, dst); + + return ret; +} diff --git a/libscaler/libscaler-m2m1shot.h b/libscaler/libscaler-m2m1shot.h new file mode 100644 index 0000000..58366cf --- /dev/null +++ b/libscaler/libscaler-m2m1shot.h @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler-m2m1shot.h + * \brief source file for Scaler HAL + * \author Cho KyongHo + * \date 2014/05/08 + * + * Revision History: + * - 2014.05.08 : Cho KyongHo (pullip.cho@samsung.com) \n + * Create + */ +#ifndef _LIBSCALER_M2M1SHOT_H_ +#define _LIBSCALER_M2M1SHOT_H_ + +#include + +class CScalerM2M1SHOT { + int m_iFD; + m2m1shot m_task; + + bool SetFormat(m2m1shot_pix_format &fmt, m2m1shot_buffer &buf, + unsigned int width, unsigned int height, unsigned int v4l2_fmt); + bool SetCrop(m2m1shot_pix_format &fmt, + unsigned int l, unsigned int t, unsigned int w, unsigned int h); + bool SetAddr(m2m1shot_buffer &buf, void *addr[SC_NUM_OF_PLANES], int mem_type); + + bool RunSWScaling(); +public: + CScalerM2M1SHOT(int devid, int allow_drm = 0); + ~CScalerM2M1SHOT(); + + bool Run(); + + inline bool Valid() { return m_iFD >= 0; } + + inline bool SetSrcFormat(unsigned int width, unsigned int height, unsigned int v4l2_fmt) { + return SetFormat(m_task.fmt_out, m_task.buf_out, width, height, v4l2_fmt); + } + + inline bool SetDstFormat(unsigned int width, unsigned int height, unsigned int v4l2_fmt) { + return SetFormat(m_task.fmt_cap, m_task.buf_cap, width, height, v4l2_fmt); + } + + inline bool SetSrcCrop(unsigned int l, unsigned int t, unsigned int w, unsigned int h) { + return SetCrop(m_task.fmt_out, l, t, w, h); + } + + inline bool SetDstCrop(unsigned int l, unsigned int t, unsigned int w, unsigned int h) { + return SetCrop(m_task.fmt_cap, l, t, w, h); + } + + inline bool SetSrcAddr(void *addr[SC_NUM_OF_PLANES], int mem_type) { + return SetAddr(m_task.buf_out, addr, mem_type); + } + + inline bool SetDstAddr(void *addr[SC_NUM_OF_PLANES], int mem_type) { + return SetAddr(m_task.buf_cap, addr, mem_type); + } + + bool SetRotate(int rot, int hflip, int vflip); + + inline void SetCSCWide(bool wide) { + m_task.op.op &= ~(M2M1SHOT_OP_CSC_WIDE | M2M1SHOT_OP_CSC_NARROW); + m_task.op.op |= wide ? M2M1SHOT_OP_CSC_WIDE : M2M1SHOT_OP_CSC_NARROW; + } + + inline void SetCSCEq(unsigned int colorspace) { + /* TODO: need to add M2M1SHOT_OP_CSC_2020 */ + m_task.op.op &= ~(M2M1SHOT_OP_CSC_601 | M2M1SHOT_OP_CSC_709); + if (colorspace == V4L2_COLORSPACE_REC709) + m_task.op.op |= M2M1SHOT_OP_CSC_709; + else + m_task.op.op |= M2M1SHOT_OP_CSC_601; + } + + inline void SetFilter(unsigned int filter) { + m_task.op.op &= ~LIBSC_M2M1SHOT_OP_FILTER_MASK; + m_task.op.op |= filter << LIBSC_M2M1SHOT_OP_FILTER_SHIFT; + } + + /* No effect in M2M1SHOT */ + inline void SetDRM(bool __UNUSED__ drm) { } + inline void SetSrcPremultiplied(bool __UNUSED__ premultiplied) { } + inline void SetDstPremultiplied(bool __UNUSED__ premultiplied) { } + inline void SetSrcCacheable(bool __UNUSED__ cacheable) { } + inline void SetDstCacheable(bool __UNUSED__ cacheable) { } + inline bool Stop() { return true; } + inline bool DevSetCtrl() { return false; } + inline bool DevSetFormat() { return false; } + inline bool ReqBufs() { return false; } + inline bool StreamOn() { return false; } + inline bool DQBuf() { return false; } + inline bool QBuf(int __UNUSED__ *pfdSrcReleaseFence = NULL, int __UNUSED__ *pfdDstReleaseFence = NULL) { + return false; + } + +}; + +#endif //_LIBSCALER_M2M1SHOT_H_ diff --git a/libscaler/libscaler-swscaler.cpp b/libscaler/libscaler-swscaler.cpp new file mode 100644 index 0000000..515b11e --- /dev/null +++ b/libscaler/libscaler-swscaler.cpp @@ -0,0 +1,106 @@ +#include "libscaler-swscaler.h" + +void CScalerSW::Clear() { + m_pSrc[0] = NULL; + m_pSrc[1] = NULL; + m_pSrc[2] = NULL; + m_pDst[0] = NULL; + m_pDst[1] = NULL; + m_pDst[2] = NULL; + + m_nSrcLeft = 0; + m_nSrcTop = 0; + m_nSrcWidth = 0; + m_nSrcHeight = 0; + m_nSrcStride = 0; + m_nDstLeft = 0; + m_nDstTop = 0; + m_nDstWidth = 0; + m_nDstHeight = 0; + m_nDstStride = 0; +} + +bool CScalerSW_YUYV::Scale() { + if (((m_nSrcLeft | m_nSrcWidth | m_nDstWidth | m_nSrcStride) % 2) != 0) { + SC_LOGE("Width of YUV422 should be even"); + return false; + } + + unsigned int h_ratio = (m_nSrcWidth << 16) / m_nDstWidth; + unsigned int v_ratio = (m_nSrcHeight << 16) / m_nDstHeight; + + unsigned int src_x; + unsigned int src_y = m_nSrcTop << 16; + + // Luminance + Chrominance at once + for (unsigned int y = m_nDstTop; y < (m_nDstTop + m_nDstHeight); y++) { + src_x = m_nSrcLeft << 16; + for (unsigned int x = m_nDstLeft; x < (m_nDstLeft + m_nDstWidth); x++) { + m_pDst[0][y * (m_nDstStride * 2) + x * 2] = + m_pSrc[0][(src_y >> 16) * (m_nSrcStride * 2) + (src_x >> 16) * 2]; + + if (!(x & 1)) { + unsigned int cx = (src_x >> 16) & ~1; + + m_pDst[0][y * (m_nDstStride * 2) + x * 2 + 1] = + m_pSrc[0][(src_y >> 16) * (m_nSrcStride * 2) + cx * 2 + 1]; + m_pDst[0][y * (m_nDstStride * 2) + x * 2 + 3] = + m_pSrc[0][(src_y >> 16) * (m_nSrcStride * 2) + cx * 2 + 3]; + + } + + src_x = LibScaler::min(src_x + h_ratio, (m_nSrcLeft + m_nSrcWidth) << 16); + } + + src_y = LibScaler::min(src_y + v_ratio, (m_nSrcTop + m_nSrcHeight) << 16); + } + + return true; +} + +bool CScalerSW_NV12::Scale() { + if (((m_nSrcLeft | m_nSrcTop | m_nSrcWidth | m_nSrcHeight | m_nSrcStride | + m_nDstLeft | m_nDstTop | m_nDstWidth | m_nDstHeight | m_nDstStride) % 2) != 0) { + SC_LOGE("Both of width and height of YUV420 should be even"); + return false; + } + + unsigned int h_ratio = (m_nSrcWidth << 16) / m_nDstWidth; + unsigned int v_ratio = (m_nSrcHeight << 16) / m_nDstHeight; + + unsigned int src_x; + unsigned int src_y = m_nSrcTop << 16; + + // Luminance + for (unsigned int y = m_nDstTop; y < (m_nDstTop + m_nDstHeight); y++) { + src_x = m_nSrcLeft << 16; + for (unsigned int x = m_nDstLeft; x < (m_nDstLeft + m_nDstWidth); x++) { + m_pDst[0][y * m_nDstStride + x] = m_pSrc[0][(src_y >> 16) * m_nSrcStride + (src_x >> 16)]; + + src_x = LibScaler::min(src_x + h_ratio, (m_nSrcLeft + m_nSrcWidth) << 16); + } + + src_y = LibScaler::min(src_y + v_ratio, (m_nSrcTop + m_nSrcHeight) << 16); + } + + // Chrominance + + // change pointers to 1-byte to pointers to 2-byte storage. + unsigned short *src = reinterpret_cast(m_pSrc[1]); + unsigned short *dst = reinterpret_cast(m_pDst[1]); + + src_y = (m_nSrcTop / 2) << 16; + for (unsigned int y = m_nDstTop / 2; y < ((m_nDstTop + m_nDstHeight) / 2); y++) { + // Move 2 pixels at once (CbCr) + src_x = (m_nSrcLeft / 2) << 16; + for (unsigned int x = m_nDstLeft / 2; x < ((m_nDstLeft + m_nDstWidth) / 2); x++) { + dst[y * (m_nDstStride / 2) + x] = src[(src_y >> 16) * (m_nSrcStride / 2) + (src_x >> 16)]; + + src_x = LibScaler::min(src_x + h_ratio, ((m_nSrcLeft + m_nSrcWidth) / 2) << 16); + } + + src_y = LibScaler::min(src_y + v_ratio, ((m_nSrcTop + m_nSrcHeight) / 2) << 16); + } + + return true; +} diff --git a/libscaler/libscaler-swscaler.h b/libscaler/libscaler-swscaler.h new file mode 100644 index 0000000..085d744 --- /dev/null +++ b/libscaler/libscaler-swscaler.h @@ -0,0 +1,61 @@ +#ifndef __LIBSCALER_SWSCALER_H__ +#define __LIBSCALER_SWSCALER_H__ + +#include "libscaler-common.h" + +class CScalerSW { + protected: + char *m_pSrc[3]; + char *m_pDst[3]; + unsigned int m_nSrcLeft, m_nSrcTop; + unsigned int m_nSrcWidth, m_nSrcHeight; + unsigned int m_nSrcStride; + unsigned int m_nDstLeft, m_nDstTop; + unsigned int m_nDstWidth, m_nDstHeight; + unsigned int m_nDstStride; + public: + CScalerSW() { Clear(); } + virtual ~CScalerSW() { }; + void Clear(); + virtual bool Scale() = 0; + + void SetSrcRect(unsigned int left, unsigned int top, unsigned int width, unsigned int height, unsigned int stride) { + m_nSrcLeft = left; + m_nSrcTop = top; + m_nSrcWidth = width; + m_nSrcHeight = height; + m_nSrcStride = stride; + } + + void SetDstRect(unsigned int left, unsigned int top, unsigned int width, unsigned int height, unsigned int stride) { + m_nDstLeft = left; + m_nDstTop = top; + m_nDstWidth = width; + m_nDstHeight = height; + m_nDstStride = stride; + } +}; + +class CScalerSW_YUYV: public CScalerSW { + public: + CScalerSW_YUYV(char *src, char *dst) { + m_pSrc[0] = src; + m_pDst[0] = dst; + } + + virtual bool Scale(); +}; + +class CScalerSW_NV12: public CScalerSW { + public: + CScalerSW_NV12(char *src0, char *src1, char *dst0, char *dst1) { + m_pSrc[0] = src0; + m_pDst[0] = dst0; + m_pSrc[1] = src1; + m_pDst[1] = dst1; + } + + virtual bool Scale(); +}; + +#endif //__LIBSCALER_SWSCALER_H__ diff --git a/libscaler/libscaler-v4l2.cpp b/libscaler/libscaler-v4l2.cpp new file mode 100644 index 0000000..c46b41b --- /dev/null +++ b/libscaler/libscaler-v4l2.cpp @@ -0,0 +1,625 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler-v4l2.cpp + * \brief source file for Scaler HAL + * \author Cho KyongHo + * \date 2014/05/12 + * + * Revision History: + * - 2014.05.12 : Cho KyongHo (pullip.cho@samsung.com) \n + * Create + */ + +#include +#include +#include +#include +#include +#include + +#include "libscaler-v4l2.h" +#include "libscaler-swscaler.h" + +void CScalerV4L2::Initialize(int instance) +{ + snprintf(m_cszNode, SC_MAX_NODENAME, SC_DEV_NODE "%d", SC_NODE(instance)); + + m_fdScaler = exynos_v4l2_open(m_cszNode, O_RDWR); + if (m_fdScaler < 0) { + SC_LOGERR("Failed to open '%s'", m_cszNode); + return; + } + + unsigned int cap = V4L2_CAP_STREAMING | V4L2_CAP_VIDEO_OUTPUT_MPLANE | + V4L2_CAP_VIDEO_CAPTURE_MPLANE; + if (!exynos_v4l2_querycap(m_fdScaler, cap)) { + SC_LOGERR("Failed to query capture on '%s'", m_cszNode); + close(m_fdScaler); + m_fdScaler = -1; + } else { + m_fdValidate = -m_fdScaler; + } +} + +CScalerV4L2::CScalerV4L2(int instance, int allow_drm) +{ + m_fdScaler = -1; + m_iInstance = instance; + m_nRotDegree = 0; + m_fStatus = 0; + m_filter = 0; + + memset(&m_frmSrc, 0, sizeof(m_frmSrc)); + memset(&m_frmDst, 0, sizeof(m_frmDst)); + + m_frmSrc.fdAcquireFence = -1; + m_frmDst.fdAcquireFence = -1; + + m_frmSrc.name = "output"; + m_frmDst.name = "capture"; + + m_frmSrc.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + m_frmDst.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + + Initialize(instance); + + if(Valid()) { + if (allow_drm) + SetFlag(m_fStatus, SCF_ALLOW_DRM); + SC_LOGD("Successfully opened '%s'; returned fd %d; drmmode %s", + m_cszNode, m_fdScaler, allow_drm ? "enabled" : "disabled"); + } +} + +CScalerV4L2::~CScalerV4L2() +{ + if (m_fdScaler >= 0) + close(m_fdScaler); + + m_fdScaler = -1; +} + +bool CScalerV4L2::Stop() +{ + if (!ResetDevice(m_frmSrc)) { + SC_LOGE("Failed to stop Scaler for the output frame"); + return false; + } + + if (!ResetDevice(m_frmDst)) { + SC_LOGE("Failed to stop Scaler for the cature frame"); + return false; + } + + return true; +} + +bool CScalerV4L2::Run() +{ + if (LibScaler::UnderOne16thScaling( + m_frmSrc.crop.width, m_frmSrc.crop.height, + m_frmDst.crop.width, m_frmDst.crop.height, + m_nRotDegree)) + return RunSWScaling(); + + if (!DevSetCtrl()) + return false; + + if (!DevSetFormat()) + return false; + + if (!ReqBufs()) + return false; + + if (!StreamOn()) + return false; + + if (!QBuf()) { + Stop(); + return false; + } + + return DQBuf(); +} + +bool CScalerV4L2::SetCtrl() +{ + if (TestFlag(m_fStatus, SCF_DRM_FRESH)) { + if (!Stop()) + return false; + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_CONTENT_PROTECTION, TestFlag(m_fStatus, SCF_DRM)) < 0) { + SC_LOGERR("Failed configure V4L2_CID_CONTENT_PROTECTION to %d", TestFlag(m_fStatus, SCF_DRM)); + return false; + } + + ClearFlag(m_fStatus, SCF_DRM_FRESH); + } else { + SC_LOGD("Skipping DRM configuration"); + } + + if (TestFlag(m_fStatus, SCF_ROTATION_FRESH)) { + if (!Stop()) + return false; + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_ROTATE, m_nRotDegree) < 0) { + SC_LOGERR("Failed V4L2_CID_ROTATE with degree %d", m_nRotDegree); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_VFLIP, TestFlag(m_fStatus, SCF_HFLIP)) < 0) { + SC_LOGERR("Failed V4L2_CID_VFLIP - %d", TestFlag(m_fStatus, SCF_VFLIP)); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_HFLIP, TestFlag(m_fStatus, SCF_VFLIP)) < 0) { + SC_LOGERR("Failed V4L2_CID_HFLIP - %d", TestFlag(m_fStatus, SCF_HFLIP)); + return false; + } + + SC_LOGD("Successfully set CID_ROTATE(%d), CID_VFLIP(%d) and CID_HFLIP(%d)", + m_nRotDegree, TestFlag(m_fStatus, SCF_VFLIP), TestFlag(m_fStatus, SCF_HFLIP)); + ClearFlag(m_fStatus, SCF_ROTATION_FRESH); + } else { + SC_LOGD("Skipping rotation and flip setting due to no change"); + } + + if (m_filter > 0) { + if (!Stop()) + return false; + + if (exynos_v4l2_s_ctrl(m_fdScaler, LIBSC_V4L2_CID_DNOISE_FT, + m_filter) < 0) { + SC_LOGERR("Failed LIBSC_V4L2_CID_DNOISE_FT to %d", m_filter); + return false; + } + } + + if (TestFlag(m_fStatus, SCF_CSC_FRESH)) { + if (!Stop()) + return false; + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_CSC_RANGE, + TestFlag(m_fStatus, SCF_CSC_WIDE) ? 1 : 0) < 0) { + SC_LOGERR("Failed V4L2_CID_CSC_RANGE to %d", TestFlag(m_fStatus, SCF_CSC_WIDE)); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_CSC_EQ, + m_colorspace) < 0) { + SC_LOGERR("Failed V4L2_CID_CSC_EQ to %d", m_colorspace); + } + ClearFlag(m_fStatus, SCF_CSC_FRESH); + } + + return true; +} + +bool CScalerV4L2::DevSetCtrl() +{ + return SetCtrl(); +} + +bool CScalerV4L2::ResetDevice(FrameInfo &frm) +{ + DQBuf(frm); + + if (TestFlag(frm.flags, SCFF_STREAMING)) { + if (exynos_v4l2_streamoff(m_fdScaler, frm.type) < 0 ) { + SC_LOGERR("Failed STREAMOFF for the %s", frm.name); + } + ClearFlag(frm.flags, SCFF_STREAMING); + } + + SC_LOGD("VIDIC_STREAMOFF is successful for the %s", frm.name); + + if (TestFlag(frm.flags, SCFF_REQBUFS)) { + v4l2_requestbuffers reqbufs; + memset(&reqbufs, 0, sizeof(reqbufs)); + reqbufs.type = frm.type; + reqbufs.memory = frm.memory; + if (exynos_v4l2_reqbufs(m_fdScaler, &reqbufs) < 0 ) { + SC_LOGERR("Failed to REQBUFS(0) for the %s", frm.name); + } + + ClearFlag(frm.flags, SCFF_REQBUFS); + } + + SC_LOGD("VIDIC_REQBUFS(0) is successful for the %s", frm.name); + + return true; +} + +bool CScalerV4L2::DevSetFormat(FrameInfo &frm) +{ + + if (!TestFlag(frm.flags, SCFF_BUF_FRESH)) { + SC_LOGD("Skipping S_FMT for the %s since it is already done", frm.name); + return true; + } + + if (!ResetDevice(frm)) { + SC_LOGE("Failed to VIDIOC_S_FMT for the %s", frm.name); + return false; + } + + v4l2_format fmt; + memset(&fmt, 0, sizeof(fmt)); + fmt.type = frm.type; + fmt.fmt.pix_mp.pixelformat = frm.color_format; + fmt.fmt.pix_mp.width = frm.width; + fmt.fmt.pix_mp.height = frm.height; + + if (TestFlag(frm.flags, SCFF_PREMULTIPLIED)) { +#ifdef SCALER_USE_PREMUL_FMT + fmt.fmt.pix_mp.flags = V4L2_PIX_FMT_FLAG_PREMUL_ALPHA; +#else + fmt.fmt.pix_mp.reserved[SC_V4L2_FMT_PREMULTI_FLAG] = 1; +#endif + } + + if (exynos_v4l2_s_fmt(m_fdScaler, &fmt) < 0) { + SC_LOGERR("Failed S_FMT(fmt: %d, w:%d, h:%d) for the %s", + fmt.fmt.pix_mp.pixelformat, fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, + frm.name); + return false; + } + + // returned fmt.fmt.pix_mp.num_planes and fmt.fmt.pix_mp.plane_fmt[i].sizeimage + frm.out_num_planes = fmt.fmt.pix_mp.num_planes; + + for (int i = 0; i < frm.out_num_planes; i++) + frm.out_plane_size[i] = fmt.fmt.pix_mp.plane_fmt[i].sizeimage; + + v4l2_crop crop; + crop.type = frm.type; + crop.c = frm.crop; + + if (exynos_v4l2_s_crop(m_fdScaler, &crop) < 0) { + SC_LOGERR("Failed S_CROP(fmt: %d, l:%d, t:%d, w:%d, h:%d) for the %s", + crop.type, crop.c.left, crop.c.top, crop.c.width, crop.c.height, + frm.name); + return false; + } + + if (frm.out_num_planes > SC_MAX_PLANES) { + SC_LOGE("Number of planes exceeds %d of %s", frm.out_num_planes, frm.name); + return false; + } + + ClearFlag(frm.flags, SCFF_BUF_FRESH); + + SC_LOGD("Successfully S_FMT and S_CROP for the %s", frm.name); + + return true; +} + +bool CScalerV4L2::DevSetFormat() +{ + if (!DevSetFormat(m_frmSrc)) + return false; + + return DevSetFormat(m_frmDst); +} + +bool CScalerV4L2::QBuf(FrameInfo &frm, int *pfdReleaseFence) +{ + v4l2_buffer buffer; + v4l2_plane planes[SC_MAX_PLANES]; + + if (!TestFlag(frm.flags, SCFF_REQBUFS)) { + SC_LOGE("Trying to QBUF without REQBUFS for %s is not allowed", + frm.name); + return false; + } + + if (!DQBuf(frm)) + return false; + + memset(&buffer, 0, sizeof(buffer)); + memset(&planes, 0, sizeof(planes)); + + buffer.type = frm.type; + buffer.memory = frm.memory; + buffer.index = 0; + buffer.length = frm.out_num_planes; + + if (pfdReleaseFence) { + buffer.flags = V4L2_BUF_FLAG_USE_SYNC; + buffer.reserved = frm.fdAcquireFence; + } + + buffer.m.planes = planes; + for (unsigned long i = 0; i < buffer.length; i++) { + planes[i].length = frm.out_plane_size[i]; + if (V4L2_TYPE_IS_OUTPUT(buffer.type)) + planes[i].bytesused = planes[i].length; + if (buffer.memory == V4L2_MEMORY_DMABUF) + planes[i].m.fd = static_cast<__s32>(reinterpret_cast(frm.addr[i])); + else + planes[i].m.userptr = reinterpret_cast(frm.addr[i]); + } + + + if (exynos_v4l2_qbuf(m_fdScaler, &buffer) < 0) { + SC_LOGERR("Failed to QBUF for the %s", frm.name); + return false; + } + + SetFlag(frm.flags, SCFF_QBUF); + + if (pfdReleaseFence) { + if (frm.fdAcquireFence >= 0) + close(frm.fdAcquireFence); + frm.fdAcquireFence = -1; + + *pfdReleaseFence = static_cast(buffer.reserved); + } + + SC_LOGD("Successfully QBUF for the %s", frm.name); + + return true; +} + +bool CScalerV4L2::ReqBufs(FrameInfo &frm) +{ + v4l2_requestbuffers reqbufs; + + if (TestFlag(frm.flags, SCFF_REQBUFS)) { + SC_LOGD("Skipping REQBUFS for the %s since it is already done", frm.name); + return true; + } + + memset(&reqbufs, 0, sizeof(reqbufs)); + + reqbufs.type = frm.type; + reqbufs.memory = frm.memory; + reqbufs.count = 1; + + if (exynos_v4l2_reqbufs(m_fdScaler, &reqbufs) < 0) { + SC_LOGERR("Failed to REQBUFS for the %s", frm.name); + return false; + } + + SetFlag(frm.flags, SCFF_REQBUFS); + + SC_LOGD("Successfully REQBUFS for the %s", frm.name); + + return true; +} + +bool CScalerV4L2::SetRotate(int rot, int flip_h, int flip_v) +{ + if ((rot % 90) != 0) { + SC_LOGE("Rotation of %d degree is not supported", rot); + return false; + } + + SetRotDegree(rot); + + if (flip_h) + SetFlag(m_fStatus, SCF_VFLIP); + else + ClearFlag(m_fStatus, SCF_VFLIP); + + if (flip_v) + SetFlag(m_fStatus, SCF_HFLIP); + else + ClearFlag(m_fStatus, SCF_HFLIP); + + SetFlag(m_fStatus, SCF_ROTATION_FRESH); + + return true; +} + +bool CScalerV4L2::StreamOn(FrameInfo &frm) +{ + if (!TestFlag(frm.flags, SCFF_REQBUFS)) { + SC_LOGE("Trying to STREAMON without REQBUFS for %s is not allowed", + frm.name); + return false; + } + + if (!TestFlag(frm.flags, SCFF_STREAMING)) { + if (exynos_v4l2_streamon(m_fdScaler, frm.type) < 0 ) { + SC_LOGERR("Failed StreamOn for the %s", frm.name); + return false; + } + + SetFlag(frm.flags, SCFF_STREAMING); + + SC_LOGD("Successfully VIDIOC_STREAMON for the %s", frm.name); + } + + return true; +} + +bool CScalerV4L2::DQBuf(FrameInfo &frm) +{ + if (!TestFlag(frm.flags, SCFF_QBUF)) + return true; + + v4l2_buffer buffer; + v4l2_plane plane[SC_NUM_OF_PLANES]; + + memset(&buffer, 0, sizeof(buffer)); + + buffer.type = frm.type; + buffer.memory = frm.memory; + + if (V4L2_TYPE_IS_MULTIPLANAR(buffer.type)) { + memset(plane, 0, sizeof(plane)); + + buffer.length = frm.out_num_planes; + buffer.m.planes = plane; + } + + ClearFlag(frm.flags, SCFF_QBUF); + + if (exynos_v4l2_dqbuf(m_fdScaler, &buffer) < 0 ) { + SC_LOGERR("Failed to DQBuf the %s", frm.name); + return false; + } + + if (buffer.flags & V4L2_BUF_FLAG_ERROR) { + SC_LOGE("Error occurred while processing streaming data"); + return false; + } + + SC_LOGD("Successfully VIDIOC_DQBUF for the %s", frm.name); + + return true; +} + +static bool GetBuffer(CScalerV4L2::FrameInfo &frm, char *addr[]) +{ + for (int i = 0; i < frm.out_num_planes; i++) { + if (frm.memory == V4L2_MEMORY_DMABUF) { + addr[i] = reinterpret_cast(mmap(NULL, frm.out_plane_size[i], + PROT_READ | PROT_WRITE, MAP_SHARED, + static_cast(reinterpret_cast(frm.addr[i])), 0)); + if (addr[i] == MAP_FAILED) { + SC_LOGE("Failed to map FD %ld", reinterpret_cast(frm.addr[i])); + while (i-- > 0) + munmap(addr[i], frm.out_plane_size[i]); + return false; + } + } else { + addr[i] = reinterpret_cast(frm.addr[i]); + } + } + + return true; +} + +static void PutBuffer(CScalerV4L2::FrameInfo &frm, char *addr[]) +{ + for (int i = 0; i < frm.out_num_planes; i++) { + if (frm.memory == V4L2_MEMORY_DMABUF) { + munmap(addr[i], frm.out_plane_size[i]); + } + } +} + +bool CScalerV4L2::RunSWScaling() +{ + if (m_frmSrc.color_format != m_frmDst.color_format) { + SC_LOGE("Source and target image format must be the same"); + return false; + } + + if (m_nRotDegree != 0) { + SC_LOGE("Rotation is not allowed for S/W Scaling"); + return false; + } + + SC_LOGI("Running S/W Scaler: %dx%d -> %dx%d", + m_frmSrc.crop.width, m_frmSrc.crop.height, + m_frmDst.crop.width, m_frmDst.crop.height); + + CScalerSW *swsc; + char *src[3], *dst[3]; + + switch (m_frmSrc.color_format) { + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVYU: + m_frmSrc.out_num_planes = 1; + m_frmSrc.out_plane_size[0] = m_frmSrc.width * m_frmSrc.height * 2; + m_frmDst.out_num_planes = 1; + m_frmDst.out_plane_size[0] = m_frmDst.width * m_frmDst.height * 2; + + if (!GetBuffer(m_frmSrc, src)) + return false; + + if (!GetBuffer(m_frmDst, dst)) { + PutBuffer(m_frmSrc, src); + return false; + } + + swsc = new CScalerSW_YUYV(src[0], dst[0]); + break; + case V4L2_PIX_FMT_NV12M: + case V4L2_PIX_FMT_NV21M: + m_frmSrc.out_num_planes = 2; + m_frmDst.out_num_planes = 2; + m_frmSrc.out_plane_size[0] = m_frmSrc.width * m_frmSrc.height; + m_frmDst.out_plane_size[0] = m_frmDst.width * m_frmDst.height; + m_frmSrc.out_plane_size[1] = m_frmSrc.out_plane_size[0] / 2; + m_frmDst.out_plane_size[1] = m_frmDst.out_plane_size[0] / 2; + + if (!GetBuffer(m_frmSrc, src)) + return false; + + if (!GetBuffer(m_frmDst, dst)) { + PutBuffer(m_frmSrc, src); + return false; + } + + swsc = new CScalerSW_NV12(src[0], src[1], dst[0], dst[1]); + break; + case V4L2_PIX_FMT_NV12: + case V4L2_PIX_FMT_NV21: + m_frmSrc.out_num_planes = 1; + m_frmDst.out_num_planes = 1; + m_frmSrc.out_plane_size[0] = m_frmSrc.width * m_frmSrc.height; + m_frmDst.out_plane_size[0] = m_frmDst.width * m_frmDst.height; + m_frmSrc.out_plane_size[0] += m_frmSrc.out_plane_size[0] / 2; + m_frmDst.out_plane_size[0] += m_frmDst.out_plane_size[0] / 2; + + if (!GetBuffer(m_frmSrc, src)) + return false; + + if (!GetBuffer(m_frmDst, dst)) { + PutBuffer(m_frmSrc, src); + return false; + } + + src[1] = src[0] + m_frmSrc.width * m_frmSrc.height; + dst[1] = dst[0] + m_frmDst.width * m_frmDst.height; + + swsc = new CScalerSW_NV12(src[0], src[1], dst[0], dst[1]); + break; + case V4L2_PIX_FMT_UYVY: // TODO: UYVY is not implemented yet. + default: + SC_LOGE("Format %x is not supported", m_frmSrc.color_format); + return false; + } + + if (swsc == NULL) { + SC_LOGE("Failed to allocate SW Scaler"); + PutBuffer(m_frmSrc, src); + PutBuffer(m_frmDst, dst); + return false; + } + + swsc->SetSrcRect(m_frmSrc.crop.left, m_frmSrc.crop.top, + m_frmSrc.crop.width, m_frmSrc.crop.height, m_frmSrc.width); + + swsc->SetDstRect(m_frmDst.crop.left, m_frmDst.crop.top, + m_frmDst.crop.width, m_frmDst.crop.height, m_frmDst.width); + + bool ret = swsc->Scale(); + + delete swsc; + + PutBuffer(m_frmSrc, src); + PutBuffer(m_frmDst, dst); + + return ret; +} diff --git a/libscaler/libscaler-v4l2.h b/libscaler/libscaler-v4l2.h new file mode 100644 index 0000000..2b70b2e --- /dev/null +++ b/libscaler/libscaler-v4l2.h @@ -0,0 +1,305 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler-v4l2.h + * \brief source file for Scaler HAL + * \author Cho KyongHo + * \date 2014/05/12 + * + * Revision History: + * - 2014.05.12 : Cho KyongHo (pullip.cho@samsung.com) \n + * Create + */ +#ifndef _LIBSCALER_V4L2_H_ +#define _LIBSCALER_V4L2_H_ + +#include + +#include + +#include "libscaler-common.h" + +class CScalerV4L2 { +public: + enum { SC_MAX_PLANES = SC_NUM_OF_PLANES }; + enum { SC_MAX_NODENAME = 14 }; + enum { SC_V4L2_FMT_PREMULTI_FLAG = 10 }; + + enum SC_FRAME_FLAG { + // frame status + SCFF_BUF_FRESH = 0, + // h/w setting + SCFF_CACHEABLE, + SCFF_PREMULTIPLIED, + // v4l2 status + SCFF_REQBUFS, + SCFF_QBUF, + SCFF_STREAMING, + }; + + enum SC_FLAG { + SCF_RESERVED = 0, + // session status + SCF_ROTATION_FRESH, + SCF_CSC_FRESH, + SCF_DRM_FRESH, + // h/w setting setting + SCF_HFLIP, + SCF_VFLIP, + SCF_DRM, + SCF_ALLOW_DRM, + SCF_CSC_WIDE, + SCF_SRC_BLEND, + }; + + struct FrameInfo { + const char *name; + v4l2_buf_type type; + unsigned int width, height; + v4l2_rect crop; + unsigned int color_format; + void *addr[SC_MAX_PLANES]; + int fdAcquireFence; + enum v4l2_memory memory; + int out_num_planes; + unsigned long out_plane_size[SC_MAX_PLANES]; + unsigned long flags; // enum SC_FRAME_FLAG + }; + +private: + FrameInfo m_frmSrc; + FrameInfo m_frmDst; + + unsigned int m_nRotDegree; + char m_cszNode[SC_MAX_NODENAME]; // /dev/videoXX + int m_iInstance; + + int m_fdValidate; + + unsigned int m_filter; + unsigned int m_colorspace; + + void Initialize(int instance); + bool ResetDevice(FrameInfo &frm); + + inline void SetRotDegree(int rot) { + rot = rot % 360; + if (rot < 0) + rot = 360 + rot; + + m_nRotDegree = rot; + SetFlag(m_fStatus, SCF_ROTATION_FRESH); + } + + bool DevSetFormat(FrameInfo &frm); + bool ReqBufs(FrameInfo &frm); + bool QBuf(FrameInfo &frm, int *pfdReleaseFence); + bool StreamOn(FrameInfo &frm); + bool DQBuf(FrameInfo &frm); + + inline bool SetFormat(FrameInfo &frm, unsigned int width, unsigned int height, + unsigned int v4l2_colorformat) { + frm.color_format = v4l2_colorformat; + frm.width = width; + frm.height = height; + SetFlag(frm.flags, SCFF_BUF_FRESH); + return true; + } + + inline bool SetCrop(FrameInfo &frm, unsigned int left, unsigned int top, + unsigned int width, unsigned int height) { + frm.crop.left = left; + frm.crop.top = top; + frm.crop.width = width; + frm.crop.height = height; + SetFlag(frm.flags, SCFF_BUF_FRESH); + return true; + } + + inline void SetPremultiplied(FrameInfo &frm, unsigned int premultiplied) { + if (premultiplied) + SetFlag(frm.flags, SCFF_PREMULTIPLIED); + else + ClearFlag(frm.flags, SCFF_PREMULTIPLIED); + } + + inline void SetCacheable(FrameInfo &frm, bool __UNUSED__ cacheable) { + SetFlag(frm.flags, SCFF_CACHEABLE); + } + + inline void SetAddr(FrameInfo &frm, void *addr[SC_NUM_OF_PLANES], int mem_type, int fence) + { + for (int i = 0; i < SC_MAX_PLANES; i++) + frm.addr[i] = addr[i]; + + frm.memory = static_cast(mem_type); + frm.fdAcquireFence = fence; + } + + bool RunSWScaling(); + +protected: + unsigned long m_fStatus; // enum SC_FLAG + + int m_fdScaler; + + inline void SetFlag(unsigned long &flags, unsigned long flag) { + flags |= (1 << flag); + } + + inline void ClearFlag(unsigned long &flags, unsigned long flag) { + flags &= ~(1 << flag); + } + + inline bool TestFlag(unsigned long &flags, unsigned long flag) { + return (flags & (1 << flag)) != 0; + } + + +public: + inline bool Valid() { return (m_fdScaler >= 0) && (m_fdScaler == -m_fdValidate); } + + CScalerV4L2(int instance, int allow_drm = 0); + virtual ~CScalerV4L2(); + + bool SetCtrl(); + + inline bool IsDRMAllowed() { return TestFlag(m_fStatus, SCF_ALLOW_DRM); } + inline int GetScalerID() { return m_iInstance; } + + bool Stop(); + bool Run(); // Blocking mode + + // H/W Control + virtual bool DevSetCtrl(); + bool DevSetFormat(); + + inline bool ReqBufs() { + if (!ReqBufs(m_frmSrc)) + return false; + + return ReqBufs(m_frmDst); + } + + inline bool QBuf(int *pfdSrcReleaseFence = NULL, int *pfdDstReleaseFence = NULL) { + if (!QBuf(m_frmSrc, pfdSrcReleaseFence)) + return false; + + if (!QBuf(m_frmDst, pfdDstReleaseFence)) { + ClearFlag(m_frmSrc.flags, SCFF_QBUF); + return false; + } + return true; + } + + inline bool StreamOn() { + if (!StreamOn(m_frmSrc)) + return false; + + return StreamOn(m_frmDst); + } + + inline bool DQBuf() { + if (!DQBuf(m_frmSrc)) + return false; + + return DQBuf(m_frmDst); + } + + inline bool SetSrcFormat(unsigned int width, unsigned int height, + unsigned int v4l2_colorformat) { + return SetFormat(m_frmSrc, width, height, v4l2_colorformat); + } + + inline bool SetDstFormat(unsigned int width, unsigned int height, + unsigned int v4l2_colorformat) { + return SetFormat(m_frmDst, width, height, v4l2_colorformat); + } + + inline bool SetSrcCrop(unsigned int left, unsigned int top, + unsigned int width, unsigned int height) { + return SetCrop(m_frmSrc, left, top, width, height); + } + + inline bool SetDstCrop(unsigned int left, unsigned int top, + unsigned int width, unsigned int height) { + return SetCrop(m_frmDst, left, top, width, height); + } + + inline void SetDRM(bool drm) { + if (drm != TestFlag(m_fStatus, SCF_DRM)) { + if (drm) + SetFlag(m_fStatus, SCF_DRM); + else + ClearFlag(m_fStatus, SCF_DRM); + SetFlag(m_fStatus, SCF_DRM_FRESH); + } + } + + inline void SetCSCWide(bool wide) { + if (wide) + SetFlag(m_fStatus, SCF_CSC_WIDE); + else + ClearFlag(m_fStatus, SCF_CSC_WIDE); + + SetFlag(m_fStatus, SCF_CSC_FRESH); + } + + inline void SetCSCEq(unsigned int v4l2_colorspace) { + if (v4l2_colorspace == V4L2_COLORSPACE_SMPTE170M) + m_colorspace = V4L2_COLORSPACE_DEFAULT; + else + m_colorspace = v4l2_colorspace; + SetFlag(m_fStatus, SCF_CSC_FRESH); + } + + inline void SetFilter(unsigned int filter) { + m_filter = filter; + } + + inline void SetSrcCacheable(bool cacheable) { + return SetCacheable(m_frmSrc, cacheable); + } + + inline void SetDstCacheable(bool cacheable) { + return SetCacheable(m_frmDst, cacheable); + } + + inline void SetSrcPremultiplied(bool premultiplied) { + return SetPremultiplied(m_frmSrc, premultiplied); + } + + inline void SetDstPremultiplied(bool premultiplied) { + return SetPremultiplied(m_frmDst, premultiplied); + } + + // Parameter Extraction + bool SetRotate(int rot, int flip_h, int flip_v); + + inline bool SetSrcAddr(void *addr[SC_NUM_OF_PLANES], int mem_type, int fence = -1) { + SetAddr(m_frmSrc, addr, mem_type, fence); + return true; + } + + inline bool SetDstAddr(void *addr[SC_NUM_OF_PLANES], int mem_type, int fence = -1) { + SetAddr(m_frmDst, addr, mem_type, fence); + return true; + } +}; + +#endif //_LIBSCALER_V4L2_H_ diff --git a/libscaler/libscaler.cpp b/libscaler/libscaler.cpp new file mode 100644 index 0000000..9ae68fc --- /dev/null +++ b/libscaler/libscaler.cpp @@ -0,0 +1,636 @@ + /* + * Copyright (C) 2013 The Android Open Source Project + * Copyright@ Samsung Electronics Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file libscaler.cpp + * \brief source file for Scaler HAL + * \author Sunyoung Kang (sy0816.kang@samsung.com) + * \date 2013/02/01 + * + * Revision History: + * - 2013.02.01 : Sunyoung Kang (sy0816.kang@samsung.com) \n + * Create + * - 2013.04.10 : Cho KyongHo (pullip.cho@samsung.com) \n + * Refactoring + * + */ + +#include +#include +#include +#include +#include + +#include "exynos_scaler.h" + +#include "libscaler-common.h" +#include "libscalerblend-v4l2.h" +#include "libscaler-v4l2.h" +#include "libscaler-m2m1shot.h" + +static bool find_pixel(unsigned int sc_pxfmt, unsigned int __out *v4l2_pxfmt) +{ + const static unsigned int sc_fmt_tbl[][2] = { + {EXYNOS_SC_FMT_RGB32, V4L2_PIX_FMT_RGB32}, + {EXYNOS_SC_FMT_BGR32, V4L2_PIX_FMT_BGR32}, + {EXYNOS_SC_FMT_RGB565, V4L2_PIX_FMT_RGB565}, + {EXYNOS_SC_FMT_RGB555X, V4L2_PIX_FMT_RGB555X}, + {EXYNOS_SC_FMT_RGB444, V4L2_PIX_FMT_RGB444}, + }; + + for (size_t i = 0; i < ARRSIZE(sc_fmt_tbl); i++) { + if (sc_pxfmt == sc_fmt_tbl[i][0]) { + *v4l2_pxfmt = sc_fmt_tbl[i][1]; + return true; + } + } + + SC_LOGE("Unknown format value %d", sc_pxfmt); + + return false; +} + +bool exynos_sc_copy_pixels(exynos_sc_pxinfo *pxinfo, int dev_num) +{ + unsigned int srcfmt; + unsigned int dstfmt; + + CScalerM2M1SHOT sc(dev_num); + + if (!sc.Valid()) + return false; + + if (!find_pixel(pxinfo->src.pxfmt, &srcfmt)) + return false; + + if (!find_pixel(pxinfo->dst.pxfmt, &dstfmt)) + return false; + + if (!sc.SetSrcFormat(pxinfo->src.width, pxinfo->src.height, srcfmt)) + return false; + + if (!sc.SetDstFormat(pxinfo->dst.width, pxinfo->dst.height, dstfmt)) + return false; + + if (!sc.SetSrcCrop(pxinfo->src.crop_left, pxinfo->src.crop_top, + pxinfo->src.crop_width, pxinfo->src.crop_height)) + return false; + + if (!sc.SetDstCrop(pxinfo->dst.crop_left, pxinfo->dst.crop_top, + pxinfo->dst.crop_width, pxinfo->dst.crop_height)) + return false; + + if (!sc.SetRotate(pxinfo->rotate, pxinfo->hflip, pxinfo->vflip)) + return false; + + // the first argument ot CScalerM2M1SHOT.SetXXXAddr() must be void *[3] + // it is safe to pass void *[1] which is not an array actually + // because CScalerM2M1SHOT.SetAddr() just accesses the array elements + // that are used for the specified format and this function just specifies + // RGB formats with one planar. + void *addr[SC_NUM_OF_PLANES]; + for (size_t i = 1; i < SC_NUM_OF_PLANES; i++) + addr[i] = NULL; + + addr[0] = pxinfo->src.addr; + if (!sc.SetSrcAddr(addr, V4L2_MEMORY_USERPTR)) + return false; + + addr[0] = pxinfo->dst.addr; + if (!sc.SetDstAddr(addr, V4L2_MEMORY_USERPTR)) + return false; + + return sc.Run(); +} + +#ifdef SCALER_USE_M2M1SHOT +typedef CScalerM2M1SHOT CScalerNonStream; +#else +typedef CScalerV4L2 CScalerNonStream; +#endif + +static CScalerNonStream *GetNonStreamScaler(void *handle) +{ + if (handle == NULL) { + SC_LOGE("NULL Scaler handle"); + return NULL; + } + + CScalerNonStream *sc = reinterpret_cast(handle); + if (!sc->Valid()) { + SC_LOGE("Invalid Scaler handle %p", handle); + return NULL; + } + + return sc; +} + +void *exynos_sc_create(int dev_num) +{ + CScalerNonStream *sc = new CScalerNonStream(dev_num); + + if (!sc) { + SC_LOGE("Failed to allocate a Scaler handle for instance %d", dev_num); + return NULL; + } + + if (!sc->Valid()) { + SC_LOGE("Failed to create a Scaler handle for instance %d", dev_num); + delete sc; + return NULL; + } + + return reinterpret_cast(sc); +} + +int exynos_sc_destroy(void *handle) +{ + int ret = 0; + + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + if (!sc->Stop()) { + SC_LOGE("Failed to stop Scaler (handle %p)", handle); + ret = -1; + } + + delete sc; + + return ret; +} + +int exynos_sc_set_csc_property( + void *handle, + unsigned int csc_range, + unsigned int v4l2_colorspace, + unsigned int filter) + +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + sc->SetCSCWide(csc_range); + sc->SetCSCEq(v4l2_colorspace); + sc->SetFilter(filter); + + return 0; +} + +int exynos_sc_set_src_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm, + unsigned int premultiplied) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + sc->SetSrcPremultiplied(premultiplied != 0); + sc->SetSrcCacheable(cacheable != 0); + sc->SetDRM(mode_drm != 0); + + if (!sc->SetSrcFormat(width, height, v4l2_colorformat)) + return -1; + + return sc->SetSrcCrop(crop_left, crop_top, crop_width, crop_height) ? 0 : -1; +} + +int exynos_sc_set_dst_format( + void *handle, + unsigned int width, + unsigned int height, + unsigned int crop_left, + unsigned int crop_top, + unsigned int crop_width, + unsigned int crop_height, + unsigned int v4l2_colorformat, + unsigned int cacheable, + unsigned int mode_drm, + unsigned int premultiplied) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + sc->SetDstPremultiplied(premultiplied != 0); + sc->SetDstCacheable(cacheable != 0); + sc->SetDRM(mode_drm != 0); + + if (!sc->SetDstFormat(width, height, v4l2_colorformat)) + return -1; + + if (!sc->SetDstCrop(crop_left, crop_top, crop_width, crop_height)) + return -1; + + return 0; +} + +int exynos_sc_set_rotation( + void *handle, + int rot, + int flip_h, + int flip_v) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + return sc->SetRotate(rot, flip_h, flip_v) ? 0 : -1; +} + +int exynos_sc_set_src_addr( + void *handle, + void *addr[SC_NUM_OF_PLANES], + int mem_type, + int __UNUSED__ acquireFenceFd) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + // acquireFenceFd is ignored by blocking mode + return sc->SetSrcAddr(addr, mem_type) ? 0 : -1; +} + +int exynos_sc_set_dst_addr( + void *handle, + void *addr[SC_NUM_OF_PLANES], + int mem_type, + int __UNUSED__ acquireFenceFd) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + // acquireFenceFd is ignored by blocking mode + return sc->SetDstAddr(addr, mem_type) ? 0 : -1; +} + +int exynos_sc_convert(void *handle) +{ + CScalerNonStream *sc = GetNonStreamScaler(handle); + if (!sc) + return -1; + + return sc->Run() ? 0 : -1; +} + +static CScalerBlendV4L2 *GetScalerBlend(void *handle) +{ + if (handle == NULL) { + SC_LOGE("NULL Scaler handle"); + return NULL; + } + + CScalerBlendV4L2 *sc = reinterpret_cast(handle); + if (!sc->Valid()) { + SC_LOGE("Invalid Scaler handle %p", handle); + return NULL; + } + + return sc; +} + +static CScalerV4L2 *GetScaler(void *handle) +{ + if (handle == NULL) { + SC_LOGE("NULL Scaler handle"); + return NULL; + } + + CScalerV4L2 *sc = reinterpret_cast(handle); + if (!sc->Valid()) { + SC_LOGE("Invalid Scaler handle %p", handle); + return NULL; + } + + return sc; +} + +void *exynos_sc_create_exclusive( + int dev_num, + int allow_drm + ) +{ + CScalerV4L2 *sc; + + sc = new CScalerV4L2(dev_num, allow_drm); + if (!sc) { + SC_LOGE("Failed to allocate a Scaler handle for instance %d", dev_num); + return NULL; + } + + if (!sc->Valid()) { + SC_LOGE("Failed to create a Scaler handle for instance %d", dev_num); + delete sc; + return NULL; + } + + SC_LOGD("Scaler %d is successfully created", dev_num); + return reinterpret_cast(sc); +} + +int exynos_sc_free_and_close(void *handle) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + int ret = 0; + if (!sc->Stop()) { + SC_LOGE("Failed to stop Scaler (handle %p)", handle); + ret = -1; + } + + delete sc; + + return ret; +} + +int exynos_sc_stop_exclusive(void *handle) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + if (!sc->Stop()) { + SC_LOGE("Failed to stop Scaler (handle %p)", handle); + return -1; + } + + return 0; +} + +int exynos_sc_csc_exclusive(void *handle, + unsigned int range_full, + unsigned int v4l2_colorspace) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + sc->SetCSCWide(range_full); + sc->SetCSCEq(v4l2_colorspace); + + return 0; +} + +int exynos_sc_config_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + if (src_img->drmMode && !sc->IsDRMAllowed()) { + SC_LOGE("Invalid DRM state request for Scaler%d (s=%d d=%d)", + sc->GetScalerID(), src_img->drmMode, dst_img->drmMode); + return -1; + } + + unsigned int rot = 0; + unsigned int flip_h = 0; + unsigned int flip_v = 0; + + if (dst_img->rot == HAL_TRANSFORM_ROT_270) { + rot = 270; + } else { + if (dst_img->rot & HAL_TRANSFORM_FLIP_H) + flip_h = 1; + if (dst_img->rot & HAL_TRANSFORM_FLIP_V) + flip_v = 1; + if (dst_img->rot & HAL_TRANSFORM_ROT_90) + rot = 90; + } + + if (!sc->SetRotate(rot, flip_h, flip_v)) { + SC_LOGE("Failed to set rotation degree %d, hflip %d, vflip %d", + rot, flip_h, flip_v); + return -1; + } + + int32_t src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(src_img->format); + int32_t dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(dst_img->format); + + if (!sc->SetSrcFormat(src_img->fw, src_img->fh, src_color_space)) + return -1; + + if (!sc->SetSrcCrop(src_img->x, src_img->y, src_img->w, src_img->h)) + return -1; + + if (!sc->SetDstFormat(dst_img->fw, dst_img->fh, dst_color_space)) + return -1; + + if (!sc->SetDstCrop(dst_img->x, dst_img->y, dst_img->w, dst_img->h)) + return -1; + + sc->SetCSCWide(!dst_img->narrowRgb); + + sc->SetSrcCacheable(src_img->cacheable != 0); + sc->SetDstCacheable(dst_img->cacheable != 0); + + sc->SetSrcPremultiplied(src_img->pre_multi); + sc->SetDstPremultiplied(dst_img->pre_multi); + + sc->SetDRM(src_img->drmMode != 0 || dst_img->drmMode != 0); + + return 0; +} + +int exynos_sc_run_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + void *addr[SC_NUM_OF_PLANES]; + + addr[0] = (void *)src_img->yaddr; + addr[1] = (void *)src_img->uaddr; + addr[2] = (void *)src_img->vaddr; + sc->SetSrcAddr(addr, src_img->mem_type, src_img->acquireFenceFd); + + addr[0] = (void *)dst_img->yaddr; + addr[1] = (void *)dst_img->uaddr; + addr[2] = (void *)dst_img->vaddr; + sc->SetDstAddr(addr, dst_img->mem_type, dst_img->acquireFenceFd); + + if (!sc->DevSetCtrl()) + return -1; + + if (!sc->DevSetFormat()) + return -1; + + if (!sc->ReqBufs()) + return -1; + + int fdSrcReleaseFence, fdDstReleaseFence; + + if (!sc->QBuf(&fdSrcReleaseFence, &fdDstReleaseFence)) + return -1; + + if (!sc->StreamOn()) { + close(fdSrcReleaseFence); + close(fdDstReleaseFence); + return -1; + } + + src_img->releaseFenceFd = fdSrcReleaseFence; + dst_img->releaseFenceFd = fdDstReleaseFence; + + return 0; +} + +void *exynos_sc_create_blend_exclusive( + int dev_num, + int allow_drm + ) +{ + CScalerBlendV4L2 *sc; + + sc = new CScalerBlendV4L2(dev_num, allow_drm); + if (!sc) { + SC_LOGE("Failed to allocate a Scaler blend handle for instance %d", dev_num); + return NULL; + } + + if (!sc->Valid()) { + SC_LOGE("Failed to create a Scaler blend handle for instance %d", dev_num); + delete sc; + return NULL; + } + + SC_LOGD("Scaler blend %d is successfully created", dev_num); + return reinterpret_cast(sc); +} + +int exynos_sc_config_blend_exclusive( + void *handle, + exynos_sc_img *src_img, + exynos_sc_img *dst_img, + struct SrcBlendInfo *srcblendinfo) +{ + + CScalerBlendV4L2 *sc = GetScalerBlend(handle); + if (!sc) + return -1; + + if (src_img->drmMode && !sc->IsDRMAllowed()) { + SC_LOGE("Invalid DRM state request for Scaler%d (s=%d d=%d)", + sc->GetScalerID(), src_img->drmMode, dst_img->drmMode); + return -1; + } + unsigned int rot = 0; + unsigned int flip_h = 0; + unsigned int flip_v = 0; + + if (dst_img->rot == HAL_TRANSFORM_ROT_270) { + rot = 270; + } else { + if (dst_img->rot & HAL_TRANSFORM_FLIP_H) + flip_h = 1; + if (dst_img->rot & HAL_TRANSFORM_FLIP_V) + flip_v = 1; + if (dst_img->rot & HAL_TRANSFORM_ROT_90) + rot = 90; + } + + if (!sc->SetRotate(rot, flip_h, flip_v)) { + SC_LOGE("Failed to set rotation degree %d, hflip %d, vflip %d", + rot, flip_h, flip_v); + return -1; + } + + int32_t src_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(src_img->format); + int32_t dst_color_space = HAL_PIXEL_FORMAT_2_V4L2_PIX(dst_img->format); + int32_t src_blend_color_space = + HAL_PIXEL_FORMAT_2_V4L2_PIX(srcblendinfo->srcblendfmt); + + sc->GetCustomAlphaBlendFmt(src_color_space, src_blend_color_space); + if (src_color_space < 0) { + SC_LOGE("src blending format not supported"); + return -1; + } + + sc->SetSrcBlendOp(srcblendinfo->blop); + + sc->SetSrcGlobalAlpha(srcblendinfo->globalalpha.val, + srcblendinfo->globalalpha.enable); + + sc->SetSrcBlendVPos(srcblendinfo->srcblendvpos); + + sc->SetSrcBlendHPos(srcblendinfo->srcblendhpos); + + sc->SetSrcBlendPremulti(srcblendinfo->srcblendpremulti); + + sc->SetSrcBlendFmt(src_blend_color_space); + + sc->SetSrcBlendStride(srcblendinfo->srcblendstride); + + sc->SetSrcBlendWidth(srcblendinfo->srcblendwidth); + + sc->SetSrcBlendHeight(srcblendinfo->srcblendheight); + + sc->SetSrcBlendCSCSpace(srcblendinfo->cscspec.enable, + srcblendinfo->cscspec.space, + srcblendinfo->cscspec.wide); + + if (!sc->SetSrcFormat(src_img->fw, src_img->fh, src_color_space)) + return -1; + + if (!sc->SetSrcCrop(src_img->x, src_img->y, src_img->w, src_img->h)) + return -1; + + if (!sc->SetDstFormat(dst_img->fw, dst_img->fh, dst_color_space)) + return -1; + + if (!sc->SetDstCrop(dst_img->x, dst_img->y, dst_img->w, dst_img->h)) + return -1; + + sc->SetSrcCacheable(src_img->cacheable != 0); + sc->SetDstCacheable(dst_img->cacheable != 0); + + sc->SetDRM(src_img->drmMode != 0 || dst_img->drmMode != 0); + + return 0; +} + +int exynos_sc_wait_frame_done_exclusive( + void *handle) +{ + CScalerV4L2 *sc = GetScaler(handle); + if (!sc) + return -1; + + return sc->DQBuf() ? 0 : -1; +} diff --git a/libscaler/libscalerblend-v4l2.cpp b/libscaler/libscalerblend-v4l2.cpp new file mode 100644 index 0000000..4747e72 --- /dev/null +++ b/libscaler/libscalerblend-v4l2.cpp @@ -0,0 +1,147 @@ +#include +#include + +#include "libscaler-v4l2.h" +#include "libscalerblend-v4l2.h" + +bool CScalerBlendV4L2::DevSetCtrl() +{ + + if (!SetCtrl()) + return false; + + /* Blending related ctls */ + if (!TestFlag(m_fStatus, SCF_SRC_BLEND)) + return false; + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_BLEND_OP, + m_SrcBlndCfg.blop) < 0) { + SC_LOGERR("Failed S_CTRL V4L2_CID_2D_BLEND_OP"); + return false; + } + + if (m_SrcBlndCfg.globalalpha.enable) { + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_GLOBAL_ALPHA, m_SrcBlndCfg.globalalpha.val) < 0) { + SC_LOGERR("Failed S_CTRL V4L2_CID_GLOBAL_ALPHA"); + return false; + } + } else { + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_GLOBAL_ALPHA, 0xff) < 0) { + SC_LOGERR("Failed S_CTRL V4L2_CID_GLOBAL_ALPHA 0xff"); + return false; + } + } + + if (m_SrcBlndCfg.cscspec.enable) { + bool is_bt709 = (m_SrcBlndCfg.cscspec.space == COLORSPACE_REC709)? true : false; + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_CSC_EQ, is_bt709) < 0) { + SC_LOGERR("Failed S_CTRL V4L2_CID_CSC_EQ - %d", + m_SrcBlndCfg.cscspec.space); + return false; + } + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_CSC_RANGE, m_SrcBlndCfg.cscspec.wide) < 0) { + SC_LOGERR("Failed S_CTRL V4L2_CID_CSC_RANGE - %d", + m_SrcBlndCfg.cscspec.wide); + return false; + } + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_FMT, + m_SrcBlndCfg.srcblendfmt) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_FMT - %d", + m_SrcBlndCfg.srcblendfmt); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_FMT_PREMULTI, + m_SrcBlndCfg.srcblendpremulti) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_BLEND_FMT_PREMULTI - %d", + m_SrcBlndCfg.srcblendpremulti); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_STRIDE, + m_SrcBlndCfg.srcblendstride) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_STRIDE - %d", + m_SrcBlndCfg.srcblendstride); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_H_POS, + m_SrcBlndCfg.srcblendhpos) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_H_POS with degree %d", + m_SrcBlndCfg.srcblendhpos); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_V_POS, + m_SrcBlndCfg.srcblendvpos) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_V_POS - %d", + m_SrcBlndCfg.srcblendvpos); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_WIDTH, + m_SrcBlndCfg.srcblendwidth) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_WIDTH with degree %d", + m_SrcBlndCfg.srcblendwidth); + return false; + } + + if (exynos_v4l2_s_ctrl(m_fdScaler, V4L2_CID_2D_SRC_BLEND_SET_HEIGHT, + m_SrcBlndCfg.srcblendheight) < 0) { + SC_LOGERR("Failed V4L2_CID_2D_SRC_BLEND_SET_HEIGHT - %d", + m_SrcBlndCfg.srcblendheight); + return false; + } + + ClearFlag(m_fStatus, SCF_SRC_BLEND); + return true; +} + +void CScalerBlendV4L2::GetCustomAlphaBlendFmt(int32_t &src_color_space, + unsigned int srcblendfmt) { + + if (src_color_space == V4L2_PIX_FMT_NV12M) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV12M_RGB32; + else if ((srcblendfmt == V4L2_PIX_FMT_BGR32)) + src_color_space = V4L2_PIX_FMT_NV12M_BGR32; + else if ((srcblendfmt == V4L2_PIX_FMT_RGB565)) + src_color_space = V4L2_PIX_FMT_NV12M_RGB565; + else if ((srcblendfmt == V4L2_PIX_FMT_RGB444)) + src_color_space = V4L2_PIX_FMT_NV12M_RGB444; + else if ((srcblendfmt == V4L2_PIX_FMT_RGB555X)) + src_color_space = V4L2_PIX_FMT_NV12M_RGB555X; + } else if (src_color_space == V4L2_PIX_FMT_NV12) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV12_RGB32; + } else if (src_color_space == V4L2_PIX_FMT_NV12N) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV12N_RGB32; + } else if (src_color_space == V4L2_PIX_FMT_NV12MT_16X16) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV12MT_16X16_RGB32; + } else if (src_color_space == V4L2_PIX_FMT_NV21M) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV21M_RGB32; + else if ((srcblendfmt == V4L2_PIX_FMT_BGR32)) + src_color_space = V4L2_PIX_FMT_NV21M_BGR32; + } else if (src_color_space == V4L2_PIX_FMT_NV21) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_NV21_RGB32; + } else if (src_color_space == V4L2_PIX_FMT_YVU420) { + if ((srcblendfmt == V4L2_PIX_FMT_RGB32)) + src_color_space = V4L2_PIX_FMT_YVU420_RGB32; + } else { + src_color_space = -1; + } +} + +CScalerBlendV4L2::CScalerBlendV4L2(int dev_num, int allow_drm) : CScalerV4L2(dev_num, allow_drm){ + +} + +CScalerBlendV4L2::~CScalerBlendV4L2(){ + +} diff --git a/libscaler/libscalerblend-v4l2.h b/libscaler/libscalerblend-v4l2.h new file mode 100644 index 0000000..a7d7741 --- /dev/null +++ b/libscaler/libscalerblend-v4l2.h @@ -0,0 +1,64 @@ +#ifndef _EXYNOS_SCALERBLEND_H_ +#define _EXYNOS_SCALERBLEND_H_ + +#include "libscaler-v4l2.h" + +class CScalerBlendV4L2 : public CScalerV4L2 { + +public: + CScalerBlendV4L2(int instance, int allow_drm); + ~CScalerBlendV4L2(); + struct SrcBlendInfo m_SrcBlndCfg; + + // H/W Control + virtual bool DevSetCtrl(); + + void GetCustomAlphaBlendFmt(int32_t &src_color_space, + unsigned int srcblendfmt); + + inline void SetSrcBlendOp(SRC_BL_OP op) { + m_SrcBlndCfg.blop = op; + SetFlag(m_fStatus, SCF_SRC_BLEND); + } + + inline void SetSrcBlendHPos(int srcblendhpos) { + m_SrcBlndCfg.srcblendhpos = srcblendhpos; + } + + inline void SetSrcBlendVPos(int srcblendvpos) { + m_SrcBlndCfg.srcblendvpos = srcblendvpos; + } + + inline void SetSrcBlendPremulti(int srcblendpremulti) { + m_SrcBlndCfg.srcblendpremulti = srcblendpremulti; + } + + inline void SetSrcBlendFmt(int srcblendfmt) { + m_SrcBlndCfg.srcblendfmt = srcblendfmt; + } + + inline void SetSrcBlendStride(int srcblendstride) { + m_SrcBlndCfg.srcblendstride = srcblendstride; + } + + inline void SetSrcBlendWidth(int srcblendwidth) { + m_SrcBlndCfg.srcblendwidth= srcblendwidth; + } + + inline void SetSrcBlendHeight(int srcblendheight) { + m_SrcBlndCfg.srcblendheight = srcblendheight; + } + + inline void SetSrcGlobalAlpha(int globalalpha, bool enable) { + m_SrcBlndCfg.globalalpha.enable = enable; + m_SrcBlndCfg.globalalpha.val = globalalpha; + } + + inline void SetSrcBlendCSCSpace(bool enable, + enum colorspace space, bool wide) { + m_SrcBlndCfg.cscspec.enable = enable; + m_SrcBlndCfg.cscspec.space = space; + m_SrcBlndCfg.cscspec.wide = wide; + } +}; +#endif diff --git a/libseiren/Android.mk b/libseiren/Android.mk new file mode 100644 index 0000000..9f42a88 --- /dev/null +++ b/libseiren/Android.mk @@ -0,0 +1,25 @@ +LOCAL_PATH := $(call my-dir) +EXYNOS_AUDIO_MODE := $(LOCAL_PATH) + +include $(CLEAR_VARS) + +ifeq ($(BOARD_USE_SEIREN_AUDIO), true) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := dec/seiren_hw.c + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include + +LOCAL_MODULE := libseirenhw + +LOCAL_MODULE_TAGS := optional + +LOCAL_ARM_MODE := arm + +LOCAL_STATIC_LIBRARIES := + +LOCAL_SHARED_LIBRARIES := + +include $(BUILD_STATIC_LIBRARY) +endif diff --git a/libseiren/NOTICE b/libseiren/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libseiren/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libseiren/dec/seiren_hw.c b/libseiren/dec/seiren_hw.c new file mode 100644 index 0000000..50db1bb --- /dev/null +++ b/libseiren/dec/seiren_hw.c @@ -0,0 +1,309 @@ +/* +* +* Copyright 2012 Samsung Electronics S.LSI Co. LTD +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + + +#include "seiren_hw.h" + +#define LOG_NDEBUG 1 +#define LOG_TAG "libseirenhw" +#include + +#define MAX_INSTANCE 10 + +struct instance_info { + unsigned int handle; + void *ibuf_addr; + void *obuf_addr; +}; +static struct instance_info inst_info[MAX_INSTANCE]; +static struct audio_mem_info_t ibuf_info; +static struct audio_mem_info_t obuf_info; +static int created = 0; +static int seiren_dev = -1; + +int ADec_getIdx(unsigned int handle) +{ + int i; + for (i = 0; i= 0) { + ret = ioctl(seiren_dev, SEIREN_IOCTL_CH_CREATE, ipType); + if (ret != 0) { + ALOGE("%s: ch_create ret: %d", __func__, ret); + goto EXIT_ERROR; + } + inst_info[index].handle = seiren_dev; + ALOGD("%s: successed to open AudH, index:%d", __func__,index); + } + else { + ALOGE("%s: failed to open AudH", __func__); + } + + goto EXIT; +EXIT_ERROR: + close(seiren_dev); + seiren_dev = -1; +EXIT: + return seiren_dev; +} + +int ADec_Destroy(u32 ulHandle) +{ + int index = ADec_getIdx(ulHandle); + int ret; + + if (index == -1) { + ALOGE("Can't find index."); + return -1; + } + + ALOGD("%s: index:%d,ibuf_addr:%p,obuf_addr:%p", __func__, + index, inst_info[index].ibuf_addr, inst_info[index].obuf_addr); + inst_info[index].handle = -1; + free(inst_info[index].ibuf_addr); + free(inst_info[index].obuf_addr); + + ret = ioctl(ulHandle, SEIREN_IOCTL_CH_DESTROY, ulHandle); + if (ret != 0) + ALOGE("%s: ch_destroy ret: %d", __func__, ret); + + ALOGD("%s: called, handle:%d", __func__, ulHandle); + + if (close(ulHandle) != 0) { + ALOGE("%s: failed to close", __func__); + return -1; + } + else { + ALOGD("%s: successed to close", __func__); + } + + return 0; +} + +int ADec_SendStream(u32 ulHandle, audio_mem_info_t* pInputInfo, int* consumedSize) +{ + int index = ADec_getIdx(ulHandle); + + ALOGV("%s: handle:%d, buf_addr[%p], buf_size[%d]", __func__, + ulHandle, pInputInfo->virt_addr, pInputInfo->data_size); + *consumedSize = write(ulHandle, pInputInfo->virt_addr, pInputInfo->data_size); + + ALOGV("%s: consumedSize: %d", __func__, *consumedSize); + + return (*consumedSize < 0 ? *consumedSize : 0); +} + +int ADec_DoEQ(u32 ulHandle, audio_mem_info_t* pMemInfo) +{ + int index = ADec_getIdx(ulHandle); + + ALOGD("%s: handle:%d, buf_addr[%p], buf_size[%d]", __func__, + ulHandle, pMemInfo->virt_addr, pMemInfo->mem_size); + ioctl(ulHandle, SEIREN_IOCTL_CH_EXE, pMemInfo); + + return 0; +} + +int ADec_RecvPCM(u32 ulHandle, audio_mem_info_t* pOutputInfo) +{ + int index = ADec_getIdx(ulHandle); + int pcm_size; + + ALOGV("%s: handle:%d, buf_addr[%p], buf_size[%d]", __func__, + ulHandle, pOutputInfo->virt_addr, pOutputInfo->mem_size); + pcm_size = read(ulHandle, pOutputInfo->virt_addr, pOutputInfo->mem_size); + + pOutputInfo->data_size = pcm_size; + + ALOGV("%s: pcm_size : %d", __func__, pOutputInfo->data_size); + + return 0; +} + +int ADec_SetParams(u32 ulHandle, SEIREN_PARAMCMD paramCmd, unsigned long pulValues) +{ + u32 cmd = paramCmd << 16; + cmd |= SEIREN_IOCTL_CH_SET_PARAMS; + + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, cmd, pulValues); + + return 0; +} + +int ADec_GetParams(u32 ulHandle, SEIREN_PARAMCMD paramCmd, unsigned long *pulValues) +{ + u32 cmd = paramCmd << 16; + + cmd |= SEIREN_IOCTL_CH_GET_PARAMS; + ioctl(ulHandle, cmd, pulValues); + ALOGD("%s: val:%lu. handle:%d", __func__, *pulValues, ulHandle); + + return 0; +} + +int ADec_SendEOS(u32 ulHandle) +{ + u32 cmd = ADEC_PARAM_SET_EOS << 16; + cmd |= SEIREN_IOCTL_CH_SET_PARAMS; + + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, cmd); + + return 0; +} + +int ADec_Flush(u32 ulHandle, SEIREN_PORTTYPE portType) +{ + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, SEIREN_IOCTL_CH_FLUSH, portType); + + return 0; +} + +int ADec_ConfigSignal(u32 ulHandle) +{ + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, SEIREN_IOCTL_CH_CONFIG); + + return 0; +} + +int ADec_GetPCMParams(u32 ulHandle, u32* pulValues) +{ + u32 cmd = PCM_CONFIG_INFO << 16; + cmd |= SEIREN_IOCTL_CH_GET_PARAMS; + + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, cmd, &pulValues); + + return 0; +} + +int ADec_GetIMemPoolInfo(u32 ulHandle, audio_mem_info_t* pIMemPoolInfo) +{ + u32 cmd = GET_IBUF_POOL_INFO << 16; + cmd |= SEIREN_IOCTL_CH_GET_PARAMS; + int index = ADec_getIdx(ulHandle); + + if (index == -1) { + ALOGE("Can't find index."); + return -1; + } + + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, cmd, &ibuf_info); + + ibuf_info.virt_addr = malloc(ibuf_info.mem_size); + inst_info[index].ibuf_addr = ibuf_info.virt_addr; + + pIMemPoolInfo->virt_addr = ibuf_info.virt_addr; + pIMemPoolInfo->mem_size = ibuf_info.mem_size; + pIMemPoolInfo->block_count = ibuf_info.block_count; + + ALOGD("%s: I_vaddr[%p], I_paddr[%p], I_size[%d], I_cnt[%d]", + __func__, + ibuf_info.virt_addr, + ibuf_info.phy_addr, + ibuf_info.mem_size, + ibuf_info.block_count); + + return 0; +} + +int ADec_GetOMemPoolInfo(u32 ulHandle, audio_mem_info_t* pOMemPoolInfo) +{ + u32 cmd = GET_OBUF_POOL_INFO << 16; + cmd |= SEIREN_IOCTL_CH_GET_PARAMS; + int index = ADec_getIdx(ulHandle); + + if (index == -1) { + ALOGE("Can't find index."); + return -1; + } + + ALOGD("%s: called. handle:%d", __func__, ulHandle); + ioctl(ulHandle, cmd, &obuf_info); + + obuf_info.virt_addr = malloc(obuf_info.mem_size); + inst_info[index].obuf_addr = obuf_info.virt_addr; + + pOMemPoolInfo->virt_addr = obuf_info.virt_addr; + pOMemPoolInfo->mem_size = obuf_info.mem_size; + pOMemPoolInfo->block_count = obuf_info.block_count; + + ALOGD("%s: O_vaddr[%p], O_paddr[%p], O_size[%d], O_cnt[%d]", + __func__, + obuf_info.virt_addr, + obuf_info.phy_addr, + obuf_info.mem_size, + obuf_info.block_count); + + return 0; +} diff --git a/libseiren/include/seiren_error.h b/libseiren/include/seiren_error.h new file mode 100644 index 0000000..8f5680b --- /dev/null +++ b/libseiren/include/seiren_error.h @@ -0,0 +1,39 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _SEIREN_ERROR_H_ +#define _SEIREN_ERROR_H_ + +typedef enum { + SEIREN_RETURN_OK = 0, + + SEIREN_ERROR_OPEN_FAIL = -1000, + SEIREN_ERROR_ALREADY_OPEN = -1001, + SEIREN_ERROR_NOT_READY = -1002, + + SEIREN_ERROR_IBUF_OVERFLOW = -2000, + SEIREN_ERROR_IBUF_INFO = -2001, + + SEIREN_ERROR_OBUF_READ = -3000, + SEIREN_ERROR_OBUF_INFO = -3001, + SEIREN_ERROR_OBUF_MMAP = -3002, + + SEIREN_ERROR_INVALID_SETTING = -4000, + SEIREN_ERROR_GETINFO_FAIL = -4001 +} SEIREN_ERRORTYPE; + +#endif /* _SEIREN_ERROR_H_ */ diff --git a/libseiren/include/seiren_hw.h b/libseiren/include/seiren_hw.h new file mode 100644 index 0000000..a215184 --- /dev/null +++ b/libseiren/include/seiren_hw.h @@ -0,0 +1,141 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __SEIREN_HW_H__ +#define __SEIREN_HW_H__ + +#include "seiren_ioctl.h" +#include "seiren_error.h" + +#define ADEC_DEV_NAME "/dev/seiren" + +typedef unsigned int u32; + +typedef enum { + ADEC_MP3 = 0x0, + ADEC_AAC, + ADEC_FLAC, + SOUND_EQ = 0x9, + SOUND_BASS, + AENC_AMR, + AENC_AAC, +} SEIREN_IPTYPE; + +typedef enum { + PORT_IN = 0x1, + PORT_OUT, +} SEIREN_PORTTYPE; + +typedef enum { + /* PCM parameters */ + PCM_PARAM_MAX_SAMPLE_RATE = 0x0, + PCM_PARAM_MAX_NUM_OF_CH, + PCM_PARAM_MAX_BIT_PER_SAMPLE, + + PCM_PARAM_SAMPLE_RATE, + PCM_PARAM_NUM_OF_CH, + PCM_PARAM_BIT_PER_SAMPLE, + + PCM_MAX_CONFIG_INFO, + PCM_CONFIG_INFO, + + /* EQ parameters */ + SEIREN_EQ_PARAM_NUM_OF_PRESETS = 0x10, + SEIREN_EQ_PARAM_MAX_NUM_OF_BANDS , + SEIREN_EQ_PARAM_RANGE_OF_BANDLEVEL, + SEIREN_EQ_PARAM_RANGE_OF_FREQ, + + SEIREN_EQ_PARAM_PRESET_ID, + SEIREN_EQ_PARAM_NUM_OF_BANDS, + SEIREN_EQ_PARAM_CENTER_FREQ, + SEIREN_EQ_PARAM_BANDLEVEL, + SEIREN_EQ_PARAM_BANDWIDTH, + + SEIREN_EQ_MAX_CONFIG_INFO, + SEIREN_EQ_CONFIG_INFO, + SEIREN_EQ_BAND_INFO, + + /* BASS parameters */ + + /* Codec Dec parameters */ + ADEC_PARAM_SET_EOS = 0x30, + ADEC_PARAM_GET_OUTPUT_STATUS, + + /* MP3 Dec parameters */ + + /* AAC Dec parameters */ + + /* FLAC Dec parameters */ + + /* Codec Enc parameters */ + + /* AMR Enc parameters */ + + /* AAC Enc parameters */ + + /* Buffer info */ + GET_IBUF_POOL_INFO = 0xA0, + GET_OBUF_POOL_INFO, + SET_IBUF_POOL_INFO, + SET_OBUF_POOL_INFO, +} SEIREN_PARAMCMD; + +typedef struct audio_mem_info_t { + void *virt_addr; + void *phy_addr; + u32 mem_size; + u32 data_size; + u32 block_count; +} audio_mem_info_t; + +typedef struct audio_mem_pool_info_t { + u32 virt_addr; + u32 phy_addr; + u32 block_size; + u32 block_count; +} audio_mem_pool_info_t; + +typedef struct audio_pcm_config_info_t { + u32 nDirection; // 0: input, 1:output + u32 nSamplingRate; + u32 nBitPerSample; + u32 nNumOfChannel; +} audio_pcm_config_info_t; + +#ifdef __cplusplus +extern "C" { +#endif + +int ADec_Create(u32 ulPlayerID, SEIREN_IPTYPE ipType, u32* pulHandle); +int ADec_Destroy(u32 ulHandle); +int ADec_SendStream(u32 ulHandle, audio_mem_info_t* pInputInfo, int* pulConsumedSize); +int ADec_DoEQ(u32 ulHandle, audio_mem_info_t* pMemInfo); +int ADec_RecvPCM(u32 ulHandle, audio_mem_info_t* pOutputInfo); +int ADec_SetParams(u32 ulHandle, SEIREN_PARAMCMD paramCmd, unsigned long pulValues); +int ADec_GetParams(u32 ulHandle, SEIREN_PARAMCMD paramCmd, unsigned long *pulValues); +int ADec_SendEOS(u32 ulHandle); +int ADec_Flush(u32 ulHandle, SEIREN_PORTTYPE portType); +int ADec_ConfigSignal(u32 ulHandle); +int ADec_GetPCMParams(u32 ulHandle, u32 *pulValues); +int ADec_GetIMemPoolInfo(u32 ulHandle, audio_mem_info_t* pIMemPoolInfo); +int ADec_GetOMemPoolInfo(u32 ulHandle, audio_mem_info_t* pOMemPoolInfo); + +#ifdef __cplusplus +} +#endif + +#endif /*__SEIREN_HW_H__ */ diff --git a/libseiren/include/seiren_ioctl.h b/libseiren/include/seiren_ioctl.h new file mode 100644 index 0000000..802cf04 --- /dev/null +++ b/libseiren/include/seiren_ioctl.h @@ -0,0 +1,30 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __SEIREN_IOCTL_H__ +#define __SEIREN_IOCTL_H__ + +#define SEIREN_IOCTL_CH_CREATE (0x1001) +#define SEIREN_IOCTL_CH_DESTROY (0x1002) +#define SEIREN_IOCTL_CH_EXE (0x1003) +#define SEIREN_IOCTL_CH_SET_PARAMS (0x2001) +#define SEIREN_IOCTL_CH_GET_PARAMS (0x2002) +#define SEIREN_IOCTL_CH_RESET (0x2003) +#define SEIREN_IOCTL_CH_FLUSH (0x2004) +#define SEIREN_IOCTL_CH_CONFIG (0x2005) + +#endif /* __SEIREN_IOCTL_H__ */ diff --git a/libsrp/Android.mk b/libsrp/Android.mk new file mode 100644 index 0000000..1cfc927 --- /dev/null +++ b/libsrp/Android.mk @@ -0,0 +1,7 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +ifeq ($(BOARD_USE_ALP_AUDIO), true) +include $(LOCAL_PATH)/alp/Android.mk +endif diff --git a/libsrp/NOTICE b/libsrp/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libsrp/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libsrp/alp/Android.mk b/libsrp/alp/Android.mk new file mode 100644 index 0000000..c6a3aef --- /dev/null +++ b/libsrp/alp/Android.mk @@ -0,0 +1,20 @@ +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + dec/srp_api.c + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include + +LOCAL_MODULE := libsrpapi + +LOCAL_MODULE_TAGS := optional + +LOCAL_ARM_MODE := arm + +LOCAL_STATIC_LIBRARIES := + +LOCAL_SHARED_LIBRARIES := + +include $(BUILD_STATIC_LIBRARY) diff --git a/libsrp/alp/NOTICE b/libsrp/alp/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libsrp/alp/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libsrp/alp/dec/srp_api.c b/libsrp/alp/dec/srp_api.c new file mode 100644 index 0000000..43cae21 --- /dev/null +++ b/libsrp/alp/dec/srp_api.c @@ -0,0 +1,293 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file srp_api.c + * @brief + * @author Yunji Kim (yunji.kim@samsung.com) + * @version 1.1.0 + * @history + * 2012.02.28 : Create + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "srp_api.h" + +#define LOG_NDEBUG 1 +#define LOG_TAG "libsrpapi" +#include + +static struct srp_buf_info ibuf_info; +static struct srp_buf_info obuf_info; +static struct srp_buf_info pcm_info; + +static int srp_dev = -1; +static int srp_block_mode = SRP_INIT_BLOCK_MODE; + +int SRP_Create(int block_mode) +{ + if (srp_dev == -1) { + srp_block_mode = block_mode; + srp_dev = open(SRP_DEV_NAME, O_RDWR | + ((block_mode == SRP_INIT_NONBLOCK_MODE) ? O_NDELAY : 0)); + if (srp_dev > 0) + return srp_dev; + else + return SRP_ERROR_OPEN_FAIL; + } + + ALOGE("%s: Device is already opened", __func__); + return SRP_ERROR_ALREADY_OPEN; +} + +int SRP_Init() +{ + int ret = SRP_RETURN_OK; + unsigned int mmapped_size = 0; + + if (srp_dev != -1) { + ret = ioctl(srp_dev, SRP_INIT); + if (ret < 0) + return ret; + + /* mmap for OBUF */ + ret = ioctl(srp_dev, SRP_GET_MMAP_SIZE, &mmapped_size); + if (ret < 0) { + ALOGE("%s: SRP_GET_MMAP_SIZE is failed", __func__); + return SRP_ERROR_OBUF_MMAP; + } + obuf_info.mmapped_addr = mmap(0, mmapped_size, + PROT_READ | PROT_WRITE, MAP_SHARED, srp_dev, 0); + if (!obuf_info.mmapped_addr) { + ALOGE("%s: mmap is failed", __func__); + return SRP_ERROR_OBUF_MMAP; + } + obuf_info.mmapped_size = mmapped_size; + + ret = SRP_RETURN_OK; + } else { + ALOGE("%s: Device is not ready", __func__); + ret = SRP_ERROR_NOT_READY; /* device is not created */ + } + + return ret; +} + +int SRP_Decode(void *buff, int size_byte) +{ + int ret = SRP_RETURN_OK; + + if (srp_dev != -1) { + if (size_byte > 0) { + ALOGV("%s: Send data to RP (%d bytes)", __func__, size_byte); + + ret = write(srp_dev, buff, size_byte); /* Write Buffer to RP Driver */ + if (ret < 0) { + ret = -errno; + if (ret != SRP_ERROR_IBUF_OVERFLOW) + ALOGE("SRP_Decode returned error code: %d", ret); + } + return ret; /* Write Success */ + } else { + return ret; + } + } + + ALOGE("%s: Device is not ready", __func__); + return SRP_ERROR_NOT_READY; +} + +int SRP_Send_EOS(void) +{ + if (srp_dev != -1) + return ioctl(srp_dev, SRP_SEND_EOS); + + return SRP_ERROR_NOT_READY; +} + +int SRP_SetParams(int id, unsigned long val) +{ + if (srp_dev != -1) + return 0; /* not yet */ + + return SRP_ERROR_NOT_READY; +} + +int SRP_GetParams(int id, unsigned long *pval) +{ + if (srp_dev != -1) + return ioctl(srp_dev, id, pval); + + return SRP_ERROR_NOT_READY; +} + +int SRP_Flush(void) +{ + if (srp_dev != -1) + return ioctl(srp_dev, SRP_FLUSH); + + return SRP_ERROR_NOT_READY; +} + +int SRP_Get_PCM(void **addr, unsigned int *size) +{ + int ret = SRP_RETURN_OK; + + if (srp_dev != -1) { + ret = read(srp_dev, &pcm_info, 0); + if (ret == -1) { + *size = 0; + ALOGE("%s: PCM read fail", __func__); + return SRP_ERROR_OBUF_READ; + } + + *addr = pcm_info.addr; + *size = pcm_info.size; + } else { + return SRP_ERROR_NOT_READY; + } + + return ret; /* Read Success */ +} + +int SRP_Get_Dec_Info(struct srp_dec_info *dec_info) +{ + int ret; + + if (srp_dev != -1) { + ret = ioctl(srp_dev, SRP_GET_DEC_INFO, dec_info); + if (ret < 0) { + ret = -errno; + ALOGE("%s: Failed to get dec info", __func__); + return ret; + } + + ALOGV("numChannels(%d), samplingRate(%d)", dec_info->channels, dec_info->sample_rate); + + ret = SRP_RETURN_OK; + } else { + ret = SRP_ERROR_NOT_READY; + } + + return ret; +} + +int SRP_Get_Ibuf_Info(void **addr, unsigned int *size, unsigned int *num) +{ + int ret = SRP_RETURN_OK; + + if (srp_dev != -1) { + ret = ioctl(srp_dev, SRP_GET_IBUF_INFO, &ibuf_info); + if (ret == -1) { + ALOGE("%s: Failed to get Ibuf info", __func__); + return SRP_ERROR_IBUF_INFO; + } + + *addr = ibuf_info.addr; + *size = ibuf_info.size; + *num = ibuf_info.num; + + if (*num == 0) { + ALOGE("%s: IBUF num is 0", __func__); + return SRP_ERROR_INVALID_SETTING; + } + + ret = SRP_RETURN_OK; + } else { + ret = SRP_ERROR_NOT_READY; + } + + return ret; +} + +int SRP_Get_Obuf_Info(void **addr, unsigned int *size, unsigned int *num) +{ + int ret = SRP_RETURN_OK; + + if (srp_dev != -1) { + if (obuf_info.addr == NULL) { + ret = ioctl(srp_dev, SRP_GET_OBUF_INFO, &obuf_info); + if (ret < 0) { + ALOGE("%s: SRP_GET_OBUF_INFO is failed", __func__); + return SRP_ERROR_OBUF_INFO; + } + } + + *addr = obuf_info.addr; + *size = obuf_info.size; + *num = obuf_info.num; + + if (*num == 0) { + ALOGE("%s: OBUF num is 0", __func__); + return SRP_ERROR_INVALID_SETTING; + } + + ret = SRP_RETURN_OK; + } else { + ret = SRP_ERROR_NOT_READY; + } + + return ret; +} + +int SRP_Deinit(void) +{ + if (srp_dev != -1) { + munmap(obuf_info.mmapped_addr, obuf_info.mmapped_size); + return ioctl(srp_dev, SRP_DEINIT); + } + + return SRP_ERROR_NOT_READY; +} + +int SRP_Terminate(void) +{ + int ret; + + if (srp_dev != -1) { + ret = close(srp_dev); + + if (ret == 0) { + srp_dev = -1; /* device closed */ + return SRP_RETURN_OK; + } + } + + return SRP_ERROR_NOT_READY; +} + +int SRP_IsOpen(void) +{ + if (srp_dev == -1) { + ALOGV("%s: Device is not opened", __func__); + return 0; + } + + ALOGV("%s: Device is opened", __func__); + return 1; +} diff --git a/libsrp/alp/include/srp_api.h b/libsrp/alp/include/srp_api.h new file mode 100644 index 0000000..6eba4e1 --- /dev/null +++ b/libsrp/alp/include/srp_api.h @@ -0,0 +1,78 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file srp_api.h + * @brief + * @author Yunji Kim (yunji.kim@samsung.com) + * @version 1.1.0 + * @history + * 2012.02.28 : Create + */ + +#ifndef __SRP_API_H__ +#define __SRP_API_H__ + +#include "srp_ioctl.h" +#include "srp_error.h" + +#define SRP_DEV_NAME "dev/srp" + +#define SRP_INIT_BLOCK_MODE 0 +#define SRP_INIT_NONBLOCK_MODE 1 + +#define SRP_PENDING_STATE_RUNNING 0 +#define SRP_PENDING_STATE_PENDING 1 + +struct srp_buf_info { + void *mmapped_addr; + void *addr; + unsigned int mmapped_size; + unsigned int size; + int num; +}; + +struct srp_dec_info { + unsigned int sample_rate; + unsigned int channels; +}; + +#ifdef __cplusplus +extern "C" { +#endif + +int SRP_Create(int block_mode); +int SRP_Init(); +int SRP_Decode(void *buff, int size_byte); +int SRP_Send_EOS(void); +int SRP_SetParams(int id, unsigned long val); +int SRP_GetParams(int id, unsigned long *pval); +int SRP_Deinit(void); +int SRP_Terminate(void); +int SRP_IsOpen(void); + +int SRP_Get_Ibuf_Info(void **addr, unsigned int *size, unsigned int *num); +int SRP_Get_Obuf_Info(void **addr, unsigned int *size, unsigned int *num); +int SRP_Get_Dec_Info(struct srp_dec_info *dec_info); +int SRP_Get_PCM(void **addr, unsigned int *size); +int SRP_Flush(void); + +#ifdef __cplusplus +} +#endif + +#endif /*__SRP_API_H__ */ diff --git a/libsrp/alp/include/srp_error.h b/libsrp/alp/include/srp_error.h new file mode 100644 index 0000000..8d7ffde --- /dev/null +++ b/libsrp/alp/include/srp_error.h @@ -0,0 +1,48 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file srp_error.h + * @brief + * @author Yunji Kim (yunji.kim@samsung.com) + * @version 1.1.0 + * @history + * 2012.02.28 : Create + */ + +#ifndef _SRP_ERROR_H_ +#define _SRP_ERROR_H_ + +typedef enum { + SRP_RETURN_OK = 0, + + SRP_ERROR_OPEN_FAIL = -1000, + SRP_ERROR_ALREADY_OPEN = -1001, + SRP_ERROR_NOT_READY = -1002, + + SRP_ERROR_IBUF_OVERFLOW = -2000, + SRP_ERROR_IBUF_INFO = -2001, + + SRP_ERROR_OBUF_READ = -3000, + SRP_ERROR_OBUF_INFO = -3001, + SRP_ERROR_OBUF_MMAP = -3002, + + SRP_ERROR_INVALID_SETTING = -4000, + SRP_ERROR_GETINFO_FAIL = -4001 +} SRP_ERRORTYPE; + +#endif /* _SRP_ERROR_H_ */ diff --git a/libsrp/alp/include/srp_ioctl.h b/libsrp/alp/include/srp_ioctl.h new file mode 100644 index 0000000..7eb2757 --- /dev/null +++ b/libsrp/alp/include/srp_ioctl.h @@ -0,0 +1,49 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file srp_ioctl.h + * @brief + * @author Yunji Kim (yunji.kim@samsung.com) + * @version 1.1.0 + * @history + * 2012.02.28 : Create + */ + +#ifndef __SRP_IOCTL_H__ +#define __SRP_IOCTL_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +#define SRP_INIT (0x10000) +#define SRP_DEINIT (0x10001) +#define SRP_GET_MMAP_SIZE (0x10002) +#define SRP_FLUSH (0x20002) +#define SRP_SEND_EOS (0x20005) +#define SRP_GET_IBUF_INFO (0x20007) +#define SRP_GET_OBUF_INFO (0x20008) +#define SRP_STOP_EOS_STATE (0x30007) +#define SRP_GET_DEC_INFO (0x30008) + +#ifdef __cplusplus +} +#endif + +#endif /* __SRP_IOCTL_H__ */ + diff --git a/libstagefrighthw/Android.mk b/libstagefrighthw/Android.mk new file mode 100644 index 0000000..a8af63c --- /dev/null +++ b/libstagefrighthw/Android.mk @@ -0,0 +1,39 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + Exynos_OMX_Plugin.cpp + +LOCAL_CFLAGS += $(PV_CFLAGS_MINUS_VISIBILITY) + +LOCAL_C_INCLUDES:= \ + frameworks/native/include/media/hardware \ + frameworks/native/include/media/openmax \ + frameworks/native/include + +LOCAL_SHARED_LIBRARIES := \ + libbinder \ + libutils \ + libcutils \ + libui \ + libdl \ + libstagefright_foundation + +LOCAL_MODULE := libstagefrighthw + +LOCAL_MODULE_TAGS := optional +include $(BUILD_SHARED_LIBRARY) diff --git a/libstagefrighthw/Exynos_OMX_Plugin.cpp b/libstagefrighthw/Exynos_OMX_Plugin.cpp new file mode 100644 index 0000000..7a9849e --- /dev/null +++ b/libstagefrighthw/Exynos_OMX_Plugin.cpp @@ -0,0 +1,147 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "Exynos_OMX_Plugin.h" + +#include + +#include +#include + +namespace android { + +OMXPluginBase *createOMXPlugin() { + return new ExynosOMXPlugin; +} + +ExynosOMXPlugin::ExynosOMXPlugin() + : mLibHandle(dlopen("libExynosOMX_Core.so", RTLD_NOW)), + mInit(NULL), + mDeinit(NULL), + mComponentNameEnum(NULL), + mGetHandle(NULL), + mFreeHandle(NULL), + mGetRolesOfComponentHandle(NULL) { + if (mLibHandle != NULL) { + mInit = (InitFunc)dlsym(mLibHandle, "Exynos_OMX_Init"); + mDeinit = (DeinitFunc)dlsym(mLibHandle, "Exynos_OMX_Deinit"); + + mComponentNameEnum = + (ComponentNameEnumFunc)dlsym(mLibHandle, "Exynos_OMX_ComponentNameEnum"); + + mGetHandle = (GetHandleFunc)dlsym(mLibHandle, "Exynos_OMX_GetHandle"); + mFreeHandle = (FreeHandleFunc)dlsym(mLibHandle, "Exynos_OMX_FreeHandle"); + + mGetRolesOfComponentHandle = + (GetRolesOfComponentFunc)dlsym( + mLibHandle, "Exynos_OMX_GetRolesOfComponent"); + + (*mInit)(); + + } +} + +ExynosOMXPlugin::~ExynosOMXPlugin() { + if (mLibHandle != NULL) { + (*mDeinit)(); + + dlclose(mLibHandle); + mLibHandle = NULL; + } +} + +OMX_ERRORTYPE ExynosOMXPlugin::makeComponentInstance( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) { + if (mLibHandle == NULL) { + return OMX_ErrorUndefined; + } + + return (*mGetHandle)( + reinterpret_cast(component), + const_cast(name), + appData, const_cast(callbacks)); +} + +OMX_ERRORTYPE ExynosOMXPlugin::destroyComponentInstance( + OMX_COMPONENTTYPE *component) { + if (mLibHandle == NULL) { + return OMX_ErrorUndefined; + } + + return (*mFreeHandle)(reinterpret_cast(component)); +} + +OMX_ERRORTYPE ExynosOMXPlugin::enumerateComponents( + OMX_STRING name, + size_t size, + OMX_U32 index) { + if (mLibHandle == NULL) { + return OMX_ErrorUndefined; + } + + return (*mComponentNameEnum)(name, size, index); +} + +OMX_ERRORTYPE ExynosOMXPlugin::getRolesOfComponent( + const char *name, + Vector *roles) { + roles->clear(); + + if (mLibHandle == NULL) { + return OMX_ErrorUndefined; + } + + OMX_U32 numRoles; + OMX_ERRORTYPE err = (*mGetRolesOfComponentHandle)( + const_cast(name), &numRoles, NULL); + + if (err != OMX_ErrorNone) { + return err; + } + + if (numRoles > 0) { + OMX_U8 **array = new OMX_U8 *[numRoles]; + for (OMX_U32 i = 0; i < numRoles; ++i) { + array[i] = new OMX_U8[OMX_MAX_STRINGNAME_SIZE]; + } + + OMX_U32 numRoles2; + err = (*mGetRolesOfComponentHandle)( + const_cast(name), &numRoles2, array); + + CHECK_EQ(err, OMX_ErrorNone); + CHECK_EQ(numRoles, numRoles2); + + for (OMX_U32 i = 0; i < numRoles; ++i) { + String8 s((const char *)array[i]); + roles->push(s); + + delete[] array[i]; + array[i] = NULL; + } + + delete[] array; + array = NULL; + } + + return OMX_ErrorNone; +} + +} // namespace android + diff --git a/libstagefrighthw/Exynos_OMX_Plugin.h b/libstagefrighthw/Exynos_OMX_Plugin.h new file mode 100644 index 0000000..3b4d8f0 --- /dev/null +++ b/libstagefrighthw/Exynos_OMX_Plugin.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef Exynos_OMX_PLUGIN + +#define Exynos_OMX_PLUGIN + +#include + +namespace android { + +struct ExynosOMXPlugin : public OMXPluginBase { + ExynosOMXPlugin(); + virtual ~ExynosOMXPlugin(); + + virtual OMX_ERRORTYPE makeComponentInstance( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + + virtual OMX_ERRORTYPE destroyComponentInstance( + OMX_COMPONENTTYPE *component); + + virtual OMX_ERRORTYPE enumerateComponents( + OMX_STRING name, + size_t size, + OMX_U32 index); + + virtual OMX_ERRORTYPE getRolesOfComponent( + const char *name, + Vector *roles); + +private: + void *mLibHandle; + + typedef OMX_ERRORTYPE (*InitFunc)(); + typedef OMX_ERRORTYPE (*DeinitFunc)(); + typedef OMX_ERRORTYPE (*ComponentNameEnumFunc)( + OMX_STRING, OMX_U32, OMX_U32); + + typedef OMX_ERRORTYPE (*GetHandleFunc)( + OMX_HANDLETYPE *, OMX_STRING, OMX_PTR, OMX_CALLBACKTYPE *); + + typedef OMX_ERRORTYPE (*FreeHandleFunc)(OMX_HANDLETYPE *); + + typedef OMX_ERRORTYPE (*GetRolesOfComponentFunc)( + OMX_STRING, OMX_U32 *, OMX_U8 **); + + InitFunc mInit; + DeinitFunc mDeinit; + ComponentNameEnumFunc mComponentNameEnum; + GetHandleFunc mGetHandle; + FreeHandleFunc mFreeHandle; + GetRolesOfComponentFunc mGetRolesOfComponentHandle; + + ExynosOMXPlugin(const ExynosOMXPlugin &); + ExynosOMXPlugin &operator=(const ExynosOMXPlugin &); +}; + +} // namespace android + +#endif // Exynos_OMX_PLUGIN diff --git a/libstagefrighthw/NOTICE b/libstagefrighthw/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libstagefrighthw/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libswconverter/Android.mk b/libswconverter/Android.mk new file mode 100644 index 0000000..edb7799 --- /dev/null +++ b/libswconverter/Android.mk @@ -0,0 +1,49 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) +include frameworks/av/media/libstagefright/codecs/common/Config.mk + +LOCAL_MODULE_TAGS := optional + +LOCAL_SRC_FILES := \ + swconvertor.c + +ifeq ($(VOTT), v7) +LOCAL_CFLAGS += -DNEON_SUPPORT +LOCAL_SRC_FILES += \ + csc_interleave_memcpy_neon.s \ + csc_BGRA8888_to_YUV420SP_NEON.s \ + csc_RGBA8888_to_YUV420SP_NEON.s \ + csc_BGRA8888_to_RGBA8888_NEON.s + +ifeq ($(BOARD_USE_NV12T_128X64), true) +LOCAL_SRC_FILES += \ + csc_linear_to_tiled_crop_neon.s \ + csc_linear_to_tiled_interleave_crop_neon.s \ + csc_tiled_to_linear_crop_neon.s \ + csc_tiled_to_linear_deinterleave_crop_neon.s +LOCAL_CFLAGS += -DUSE_NV12T_128X64 +else +LOCAL_SRC_FILES += \ + csc_tiled_to_linear_y_neon.s \ + csc_tiled_to_linear_uv_neon.s \ + csc_tiled_to_linear_uv_deinterleave_neon.s +endif + +LOCAL_CFLAGS += -Werror +endif + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/include + +LOCAL_MODULE := libswconverter + +LOCAL_PRELINK_MODULE := false + +LOCAL_ARM_MODE := arm + +LOCAL_STATIC_LIBRARIES := +LOCAL_SHARED_LIBRARIES := liblog + +include $(BUILD_STATIC_LIBRARY) diff --git a/libswconverter/NOTICE b/libswconverter/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libswconverter/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libswconverter/csc_BGRA8888_to_RGBA8888_NEON.s b/libswconverter/csc_BGRA8888_to_RGBA8888_NEON.s new file mode 100644 index 0000000..0f4b45b --- /dev/null +++ b/libswconverter/csc_BGRA8888_to_RGBA8888_NEON.s @@ -0,0 +1,112 @@ +/* + * + * Copyright 2013 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_BGRA8888_to_RGBA8888.s + * @brief color format converter + * @author Hyungdeok Lee (hd0408.lee@samsung.com) + * @version 1.0 + * @history + * 2013.02.28 : Create + */ + +/* + * Source BGRA8888 copy to Dest RGBA8888. + * Use neon interleaved load instruction, easly swap R ch to B ch. + * + * @param dest + * dst address[out] + * + * @param src + * src address[in] + * + * @param width + * line width [in] + * + * @param bpp + * bpp only concerned about 4 + */ + + .arch armv7-a + .text + .global csc_BGRA8888_RGBA8888_NEON + .type csc_BGRA8888_RGBA8888_NEON, %function +csc_BGRA8888_RGBA8888_NEON: + .fnstart + + @r0 dest + @r1 src + @r2 width + @r3 bpp + @r4 + @r5 + @r6 + @r7 + @r8 temp1 + @r9 temp2 + @r10 dest_addr + @r11 src_addr + @r12 temp_width + @r14 i + + stmfd sp!, {r4-r12,r14} @ backup registers + + mov r10, r0 + mov r11, r1 + mov r9, r2, lsr #5 @ r9 = r2 >> 5 (32) + and r14, r9, #3 @ r14 = r9 & 3 + mov r12, r2, lsr #7 @ r12 = r2 >> 7 (128) + + cmp r12, #0 + beq LESS_THAN_128 + +@ Process d0 to d3 at once. 4 times same operation. := 8 byte * 4 * 4 = 128 byte loop. +LOOP_128: + @pld [r11] @ cache line fill. use this for r11 region set by cachable. + vld4.8 {d0, d1, d2, d3}, [r11]! + vswp d0, d2 + vst4.8 {d0, d1, d2, d3}, [r10]! + + vld4.8 {d0, d1, d2, d3}, [r11]! + vswp d0, d2 + vst4.8 {d0, d1, d2, d3}, [r10]! + + vld4.8 {d0, d1, d2, d3}, [r11]! + vswp d0, d2 + vst4.8 {d0, d1, d2, d3}, [r10]! + + vld4.8 {d0, d1, d2, d3}, [r11]! + vswp d0, d2 + vst4.8 {d0, d1, d2, d3}, [r10]! + + subs r12, #1 + bne LOOP_128 + +LESS_THAN_128: + cmp r14, #0 + beq END + +LOOP_32: + vld4.8 {d0, d1, d2, d3}, [r11]! + vswp d0, d2 + vst4.8 {d0, d1, d2, d3}, [r10]! + subs r14, #1 + bne LOOP_32 + +END: + ldmfd sp!, {r4-r12,r15} @ restore registers + .fnend diff --git a/libswconverter/csc_BGRA8888_to_YUV420SP_NEON.s b/libswconverter/csc_BGRA8888_to_YUV420SP_NEON.s new file mode 100644 index 0000000..956f553 --- /dev/null +++ b/libswconverter/csc_BGRA8888_to_YUV420SP_NEON.s @@ -0,0 +1,365 @@ + + .arch armv7-a + .text + .global csc_BGRA8888_to_YUV420SP_NEON + .type csc_BGRA8888_to_YUV420SP_NEON, %function +csc_BGRA8888_to_YUV420SP_NEON: + .fnstart + + @r0 pDstY + @r1 pDstUV + @r2 pSrcRGB + @r3 nWidth + @r4 pDstY2 = pDstY + nWidth + @r5 pSrcRGB2 = pSrcRGB + nWidthx2 + @r6 temp7, nWidth16m + @r7 temp6, accumilator + @r8 temp5, nWidthTemp + @r9 temp4, Raw RGB565 + @r10 temp3, r,g,b + @r11 temp2, immediate operand + @r12 temp1, nHeight + @r14 temp0, debugging pointer + + .equ CACHE_LINE_SIZE, 32 + .equ PRE_LOAD_OFFSET, 6 + + stmfd sp!, {r4-r12,r14} @ backup registers + ldr r12, [sp, #40] @ load nHeight + @ldr r14, [sp, #44] @ load pTest + add r4, r0, r3 @r4: pDstY2 = pDstY + nWidth + add r5, r2, r3, lsl #2 @r5: pSrcRGB2 = tmpSrcRGB + nWidthx4 + sub r8, r3, #16 @r8: nWidthTmp = nWidth -16 + + @q0: temp1, R + @q1: temp2, GB + @q2: R + @q3: G + @q4: B + @q5: temp3, output + + + vmov.u16 q6, #66 @coefficient assignment + vmov.u16 q7, #129 + vmov.u16 q8, #25 + vmov.u16 q9, #0x8080 @ 128<<8 + 128 + + vmov.u16 q10, #0x1000 @ 16<<8 + 128 + vorr.u16 q10, #0x0080 + + vmov.u16 q11, #38 @#-38 + vmov.u16 q12, #74 @#-74 + vmov.u16 q13, #112 + vmov.u16 q14, #94 @#-94 + vmov.u16 q15, #18 @#-18 + + + + +LOOP_NHEIGHT2: + stmfd sp!, {r12} @ backup registers + +LOOP_NWIDTH16: + pld [r2, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + @-------------------------------------------YUV ------------------------------------------ + vmov.u16 q14, #94 @#94 + vmov.u16 q15, #18 @#18 + vld4.8 {d0,d1,d2,d3}, [r2]! @loadRGB interleavely + vld4.8 {d4,d5,d6,d7}, [r2]! @loadRGB interleavely + + + vmov.u16 d8,d2 + vmov.u16 d9,d6 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d0 + vmov.u16 d13,d4 + + vand.u16 q4,#0x00FF @R + vand.u16 q5,#0x00FF @G + vand.u16 q6,#0x00FF @B + + vmov.u16 q8,q9 @ CalcU() + vmla.u16 q8,q6,q13 @112 * B[k] + vmls.u16 q8,q4,q11 @q0:U -(38 * R[k]) @128<<6+ 32 + u>>2 + vmls.u16 q8,q5,q12 @-(74 * G[k]) + vshr.u16 q8,q8, #8 @(128<<8+ 128 + u)>>8 + + vmov.u16 q7,q9 @CalcV() + vmla.u16 q7,q4,q13 @112 * R[k] + vmls.u16 q7,q5,q14 @q0:U -(94 * G[k]) @128<<6+ 32 + v>>2 + vmls.u16 q7,q6,q15 @-(18 * B[k]) + vshr.u16 q7,q7, #8 @(128<<8+ 128 + v)>>8 + + + vtrn.8 q8,q7 + vst1.8 {q8}, [r1]! @write UV component to yuv420_buffer+linear_ylanesiez + + @-------------------------------------------Y ------------------------------------------ + + vmov.u16 q14, #66 @#66 + vmov.u16 q15, #129 @#129 + vmov.u16 q8, #25 @#25 + + @CalcY_Y() + + vmul.u16 q7,q4,q14 @q0 = 66 *R[k] + vmla.u16 q7,q5,q15 @q0 += 129 *G[k] + vmla.u16 q7,q6,q8 @q0 += 25 *B[k] + + vadd.u16 q7,q7,q10 + vshr.u16 q7,q7, #8 + + vmov.u16 d8,d2 + vmov.u16 d9,d6 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d0 + vmov.u16 d13,d4 + + vshr.u16 q4,q4,#8 @R + vshr.u16 q5,q5,#8 @G + vshr.u16 q6,q6,#8 @B + + vmul.u16 q0,q4,q14 @q0 = 66 *R[k] + vmla.u16 q0,q5,q15 @q0 += 129 *G[k] + vmla.u16 q0,q6,q8 @q0 += 25 *B[k] + vadd.u16 q0,q0,q10 + vshr.u16 q0,q0, #8 + + vtrn.8 q7,q0 + vst1.8 {q7}, [r0]!@write to Y to yuv420_buffer + + + + @-------------------------------------------Y ------------------------------------------ + + @---------------------------------------------Y1------------------------------------------- + + pld [r5, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld4.8 {d0,d1,d2,d3}, [r5]! @loadRGB interleavely + vld4.8 {d4,d5,d6,d7}, [r5]! @loadRGB interleavely + + vmov.u16 d8,d2 + vmov.u16 d9,d6 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d0 + vmov.u16 d13,d4 + + + vand.u16 q4,#0x00FF @R + vand.u16 q5,#0x00FF @G + vand.u16 q6,#0x00FF @B + + + + vmul.u16 q7,q4,q14 @q0 = 66 *R[k] + vmla.u16 q7,q5,q15 @q0 += 129 *G[k] + vmla.u16 q7,q6,q8 @q0 += 25 *B[k] + vadd.u16 q7,q7,q10 + vshr.u16 q7,q7, #8 + + vmov.u16 d8,d2 + vmov.u16 d9,d6 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d0 + vmov.u16 d13,d4 + + vshr.u16 q4,q4,#8 @R + vshr.u16 q5,q5,#8 @G + vshr.u16 q6,q6,#8 @B + + vmul.u16 q0,q4,q14 @q0 = 66 *R[k] + vmla.u16 q0,q5,q15 @q0 += 129 *G[k] + vmla.u16 q0,q6,q8 @q0 += 25 *B[k] + vadd.u16 q0,q0,q10 + vshr.u16 q0,q0, #8 + + vtrn.8 q7,q0 + vst1.8 {q7}, [r4]!@write to Y to yuv420_buffer + + subs r8,r8,#16 @nWidth16-- + BPL LOOP_NWIDTH16 @if nWidth16>0 + @-----------------------------------unaligned --------------------------------------- + + adds r8,r8,#16 @ + 16 - 2 + BEQ NO_UNALIGNED @in case that nWidht is multiple of 16 +LOOP_NWIDTH2: + @----------------------------------pDstRGB1--Y------------------------------------------ + @stmfd sp!, {r14} @backup r14 + + + ldr r9, [r2], #4 @loadRGB int + ldr r12, [r2], #4 @loadRGB int + + mov r10, r9,lsr #16 @copy to r10 + mov r14, r12 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + ldr r6, =0x00FF0000 + and r14, r14, r6 @R: (rgbIn[k] & 0xF800) >> 10; + add r10,r10,r14 + + mov r11, #66 @accumilator += R*66 + mul r7, r10, r11 + + mov r10, r9,lsr #8 @copy to r10 + mov r14, r12,lsl #8 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @G: + ldr r6, =0x00FF0000 + and r14, r14, r6 @G: + add r10,r10,r14 + + mov r11, #129 @accumilator += G *129 + mla r7, r10, r11, r7 + + mov r10, r9 @copy to r10 + mov r14, r12,lsl #16 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @B + ldr r6, =0x00FF0000 + and r14, r14, r6 @B + add r10,r10,r14 + + mov r11, #25 @accumilator 1 -= B *25 + mla r7, r10, r11, r7 + + ldr r6, =0x10801080 + add r7, r6 + + lsr r7, #8 + strb r7, [r0],#1 + lsr r7,#16 + strb r7, [r0],#1 + @ldmfd sp!, {r14} @load r14 + + + @----------------------------------pDstRGB2--UV------------------------------------------ + + mov r10, r9 @copy to r10 + ldr r7,=0x00008080 + mov r12,r7 + + ldr r6, =0x000000FF + and r10, r10, r6 @B: + + mov r11, #112 @accumilator += B*112 + mla r7, r10, r11, r7 + + + mov r11, #18 @accumilator -= B*18 + mul r11, r10, r11 + sub r12, r12, r11 + + + + + mov r10, r9, lsr #16 @copy to r10 + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + + mov r11, #38 @accumilator -= R *38 + mul r11, r10, r11 + sub r7, r7, r11 + + mov r11, #112 @accumilator = R *112 + mla r12, r10, r11, r12 + + mov r10, r9,lsr #8 @copy to r10 + ldr r6, =0x000000FF + and r10, r10, r6 @G: (rgbIn[k] & 0x07E0) >> 5; + + mov r11, #74 @accumilator -= G*74 + mul r11, r10, r11 + sub r7, r7, r11 + + mov r11, #94 @accumilator -= G*94 + mul r11, r10, r11 + sub r12, r12, r11 + + lsr r7, #8 @ >>8 + strb r7, [r1],#1 + lsr r12, #8 @ >>8 + strb r12, [r1],#1 + + @----------------------------------pDstRGB2--Y------------------------------------------ + @stmfd sp!, {r14} @backup r14 + + + ldr r9, [r5], #4 @loadRGB int + ldr r12, [r5], #4 @loadRGB int + + mov r10, r9,lsr #16 @copy to r10 + mov r14, r12 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + ldr r6, =0x00FF0000 + and r14, r14, r6 @R: (rgbIn[k] & 0xF800) >> 10; + add r10,r10,r14 + + mov r11, #66 @accumilator += R*66 + mul r7, r10, r11 + + mov r10, r9,lsr #8 @copy to r10 + mov r14, r12,lsl #8 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @G: + ldr r6, =0x00FF0000 + and r14, r14, r6 @G: + add r10,r10,r14 + + mov r11, #129 @accumilator += G *129 + mla r7, r10, r11, r7 + + mov r10, r9 @copy to r10 + mov r14, r12,lsl #16 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @B + ldr r6, =0x00FF0000 + and r14, r14, r6 @B + add r10,r10,r14 + + + + + mov r11, #25 @accumilator 1 -= B *25 + mla r7, r10, r11, r7 + + ldr r6, =0x10801080 + add r7, r6 + lsr r7, #8 + + strb r7, [r4],#1 + lsr r7,#16 + strb r7, [r4],#1 + @ldmfd sp!, {r14} @load r14 + + + subs r8,r8,#2 @ nWidth2 -= 2 + BGT LOOP_NWIDTH2 @ if nWidth2>0 + + +NO_UNALIGNED: @in case that nWidht is multiple of 16 + + @----------------------------------------------------------------------------- + sub r8, r3, #16 @r8: nWidthTmp = nWidth -16 + add r0, r0, r3 @pDstY + nwidth + add r2, r2, r3, lsl #2 @pSrcRGB + nwidthx4 + add r4, r4, r3 @pDstY2 + nwidth + add r5, r5, r3, lsl #2 @pSrcRGB2 + nwidthx4 + + ldmfd sp!, {r12} + subs r12,r12,#2 @nHeight -=2 + BGT LOOP_NHEIGHT2 @if nHeight2>0 + + ldmfd sp!, {r4-r12,pc} @ backup registers + .fnend diff --git a/libswconverter/csc_RGBA8888_to_YUV420SP_NEON.s b/libswconverter/csc_RGBA8888_to_YUV420SP_NEON.s new file mode 100644 index 0000000..92c2d58 --- /dev/null +++ b/libswconverter/csc_RGBA8888_to_YUV420SP_NEON.s @@ -0,0 +1,388 @@ + + .arch armv7-a + .text + .global csc_RGBA8888_to_YUV420SP_NEON + .type csc_RGBA8888_to_YUV420SP_NEON, %function +csc_RGBA8888_to_YUV420SP_NEON: + .fnstart + + @r0 pDstY + @r1 pDstUV + @r2 pSrcRGB + @r3 nWidth + @r4 pDstY2 = pDstY + nWidth + @r5 pSrcRGB2 = pSrcRGB + nWidthx2 + @r6 temp7, nWidth16m + @r7 temp6, accumilator + @r8 temp5, nWidthTemp + @r9 temp4, Raw RGB565 + @r10 temp3, r,g,b + @r11 temp2, immediate operand + @r12 temp1, nHeight + @r14 temp0, debugging pointer + + .equ CACHE_LINE_SIZE, 32 + .equ PRE_LOAD_OFFSET, 6 + + stmfd sp!, {r4-r12,r14} @ backup registers + ldr r12, [sp, #40] @ load nHeight + @ldr r14, [sp, #44] @ load pTest + add r4, r0, r3 @r4: pDstY2 = pDstY + nWidth + add r5, r2, r3, lsl #2 @r5: pSrcRGB2 = tmpSrcRGB + nWidthx4 + sub r8, r3, #16 @r8: nWidthTmp = nWidth -16 + + @q0: temp1, R + @q1: temp2, GB + @q2: R + @q3: G + @q4: B + @q5: temp3, output + + + vmov.u16 q6, #66 @coefficient assignment + vmov.u16 q7, #129 + vmov.u16 q8, #25 + vmov.u16 q9, #0x8080 @ 128<<8 + 128 + + vmov.u16 q10, #0x1000 @ 16<<8 + 128 + vorr.u16 q10, #0x0080 + + vmov.u16 q11, #38 @#-38 + vmov.u16 q12, #74 @#-74 + vmov.u16 q13, #112 + vmov.u16 q14, #94 @#-94 + vmov.u16 q15, #18 @#-18 + + + + +LOOP_NHEIGHT2: + stmfd sp!, {r12} @ backup registers + +LOOP_NWIDTH16: + pld [r2, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + @-------------------------------------------YUV ------------------------------------------ + vmov.u16 q14, #94 @#94 + vmov.u16 q15, #18 @#18 + vld4.8 {d0,d1,d2,d3}, [r2]! @loadRGB interleavely + vld4.8 {d4,d5,d6,d7}, [r2]! @loadRGB interleavely + + + @vmov.u16 d8,d2 + @vmov.u16 d9,d6 + @vmov.u16 d10,d1 + @vmov.u16 d11,d5 + @vmov.u16 d12,d0 + @vmov.u16 d13,d4 + vmov.u16 d8,d0 + vmov.u16 d9,d4 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d2 + vmov.u16 d13,d6 + + vand.u16 q4,#0x00FF @R + vand.u16 q5,#0x00FF @G + vand.u16 q6,#0x00FF @B + + vmov.u16 q8,q9 @ CalcU() + vmla.u16 q8,q6,q13 @112 * B[k] + vmls.u16 q8,q4,q11 @q0:U -(38 * R[k]) @128<<6+ 32 + u>>2 + vmls.u16 q8,q5,q12 @-(74 * G[k]) + vshr.u16 q8,q8, #8 @(128<<8+ 128 + u)>>8 + + vmov.u16 q7,q9 @CalcV() + vmla.u16 q7,q4,q13 @112 * R[k] + vmls.u16 q7,q5,q14 @q0:U -(94 * G[k]) @128<<6+ 32 + v>>2 + vmls.u16 q7,q6,q15 @-(18 * B[k]) + vshr.u16 q7,q7, #8 @(128<<8+ 128 + v)>>8 + + + vtrn.8 q8,q7 + vst1.8 {q8}, [r1]! @write UV component to yuv420_buffer+linear_ylanesiez + + @-------------------------------------------Y ------------------------------------------ + + vmov.u16 q14, #66 @#66 + vmov.u16 q15, #129 @#129 + vmov.u16 q8, #25 @#25 + + @CalcY_Y() + + vmul.u16 q7,q4,q14 @q0 = 66 *R[k] + vmla.u16 q7,q5,q15 @q0 += 129 *G[k] + vmla.u16 q7,q6,q8 @q0 += 25 *B[k] + + vadd.u16 q7,q7,q10 + vshr.u16 q7,q7, #8 + + @vmov.u16 d8,d2 + @vmov.u16 d9,d6 + @vmov.u16 d10,d1 + @vmov.u16 d11,d5 + @vmov.u16 d12,d0 + @vmov.u16 d13,d4 + vmov.u16 d8,d0 + vmov.u16 d9,d4 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d2 + vmov.u16 d13,d6 + + vshr.u16 q4,q4,#8 @R + vshr.u16 q5,q5,#8 @G + vshr.u16 q6,q6,#8 @B + + vmul.u16 q0,q4,q14 @q0 = 66 *R[k] + vmla.u16 q0,q5,q15 @q0 += 129 *G[k] + vmla.u16 q0,q6,q8 @q0 += 25 *B[k] + vadd.u16 q0,q0,q10 + vshr.u16 q0,q0, #8 + + vtrn.8 q7,q0 + vst1.8 {q7}, [r0]!@write to Y to yuv420_buffer + + + + @-------------------------------------------Y ------------------------------------------ + + @---------------------------------------------Y1------------------------------------------- + + pld [r5, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld4.8 {d0,d1,d2,d3}, [r5]! @loadRGB interleavely + vld4.8 {d4,d5,d6,d7}, [r5]! @loadRGB interleavely + + @vmov.u16 d8,d2 + @vmov.u16 d9,d6 + @vmov.u16 d10,d1 + @vmov.u16 d11,d5 + @vmov.u16 d12,d0 + @vmov.u16 d13,d4 + vmov.u16 d8,d0 + vmov.u16 d9,d4 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d2 + vmov.u16 d13,d6 + + vand.u16 q4,#0x00FF @R + vand.u16 q5,#0x00FF @G + vand.u16 q6,#0x00FF @B + + + + vmul.u16 q7,q4,q14 @q0 = 66 *R[k] + vmla.u16 q7,q5,q15 @q0 += 129 *G[k] + vmla.u16 q7,q6,q8 @q0 += 25 *B[k] + vadd.u16 q7,q7,q10 + vshr.u16 q7,q7, #8 + + @vmov.u16 d8,d2 + @vmov.u16 d9,d6 + @vmov.u16 d10,d1 + @vmov.u16 d11,d5 + @vmov.u16 d12,d0 + @vmov.u16 d13,d4 + vmov.u16 d8,d0 + vmov.u16 d9,d4 + vmov.u16 d10,d1 + vmov.u16 d11,d5 + vmov.u16 d12,d2 + vmov.u16 d13,d6 + + vshr.u16 q4,q4,#8 @R + vshr.u16 q5,q5,#8 @G + vshr.u16 q6,q6,#8 @B + + vmul.u16 q0,q4,q14 @q0 = 66 *R[k] + vmla.u16 q0,q5,q15 @q0 += 129 *G[k] + vmla.u16 q0,q6,q8 @q0 += 25 *B[k] + vadd.u16 q0,q0,q10 + vshr.u16 q0,q0, #8 + + vtrn.8 q7,q0 + vst1.8 {q7}, [r4]!@write to Y to yuv420_buffer + + subs r8,r8,#16 @nWidth16-- + BPL LOOP_NWIDTH16 @if nWidth16>0 + @-----------------------------------unaligned --------------------------------------- + + adds r8,r8,#16 @ + 16 - 2 + BEQ NO_UNALIGNED @in case that nWidht is multiple of 16 +LOOP_NWIDTH2: + @----------------------------------pDstRGB1--Y------------------------------------------ + @stmfd sp!, {r14} @backup r14 + + + ldr r9, [r2], #4 @loadRGB int + ldr r12, [r2], #4 @loadRGB int + + mov r10, r9,lsr #16 @copy to r10 + mov r14, r12 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + ldr r6, =0x00FF0000 + and r14, r14, r6 @R: (rgbIn[k] & 0xF800) >> 10; + add r10,r10,r14 + + mov r11, #66 @accumilator += R*66 + mul r7, r10, r11 + + mov r10, r9,lsr #8 @copy to r10 + mov r14, r12,lsl #8 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @G: + ldr r6, =0x00FF0000 + and r14, r14, r6 @G: + add r10,r10,r14 + + mov r11, #129 @accumilator += G *129 + mla r7, r10, r11, r7 + + mov r10, r9 @copy to r10 + mov r14, r12,lsl #16 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @B + ldr r6, =0x00FF0000 + and r14, r14, r6 @B + add r10,r10,r14 + + mov r11, #25 @accumilator 1 -= B *25 + mla r7, r10, r11, r7 + + ldr r6, =0x10801080 + add r7, r6 + + lsr r7, #8 + strb r7, [r0],#1 + lsr r7,#16 + strb r7, [r0],#1 + @ldmfd sp!, {r14} @load r14 + + + @----------------------------------pDstRGB2--UV------------------------------------------ + + mov r10, r9 @copy to r10 + ldr r7,=0x00008080 + mov r12,r7 + + ldr r6, =0x000000FF + and r10, r10, r6 @B: + + mov r11, #112 @accumilator += B*112 + mla r7, r10, r11, r7 + + + mov r11, #18 @accumilator -= B*18 + mul r11, r10, r11 + sub r12, r12, r11 + + + + + mov r10, r9, lsr #16 @copy to r10 + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + + mov r11, #38 @accumilator -= R *38 + mul r11, r10, r11 + sub r7, r7, r11 + + mov r11, #112 @accumilator = R *112 + mla r12, r10, r11, r12 + + mov r10, r9,lsr #8 @copy to r10 + ldr r6, =0x000000FF + and r10, r10, r6 @G: (rgbIn[k] & 0x07E0) >> 5; + + mov r11, #74 @accumilator -= G*74 + mul r11, r10, r11 + sub r7, r7, r11 + + mov r11, #94 @accumilator -= G*94 + mul r11, r10, r11 + sub r12, r12, r11 + + lsr r7, #8 @ >>8 + strb r7, [r1],#1 + lsr r12, #8 @ >>8 + strb r12, [r1],#1 + + @----------------------------------pDstRGB2--Y------------------------------------------ + @stmfd sp!, {r14} @backup r14 + + + ldr r9, [r5], #4 @loadRGB int + ldr r12, [r5], #4 @loadRGB int + + mov r10, r9,lsr #16 @copy to r10 + mov r14, r12 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @R: (rgbIn[k] & 0xF800) >> 10; + ldr r6, =0x00FF0000 + and r14, r14, r6 @R: (rgbIn[k] & 0xF800) >> 10; + add r10,r10,r14 + + mov r11, #66 @accumilator += R*66 + mul r7, r10, r11 + + mov r10, r9,lsr #8 @copy to r10 + mov r14, r12,lsl #8 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @G: + ldr r6, =0x00FF0000 + and r14, r14, r6 @G: + add r10,r10,r14 + + mov r11, #129 @accumilator += G *129 + mla r7, r10, r11, r7 + + mov r10, r9 @copy to r10 + mov r14, r12,lsl #16 @copy to r10 + + ldr r6, =0x000000FF + and r10, r10, r6 @B + ldr r6, =0x00FF0000 + and r14, r14, r6 @B + add r10,r10,r14 + + + + + mov r11, #25 @accumilator 1 -= B *25 + mla r7, r10, r11, r7 + + ldr r6, =0x10801080 + add r7, r6 + lsr r7, #8 + + strb r7, [r4],#1 + lsr r7,#16 + strb r7, [r4],#1 + @ldmfd sp!, {r14} @load r14 + + + subs r8,r8,#2 @ nWidth2 -= 2 + BGT LOOP_NWIDTH2 @ if nWidth2>0 + + +NO_UNALIGNED: @in case that nWidht is multiple of 16 + + @----------------------------------------------------------------------------- + sub r8, r3, #16 @r8: nWidthTmp = nWidth -16 + add r0, r0, r3 @pDstY + nwidth + add r2, r2, r3, lsl #2 @pSrcRGB + nwidthx4 + add r4, r4, r3 @pDstY2 + nwidth + add r5, r5, r3, lsl #2 @pSrcRGB2 + nwidthx4 + + ldmfd sp!, {r12} + subs r12,r12,#2 @nHeight -=2 + BGT LOOP_NHEIGHT2 @if nHeight2>0 + + ldmfd sp!, {r4-r12,pc} @ backup registers + .fnend diff --git a/libswconverter/csc_interleave_memcpy_neon.s b/libswconverter/csc_interleave_memcpy_neon.s new file mode 100644 index 0000000..1ab25b6 --- /dev/null +++ b/libswconverter/csc_interleave_memcpy_neon.s @@ -0,0 +1,120 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_linear_to_tiled_crop_neon.s + * @brief SEC_OMX specific define + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Interleave src1, src2 to dst + * + * @param dest + * dst address[out] + * + * @param src1 + * src1 address[in] + * + * @param src2 + * src2 address[in] + * + * @param src_size + * src_size or src1 + */ + + .arch armv7-a + .text + .global csc_interleave_memcpy_neon + .type csc_interleave_memcpy_neon, %function +csc_interleave_memcpy_neon: + .fnstart + + @r0 dest + @r1 src1 + @r2 src2 + @r3 src_size + @r4 + @r5 + @r6 + @r7 + @r8 temp1 + @r9 temp2 + @r10 dest_addr + @r11 src1_addr + @r12 src2_addr + @r14 i + + stmfd sp!, {r8-r12,r14} @ backup registers + + mov r10, r0 + mov r11, r1 + mov r12, r2 + mov r14, r3 + + cmp r14, #128 + blt LESS_THAN_128 + +LOOP_128: + vld1.8 {q0}, [r11]! + vld1.8 {q2}, [r11]! + vld1.8 {q4}, [r11]! + vld1.8 {q6}, [r11]! + vld1.8 {q8}, [r11]! + vld1.8 {q10}, [r11]! + vld1.8 {q12}, [r11]! + vld1.8 {q14}, [r11]! + vld1.8 {q1}, [r12]! + vld1.8 {q3}, [r12]! + vld1.8 {q5}, [r12]! + vld1.8 {q7}, [r12]! + vld1.8 {q9}, [r12]! + vld1.8 {q11}, [r12]! + vld1.8 {q13}, [r12]! + vld1.8 {q15}, [r12]! + + vst2.8 {q0, q1}, [r10]! + vst2.8 {q2, q3}, [r10]! + vst2.8 {q4, q5}, [r10]! + vst2.8 {q6, q7}, [r10]! + vst2.8 {q8, q9}, [r10]! + vst2.8 {q10, q11}, [r10]! + vst2.8 {q12, q13}, [r10]! + vst2.8 {q14, q15}, [r10]! + + sub r14, #128 + cmp r14, #128 + bgt LOOP_128 + +LESS_THAN_128: + cmp r14, #0 + beq RESTORE_REG + +LOOP_1: + ldrb r8, [r11], #1 + ldrb r9, [r12], #1 + strb r8, [r10], #1 + strb r9, [r10], #1 + subs r14, #1 + bne LOOP_1 + +RESTORE_REG: + ldmfd sp!, {r8-r12,r15} @ restore registers + .fnend diff --git a/libswconverter/csc_linear_to_tiled_crop_neon.s b/libswconverter/csc_linear_to_tiled_crop_neon.s new file mode 100644 index 0000000..8f59826 --- /dev/null +++ b/libswconverter/csc_linear_to_tiled_crop_neon.s @@ -0,0 +1,492 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_linear_to_tiled_crop_neon.s + * @brief SEC_OMX specific define + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Converts linear data to tiled + * Crops left, top, right, buttom + * 1. Y of YUV420P to Y of NV12T + * 2. Y of YUV420S to Y of NV12T + * 3. UV of YUV420S to UV of NV12T + * + * @param nv12t_dest + * Y or UV plane address of NV12T[out] + * + * @param yuv420_src + * Y or UV plane address of YUV420P(S)[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left. It should be even. + * + * @param top + * Crop size of top. It should be even. + * + * @param right + * Crop size of right. It should be even. + * + * @param buttom + * Crop size of buttom. It should be even. + */ + + .arch armv7-a + .text + .global csc_linear_to_tiled_crop_neon + .type csc_linear_to_tiled_crop_neon, %function +csc_linear_to_tiled_crop_neon: + .fnstart + + @r0 tiled_dest + @r1 linear_src + @r2 yuv420_width + @r3 yuv420_height + @r4 j + @r5 i + @r6 nn(tiled_addr) + @r7 mm(linear_addr) + @r8 aligned_x_size + @r9 aligned_y_size + @r10 temp1 + @r11 temp2 + @r12 temp3 + @r14 temp4 + + stmfd sp!, {r4-r12,r14} @ backup registers + + ldr r11, [sp, #44] @ top + ldr r14, [sp, #52] @ buttom + ldr r10, [sp, #40] @ left + ldr r12, [sp, #48] @ right + + sub r9, r3, r11 @ aligned_y_size = ((yuv420_height-top-buttom)>>5)<<5 + sub r9, r9, r14 + bic r9, r9, #0x1F + + sub r8, r2, r10 @ aligned_x_size = ((yuv420_width-left-right)>>6)<<6 + sub r8, r8, r12 + bic r8, r8, #0x3F + + mov r5, #0 @ i = 0 +LOOP_ALIGNED_Y_SIZE: + + mov r4, #0 @ j = 0 +LOOP_ALIGNED_X_SIZE: + + bl GET_TILED_OFFSET + + ldr r10, [sp, #44] @ r10 = top + ldr r14, [sp, #40] @ r14 = left + add r10, r5, r10 @ temp1 = linear_x_size*(i+top) + mul r10, r2, r10 + add r7, r1, r4 @ linear_addr = linear_src+j + add r7, r7, r10 @ linear_addr = linear_addr+temp1 + add r7, r7, r14 @ linear_addr = linear_addr+left + sub r10, r2, #32 + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*1, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*2, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*3, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + add r6, r0, r6 @ tiled_addr = tiled_dest+tiled_addr + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*1} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*2} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*3} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*4, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*5, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*6, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*7, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*4} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*5} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*6} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*7} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*8, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*9, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*10, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*11, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*8} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*9} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*10} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*11} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*12, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*13, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*14, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*15, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*12} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*13} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*14} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*15} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*16, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*17, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*18, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*19, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*16} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*17} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*18} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*19} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*20, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*21, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*22, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*23, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*20} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*21} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*22} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*23} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*24, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*25, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*26, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + pld [r7, r2] + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*27, 64} + pld [r7, r2] + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*24} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*25} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*26} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*27} + vst1.8 {q14, q15}, [r6]! + + pld [r7, r2] + vld1.8 {q0, q1}, [r7]! @ load {linear_src+linear_x_size*28, 64} + pld [r7, r2] + vld1.8 {q2, q3}, [r7], r10 + pld [r7, r2] + vld1.8 {q4, q5}, [r7]! @ load {linear_src+linear_x_size*29, 64} + pld [r7, r2] + vld1.8 {q6, q7}, [r7], r10 + pld [r7, r2] + vld1.8 {q8, q9}, [r7]! @ load {linear_src+linear_x_size*30, 64} + pld [r7, r2] + vld1.8 {q10, q11}, [r7], r10 + vld1.8 {q12, q13}, [r7]! @ load {linear_src+linear_x_size*31, 64} + vld1.8 {q14, q15}, [r7], r10 + vst1.8 {q0, q1}, [r6]! @ store {tiled_addr+64*28} + vst1.8 {q2, q3}, [r6]! + vst1.8 {q4, q5}, [r6]! @ store {tiled_addr+64*29} + vst1.8 {q6, q7}, [r6]! + vst1.8 {q8, q9}, [r6]! @ store {tiled_addr+64*30} + vst1.8 {q10, q11}, [r6]! + vst1.8 {q12, q13}, [r6]! @ store {tiled_addr+64*31} + vst1.8 {q14, q15}, [r6]! + + add r4, r4, #64 @ j = j+64 + cmp r4, r8 @ j>5 + mov r10, r4, asr #6 @ temp1 = j>>6 + + and r12, r11, #0x1 @ if (temp2 & 0x1) + cmp r12, #0x1 + bne GET_TILED_OFFSET_EVEN_FORMULA_1 + +GET_TILED_OFFSET_ODD_FORMULA: + sub r6, r11, #1 @ tiled_addr = temp2-1 + + ldr r7, [sp, #40] @ left + add r12, r2, #127 @ temp3 = linear_x_size+127 + sub r12, r12, r7 + ldr r7, [sp, #48] @ right + sub r12, r12, r7 + bic r12, r12, #0x7F @ temp3 = (temp3 >>7)<<7 + mov r12, r12, asr #6 @ temp3 = temp3>>6 + mul r6, r6, r12 @ tiled_addr = tiled_addr*temp3 + add r6, r6, r10 @ tiled_addr = tiled_addr+temp1 + add r6, r6, #2 @ tiled_addr = tiled_addr+2 + bic r12, r10, #0x3 @ temp3 = (temp1>>2)<<2 + add r6, r6, r12 @ tiled_addr = tiled_addr+temp3 + mov r6, r6, lsl #11 @ tiled_addr = tiled_addr<<11 + b GET_TILED_OFFSET_RETURN + +GET_TILED_OFFSET_EVEN_FORMULA_1: + ldr r7, [sp, #44] @ top + add r12, r3, #31 @ temp3 = linear_y_size+31 + sub r12, r12, r7 + ldr r7, [sp, #52] @ buttom + sub r12, r12, r7 + bic r12, r12, #0x1F @ temp3 = (temp3>>5)<<5 + sub r12, r12, #32 @ temp3 = temp3 - 32 + cmp r5, r12 @ if (i<(temp3-32)) { + bge GET_TILED_OFFSET_EVEN_FORMULA_2 + add r12, r10, #2 @ temp3 = temp1+2 + bic r12, r12, #3 @ temp3 = (temp3>>2)<<2 + add r6, r10, r12 @ tiled_addr = temp1+temp3 + ldr r7, [sp, #40] @ left + add r12, r2, #127 @ temp3 = linear_x_size+127 + sub r12, r12, r7 + ldr r7, [sp, #48] @ right + sub r12, r12, r7 + bic r12, r12, #0x7F @ temp3 = (temp3>>7)<<7 + mov r12, r12, asr #6 @ temp3 = temp3>>6 + mul r11, r11, r12 @ tiled_y_index = tiled_y_index*temp3 + add r6, r6, r11 @ tiled_addr = tiled_addr+tiled_y_index + mov r6, r6, lsl #11 @ + b GET_TILED_OFFSET_RETURN + +GET_TILED_OFFSET_EVEN_FORMULA_2: + ldr r7, [sp, #40] @ left + add r12, r2, #127 @ temp3 = linear_x_size+127 + sub r12, r12, r7 + ldr r7, [sp, #48] @ right + sub r12, r12, r7 + bic r12, r12, #0x7F @ temp3 = (temp3>>7)<<7 + mov r12, r12, asr #6 @ temp3 = temp3>>6 + mul r6, r11, r12 @ tiled_addr = temp2*temp3 + add r6, r6, r10 @ tiled_addr = tiled_addr+temp3 + mov r6, r6, lsl #11 @ tiled_addr = tiled_addr<<11@ + +GET_TILED_OFFSET_RETURN: + mov pc, lr + + .fnend diff --git a/libswconverter/csc_linear_to_tiled_interleave_crop_neon.s b/libswconverter/csc_linear_to_tiled_interleave_crop_neon.s new file mode 100644 index 0000000..33a31da --- /dev/null +++ b/libswconverter/csc_linear_to_tiled_interleave_crop_neon.s @@ -0,0 +1,563 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_linear_to_tiled_interleave_crop_neon.s + * @brief SEC_OMX specific define + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Converts tiled data to linear + * Crops left, top, right, buttom + * 1. Y of NV12T to Y of YUV420P + * 2. Y of NV12T to Y of YUV420S + * 3. UV of NV12T to UV of YUV420S + * + * @param yuv420_dest + * Y or UV plane address of YUV420[out] + * + * @param nv12t_src + * Y or UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left. It should be even. + * + * @param top + * Crop size of top. It should be even. + * + * @param right + * Crop size of right. It should be even. + * + * @param buttom + * Crop size of buttom. It should be even. + */ + + .arch armv7-a + .text + .global csc_linear_to_tiled_interleave_crop_neon + .type csc_linear_to_tiled_interleave_crop_neon, %function +csc_linear_to_tiled_interleave_crop_neon: + .fnstart + + @r0 tiled_dest + @r1 linear_src_u + @r2 linear_src_v + @r3 yuv420_width + @r4 yuv420_height + @r5 j + @r6 i + @r7 tiled_addr + @r8 linear_addr + @r9 aligned_x_size + @r10 aligned_y_size + @r11 temp1 + @r12 temp2 + @r14 temp3 + + stmfd sp!, {r4-r12,r14} @ backup registers + + ldr r4, [sp, #40] @ load linear_y_size to r4 + + ldr r10, [sp, #48] @ r10 = top + ldr r14, [sp, #56] @ r14 = buttom + ldr r11, [sp, #44] @ r11 = left + ldr r12, [sp, #52] @ r12 = right + + sub r10, r4, r10 @ aligned_y_size = ((yuv420_height-top-buttom)>>5)<<5 + sub r10, r10, r14 + bic r10, r10, #0x1F + sub r11, r3, r11 @ aligned_x_size = ((yuv420_width-left-right)>>6)<<6 + sub r11, r11, r12 + bic r9, r11, #0x3F + + mov r6, #0 @ i = 0 +LOOP_ALIGNED_Y_SIZE: + + mov r5, #0 @ j = 0 +LOOP_ALIGNED_X_SIZE: + + bl GET_TILED_OFFSET + + ldr r12, [sp, #48] @ r12 = top + ldr r8, [sp, #44] @ r8 = left + + mov r11, r3, asr #1 @ temp1 = (yuv420_width/2)*(i+top) + add r12, r6, r12 + mul r11, r11, r12 + add r11, r11, r5, asr #1 @ temp1 = temp1+j/2 + add r11, r11, r8, asr #1 @ temp1 = temp1+left/2 + + mov r12, r3, asr #1 @ temp2 = yuv420_width/2 + sub r12, r12, #16 @ temp2 = yuv420_width-16 + + add r8, r1, r11 @ linear_addr = linear_src_u+temp1 + add r11, r2, r11 @ temp1 = linear_src_v+temp1 + add r7, r0, r7 @ tiled_addr = tiled_dest+tiled_addr + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*1, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*2, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*3, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*1, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*2, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*3, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*1} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*2} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*3} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*4, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*5, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*6, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*7, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*4, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*5, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*6, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*7, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*4} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*5} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*6} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*7} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*8, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*9, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*10, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*11, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*8, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*9, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*10, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*11, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*8} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*9} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*10} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*11} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*12, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*13, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*14, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*15, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*12, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*13, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*14, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*15, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*12} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*13} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*14} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*15} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*16, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*17, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*18, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*19, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*16, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*17, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*18, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*19, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*16} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*17} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*18} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*19} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*20, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*21, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*22, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*23, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*20, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*21, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*22, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*23, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*20} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*21} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*22} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*23} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*24, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*25, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*26, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*27, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*24, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*25, 32} + vld1.8 {q7}, [r11], r12 + pld [r8] + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*26, 32} + vld1.8 {q11}, [r11], r12 + pld [r8, r3, asr #1] + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*27, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*24} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*25} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*26} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*27} + vst2.8 {q14, q15}, [r7]! + + pld [r8, r3] + vld1.8 {q0}, [r8]! @ load {linear_src_u+(linear_x_size/2)*28, 32} + vld1.8 {q2}, [r8], r12 + pld [r8, r3] + vld1.8 {q4}, [r8]! @ load {linear_src_u+(linear_x_size/2)*29, 32} + vld1.8 {q6}, [r8], r12 + pld [r11] + vld1.8 {q8}, [r8]! @ load {linear_src_u+(linear_x_size/2)*30, 32} + vld1.8 {q10}, [r8], r12 + pld [r11, r3, asr #1] + vld1.8 {q12}, [r8]! @ load {linear_src_u+(linear_x_size/2)*31, 32} + vld1.8 {q14}, [r8], r12 + pld [r11, r3] + vld1.8 {q1}, [r11]! @ load {linear_src_v+(linear_x_size/2)*28, 32} + vld1.8 {q3}, [r11], r12 + pld [r11, r3] + vld1.8 {q5}, [r11]! @ load {linear_src_v+(linear_x_size/2)*29, 32} + vld1.8 {q7}, [r11], r12 + vld1.8 {q9}, [r11]! @ load {linear_src_v+(linear_x_size/2)*30, 32} + vld1.8 {q11}, [r11], r12 + vld1.8 {q13}, [r11]! @ load {linear_src_v+(linear_x_size/2)*31, 32} + vld1.8 {q15}, [r11], r12 + vst2.8 {q0, q1}, [r7]! @ store {tiled_addr+64*28} + vst2.8 {q2, q3}, [r7]! + vst2.8 {q4, q5}, [r7]! @ store {tiled_addr+64*29} + vst2.8 {q6, q7}, [r7]! + vst2.8 {q8, q9}, [r7]! @ store {tiled_addr+64*30} + vst2.8 {q10, q11}, [r7]! + vst2.8 {q12, q13}, [r7]! @ store {tiled_addr+64*31} + vst2.8 {q14, q15}, [r7]! + + add r5, r5, #64 @ j = j+64 + cmp r5, r9 @ j>5 + mov r11, r5, asr #6 @ temp1 = j>>6 + + and r14, r12, #0x1 @ if (temp2 & 0x1) + cmp r14, #0x1 + bne GET_TILED_OFFSET_EVEN_FORMULA_1 + +GET_TILED_OFFSET_ODD_FORMULA: + + ldr r7, [sp, #48] @ r7 = left , (r14 was pushed to stack) + ldr r8, [sp, #56] @ r8 = right , (r14 was pushed to stack) + sub r14, r3, r7 + sub r14, r14, r8 + add r14, r14, #127 @ temp3 = (((yuv420_width-left-right)+127)>>7)<<7 + bic r14, r14, #0x7F @ temp3 = (temp3 >>7)<<7 + mov r14, r14, asr #6 @ temp3 = temp3>>6 + sub r7, r12, #1 @ tiled_addr = temp2-1 + mul r7, r7, r14 @ tiled_addr = tiled_addr*temp3 + add r7, r7, r11 @ tiled_addr = tiled_addr+temp1 + add r7, r7, #2 @ tiled_addr = tiled_addr+2 + bic r14, r11, #0x3 @ temp3 = (temp1>>2)<<2 + add r7, r7, r14 @ tiled_addr = tiled_addr+temp3 + mov r7, r7, lsl #11 @ tiled_addr = tiled_addr<<11 + b GET_TILED_OFFSET_RETURN + +GET_TILED_OFFSET_EVEN_FORMULA_1: + ldr r7, [sp, #52] @ r7 = top, (r14 was pushed to stack) + ldr r8, [sp, #60] @ r8 = buttom, (r14 was pushed to stack) + sub r14, r4, r7 + sub r14, r14, r8 + add r14, r14, #31 @ temp3 = (((yuv420_height-top-buttom)+31)>>5)<<5 + bic r14, r14, #0x1F @ temp3 = (temp3>>5)<<5 + sub r14, r14, #32 @ temp3 = temp3 - 32 + cmp r6, r14 @ if (i<(temp3-32)) { + bge GET_TILED_OFFSET_EVEN_FORMULA_2 + add r14, r11, #2 @ temp3 = temp1+2 + bic r14, r14, #3 @ temp3 = (temp3>>2)<<2 + add r7, r11, r14 @ tiled_addr = temp1+temp3 + ldr r8, [sp, #48] @ r8 = left, (r14 was pushed to stack) + sub r14, r3, r8 + ldr r8, [sp, #56] @ r8 = right, (r14 was pushed to stack) + sub r14, r14, r8 + add r14, r14, #127 @ temp3 = (((yuv420_width-left-right)+127)>>7)<<7 + bic r14, r14, #0x7F @ temp3 = (temp3>>7)<<7 + mov r14, r14, asr #6 @ temp3 = temp3>>6 + mul r12, r12, r14 @ tiled_y_index = tiled_y_index*temp3 + add r7, r7, r12 @ tiled_addr = tiled_addr+tiled_y_index + mov r7, r7, lsl #11 @ + b GET_TILED_OFFSET_RETURN + +GET_TILED_OFFSET_EVEN_FORMULA_2: + ldr r8, [sp, #48] @ r8 = left, (r14 was pushed to stack) + sub r14, r3, r8 + ldr r8, [sp, #56] @ r8 = right, (r14 was pushed to stack) + sub r14, r14, r8 + add r14, r14, #127 @ temp3 = (((yuv420_width-left-right)+127)>>7)<<7 + bic r14, r14, #0x7F @ temp3 = (temp3>>7)<<7 + mov r14, r14, asr #6 @ temp3 = temp3>>6 + mul r7, r12, r14 @ tiled_addr = temp2*temp3 + add r7, r7, r11 @ tiled_addr = tiled_addr+temp3 + mov r7, r7, lsl #11 @ tiled_addr = tiled_addr<<11@ + +GET_TILED_OFFSET_RETURN: + ldmfd sp!, {r15} @ restore registers + + .fnend + diff --git a/libswconverter/csc_tiled_to_linear_crop_neon.s b/libswconverter/csc_tiled_to_linear_crop_neon.s new file mode 100644 index 0000000..9cb81b5 --- /dev/null +++ b/libswconverter/csc_tiled_to_linear_crop_neon.s @@ -0,0 +1,701 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_tiled_to_linear_crop_neon.s + * @brief SEC_OMX specific define + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Converts tiled data to linear + * Crops left, top, right, buttom + * 1. Y of NV12T to Y of YUV420P + * 2. Y of NV12T to Y of YUV420S + * 3. UV of NV12T to UV of YUV420S + * + * @param yuv420_dest + * Y or UV plane address of YUV420[out] + * + * @param nv12t_src + * Y or UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left. It should be even. + * + * @param top + * Crop size of top. It should be even. + * + * @param right + * Crop size of right. It should be even. + * + * @param buttom + * Crop size of buttom. It should be even. + */ + + .arch armv7-a + .text + .global csc_tiled_to_linear_crop_neon + .type csc_tiled_to_linear_crop_neon, %function +csc_tiled_to_linear_crop_neon: + .fnstart + + @r0 yuv420_dest + @r1 nv12t_src + @r2 yuv420_width + @r3 yuv420_height + @r4 + @r5 i + @r6 j + @r7 tiled_offset + @r8 tiled_offset1 + @r9 linear_offset + @r10 temp1 + @r11 temp2 + @r12 temp3 + @r14 temp4 + + stmfd sp!, {r4-r12,r14} @ backup registers + + ldr r12, [sp, #48] @ r12 = right + ldr r10, [sp, #40] @ r10 = left + sub r12, r2, r12 @ temp3 = yuv420_width-right@ + sub r10, r12, r10 @ temp1 = temp3-left@ + cmp r10, #256 @ if (temp1 >= 256) + blt LOOP_HEIGHT_64_START + + ldr r5, [sp, #44] @ i = top +LOOP_HEIGHT_256: + ldr r6, [sp, #40] @ j = left + mov r14, r5, asr #5 @ temp4 = i>>5 + bic r12, r6, #0xFF @ temp3 = (j>>8)<<8 + mov r12, r12, asr #6 @ temp3 = temp3>>6 + and r11, r14, #0x1 @ if (temp4 & 0x1) + cmp r11, #0x1 + bne LOOP_HEIGHT_256_GET_TILED_EVEN +LOOP_HEIGHT_256_GET_TILED_ODD: + sub r7, r14, #1 @ tiled_offset = temp4-1 + add r10, r2, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = tiled_offset*(temp1>>6) + mul r7, r7, r10 + add r7, r7, r12 @ tiled_offset = tiled_offset+temp3 + add r7, r7, #2 @ tiled_offset = tiled_offset+2 + bic r10, r12, #0x3 @ temp1 = (temp3>>2)<<2 + add r7, r7, r10 @ tiled_offset = tiled_offset+temp1 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #4096 @ tiled_offset1 = tiled_offset+2048*2 + mov r14, #8 + b LOOP_HEIGHT_256_GET_TILED_END + +LOOP_HEIGHT_256_GET_TILED_EVEN: + add r11, r3, #31 @ temp2 = ((yuv420_height+31)>>5)<<5 + bic r11, r11, #0x1F + add r10, r5, #32 @ if ((i+32)>2)<<2 + add r7, r12, r10 @ tiled_offset = temp3+temp1@ + add r10, r2, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = tiled_offset+temp4*(temp1>>6) + mla r7, r14, r10, r7 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #12288 @ tiled_offset1 = tiled_offset+2048*6 + mov r14, #8 + b LOOP_HEIGHT_256_GET_TILED_END + +LOOP_HEIGHT_256_GET_TILED_EVEN1: + add r10, r2, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = temp4*(temp1>>6) + mul r7, r14, r10 + add r7, r7, r12 @ tiled_offset = tiled_offset+temp3 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #4096 @ tiled_offset1 = tiled_offset+2048*2 + mov r14, #4 + +LOOP_HEIGHT_256_GET_TILED_END: + + ldr r12, [sp, #48] @ right + ldr r9, [sp, #44] @ top + and r10, r5, #0x1F @ temp1 = i&0x1F + add r7, r7, r10, lsl #6 @ tiled_offset = tiled_offset+64*(temp1) + add r8, r8, r10, lsl #6 @ tiled_offset1 = tiled_offset1+64*(temp1) + sub r11, r2, r6 @ temp2 = yuv420_width-left(==j)-right + sub r11, r11, r12 + sub r9, r5, r9 @ linear_offset = temp2*(i-top)@ + mul r9, r11, r9 + add r12, r6, #256 @ temp3 = ((j+256)>>8)<<8@ + bic r12, r12, #0xFF + sub r12, r12, r6 @ temp3 = temp3-j@ + and r10, r6, #0x3F @ temp1 = left(==j)&0x3F + + cmp r12, #192 @ if (temp3 > 192) + ble LOOP_HEIGHT_256_LEFT_192 + add r11, r1, r7 @ r11 = nv12t_src+tiled_offset+temp1 + add r11, r11, r10 + pld [r11] + add r12, r1, r7 @ r12 = nv12t_src+tiled_offset+2048 + pld [r11, #32] + add r12, r12, #2048 + pld [r12] + cmp r10, #0 + pld [r12, #32] + stmnefd sp!, {r9-r12, r14} @ backup registers + rsbne r10, r10, #64 + blne MEMCOPY_UNDER_64 + ldmnefd sp!, {r9-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_256_64 + vld1.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset+temp1, 64} + vld1.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r11]! @ store {yuv420_dest+linear_offset, 64} + vst1.8 {q2, q3}, [r11]! +LOOP_HEIGHT_256_LEFT_256_64: + add r11, r1, r8 @ r11 = nv12t_src+tiled_offset1 + pld [r11] + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r11, #32] + vld1.8 {q6, q7}, [r12] + add r12, r11, #2048 @ r12 = nv12t_src+tiled_offset1+2048 + pld [r12] + vld1.8 {q8, q9}, [r11]! @ load {nv12t_src+tiled_offset1, 64} + pld [r12, #32] + vld1.8 {q10, q11}, [r11] + vld1.8 {q12, q13}, [r12]! @ load {nv12t_src+tiled_offset1+2048, 64} + vld1.8 {q14, q15}, [r12] + + sub r11, r0, r10 @ r11 = yuv420_dest+linear_offset+64-temp1 + add r12, r9, #64 + add r11, r11, r12 + + vst1.8 {q4, q5}, [r11]! @ store {yuv420_dest+linear_offset+64-temp1, 64} + vst1.8 {q6, q7}, [r11]! + vst1.8 {q8, q9}, [r11]! @ store {yuv420_dest+linear_offset+128-temp1, 64} + vst1.8 {q10, q11}, [r11]! + vst1.8 {q12, q13}, [r11]! @ store {yuv420_dest+linear_offset+192-temp1, 64} + vst1.8 {q14, q15}, [r11]! + + add r9, r9, #256 + sub r9, r9, r10 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_192: + cmp r12, #128 @ if (temp3 > 128) + ble LOOP_HEIGHT_256_LEFT_128 + add r11, r1, r7 @ r11 = nv12t_src+tiled_offset+2048+temp1 + add r11, r11, r10 + add r11, r11, #2048 + pld [r11] + add r12, r1, r8 @ r12 = nv12t_src+tiled_offset1 + pld [r11, #32] + cmp r10, #0 + pld [r12] + stmnefd sp!, {r9-r12, r14} @ backup registers + pld [r12, #32] + rsbne r10, r10, #64 + blne MEMCOPY_UNDER_64 + ldmnefd sp!, {r9-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_192_64 + vld1.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset+2048+temp1, 64} + vld1.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r11]! @ store {yuv420_dest+linear_offset, 64} + vst1.8 {q2, q3}, [r11]! +LOOP_HEIGHT_256_LEFT_192_64: + add r11, r1, r8 @ r11 = nv12t_src+tiled_offset1+2048 + add r11, r11, #2048 + pld [r11] + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset1, 64} + pld [r11, #32] + vld1.8 {q6, q7}, [r12] + vld1.8 {q8, q9}, [r11]! @ load {nv12t_src+tiled_offset1+2048, 64} + vld1.8 {q10, q11}, [r11] + + sub r11, r0, r10 @ r11 = yuv420_dest+linear_offset+64-temp1 + add r12, r9, #64 + add r11, r11, r12 + + vst1.8 {q4, q5}, [r11]! @ store {yuv420_dest+linear_offset+64-temp1, 64} + vst1.8 {q6, q7}, [r11]! + vst1.8 {q8, q9}, [r11]! @ store {yuv420_dest+linear_offset+128-temp1, 64} + vst1.8 {q10, q11}, [r11]! + + add r9, r9, #192 + sub r9, r9, r10 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_128: + cmp r12, #64 @ if (temp3 > 64) + ble LOOP_HEIGHT_256_LEFT_64 + add r11, r1, r8 @ r11 = nv12t_src+tiled_offset1+temp1 + add r11, r11, r10 + pld [r11] + add r12, r1, r8 @ r12 = nv12t_src+tiled_offset1 + add r12, r12, #2048 + pld [r11, #32] + cmp r10, #0 + pld [r12] + stmnefd sp!, {r9-r12, r14} @ backup registers + pld [r12, #32] + rsbne r10, r10, #64 + blne MEMCOPY_UNDER_64 + ldmnefd sp!, {r9-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_128_64 + vld1.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset1+temp1, 64} + vld1.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r11]! @ store {yuv420_dest+linear_offset, 64} + vst1.8 {q2, q3}, [r11]! +LOOP_HEIGHT_256_LEFT_128_64: + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset1, 64} + vld1.8 {q6, q7}, [r12] + + sub r11, r0, r10 @ r11 = yuv420_dest+linear_offset+64-temp1 + add r12, r9, #64 + add r11, r11, r12 + + vst1.8 {q4, q5}, [r11]! @ store {yuv420_dest+linear_offset+64-temp1, 64} + vst1.8 {q6, q7}, [r11]! + + add r9, r9, #128 + sub r9, r9, r10 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_64: + add r11, r1, r8 @ r11 = nv12t_src+tiled_offset1+2048+temp1 + add r11, r11, #2048 + add r11, r11, r10 + cmp r10, #0 + pld [r11] + stmnefd sp!, {r9-r12, r14} @ backup registers + pld [r11, #32] + rsbne r10, r10, #64 + blne MEMCOPY_UNDER_64 + ldmnefd sp!, {r9-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_64_64 + vld1.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset1+temp1, 64} + vld1.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r11]! @ store {yuv420_dest+linear_offset, 64} + vst1.8 {q2, q3}, [r11]! +LOOP_HEIGHT_256_LEFT_64_64: + add r9, r9, #64 + sub r9, r9, r10 + +LOOP_HEIGHT_256_LEFT_END: + + ldr r12, [sp, #48] @ right + add r7, r7, r14, lsl #11 @ tiled_offset = tiled_offset+temp4*2048 + add r10, r1, r7 @ r10 = nv12t_src+tiled_offset + pld [r10] + bic r6, r6, #0xFF @ j = (left>>8)<<8 + pld [r10, #32] + add r6, r6, #256 @ j = j + 256 + sub r11, r2, r12 @ temp2 = yuv420_width-right-256 + sub r11, r11, #256 + cmp r6, r11 + bgt LOOP_HEIGHT_256_WIDTH_END + +LOOP_HEIGHT_256_WIDTH: + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + pld [r12] + vld1.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset, 64} + pld [r12, #32] + vld1.8 {q2, q3}, [r10] + + add r8, r8, r14, lsl #11 @ tiled_offset1 = tiled_offset1+temp4*2048 + add r10, r1, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r10] + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r10, #32] + vld1.8 {q6, q7}, [r12] + + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + pld [r12] + vld1.8 {q8, q9}, [r10]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r12, #32] + vld1.8 {q10, q11}, [r10] + + add r7, r7, r14, lsl #11 @ tiled_offset = tiled_offset+temp4*2048 + add r10, r1, r7 + pld [r10] + vld1.8 {q12, q13}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r10, #32] + vld1.8 {q14, q15}, [r12] + + add r12, r0, r9 @ r12 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r12]! + vst1.8 {q2, q3}, [r12]! + vst1.8 {q4, q5}, [r12]! + vst1.8 {q6, q7}, [r12]! + vst1.8 {q8, q9}, [r12]! + vst1.8 {q10, q11}, [r12]! + vst1.8 {q12, q13}, [r12]! + vst1.8 {q14, q15}, [r12]! + add r9, r9, #256 @ linear_offset = linear_offset+256 + + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + + add r6, r6, #256 @ j=j+256 + cmp r6, r11 @ j<=temp2 + ble LOOP_HEIGHT_256_WIDTH + +LOOP_HEIGHT_256_WIDTH_END: + + add r8, r8, r14, lsl #11 @ tiled_offset1 = tiled_offset1+temp4*2048 + ldr r14, [sp, #48] @ right + sub r11, r2, r6 @ temp2 = yuv420_width-right-j + sub r11, r11, r14 + cmp r11, #0 + beq LOOP_HEIGHT_256_RIGHT_END + cmp r11, #192 + ble LOOP_HEIGHT_256_RIGHT_192 + add r12, r10, #2048 + pld [r12] + vld1.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r12, #32] + vld1.8 {q2, q3}, [r10] + + add r10, r1, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r10] + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048} + pld [r10, #32] + vld1.8 {q6, q7}, [r12] + + add r14, r10, #2048 @ r10 = nv12t_src+tiled_offset1+2048 + pld [r14] + vld1.8 {q8, q9}, [r10]! @ load {nv12t_src+tiled_offset1} + pld [r14, #32] + vld1.8 {q10, q11}, [r10] + + add r12, r0, r9 @ r12 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r12]! + vst1.8 {q2, q3}, [r12]! + vst1.8 {q4, q5}, [r12]! + vst1.8 {q6, q7}, [r12]! + vst1.8 {q8, q9}, [r12]! + vst1.8 {q10, q11}, [r12]! + add r9, r9, #192 @ linear_offset = linear_offset+192 + + stmfd sp!, {r9-r12, r14} @ backup registers + sub r10, r11, #192 + mov r11, r14 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_192: + cmp r11, #128 + ble LOOP_HEIGHT_256_RIGHT_128 + add r12, r10, #2048 + pld [r12] + vld1.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r12, #32] + vld1.8 {q2, q3}, [r10] + + add r14, r1, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r14] + vld1.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048} + pld [r14, #32] + vld1.8 {q6, q7}, [r12] + + add r12, r0, r9 @ r12 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r12]! + vst1.8 {q2, q3}, [r12]! + vst1.8 {q4, q5}, [r12]! + vst1.8 {q6, q7}, [r12]! + add r9, r9, #128 @ linear_offset = linear_offset+128 + + stmfd sp!, {r9-r12, r14} @ backup registers + sub r10, r11, #128 + mov r11, r14 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_128: + cmp r11, #64 + ble LOOP_HEIGHT_256_RIGHT_64 + add r14, r10, #2048 + pld [r14] + vld1.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r14, #32] + vld1.8 {q2, q3}, [r10] + + add r12, r0, r9 @ r12 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r12]! + vst1.8 {q2, q3}, [r12]! + add r9, r9, #64 @ linear_offset = linear_offset+64 + + stmfd sp!, {r9-r12, r14} @ backup registers + sub r10, r11, #64 + mov r11, r14 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_64: + stmfd sp!, {r9-r12, r14} @ backup registers + mov r14, r11 + mov r11, r10 + mov r10, r14 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12, r14} @ restore registers + +LOOP_HEIGHT_256_RIGHT_END: + + ldr r14, [sp, #52] @ buttom + add r5, r5, #1 @ i=i+1 + sub r14, r3, r14 @ i= 64) + blt LOOP_HEIGHT_2_START + + ldr r5, [sp, #44] @ i = top +LOOP_HEIGHT_64: + ldr r6, [sp, #40] @ j = left + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r2 + mov r1, r3 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + ldr r9, [sp, #44] @ linear_offset = top + add r11, r6, #64 @ temp2 = ((j+64)>>6)<<6 + bic r11, r11, #0x3F + sub r11, r11, r6 @ temp2 = temp2-j + sub r9, r5, r9 @ linear_offset = temp1*(i-top) + mul r9, r9, r10 + and r14, r6, #0x3 @ temp4 = j&0x3 + add r7, r7, r14 @ tiled_offset = tiled_offset+temp4 + stmfd sp!, {r9-r12} @ backup parameters + mov r10, r11 + add r11, r1, r7 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12} @ restore parameters + add r9, r9, r11 @ linear_offset = linear_offset+temp2 + add r6, r6, r11 @ j = j+temp2@ + + add r14, r6, #64 + cmp r14, r12 + bgt LOOP_HEIGHT_64_1 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r2 + mov r1, r3 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + add r7, r1, r7 + vld1.8 {q0, q1}, [r7]! + vld1.8 {q2, q3}, [r7] + add r7, r0, r9 + vst1.8 {q0, q1}, [r7]! + vst1.8 {q2, q3}, [r7] + add r9, r9, #64 + add r6, r6, #64 + +LOOP_HEIGHT_64_1: + add r14, r6, #64 + cmp r14, r12 + bgt LOOP_HEIGHT_64_2 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r2 + mov r1, r3 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + add r7, r1, r7 + vld1.8 {q0, q1}, [r7]! + vld1.8 {q2, q3}, [r7] + add r7, r0, r9 + vst1.8 {q0, q1}, [r7]! + vst1.8 {q2, q3}, [r7] + add r9, r9, #64 + add r6, r6, #64 + +LOOP_HEIGHT_64_2: + cmp r6, r12 + bge LOOP_HEIGHT_64_3 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r2 + mov r1, r3 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + sub r11, r12, r6 + stmfd sp!, {r9-r12} @ backup parameters + mov r10, r11 + add r11, r1, r7 + bl MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12} @ restore parameters + +LOOP_HEIGHT_64_3: + + ldr r14, [sp, #52] @ buttom + add r5, r5, #1 @ i=i+1 + sub r14, r3, r14 @ i>6)<<6 + bic r11, r11, #0x3F + sub r11, r11, r6 @ temp2 = temp2-j + sub r9, r5, r9 @ linear_offset = temp1*(i-top) + mul r9, r10, r9 + add r9, r0, r9 @ linear_offset = linear_dst+linear_offset +LOOP_HEIGHT_2_WIDTH: + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r2 + mov r1, r3 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + + and r14, r6, #0x3 @ temp4 = j&0x3@ + add r7, r7, r14 @ tiled_offset = tiled_offset+temp4@ + add r7, r1, r7 + + ldrh r14, [r7] + strh r14, [r9], #2 + + ldr r14, [sp, #48] @ right + add r6, r6, #2 @ j=j+2 + sub r14, r2, r14 @ j= 256) + blt LOOP_HEIGHT_64_START + + ldr r5, [sp, #48] @ top +LOOP_HEIGHT_256: + ldr r6, [sp, #44] @ j = left + mov r14, r5, asr #5 @ temp4 = i>>5 + bic r12, r6, #0xFF @ temp3 = (j>>8)<<8 + mov r12, r12, asr #6 @ temp3 = temp3>>6 + and r11, r14, #0x1 @ if (temp4 & 0x1) + cmp r11, #0x1 + bne LOOP_HEIGHT_256_GET_TILED_EVEN +LOOP_HEIGHT_256_GET_TILED_ODD: + sub r7, r14, #1 @ tiled_offset = temp4-1 + add r10, r3, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = tiled_offset*(temp1>>6) + mul r7, r7, r10 + add r7, r7, r12 @ tiled_offset = tiled_offset+temp3 + add r7, r7, #2 @ tiled_offset = tiled_offset+2 + bic r10, r12, #0x3 @ temp1 = (temp3>>2)<<2 + add r7, r7, r10 @ tiled_offset = tiled_offset+temp1 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #4096 @ tiled_offset1 = tiled_offset+2048*2 + mov r14, #8 + b LOOP_HEIGHT_256_GET_TILED_END + +LOOP_HEIGHT_256_GET_TILED_EVEN: + add r11, r4, #31 @ temp2 = ((yuv420_height+31)>>5)<<5 + bic r11, r11, #0x1F + add r10, r5, #32 @ if ((i+32)>2)<<2 + add r7, r12, r10 @ tiled_offset = temp3+temp1@ + add r10, r3, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = tiled_offset+temp4*(temp1>>6) + mla r7, r14, r10, r7 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #12288 @ tiled_offset1 = tiled_offset+2048*6 + mov r14, #8 + b LOOP_HEIGHT_256_GET_TILED_END + +LOOP_HEIGHT_256_GET_TILED_EVEN1: + add r10, r3, #127 @ temp1 = ((yuv420_width+127)>>7)<<7 + bic r10, r10, #0x7F + mov r10, r10, asr #6 @ tiled_offset = temp4*(temp1>>6) + mul r7, r14, r10 + add r7, r7, r12 @ tiled_offset = tiled_offset+temp3 + mov r7, r7, lsl #11 @ tiled_offset = tiled_offset<<11 + add r8, r7, #4096 @ tiled_offset1 = tiled_offset+2048*2 + mov r14, #4 + +LOOP_HEIGHT_256_GET_TILED_END: + + ldr r12, [sp, #52] @ right + ldr r9, [sp, #48] @ top + and r10, r5, #0x1F @ temp1 = i&0x1F + add r7, r7, r10, lsl #6 @ tiled_offset = tiled_offset+64*(temp1) + add r8, r8, r10, lsl #6 @ tiled_offset1 = tiled_offset1+64*(temp1) + sub r11, r3, r6 @ temp2 = yuv420_width-left(==j)-right + sub r11, r11, r12 + sub r9, r5, r9 @ linear_offset = temp2*(i-top)/2@ + mul r9, r11, r9 + mov r9, r9, asr #1 + add r12, r6, #256 @ temp3 = ((j+256)>>8)<<8@ + bic r12, r12, #0xFF + sub r12, r12, r6 @ temp3 = temp3-j@ + and r10, r6, #0x3F @ temp1 = left(==j)&0x3F + + cmp r12, #192 @ if (temp3 > 192) + ble LOOP_HEIGHT_256_LEFT_192 + add r11, r2, r7 @ r11 = nv12t_src+tiled_offset+temp1 + add r11, r11, r10 + pld [r11] + add r12, r2, r7 @ r12 = nv12t_src+tiled_offset+2048 + pld [r11, #32] + add r12, r12, #2048 + pld [r12] + cmp r10, #0 + pld [r12, #32] + stmnefd sp!, {r8-r12, r14} @ backup registers + rsbne r10, r10, #64 + blne INTERLEAVED_MEMCOPY_UNDER_64 + ldmnefd sp!, {r8-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_256_64 + vld2.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset+temp1, 64} + vld2.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r11]! + vst1.8 {q2}, [r11]! + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r11]! + vst1.8 {q3}, [r11]! +LOOP_HEIGHT_256_LEFT_256_64: + add r11, r2, r8 @ r11 = nv12t_src+tiled_offset1 + pld [r11] + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r11, #32] + vld2.8 {q6, q7}, [r12] + add r12, r11, #2048 @ r12 = nv12t_src+tiled_offset1+2048 + pld [r12] + vld2.8 {q8, q9}, [r11]! @ load {nv12t_src+tiled_offset1, 64} + pld [r12, #32] + vld2.8 {q10, q11}, [r11] + vld2.8 {q12, q13}, [r12]! @ load {nv12t_src+tiled_offset1+2048, 64} + vld2.8 {q14, q15}, [r12] + + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q4}, [r11]! + vst1.8 {q6}, [r11]! + vst1.8 {q8}, [r11]! + vst1.8 {q10}, [r11]! + vst1.8 {q12}, [r11]! + vst1.8 {q14}, [r11]! + + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q5}, [r11]! + vst1.8 {q7}, [r11]! + vst1.8 {q9}, [r11]! + vst1.8 {q11}, [r11]! + vst1.8 {q13}, [r11]! + vst1.8 {q15}, [r11]! + + add r9, r9, #128 + sub r9, r9, r10, asr #1 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_192: + cmp r12, #128 @ if (temp3 > 128) + ble LOOP_HEIGHT_256_LEFT_128 + add r11, r2, r7 @ r11 = nv12t_src+tiled_offset+2048+temp1 + add r11, r11, r10 + add r11, r11, #2048 + pld [r11] + add r12, r2, r8 @ r12 = nv12t_src+tiled_offset1 + pld [r11, #32] + cmp r10, #0 + pld [r12] + stmnefd sp!, {r8-r12, r14} @ backup registers + pld [r12, #32] + rsbne r10, r10, #64 + blne INTERLEAVED_MEMCOPY_UNDER_64 + ldmnefd sp!, {r8-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_192_64 + vld2.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset+2048+temp1, 64} + vld2.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r11]! + vst1.8 {q2}, [r11]! + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r11]! + vst1.8 {q3}, [r11]! +LOOP_HEIGHT_256_LEFT_192_64: + add r11, r2, r8 @ r11 = nv12t_src+tiled_offset1+2048 + add r11, r11, #2048 + pld [r11] + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset1, 64} + pld [r11, #32] + vld2.8 {q6, q7}, [r12] + vld2.8 {q8, q9}, [r11]! @ load {nv12t_src+tiled_offset1+2048, 64} + vld2.8 {q10, q11}, [r11] + + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q4}, [r11]! + vst1.8 {q6}, [r11]! + vst1.8 {q8}, [r11]! + vst1.8 {q10}, [r11]! + + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q5}, [r11]! + vst1.8 {q7}, [r11]! + vst1.8 {q9}, [r11]! + vst1.8 {q11}, [r11]! + + add r9, r9, #96 + sub r9, r9, r10, asr #1 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_128: + cmp r12, #64 @ if (temp3 > 64) + ble LOOP_HEIGHT_256_LEFT_64 + add r11, r2, r8 @ r11 = nv12t_src+tiled_offset1+temp1 + add r11, r11, r10 + pld [r11] + add r12, r2, r8 @ r12 = nv12t_src+tiled_offset1 + add r12, r12, #2048 + pld [r11, #32] + cmp r10, #0 + pld [r12] + stmnefd sp!, {r8-r12, r14} @ backup registers + pld [r12, #32] + rsbne r10, r10, #64 + blne INTERLEAVED_MEMCOPY_UNDER_64 + ldmnefd sp!, {r8-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_128_64 + vld2.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset1+temp1, 64} + vld2.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r11]! + vst1.8 {q2}, [r11]! + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r11]! + vst1.8 {q3}, [r11]! +LOOP_HEIGHT_256_LEFT_128_64: + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset1, 64} + vld2.8 {q6, q7}, [r12] + + add r11, r0, r9 @ r11 = yuv420_u_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q4}, [r11]! + vst1.8 {q6}, [r11]! + + add r11, r1, r9 @ r11 = yuv420_v_dest+linear_offset+32-temp1/2 + add r11, r11, #32 + sub r11, r11, r10, asr #1 + vst1.8 {q5}, [r11]! + vst1.8 {q7}, [r11]! + + add r9, r9, #64 + sub r9, r9, r10, asr #1 + b LOOP_HEIGHT_256_LEFT_END + +LOOP_HEIGHT_256_LEFT_64: + add r11, r2, r8 @ r11 = nv12t_src+tiled_offset1+2048+temp1 + add r11, r11, #2048 + add r11, r11, r10 + cmp r10, #0 + pld [r11] + stmnefd sp!, {r8-r12, r14} @ backup registers + pld [r11, #32] + rsbne r10, r10, #64 + blne INTERLEAVED_MEMCOPY_UNDER_64 + ldmnefd sp!, {r8-r12, r14} @ restore registers + bne LOOP_HEIGHT_256_LEFT_64_64 + vld2.8 {q0, q1}, [r11]! @ load {nv12t_src+tiled_offset1+temp1, 64} + vld2.8 {q2, q3}, [r11] + add r11, r0, r9 @ r11 = yuv420_dest+linear_offset + vst1.8 {q0, q1}, [r11]! @ store {yuv420_dest+linear_offset, 64} + vst1.8 {q2, q3}, [r11]! +LOOP_HEIGHT_256_LEFT_64_64: + add r9, r9, #32 + sub r9, r9, r10, asr #1 + +LOOP_HEIGHT_256_LEFT_END: + + ldr r12, [sp, #52] @ right + add r7, r7, r14, lsl #11 @ tiled_offset = tiled_offset+temp4*2048 + add r10, r2, r7 @ r10 = nv12t_src+tiled_offset + pld [r10] + bic r6, r6, #0xFF @ j = (left>>8)<<8 + pld [r10, #32] + add r6, r6, #256 @ j = j + 256 + sub r11, r3, r12 @ temp2 = yuv420_width-right-256 + sub r11, r11, #256 + cmp r6, r11 + bgt LOOP_HEIGHT_256_WIDTH_END + +LOOP_HEIGHT_256_WIDTH: + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + pld [r12] + vld2.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset, 64} + pld [r12, #32] + vld2.8 {q2, q3}, [r10] + + add r8, r8, r14, lsl #11 @ tiled_offset1 = tiled_offset1+temp4*2048 + add r10, r2, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r10] + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r10, #32] + vld2.8 {q6, q7}, [r12] + + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + pld [r12] + vld2.8 {q8, q9}, [r10]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r12, #32] + vld2.8 {q10, q11}, [r10] + + add r7, r7, r14, lsl #11 @ tiled_offset = tiled_offset+temp4*2048 + add r10, r2, r7 + pld [r10] + vld2.8 {q12, q13}, [r12]! @ load {nv12t_src+tiled_offset+2048, 64} + pld [r10, #32] + vld2.8 {q14, q15}, [r12] + + add r12, r0, r9 @ r12 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r12]! + vst1.8 {q2}, [r12]! + vst1.8 {q4}, [r12]! + vst1.8 {q6}, [r12]! + vst1.8 {q8}, [r12]! + vst1.8 {q10}, [r12]! + vst1.8 {q12}, [r12]! + vst1.8 {q14}, [r12]! + add r12, r1, r9 @ r12 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r12]! + vst1.8 {q3}, [r12]! + vst1.8 {q5}, [r12]! + vst1.8 {q7}, [r12]! + vst1.8 {q9}, [r12]! + vst1.8 {q11}, [r12]! + vst1.8 {q13}, [r12]! + vst1.8 {q15}, [r12]! + add r9, r9, #128 @ linear_offset = linear_offset+128 + + add r12, r10, #2048 @ r12 = nv12t_src+tiled_offset+2048 + + add r6, r6, #256 @ j=j+256 + cmp r6, r11 @ j<=temp2 + ble LOOP_HEIGHT_256_WIDTH + +LOOP_HEIGHT_256_WIDTH_END: + + add r8, r8, r14, lsl #11 @ tiled_offset1 = tiled_offset1+temp4*2048 + ldr r14, [sp, #52] @ right + sub r11, r3, r6 @ temp2 = yuv420_width-right-j + sub r11, r11, r14 + cmp r11, #0 + beq LOOP_HEIGHT_256_RIGHT_END + cmp r11, #192 + ble LOOP_HEIGHT_256_RIGHT_192 + add r12, r10, #2048 + pld [r12] + vld2.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r12, #32] + vld2.8 {q2, q3}, [r10] + + add r10, r2, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r10] + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048} + pld [r10, #32] + vld2.8 {q6, q7}, [r12] + + add r14, r10, #2048 @ r10 = nv12t_src+tiled_offset1+2048 + pld [r14] + vld2.8 {q8, q9}, [r10]! @ load {nv12t_src+tiled_offset1} + pld [r14, #32] + vld2.8 {q10, q11}, [r10] + + add r12, r0, r9 @ r12 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r12]! + vst1.8 {q2}, [r12]! + vst1.8 {q4}, [r12]! + vst1.8 {q6}, [r12]! + vst1.8 {q8}, [r12]! + vst1.8 {q10}, [r12]! + add r12, r1, r9 @ r12 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r12]! + vst1.8 {q3}, [r12]! + vst1.8 {q5}, [r12]! + vst1.8 {q7}, [r12]! + vst1.8 {q9}, [r12]! + vst1.8 {q11}, [r12]! + add r9, r9, #96 @ linear_offset = linear_offset+96 + + stmfd sp!, {r8-r12, r14} @ backup registers + sub r10, r11, #192 + mov r11, r14 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r8-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_192: + cmp r11, #128 + ble LOOP_HEIGHT_256_RIGHT_128 + add r12, r10, #2048 + pld [r12] + vld2.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r12, #32] + vld2.8 {q2, q3}, [r10] + + add r14, r2, r8 @ r10 = nv12t_src+tiled_offset1 + pld [r14] + vld2.8 {q4, q5}, [r12]! @ load {nv12t_src+tiled_offset+2048} + pld [r14, #32] + vld2.8 {q6, q7}, [r12] + + add r12, r0, r9 @ r12 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r12]! + vst1.8 {q2}, [r12]! + vst1.8 {q4}, [r12]! + vst1.8 {q6}, [r12]! + add r12, r1, r9 @ r12 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r12]! + vst1.8 {q3}, [r12]! + vst1.8 {q5}, [r12]! + vst1.8 {q7}, [r12]! + add r9, r9, #64 @ linear_offset = linear_offset+64 + + stmfd sp!, {r8-r12, r14} @ backup registers + sub r10, r11, #128 + mov r11, r14 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r8-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_128: + cmp r11, #64 + ble LOOP_HEIGHT_256_RIGHT_64 + add r14, r10, #2048 + pld [r14] + vld2.8 {q0, q1}, [r10]! @ load {nv12t_src+tiled_offset} + pld [r14, #32] + vld2.8 {q2, q3}, [r10] + + add r12, r0, r9 @ r12 = yuv420_u_dest+linear_offset + vst1.8 {q0}, [r12]! + vst1.8 {q2}, [r12]! + add r12, r1, r9 @ r12 = yuv420_v_dest+linear_offset + vst1.8 {q1}, [r12]! + vst1.8 {q3}, [r12]! + add r9, r9, #32 @ linear_offset = linear_offset+32 + + stmfd sp!, {r8-r12, r14} @ backup registers + sub r10, r11, #64 + mov r11, r14 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r8-r12, r14} @ restore registers + b LOOP_HEIGHT_256_RIGHT_END + +LOOP_HEIGHT_256_RIGHT_64: + stmfd sp!, {r8-r12, r14} @ backup registers + mov r14, r11 + mov r11, r10 + mov r10, r14 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r8-r12, r14} @ restore registers + +LOOP_HEIGHT_256_RIGHT_END: + + ldr r14, [sp, #56] @ buttom + add r5, r5, #1 @ i=i+1 + sub r14, r4, r14 @ i= 64) + blt LOOP_HEIGHT_2_START + + ldr r5, [sp, #48] @ i = top +LOOP_HEIGHT_64: + ldr r6, [sp, #44] @ j = left + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r3 + mov r1, r4 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + ldr r9, [sp, #48] @ linear_offset = top + ldr r12, [sp, #52] @ r12 = right + add r11, r6, #64 @ temp2 = ((j+64)>>6)<<6 + bic r11, r11, #0x3F + sub r11, r11, r6 @ temp2 = temp2-j + sub r12, r3, r12 @ temp3 = yuv420_width-right + sub r14, r12, r6 @ temp4 = temp3-left + sub r9, r5, r9 @ linear_offset = temp4*(i-top)/2 + mul r9, r9, r14 + mov r9, r9, asr #1 + and r14, r6, #0x3 @ temp4 = j&0x3 + add r7, r7, r14 @ tiled_offset = tiled_offset+temp4 + stmfd sp!, {r9-r12} @ backup parameters + mov r10, r11 + add r11, r2, r7 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12} @ restore parameters + add r9, r9, r11, asr #1 @ linear_offset = linear_offset+temp2/2 + add r6, r6, r11 @ j = j+temp2@ + + add r14, r6, #64 + cmp r14, r12 + bgt LOOP_HEIGHT_64_1 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r3 + mov r1, r4 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + add r7, r2, r7 + vld2.8 {q0, q1}, [r7]! + vld2.8 {q2, q3}, [r7] + add r7, r0, r9 + vst1.8 {q0}, [r7]! + vst1.8 {q2}, [r7] + add r7, r1, r9 + vst1.8 {q1}, [r7]! + vst1.8 {q3}, [r7] + add r9, r9, #32 + add r6, r6, #64 + +LOOP_HEIGHT_64_1: + add r14, r6, #64 + cmp r14, r12 + bgt LOOP_HEIGHT_64_2 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r3 + mov r1, r4 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + add r7, r2, r7 + vld2.8 {q0, q1}, [r7]! + vld2.8 {q2, q3}, [r7] + add r7, r0, r9 + vst1.8 {q0}, [r7]! + vst1.8 {q2}, [r7] + add r7, r1, r9 + vst1.8 {q1}, [r7]! + vst1.8 {q3}, [r7] + add r9, r9, #32 + add r6, r6, #64 + +LOOP_HEIGHT_64_2: + cmp r6, r12 + bge LOOP_HEIGHT_64_3 + stmfd sp!, {r0-r3, r12} @ backup parameters + mov r0, r3 + mov r1, r4 + mov r2, r6 + mov r3, r5 + bl tile_4x2_read_asm + mov r7, r0 + ldmfd sp!, {r0-r3, r12} @ restore parameters + sub r11, r12, r6 + stmfd sp!, {r9-r12} @ backup parameters + mov r10, r11 + add r11, r2, r7 + bl INTERLEAVED_MEMCOPY_UNDER_64 + ldmfd sp!, {r9-r12} @ restore parameters + +LOOP_HEIGHT_64_3: + + ldr r14, [sp, #56] @ buttom + add r5, r5, #1 @ i=i+1 + sub r14, r4, r14 @ i> 4) << 4 + mov r11, r11, asr #4 + mov r11, r11, lsl #4 + + mov r5, #0 +LOOP_MAIN_ALIGNED_HEIGHT: + mul r8, r11, r5 @ src_offset = tiled_width * i + mov r6, #0 + add r8, r2, r8 @ src_offset = uv_src + src_offset +LOOP_MAIN_ALIGNED_WIDTH: + mov r12, r3, asr #1 @ temp1 = (width >> 1) * i + (j >> 1) + mul r12, r12, r5 + + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld2.8 {q0, q1}, [r8]! + add r12, r12, r6, asr #1 + vld2.8 {q2, q3}, [r8]! + add r7, r0, r12 @ dst_offset = u_dst + temp1 + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld2.8 {q4, q5}, [r8]! + mov r14, r3, asr #1 @ temp2 = width / 2 + vld2.8 {q6, q7}, [r8]! + + vst1.8 {d0}, [r7], r14 + vst1.8 {d1}, [r7], r14 + vst1.8 {d4}, [r7], r14 + vst1.8 {d5}, [r7], r14 + vst1.8 {d8}, [r7], r14 + vst1.8 {d9}, [r7], r14 + vst1.8 {d12}, [r7], r14 + vst1.8 {d13}, [r7], r14 + + add r7, r1, r12 @ dst_offset = v_dst + temp1 + + vst1.8 {d2}, [r7], r14 + vst1.8 {d3}, [r7], r14 + vst1.8 {d6}, [r7], r14 + vst1.8 {d7}, [r7], r14 + vst1.8 {d10}, [r7], r14 + vst1.8 {d11}, [r7], r14 + add r6, r6, #16 + vst1.8 {d14}, [r7], r14 + cmp r6, r10 + vst1.8 {d15}, [r7], r14 + blt LOOP_MAIN_ALIGNED_WIDTH + +MAIN_REMAIN_WIDTH_START: + cmp r10, r3 @ if (aligned_width != width) { + beq MAIN_REMAIN_WIDTH_END + stmfd sp!, {r0-r2,r4} @ backup registers + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3) + add r8, r8, r6, lsl #3 + add r8, r2, r8 @ r8 = uv_src + src_offset + mov r12, r3, asr #1 @ temp1 = (width >> 1) * i + (j >> 1) + mul r12, r12, r5 + add r12, r12, r6, asr #1 + add r7, r0, r12 @ r7 = u_dst + temp1 + add r12, r1, r12 @ r12 = v_dst + temp1 + sub r14, r3, r6 @ r14 = (width - j) / 2 + mov r14, r14, asr #1 + + mov r4, #0 +LOOP_MAIN_REMAIN_HEIGHT: + mov r0, #0 @ r0 is index in de-interleave +LOOP_MAIN_REMAIN_WIDTH: + ldrb r1, [r8], #1 + ldrb r2, [r8], #1 + strb r1, [r7], #1 + strb r2, [r12], #1 + add r0, #1 + cmp r0, r14 + blt LOOP_MAIN_REMAIN_WIDTH + + sub r8, r8, r14, lsl #1 + sub r7, r7, r14 + sub r12, r12, r14 + add r8, r8, #16 + add r7, r7, r3, asr #1 + add r12, r12, r3, asr #1 + + add r4, #1 + cmp r4, #8 + blt LOOP_MAIN_REMAIN_HEIGHT + ldmfd sp!, {r0-r2,r4} @ restore registers +MAIN_REMAIN_WIDTH_END: + + add r5, r5, #8 + cmp r5, r9 + blt LOOP_MAIN_ALIGNED_HEIGHT + +REMAIN_HEIGHT_START: + cmp r9, r4 @ if (aligned_height != height) { + beq REMAIN_HEIGHT_END + + mov r6, #0 +LOOP_REMAIN_HEIGHT_WIDTH16: + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3) + add r8, r8, r6, lsl #3 + add r8, r2, r8 @ src_offset = uv_src + src_offset + + mov r12, r3, asr #1 @ temp1 = (width >> 1) * i + (j >> 1) + mul r12, r12, r5 + add r12, r12, r6, asr #1 + add r7, r0, r12 @ r7 = u_dst + temp1 + add r12, r1, r12 @ r12 = v_dst + temp1 + mov r14, r3, asr #1 @ temp2 = width / 2 + + stmfd sp!, {r0-r1} @ backup registers + mov r0, #0 + sub r1, r4, r9 +LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1: + vld2.8 {d0, d1}, [r8]! + vst1.8 {d0}, [r7], r14 + vst1.8 {d1}, [r12], r14 + + add r0, r0, #1 + cmp r0, r1 + blt LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1 + ldmfd sp!, {r0-r1} @ restore registers + + add r6, r6, #16 + cmp r6, r10 + blt LOOP_REMAIN_HEIGHT_WIDTH16 + +REMAIN_HEIGHT_REMAIN_WIDTH_START: + cmp r10, r3 + beq REMAIN_HEIGHT_REMAIN_WIDTH_END + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3) + add r8, r8, r6, lsl #3 + add r8, r2, r8 @ src_offset = uv_src + src_offset + + mov r12, r3, asr #1 @ temp1 = (width >> 1) * i + (j >> 1) + mul r12, r12, r5 + add r12, r12, r6, asr #1 + add r7, r0, r12 @ r7 = u_dst + temp1 + add r12, r1, r12 @ r12 = v_dst + temp1 + sub r14, r3, r6 @ r14 = (width - j) /2 + mov r14, r14, asr #1 + + stmfd sp!, {r0-r2,r4-r5} @ backup registers + mov r0, #0 + sub r1, r4, r9 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1: + + mov r4, #0 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx: + ldrb r2, [r8], #1 + ldrb r5, [r8], #1 + strb r2, [r7], #1 + strb r5, [r12], #1 + add r4, #1 + cmp r4, r14 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx + + sub r8, r8, r14, lsl #1 + sub r7, r7, r14 + sub r12, r12, r14 + add r8, r8, #16 + add r7, r7, r3, asr #1 + add r12, r12, r3, asr #1 + + add r0, r0, #1 + cmp r0, r1 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1 + ldmfd sp!, {r0-r2,r4-r5} @ restore registers + +REMAIN_HEIGHT_REMAIN_WIDTH_END: + +REMAIN_HEIGHT_END: + +RESTORE_REG: + ldmfd sp!, {r4-r12,r15} @ restore registers + + .fnend diff --git a/libswconverter/csc_tiled_to_linear_uv_neon.s b/libswconverter/csc_tiled_to_linear_uv_neon.s new file mode 100644 index 0000000..c90d1f3 --- /dev/null +++ b/libswconverter/csc_tiled_to_linear_uv_neon.s @@ -0,0 +1,217 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_tiled_to_linear_uv.s + * @brief SEC_OMX specific define. It support MFC 6.x tiled. + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Converts tiled data to linear for mfc 6.x tiled + * 1. uv of nv12t to y of yuv420s + * + * @param dst + * uv address of yuv420s[out] + * + * @param src + * uv address of nv12t[in] + * + * @param yuv420_width + * real width of yuv420s[in] + * + * @param yuv420_height + * real height of yuv420s[in] + * + */ + .arch armv7-a + .text + .global csc_tiled_to_linear_uv_neon + .type csc_tiled_to_linear_uv_neon, %function +csc_tiled_to_linear_uv_neon: + .fnstart + + .equ CACHE_LINE_SIZE, 64 + .equ PRE_LOAD_OFFSET, 6 + + @r0 y_dst + @r1 y_src + @r2 width + @r3 height + @r4 temp3 + @r5 i + @r6 j + @r7 dst_offset + @r8 src_offset + @r9 aligned_height + @r10 aligned_width + @r11 tiled_width + @r12 temp1 + @r14 temp2 + + stmfd sp!, {r4-r12,r14} @ backup registers + ldr r4, [sp, #40] @ r4 = height + + bic r9, r3, #0x7 @ aligned_height = height & (~0xF) + bic r10, r2, #0xF @ aligned_width = width & (~0xF) + add r11, r2, #15 @ tiled_width = ((width + 15) >> 4) << 4 + mov r11, r11, asr #4 + mov r11, r11, lsl #4 + + mov r5, #0 +LOOP_MAIN_ALIGNED_HEIGHT: + mul r8, r11, r5 @ src_offset = tiled_width * i + mov r6, #0 + add r8, r1, r8 @ src_offset = y_src + src_offset +LOOP_MAIN_ALIGNED_WIDTH: + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q0, q1}, [r8]! + mul r12, r2, r5 @ temp1 = width * i + j; + vld1.8 {q2, q3}, [r8]! + add r12, r12, r6 + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q4, q5}, [r8]! + add r7, r0, r12 @ dst_offset = y_dst + temp1 + vld1.8 {q6, q7}, [r8]! + + vst1.8 {q0}, [r7], r2 + vst1.8 {q1}, [r7], r2 + vst1.8 {q2}, [r7], r2 + vst1.8 {q3}, [r7], r2 + vst1.8 {q4}, [r7], r2 + vst1.8 {q5}, [r7], r2 + vst1.8 {q6}, [r7], r2 + vst1.8 {q7}, [r7], r2 + add r6, r6, #16 + cmp r6, r10 + blt LOOP_MAIN_ALIGNED_WIDTH + +MAIN_REMAIN_WIDTH_START: + cmp r10, r2 @ if (aligned_width != width) { + beq MAIN_REMAIN_WIDTH_END + + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3); + add r8, r8, r6, lsl #3 + add r8, r1, r8 @ r8 = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + sub r14, r2, r6 @ r14 = width - j + + stmfd sp!, {r0-r1} @ backup registers + mov r1, #0 +LOOP_MAIN_REMAIN_HEIGHT: + mov r0, #0 @ r0 is index in memcpy +LOOP_MAIN_REMAIN_WIDTH: + ldrh r4, [r8], #2 + strh r4, [r7], #2 + add r0, #2 + cmp r0, r14 + blt LOOP_MAIN_REMAIN_WIDTH + + sub r8, r8, r14 + sub r7, r7, r14 + add r8, r8, #16 + add r7, r7, r2 + + add r1, #1 + cmp r1, #8 + blt LOOP_MAIN_REMAIN_HEIGHT + ldmfd sp!, {r0-r1} @ restore registers +MAIN_REMAIN_WIDTH_END: + + add r5, r5, #8 + cmp r5, r9 + blt LOOP_MAIN_ALIGNED_HEIGHT + +REMAIN_HEIGHT_START: + cmp r9, r3 @ if (aligned_height != height) { + beq REMAIN_HEIGHT_END + + mov r6, #0 +LOOP_REMAIN_HEIGHT_WIDTH16: + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3) + add r8, r8, r6, lsl #3 + add r8, r1, r8 @ src_offset = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + + sub r12, r3, r9 + mov r14, #0 +LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1: + vld1.8 {q0}, [r8]! + vld1.8 {q1}, [r8]! + vst1.8 {q0}, [r7], r2 + vst1.8 {q1}, [r7], r2 + + add r14, r14, #2 + cmp r14, r12 + blt LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1 + + add r6, r6, #16 + cmp r6, r10 + blt LOOP_REMAIN_HEIGHT_WIDTH16 + +REMAIN_HEIGHT_REMAIN_WIDTH_START: + cmp r10, r2 + beq REMAIN_HEIGHT_REMAIN_WIDTH_END + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 3) + add r8, r8, r6, lsl #3 + add r8, r1, r8 @ src_offset = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + + stmfd sp!, {r0-r1,r3} @ backup registers + mov r0, #0 + sub r1, r3, r9 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1: + + sub r14, r2, r6 + mov r4, #0 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx: + ldrh r3, [r8], #2 + strh r3, [r7], #2 + add r4, #2 + cmp r4, r14 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx + + sub r8, r8, r14 + sub r7, r7, r14 + add r8, r8, #16 + add r7, r7, r2 + + add r0, r0, #1 + cmp r0, r1 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1 + ldmfd sp!, {r0-r1,r3} @ restore registers + +REMAIN_HEIGHT_REMAIN_WIDTH_END: + +REMAIN_HEIGHT_END: + +RESTORE_REG: + ldmfd sp!, {r4-r12,r15} @ restore registers + + .fnend diff --git a/libswconverter/csc_tiled_to_linear_y_neon.s b/libswconverter/csc_tiled_to_linear_y_neon.s new file mode 100644 index 0000000..3cdf092 --- /dev/null +++ b/libswconverter/csc_tiled_to_linear_y_neon.s @@ -0,0 +1,232 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License") + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file csc_tiled_to_linear_y.s + * @brief SEC_OMX specific define. It support MFC 6.x tiled. + * @author ShinWon Lee (shinwon.lee@samsung.com) + * @version 1.0 + * @history + * 2012.02.01 : Create + */ + +/* + * Converts tiled data to linear for mfc 6.x + * 1. Y of NV12T to Y of YUV420P + * 2. Y of NV12T to Y of YUV420S + * + * @param dst + * Y address of YUV420[out] + * + * @param src + * Y address of NV12T[in] + * + * @param yuv420_width + * real width of YUV420[in]. It should be even. + * + * @param yuv420_height + * real height of YUV420[in] It should be even. + * + */ + .arch armv7-a + .text + .global csc_tiled_to_linear_y_neon + .type csc_tiled_to_linear_y_neon, %function +csc_tiled_to_linear_y_neon: + .fnstart + + .equ CACHE_LINE_SIZE, 64 + .equ PRE_LOAD_OFFSET, 6 + + @r0 y_dst + @r1 y_src + @r2 width + @r3 height + @r4 temp3 + @r5 i + @r6 j + @r7 dst_offset + @r8 src_offset + @r9 aligned_height + @r10 aligned_width + @r11 tiled_width + @r12 temp1 + @r14 temp2 + + stmfd sp!, {r4-r12,r14} @ backup registers + ldr r4, [sp, #40] @ r4 = height + + bic r9, r3, #0xF @ aligned_height = height & (~0xF) + bic r10, r2, #0xF @ aligned_width = width & (~0xF) + add r11, r2, #15 @ tiled_width = ((width + 15) >> 4) << 4 + mov r11, r11, asr #4 + mov r11, r11, lsl #4 + + mov r5, #0 +LOOP_MAIN_ALIGNED_HEIGHT: + mul r8, r11, r5 @ src_offset = tiled_width * i + mov r6, #0 + add r8, r1, r8 @ src_offset = y_src + src_offset +LOOP_MAIN_ALIGNED_WIDTH: + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q0, q1}, [r8]! + vld1.8 {q2, q3}, [r8]! + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q4, q5}, [r8]! + vld1.8 {q6, q7}, [r8]! + mul r12, r2, r5 @ temp1 = width * i + j; + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q8, q9}, [r8]! + add r12, r12, r6 + vld1.8 {q10, q11}, [r8]! + add r7, r0, r12 @ dst_offset = y_dst + temp1 + pld [r8, #(CACHE_LINE_SIZE*PRE_LOAD_OFFSET)] + vld1.8 {q12, q13}, [r8]! + vld1.8 {q14, q15}, [r8]! + + vst1.8 {q0}, [r7], r2 + vst1.8 {q1}, [r7], r2 + vst1.8 {q2}, [r7], r2 + vst1.8 {q3}, [r7], r2 + vst1.8 {q4}, [r7], r2 + vst1.8 {q5}, [r7], r2 + vst1.8 {q6}, [r7], r2 + vst1.8 {q7}, [r7], r2 + vst1.8 {q8}, [r7], r2 + vst1.8 {q9}, [r7], r2 + vst1.8 {q10}, [r7], r2 + vst1.8 {q11}, [r7], r2 + vst1.8 {q12}, [r7], r2 + vst1.8 {q13}, [r7], r2 + add r6, r6, #16 + vst1.8 {q14}, [r7], r2 + cmp r6, r10 + vst1.8 {q15}, [r7], r2 + blt LOOP_MAIN_ALIGNED_WIDTH + +MAIN_REMAIN_WIDTH_START: + cmp r10, r2 @ if (aligned_width != width) { + beq MAIN_REMAIN_WIDTH_END + + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 4); + add r8, r8, r6, lsl #4 + add r8, r1, r8 @ r8 = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + sub r14, r2, r6 @ r14 = width - j + + stmfd sp!, {r0-r1} @ backup registers + mov r1, #0 +LOOP_MAIN_REMAIN_HEIGHT: + mov r0, #0 @ r0 is index in memcpy +LOOP_MAIN_REMAIN_WIDTH: + ldrh r4, [r8], #2 + strh r4, [r7], #2 + add r0, #2 + cmp r0, r14 + blt LOOP_MAIN_REMAIN_WIDTH + + sub r8, r8, r14 + sub r7, r7, r14 + add r8, r8, #16 + add r7, r7, r2 + + add r1, #1 + cmp r1, #16 + blt LOOP_MAIN_REMAIN_HEIGHT + ldmfd sp!, {r0-r1} @ restore registers +MAIN_REMAIN_WIDTH_END: + + add r5, r5, #16 + cmp r5, r9 + blt LOOP_MAIN_ALIGNED_HEIGHT + +REMAIN_HEIGHT_START: + cmp r9, r3 @ if (aligned_height != height) { + beq REMAIN_HEIGHT_END + + mov r6, #0 +LOOP_REMAIN_HEIGHT_WIDTH16: + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 4) + add r8, r8, r6, lsl #4 + add r8, r1, r8 @ src_offset = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + + sub r12, r3, r9 + mov r14, #0 +LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1: + vld1.8 {q0}, [r8]! + vld1.8 {q1}, [r8]! + vst1.8 {q0}, [r7], r2 + vst1.8 {q1}, [r7], r2 + + add r14, r14, #2 + cmp r14, r12 + blt LOOP_REMAIN_HEIGHT_WIDTH16_HEIGHT1 + + add r6, r6, #16 + cmp r6, r10 + blt LOOP_REMAIN_HEIGHT_WIDTH16 + +REMAIN_HEIGHT_REMAIN_WIDTH_START: + cmp r10, r2 + beq REMAIN_HEIGHT_REMAIN_WIDTH_END + mul r8, r11, r5 @ src_offset = (tiled_width * i) + (j << 4) + add r8, r8, r6, lsl #4 + add r8, r1, r8 @ src_offset = y_src + src_offset + + mul r12, r2, r5 @ temp1 = width * i + j; + add r12, r12, r6 + add r7, r0, r12 @ r7 = y_dst + temp1 + + stmfd sp!, {r0-r1,r3} @ backup registers + mov r0, #0 + sub r1, r3, r9 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1: + + sub r14, r2, r6 + mov r4, #0 +LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx: + ldrh r3, [r8], #2 + strh r3, [r7], #2 + add r4, #2 + cmp r4, r14 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1_WIDTHx + + sub r8, r8, r14 + sub r7, r7, r14 + add r8, r8, #16 + add r7, r7, r2 + + add r0, r0, #1 + cmp r0, r1 + blt LOOP_REMAIN_HEIGHT_REMAIN_WIDTH_HEIGHT1 + ldmfd sp!, {r0-r1,r3} @ restore registers + +REMAIN_HEIGHT_REMAIN_WIDTH_END: + +REMAIN_HEIGHT_END: + +RESTORE_REG: + ldmfd sp!, {r4-r12,r15} @ restore registers + + .fnend diff --git a/libswconverter/swconverter_dummy.c b/libswconverter/swconverter_dummy.c new file mode 100644 index 0000000..0487ba9 --- /dev/null +++ b/libswconverter/swconverter_dummy.c @@ -0,0 +1,81 @@ +/* + * + * Copyright 2014 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file swconverter_dummy.c + * + * @brief dummu file of machine dependent implemention of CSC functions + * + * @author Cho KyongHo (pullip.cho@samsung.com) + * + * @version 1.0 + * + * @history + * 2014.08.27 : Create + */ + +void csc_interleave_memcpy_neon( + unsigned char *dest, + unsigned char *src1, + unsigned char *src2, + unsigned int src_size) +{ +} + + +void csc_tiled_to_linear_y_neon( + unsigned char *y_dst, + unsigned char *y_src, + unsigned int width, + unsigned int height) +{ +} + +void csc_tiled_to_linear_uv_neon( + unsigned char *uv_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height) +{ +} + +void csc_tiled_to_linear_uv_deinterleave_neon( + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height) +{ +} + +void csc_ARGB8888_to_YUV420SP_NEON( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ +} + +void csc_RGBA8888_to_YUV420SP_NEON( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ +} diff --git a/libswconverter/swconvertor.c b/libswconverter/swconvertor.c new file mode 100644 index 0000000..b3330fe --- /dev/null +++ b/libswconverter/swconvertor.c @@ -0,0 +1,2214 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file swconvertor.c + * + * @brief Exynos_OMX specific define + * + * @author ShinWon Lee (shinwon.lee@samsung.com) + * + * @version 1.0 + * + * @history + * 2012.02.01 : Create + */ + +#include +#include +#include +#include "swconverter.h" + +#ifdef NEON_SUPPORT +#ifdef USE_NV12T_128X64 +/* MFC 5.X */ +/* + * Converts tiled data to linear + * Crops left, top, right, buttom + * 1. Y of NV12T to Y of YUV420P + * 2. Y of NV12T to Y of YUV420S + * 3. UV of NV12T to UV of YUV420S + * + * @param yuv420_dest + * Y or UV plane address of YUV420[out] + * + * @param nv12t_src + * Y or UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ + void csc_tiled_to_linear_crop_neon( + unsigned char *yuv420_dest, + unsigned char *nv12t_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom); + +/* + * Converts and Deinterleaves tiled data to linear + * Crops left, top, right, buttom + * 1. UV of NV12T to UV of YUV420P + * + * @param yuv420_u_dest + * U plane address of YUV420P[out] + * + * @param yuv420_v_dest + * V plane address of YUV420P[out] + * + * @param nv12t_src + * UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_uv_height + * Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +void csc_tiled_to_linear_deinterleave_crop_neon( + unsigned char *yuv420_u_dest, + unsigned char *yuv420_v_dest, + unsigned char *nv12t_uv_src, + unsigned int yuv420_width, + unsigned int yuv420_uv_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom); + +/* + * Converts linear data to tiled + * Crops left, top, right, buttom + * 1. Y of YUV420P to Y of NV12T + * 2. Y of YUV420S to Y of NV12T + * 3. UV of YUV420S to UV of NV12T + * + * @param nv12t_dest + * Y or UV plane address of NV12T[out] + * + * @param yuv420_src + * Y or UV plane address of YUV420P(S)[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +void csc_linear_to_tiled_crop_neon( + unsigned char *nv12t_dest, + unsigned char *yuv420_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom); + +/* + * Converts and Interleaves linear to tiled + * Crops left, top, right, buttom + * 1. UV of YUV420P to UV of NV12T + * + * @param nv12t_uv_dest + * UV plane address of NV12T[out] + * + * @param yuv420p_u_src + * U plane address of YUV420P[in] + * + * @param yuv420p_v_src + * V plane address of YUV420P[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_uv_height + * Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +void csc_linear_to_tiled_interleave_crop_neon( + unsigned char *nv12t_uv_dest, + unsigned char *yuv420_u_src, + unsigned char *yuv420_v_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom); +#else +/* others */ +void csc_tiled_to_linear_y_neon( + unsigned char *y_dst, + unsigned char *y_src, + unsigned int width, + unsigned int height); + +void csc_tiled_to_linear_uv_neon( + unsigned char *uv_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height); + +void csc_tiled_to_linear_uv_deinterleave_neon( + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *uv_src, + unsigned int width, + unsigned int height); +#endif /* USE_NV12T_128X64 */ +/* common */ +void csc_interleave_memcpy_neon( + unsigned char *dest, + unsigned char *src1, + unsigned char *src2, + unsigned int src_size); + +void csc_BGRA8888_to_YUV420SP_NEON( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); + +void csc_RGBA8888_to_YUV420SP_NEON( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height); +#endif /* NEON_SUPPORT */ + + +#ifdef USE_NV12T_128X64 +/* + * It support MFC 5.x tiled. + * Get tiled address of position(x,y) + * + * @param x_size + * width of tiled[in] + * + * @param y_size + * height of tiled[in] + * + * @param x_pos + * x position of tield[in] + * + * @param src_size + * y position of tield[in] + * + * @return + * address of tiled data + */ +static int tile_4x2_read(int x_size, int y_size, int x_pos, int y_pos) +{ + int pixel_x_m1, pixel_y_m1; + int roundup_x, roundup_y; + int linear_addr0, linear_addr1, bank_addr ; + int x_addr; + int trans_addr; + + pixel_x_m1 = x_size -1; + pixel_y_m1 = y_size -1; + + roundup_x = ((pixel_x_m1 >> 7) + 1); + roundup_y = ((pixel_x_m1 >> 6) + 1); + + x_addr = x_pos >> 2; + + if ((y_size <= y_pos+32) && ( y_pos < y_size) && + (((pixel_y_m1 >> 5) & 0x1) == 0) && (((y_pos >> 5) & 0x1) == 0)) { + linear_addr0 = (((y_pos & 0x1f) <<4) | (x_addr & 0xf)); + linear_addr1 = (((y_pos >> 6) & 0xff) * roundup_x + ((x_addr >> 6) & 0x3f)); + + if (((x_addr >> 5) & 0x1) == ((y_pos >> 5) & 0x1)) + bank_addr = ((x_addr >> 4) & 0x1); + else + bank_addr = 0x2 | ((x_addr >> 4) & 0x1); + } else { + linear_addr0 = (((y_pos & 0x1f) << 4) | (x_addr & 0xf)); + linear_addr1 = (((y_pos >> 6) & 0xff) * roundup_x + ((x_addr >> 5) & 0x7f)); + + if (((x_addr >> 5) & 0x1) == ((y_pos >> 5) & 0x1)) + bank_addr = ((x_addr >> 4) & 0x1); + else + bank_addr = 0x2 | ((x_addr >> 4) & 0x1); + } + + linear_addr0 = linear_addr0 << 2; + trans_addr = (linear_addr1 <<13) | (bank_addr << 11) | linear_addr0; + + return trans_addr; +} + +/* + * It support MFC 5.x tiled. + * Converts tiled data to linear + * Crops left, top, right, buttom + * 1. Y of NV12T to Y of YUV420P + * 2. Y of NV12T to Y of YUV420S + * 3. UV of NV12T to UV of YUV420S + * + * @param yuv420_dest + * Y or UV plane address of YUV420[out] + * + * @param nv12t_src + * Y or UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +static void csc_tiled_to_linear_crop( + unsigned char *yuv420_dest, + unsigned char *nv12t_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom) +{ + unsigned int i, j; + unsigned int tiled_offset = 0, tiled_offset1 = 0; + unsigned int linear_offset = 0; + unsigned int temp1 = 0, temp2 = 0, temp3 = 0, temp4 = 0; + + temp3 = yuv420_width-right; + temp1 = temp3-left; + /* real width is greater than or equal 256 */ + if (temp1 >= 256) { + for (i=top; i>8)<<8; + temp3 = temp3>>6; + temp4 = i>>5; + if (temp4 & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = temp4-1; + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+temp3; + tiled_offset = tiled_offset+2; + temp1 = (temp3>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*2; + temp4 = 8; + } else { + temp2 = ((yuv420_height+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = temp3+2; + temp1 = (temp1>>2)<<2; + tiled_offset = temp3+temp1; + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = tiled_offset+temp4*(temp1>>6); + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*6; + temp4 = 8; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = temp4*(temp1>>6); + tiled_offset = tiled_offset+temp3; + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*2; + temp4 = 4; + } + } + + temp1 = i&0x1F; + tiled_offset = tiled_offset+64*(temp1); + tiled_offset1 = tiled_offset1+64*(temp1); + temp2 = yuv420_width-left-right; + linear_offset = temp2*(i-top); + temp3 = ((j+256)>>8)<<8; + temp3 = temp3-j; + temp1 = left&0x3F; + if (temp3 > 192) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset+temp1, 64-temp1); + temp2 = ((left+63)>>6)<<6; + temp3 = ((yuv420_width-right)>>6)<<6; + if (temp2 == temp3) { + temp2 = yuv420_width-right-(64-temp1); + } + memcpy(yuv420_dest+linear_offset+64-temp1, nv12t_src+tiled_offset+2048, 64); + memcpy(yuv420_dest+linear_offset+128-temp1, nv12t_src+tiled_offset1, 64); + memcpy(yuv420_dest+linear_offset+192-temp1, nv12t_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+256-temp1; + } else if (temp3 > 128) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset+2048+temp1, 64-temp1); + memcpy(yuv420_dest+linear_offset+64-temp1, nv12t_src+tiled_offset1, 64); + memcpy(yuv420_dest+linear_offset+128-temp1, nv12t_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+192-temp1; + } else if (temp3 > 64) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset1+temp1, 64-temp1); + memcpy(yuv420_dest+linear_offset+64-temp1, nv12t_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+128-temp1; + } else if (temp3 > 0) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset1+2048+temp1, 64-temp1); + linear_offset = linear_offset+64-temp1; + } + + tiled_offset = tiled_offset+temp4*2048; + j = (left>>8)<<8; + j = j + 256; + temp2 = yuv420_width-right-256; + for (; j<=temp2; j=j+256) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + tiled_offset1 = tiled_offset1+temp4*2048; + memcpy(yuv420_dest+linear_offset+64, nv12t_src+tiled_offset+2048, 64); + memcpy(yuv420_dest+linear_offset+128, nv12t_src+tiled_offset1, 64); + tiled_offset = tiled_offset+temp4*2048; + memcpy(yuv420_dest+linear_offset+192, nv12t_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+256; + } + + tiled_offset1 = tiled_offset1+temp4*2048; + temp2 = yuv420_width-right-j; + if (temp2 > 192) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + memcpy(yuv420_dest+linear_offset+64, nv12t_src+tiled_offset+2048, 64); + memcpy(yuv420_dest+linear_offset+128, nv12t_src+tiled_offset1, 64); + memcpy(yuv420_dest+linear_offset+192, nv12t_src+tiled_offset1+2048, temp2-192); + } else if (temp2 > 128) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + memcpy(yuv420_dest+linear_offset+64, nv12t_src+tiled_offset+2048, 64); + memcpy(yuv420_dest+linear_offset+128, nv12t_src+tiled_offset1, temp2-128); + } else if (temp2 > 64) { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + memcpy(yuv420_dest+linear_offset+64, nv12t_src+tiled_offset+2048, temp2-64); + } else { + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, temp2); + } + } + } else if (temp1 >= 64) { + for (i=top; i<(yuv420_height-buttom); i=i+1) { + j = left; + tiled_offset = tile_4x2_read(yuv420_width, yuv420_height, j, i); + temp2 = ((j+64)>>6)<<6; + temp2 = temp2-j; + linear_offset = temp1*(i-top); + temp4 = j&0x3; + tiled_offset = tiled_offset+temp4; + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, temp2); + linear_offset = linear_offset+temp2; + j = j+temp2; + if ((j+64) <= temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_height, j, i); + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + linear_offset = linear_offset+64; + j = j+64; + } + if ((j+64) <= temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_height, j, i); + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 64); + linear_offset = linear_offset+64; + j = j+64; + } + if (j < temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_height, j, i); + temp2 = temp3-j; + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, temp2); + } + } + } else { + for (i=top; i<(yuv420_height-buttom); i=i+1) { + linear_offset = temp1*(i-top); + for (j=left; j<(yuv420_width-right); j=j+2) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_height, j, i); + temp4 = j&0x3; + tiled_offset = tiled_offset+temp4; + memcpy(yuv420_dest+linear_offset, nv12t_src+tiled_offset, 2); + linear_offset = linear_offset+2; + } + } + } +} + +/* + * Converts and Deinterleaves tiled data to linear + * Crops left, top, right, buttom + * 1. UV of NV12T to UV of YUV420P + * + * @param yuv420_u_dest + * U plane address of YUV420P[out] + * + * @param yuv420_v_dest + * V plane address of YUV420P[out] + * + * @param nv12t_src + * UV plane address of NV12T[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_uv_height + * Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +static void csc_tiled_to_linear_deinterleave_crop( + unsigned char *yuv420_u_dest, + unsigned char *yuv420_v_dest, + unsigned char *nv12t_uv_src, + unsigned int yuv420_width, + unsigned int yuv420_uv_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom) +{ + unsigned int i, j; + unsigned int tiled_offset = 0, tiled_offset1 = 0; + unsigned int linear_offset = 0; + unsigned int temp1 = 0, temp2 = 0, temp3 = 0, temp4 = 0; + + temp3 = yuv420_width-right; + temp1 = temp3-left; + /* real width is greater than or equal 256 */ + if (temp1 >= 256) { + for (i=top; i>8)<<8; + temp3 = temp3>>6; + temp4 = i>>5; + if (temp4 & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = temp4-1; + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+temp3; + tiled_offset = tiled_offset+2; + temp1 = (temp3>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*2; + temp4 = 8; + } else { + temp2 = ((yuv420_uv_height+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = temp3+2; + temp1 = (temp1>>2)<<2; + tiled_offset = temp3+temp1; + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = tiled_offset+temp4*(temp1>>6); + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*6; + temp4 = 8; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = ((yuv420_width+127)>>7)<<7; + tiled_offset = temp4*(temp1>>6); + tiled_offset = tiled_offset+temp3; + tiled_offset = tiled_offset<<11; + tiled_offset1 = tiled_offset+2048*2; + temp4 = 4; + } + } + + temp1 = i&0x1F; + tiled_offset = tiled_offset+64*(temp1); + tiled_offset1 = tiled_offset1+64*(temp1); + temp2 = yuv420_width-left-right; + linear_offset = temp2*(i-top)/2; + temp3 = ((j+256)>>8)<<8; + temp3 = temp3-j; + temp1 = left&0x3F; + if (temp3 > 192) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, yuv420_v_dest+linear_offset, nv12t_uv_src+tiled_offset+temp1, 64-temp1); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(32-temp1/2), + yuv420_v_dest+linear_offset+(32-temp1/2), + nv12t_uv_src+tiled_offset+2048, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(64-temp1/2), + yuv420_v_dest+linear_offset+(64-temp1/2), + nv12t_uv_src+tiled_offset1, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(96-temp1/2), + yuv420_v_dest+linear_offset+(96-temp1/2), + nv12t_uv_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+128-temp1/2; + } else if (temp3 > 128) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset+2048+temp1, 64-temp1); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(32-temp1/2), + yuv420_v_dest+linear_offset+(32-temp1/2), + nv12t_uv_src+tiled_offset1, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(64-temp1/2), + yuv420_v_dest+linear_offset+(64-temp1/2), + nv12t_uv_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+96-temp1/2; + } else if (temp3 > 64) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset1+temp1, 64-temp1); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+(32-temp1/2), + yuv420_v_dest+linear_offset+(32-temp1/2), + nv12t_uv_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+64-temp1/2; + } else if (temp3 > 0) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset1+2048+temp1, 64-temp1); + linear_offset = linear_offset+32-temp1/2; + } + + tiled_offset = tiled_offset+temp4*2048; + j = (left>>8)<<8; + j = j + 256; + temp2 = yuv420_width-right-256; + for (; j<=temp2; j=j+256) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + tiled_offset1 = tiled_offset1+temp4*2048; + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+32, + yuv420_v_dest+linear_offset+32, + nv12t_uv_src+tiled_offset+2048, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+64, + yuv420_v_dest+linear_offset+64, + nv12t_uv_src+tiled_offset1, 64); + tiled_offset = tiled_offset+temp4*2048; + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+96, + yuv420_v_dest+linear_offset+96, + nv12t_uv_src+tiled_offset1+2048, 64); + linear_offset = linear_offset+128; + } + + tiled_offset1 = tiled_offset1+temp4*2048; + temp2 = yuv420_width-right-j; + if (temp2 > 192) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+32, + yuv420_v_dest+linear_offset+32, + nv12t_uv_src+tiled_offset+2048, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+64, + yuv420_v_dest+linear_offset+64, + nv12t_uv_src+tiled_offset1, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+96, + yuv420_v_dest+linear_offset+96, + nv12t_uv_src+tiled_offset1+2048, temp2-192); + } else if (temp2 > 128) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+32, + yuv420_v_dest+linear_offset+32, + nv12t_uv_src+tiled_offset+2048, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+64, + yuv420_v_dest+linear_offset+64, + nv12t_uv_src+tiled_offset1, temp2-128); + } else if (temp2 > 64) { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset+32, + yuv420_v_dest+linear_offset+32, + nv12t_uv_src+tiled_offset+2048, temp2-64); + } else { + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, temp2); + } + } + } else if (temp1 >= 64) { + for (i=top; i<(yuv420_uv_height-buttom); i=i+1) { + j = left; + tiled_offset = tile_4x2_read(yuv420_width, yuv420_uv_height, j, i); + temp2 = ((j+64)>>6)<<6; + temp2 = temp2-j; + temp3 = yuv420_width-right; + temp4 = temp3-left; + linear_offset = temp4*(i-top)/2; + temp4 = j&0x3; + tiled_offset = tiled_offset+temp4; + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, temp2); + linear_offset = linear_offset+temp2/2; + j = j+temp2; + if ((j+64) <= temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_uv_height, j, i); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + linear_offset = linear_offset+32; + j = j+64; + } + if ((j+64) <= temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_uv_height, j, i); + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 64); + linear_offset = linear_offset+32; + j = j+64; + } + if (j < temp3) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_uv_height, j, i); + temp1 = temp3-j; + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, temp1); + } + } + } else { + for (i=top; i<(yuv420_uv_height-buttom); i=i+1) { + temp3 = yuv420_width-right; + temp4 = temp3-left; + linear_offset = temp4*(i-top)/2; + for (j=left; j<(yuv420_width-right); j=j+2) { + tiled_offset = tile_4x2_read(yuv420_width, yuv420_uv_height, j, i); + temp3 = j&0x3; + tiled_offset = tiled_offset+temp3; + csc_deinterleave_memcpy(yuv420_u_dest+linear_offset, + yuv420_v_dest+linear_offset, + nv12t_uv_src+tiled_offset, 2); + linear_offset = linear_offset+1; + } + } + } +} + +/* + * Converts linear data to tiled + * Crops left, top, right, buttom + * 1. Y of YUV420P to Y of NV12T + * 2. Y of YUV420S to Y of NV12T + * 3. UV of YUV420S to UV of NV12T + * + * @param nv12t_dest + * Y or UV plane address of NV12T[out] + * + * @param yuv420_src + * Y or UV plane address of YUV420P(S)[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_height + * Y: Height of YUV420, UV: Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +static void csc_linear_to_tiled_crop( + unsigned char *nv12t_dest, + unsigned char *yuv420_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom) +{ + unsigned int i, j; + unsigned int tiled_x_index = 0, tiled_y_index = 0; + unsigned int aligned_x_size = 0, aligned_y_size = 0; + unsigned int tiled_offset = 0; + unsigned int temp1 = 0, temp2 = 0; + + aligned_y_size = ((yuv420_height-top-buttom)>>5)<<5; + aligned_x_size = ((yuv420_width-left-right)>>6)<<6; + + for (i=0; i>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + + memcpy(nv12t_dest+tiled_offset, yuv420_src+left+j+yuv420_width*(i+top), 64); + memcpy(nv12t_dest+tiled_offset+64*1, yuv420_src+left+j+yuv420_width*(i+top+1), 64); + memcpy(nv12t_dest+tiled_offset+64*2, yuv420_src+left+j+yuv420_width*(i+top+2), 64); + memcpy(nv12t_dest+tiled_offset+64*3, yuv420_src+left+j+yuv420_width*(i+top+3), 64); + memcpy(nv12t_dest+tiled_offset+64*4, yuv420_src+left+j+yuv420_width*(i+top+4), 64); + memcpy(nv12t_dest+tiled_offset+64*5, yuv420_src+left+j+yuv420_width*(i+top+5), 64); + memcpy(nv12t_dest+tiled_offset+64*6, yuv420_src+left+j+yuv420_width*(i+top+6), 64); + memcpy(nv12t_dest+tiled_offset+64*7, yuv420_src+left+j+yuv420_width*(i+top+7), 64); + memcpy(nv12t_dest+tiled_offset+64*8, yuv420_src+left+j+yuv420_width*(i+top+8), 64); + memcpy(nv12t_dest+tiled_offset+64*9, yuv420_src+left+j+yuv420_width*(i+top+9), 64); + memcpy(nv12t_dest+tiled_offset+64*10, yuv420_src+left+j+yuv420_width*(i+top+10), 64); + memcpy(nv12t_dest+tiled_offset+64*11, yuv420_src+left+j+yuv420_width*(i+top+11), 64); + memcpy(nv12t_dest+tiled_offset+64*12, yuv420_src+left+j+yuv420_width*(i+top+12), 64); + memcpy(nv12t_dest+tiled_offset+64*13, yuv420_src+left+j+yuv420_width*(i+top+13), 64); + memcpy(nv12t_dest+tiled_offset+64*14, yuv420_src+left+j+yuv420_width*(i+top+14), 64); + memcpy(nv12t_dest+tiled_offset+64*15, yuv420_src+left+j+yuv420_width*(i+top+15), 64); + memcpy(nv12t_dest+tiled_offset+64*16, yuv420_src+left+j+yuv420_width*(i+top+16), 64); + memcpy(nv12t_dest+tiled_offset+64*17, yuv420_src+left+j+yuv420_width*(i+top+17), 64); + memcpy(nv12t_dest+tiled_offset+64*18, yuv420_src+left+j+yuv420_width*(i+top+18), 64); + memcpy(nv12t_dest+tiled_offset+64*19, yuv420_src+left+j+yuv420_width*(i+top+19), 64); + memcpy(nv12t_dest+tiled_offset+64*20, yuv420_src+left+j+yuv420_width*(i+top+20), 64); + memcpy(nv12t_dest+tiled_offset+64*21, yuv420_src+left+j+yuv420_width*(i+top+21), 64); + memcpy(nv12t_dest+tiled_offset+64*22, yuv420_src+left+j+yuv420_width*(i+top+22), 64); + memcpy(nv12t_dest+tiled_offset+64*23, yuv420_src+left+j+yuv420_width*(i+top+23), 64); + memcpy(nv12t_dest+tiled_offset+64*24, yuv420_src+left+j+yuv420_width*(i+top+24), 64); + memcpy(nv12t_dest+tiled_offset+64*25, yuv420_src+left+j+yuv420_width*(i+top+25), 64); + memcpy(nv12t_dest+tiled_offset+64*26, yuv420_src+left+j+yuv420_width*(i+top+26), 64); + memcpy(nv12t_dest+tiled_offset+64*27, yuv420_src+left+j+yuv420_width*(i+top+27), 64); + memcpy(nv12t_dest+tiled_offset+64*28, yuv420_src+left+j+yuv420_width*(i+top+28), 64); + memcpy(nv12t_dest+tiled_offset+64*29, yuv420_src+left+j+yuv420_width*(i+top+29), 64); + memcpy(nv12t_dest+tiled_offset+64*30, yuv420_src+left+j+yuv420_width*(i+top+30), 64); + memcpy(nv12t_dest+tiled_offset+64*31, yuv420_src+left+j+yuv420_width*(i+top+31), 64); + } + } + + for (i=aligned_y_size; i<(yuv420_height-top-buttom); i=i+2) { + for (j=0; j>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + + temp1 = i&0x1F; + memcpy(nv12t_dest+tiled_offset+64*(temp1), yuv420_src+left+j+yuv420_width*(i+top), 64); + memcpy(nv12t_dest+tiled_offset+64*(temp1+1), yuv420_src+left+j+yuv420_width*(i+top+1), 64); + } + } + + for (i=0; i<(yuv420_height-top-buttom); i=i+2) { + for (j=aligned_x_size; j<(yuv420_width-left-right); j=j+2) { + tiled_offset = 0; + tiled_x_index = j>>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + + temp1 = i&0x1F; + temp2 = j&0x3F; + memcpy(nv12t_dest+tiled_offset+temp2+64*(temp1), yuv420_src+left+j+yuv420_width*(i+top), 2); + memcpy(nv12t_dest+tiled_offset+temp2+64*(temp1+1), yuv420_src+left+j+yuv420_width*(i+top+1), 2); + } + } +} + +/* + * Converts and Interleaves linear to tiled + * Crops left, top, right, buttom + * 1. UV of YUV420P to UV of NV12T + * + * @param nv12t_uv_dest + * UV plane address of NV12T[out] + * + * @param yuv420p_u_src + * U plane address of YUV420P[in] + * + * @param yuv420p_v_src + * V plane address of YUV420P[in] + * + * @param yuv420_width + * Width of YUV420[in] + * + * @param yuv420_uv_height + * Height/2 of YUV420[in] + * + * @param left + * Crop size of left + * + * @param top + * Crop size of top + * + * @param right + * Crop size of right + * + * @param buttom + * Crop size of buttom + */ +static void csc_linear_to_tiled_interleave_crop( + unsigned char *nv12t_uv_dest, + unsigned char *yuv420_u_src, + unsigned char *yuv420_v_src, + unsigned int yuv420_width, + unsigned int yuv420_height, + unsigned int left, + unsigned int top, + unsigned int right, + unsigned int buttom) +{ + unsigned int i, j; + unsigned int tiled_x_index = 0, tiled_y_index = 0; + unsigned int aligned_x_size = 0, aligned_y_size = 0; + unsigned int tiled_offset = 0; + unsigned int temp1 = 0, temp2 = 0; + + aligned_y_size = ((yuv420_height-top-buttom)>>5)<<5; + aligned_x_size = ((yuv420_width-left-right)>>6)<<6; + + for (i=0; i>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*1, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+1), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+1), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*2, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+2), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+2), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*3, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+3), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+3), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*4, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+4), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+4), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*5, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+5), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+5), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*6, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+6), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+6), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*7, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+7), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+7), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*8, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+8), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+8), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*9, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+9), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+9), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*10, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+10), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+10), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*11, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+11), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+11), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*12, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+12), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+12), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*13, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+13), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+13), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*14, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+14), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+14), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*15, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+15), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+15), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*16, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+16), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+16), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*17, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+17), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+17), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*18, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+18), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+18), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*19, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+19), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+19), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*20, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+20), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+20), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*21, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+21), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+21), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*22, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+22), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+22), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*23, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+23), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+23), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*24, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+24), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+24), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*25, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+25), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+25), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*26, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+26), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+26), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*27, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+27), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+27), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*28, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+28), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+28), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*29, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+29), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+29), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*30, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+30), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+30), 32); + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*31, + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top+31), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top+31), 32); + + } + } + + for (i=aligned_y_size; i<(yuv420_height-top-buttom); i=i+1) { + for (j=0; j>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + temp1 = i&0x1F; + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+64*(temp1), + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top), 32); + } + } + + for (i=0; i<(yuv420_height-top-buttom); i=i+1) { + for (j=aligned_x_size; j<(yuv420_width-left-right); j=j+2) { + tiled_offset = 0; + tiled_x_index = j>>6; + tiled_y_index = i>>5; + if (tiled_y_index & 0x1) { + /* odd fomula: 2+x+(x>>2)<<2+x_block_num*(y-1) */ + tiled_offset = tiled_y_index-1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset+2; + temp1 = (tiled_x_index>>2)<<2; + tiled_offset = tiled_offset+temp1; + tiled_offset = tiled_offset<<11; + } else { + temp2 = (((yuv420_height-top-buttom)+31)>>5)<<5; + if ((i+32)>2)<<2+x_block_num*y */ + temp1 = tiled_x_index+2; + temp1 = (temp1>>2)<<2; + tiled_offset = tiled_x_index+temp1; + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_offset+tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset<<11; + } else { + /* even2 fomula: x+x_block_num*y */ + temp1 = (((yuv420_width-left-right)+127)>>7)<<7; + tiled_offset = tiled_y_index*(temp1>>6); + tiled_offset = tiled_offset+tiled_x_index; + tiled_offset = tiled_offset<<11; + } + } + temp1 = i&0x1F; + temp2 = j&0x3F; + csc_interleave_memcpy(nv12t_uv_dest+tiled_offset+temp2+64*(temp1), + yuv420_u_src+left/2+j/2+yuv420_width/2*(i+top), + yuv420_v_src+left/2+j/2+yuv420_width/2*(i+top), 1); + } + } +} +#else +/* 2D Configurable tiled memory access (TM) + * Return the linear address from tiled position (x, y) */ +static unsigned int Tile2D_To_Linear( + unsigned int width, + unsigned int height, + unsigned int xpos, + unsigned int ypos, + int crFlag) +{ + int tileNumX; + int tileX, tileY; + int tileAddr; + int offset; + int addr; + + width = ((width + 15) / 16) * 16; + height = ((height + 15) / 16) * 16; + tileNumX = width / 16; + + /* crFlag - 0: Y plane, 1: CbCr plane */ + if (crFlag == 0) { + tileX = xpos / 16; + tileY = ypos / 16; + tileAddr = tileY * tileNumX + tileX; + offset = (ypos & 15) * 16 + (xpos & 15); + addr = (tileAddr << 8) | offset; + } else { + tileX = xpos / 16; + tileY = ypos / 8; + tileAddr = tileY * tileNumX + tileX; + offset = (ypos & 7) * 16 + (xpos & 15); + addr = (tileAddr << 7) | offset; + } + + return addr; +} + +static void Tile2D_To_YUV420(unsigned char *Y_plane, unsigned char *Cb_plane, unsigned char *Cr_plane, + unsigned int y_addr, unsigned int c_addr, unsigned int width, unsigned int height) +{ + unsigned int x, y, j, k, l; + unsigned int out_of_width, actual_width, data; + unsigned long base_addr; + + // y: 0, 16, 32, ... + for (y = 0; y < height; y += 16) { + // x: 0, 16, 32, ... + for (x = 0; x < width; x += 16) { + out_of_width = (x + 16) > width ? 1 : 0; + base_addr = y_addr + Tile2D_To_Linear(width, height, x, y, 0); + + for (k = 0; (k < 16) && ((y + k) < height); k++) { + actual_width = out_of_width ? ((width%4)?((width%16) / 4 + 1) : ((width%16) / 4)) : 4; + for (l = 0; l < actual_width; l++) { + data = *((unsigned int *)(base_addr + 16*k + l*4)); + for (j = 0; (j < 4) && (x + l*4 + j) < width; j++) { + Y_plane[(y+k)*width + x + l*4 +j] = (data>>(8*j))&0xff; + } + } + } + } + } + + for (y = 0; y < height/2; y += 8) { + for (x = 0; x < width; x += 16) { + out_of_width = (x + 16) > width ? 1 : 0; + base_addr = c_addr + Tile2D_To_Linear(width, height/2, x, y, 1); + for (k = 0; (k < 8) && ((y+k) < height/2); k++) { + actual_width = out_of_width ? ((width%4) ? ((width%16) / 4 + 1) : ((width%16) / 4)) : 4; + for (l = 0; l < actual_width; l++) { + data = *((unsigned int *)(base_addr + 16*k + l*4)); + for (j = 0; (j < 2) && (x/2 + l*2 +j) < width/2; j++) { + Cb_plane[(y+k)*width/2 + x/2 + l*2 +j] = (data>> (8*2*j))&0xff; + Cr_plane[(y+k)*width/2 + x/2 + l*2 +j] = (data>>(8*2*j+8))&0xff; + } + } + } + } + } +} +#endif /* USE_NV12T_128X64 */ + +/* + * De-interleaves src to dest1, dest2 + * + * @param dest1 + * Address of de-interleaved data[out] + * + * @param dest2 + * Address of de-interleaved data[out] + * + * @param src + * Address of interleaved data[in] + * + * @param src_size + * Size of interleaved data[in] + */ +void csc_deinterleave_memcpy( + unsigned char *dest1, + unsigned char *dest2, + unsigned char *src, + unsigned int src_size) +{ + unsigned int i = 0; + for(i=0; i> 4) << 4; + + for (i = 0; i < aligned_height; i = i + 16) { + for (j = 0; j> 4) << 4; + + for (i = 0; i < aligned_height; i = i + 8) { + for (j = 0; j> 4) << 4; + + for (i = 0; i < aligned_height; i = i + 8) { + for (j = 0; j> 1) * i + (j >> 1); + for (k = 0; k < 4; k++) { + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, 16); + src_offset += 16; + dst_offset += width >> 1; + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, 16); + src_offset += 16; + dst_offset += width >> 1; + } + } + if (aligned_width != width) { + src_offset = (tiled_width * i) + (j << 3); + dst_offset = (width >> 1) * i + (j >> 1); + for (k = 0; k < 4; k++) { + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, width - j); + src_offset += 16; + dst_offset += width >> 1; + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, width - j); + src_offset += 16; + dst_offset += width >> 1; + } + } + } + if (aligned_height != height) { + for (j = 0; j> 1) * i + (j >> 1); + for (k = 0; k < height - aligned_height; k = k + 1) { + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, 16); + src_offset += 16; + dst_offset += width >> 1; + } + } + if (aligned_width != width) { + src_offset = (tiled_width * i) + (j << 3); + dst_offset = (width >> 1) * i + (j >> 1); + for (k = 0; k < height - aligned_height; k = k + 1) { + csc_deinterleave_memcpy(u_dst + dst_offset, v_dst + dst_offset, + uv_src + src_offset, width - j); + src_offset += 16; + dst_offset += width >> 1; + } + } + } +#endif /* USE_NV12T_128X64 */ +#endif /* NEON_SUPPORT */ +} + +/* + * Converts linear data to tiled + * 1. y of yuv420 to y of nv12t + * + * @param dst + * y address of nv12t[out] + * + * @param src + * y address of yuv420[in] + * + * @param yuv420_width + * real width of yuv420[in] + * it should be even + * + * @param yuv420_height + * real height of yuv420[in] + * it should be even. + * + */ +void csc_linear_to_tiled_y( + unsigned char *y_dst, + unsigned char *y_src, + unsigned int width, + unsigned int height) +{ +#ifdef USE_NV12T_128X64 +#ifdef NEON_SUPPORT + csc_linear_to_tiled_crop_neon(y_dst, y_src, width, height, 0, 0, 0, 0); +#else + csc_linear_to_tiled_crop(y_dst, y_src, width, height, 0, 0, 0, 0); +#endif /* NEON_SUPPORT */ +#else + unsigned char *dst = y_dst; + unsigned char *src = y_src; + unsigned int w = width; + unsigned int h = height; +#endif /* USE_NV12T_128X64 */ +} + +/* + * Converts and interleaves linear data to tiled + * 1. uv of nv12t to uv of yuv420 + * + * @param dst + * uv address of nv12t[out] + * + * @param src + * u address of yuv420[in] + * + * @param src + * v address of yuv420[in] + * + * @param yuv420_width + * real width of yuv420[in] + * + * @param yuv420_height + * real height of yuv420[in] + * + */ +void csc_linear_to_tiled_uv( + unsigned char *uv_dst, + unsigned char *u_src, + unsigned char *v_src, + unsigned int width, + unsigned int height) +{ +#ifdef USE_NV12T_128X64 +#ifdef NEON_SUPPORT + csc_linear_to_tiled_interleave_crop_neon(uv_dst, u_src, v_src, width, height, 0, 0, 0, 0); +#else + csc_linear_to_tiled_interleave_crop(uv_dst, u_src, v_src, width, height, 0, 0, 0, 0); +#endif /* NEON_SUPPORT */ +#else + unsigned char *uv = uv_dst; + unsigned char *u = u_src; + unsigned char *v = v_src; + unsigned int w = width; + unsigned int h = height; +#endif /* USE_NV12T_128X64 */ +} + +/* + * Converts RGB565 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of RGB565[in] + * + * @param width + * Width of RGB565[in] + * + * @param height + * Height of RGB565[in] + */ +void csc_RGB565_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + int width, + int height) +{ + int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset1 = width * height; + unsigned int offset2 = width/2 * height/2; + + unsigned short int *pSrc = (unsigned short int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstU = (unsigned char *)u_dst; + unsigned char *pDstV = (unsigned char *)v_dst; + + unsigned int yIndex = 0; + unsigned int uIndex = 0; + unsigned int vIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + R = (tmp & 0x0000F800) >> 8; + G = (tmp & 0x000007E0) >> 3; + B = (tmp & 0x0000001F); + B = B << 3; + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstU[uIndex++] = (unsigned char)U; + pDstV[vIndex++] = (unsigned char)V; + } + } + } +} + +/* + * Converts RGB565 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of RGB565[in] + * + * @param width + * Width of RGB565[in] + * + * @param height + * Height of RGB565[in] + */ +void csc_RGB565_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + int width, + int height) +{ + int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset = width * height; + + unsigned short int *pSrc = (unsigned short int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstUV = (unsigned char *)uv_dst; + + unsigned int yIndex = 0; + unsigned int uvIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + R = (tmp & 0x0000F800) >> 11; + R = R * 8; + G = (tmp & 0x000007E0) >> 5; + G = G * 4; + B = (tmp & 0x0000001F); + B = B * 8; + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstUV[uvIndex++] = (unsigned char)U; + pDstUV[uvIndex++] = (unsigned char)V; + } + } + } +} + +/* + * Converts BGRA8888 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of BGRA8888[in] + * + * @param width + * Width of BGRA8888[in] + * + * @param height + * Height of BGRA8888[in] + */ +void csc_BGRA8888_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ + unsigned int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset1 = width * height; + unsigned int offset2 = width/2 * height/2; + + unsigned int *pSrc = (unsigned int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstU = (unsigned char *)u_dst; + unsigned char *pDstV = (unsigned char *)v_dst; + + unsigned int yIndex = 0; + unsigned int uIndex = 0; + unsigned int vIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + R = (tmp & 0x00FF0000) >> 16; + G = (tmp & 0x0000FF00) >> 8; + B = (tmp & 0x000000FF); + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstU[uIndex++] = (unsigned char)U; + pDstV[vIndex++] = (unsigned char)V; + } + } + } +} + +/* + * Converts RGBA8888 to YUV420P + * + * @param y_dst + * Y plane address of YUV420P[out] + * + * @param u_dst + * U plane address of YUV420P[out] + * + * @param v_dst + * V plane address of YUV420P[out] + * + * @param rgb_src + * Address of RGBA8888[in] + * + * @param width + * Width of RGBA8888[in] + * + * @param height + * Height of RGBA8888[in] + */ +void csc_RGBA8888_to_YUV420P( + unsigned char *y_dst, + unsigned char *u_dst, + unsigned char *v_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ + unsigned int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset1 = width * height; + unsigned int offset2 = width/2 * height/2; + + unsigned int *pSrc = (unsigned int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstU = (unsigned char *)u_dst; + unsigned char *pDstV = (unsigned char *)v_dst; + + unsigned int yIndex = 0; + unsigned int uIndex = 0; + unsigned int vIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + B = (tmp & 0x00FF0000) >> 16; + G = (tmp & 0x0000FF00) >> 8; + R = (tmp & 0x000000FF); + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstU[uIndex++] = (unsigned char)U; + pDstV[vIndex++] = (unsigned char)V; + } + } + } +} + +/* + * Converts BGRA8888 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of BGRA8888[in] + * + * @param width + * Width of BGRA8888[in] + * + * @param height + * Height of BGRA8888[in] + */ +void csc_BGRA8888_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ +#ifdef NEON_SUPPORT + csc_BGRA8888_to_YUV420SP_NEON(y_dst, uv_dst, rgb_src, width, height); +#else + unsigned int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset = width * height; + + unsigned int *pSrc = (unsigned int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstUV = (unsigned char *)uv_dst; + + unsigned int yIndex = 0; + unsigned int uvIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + R = (tmp & 0x00FF0000) >> 16; + G = (tmp & 0x0000FF00) >> 8; + B = (tmp & 0x000000FF); + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstUV[uvIndex++] = (unsigned char)U; + pDstUV[uvIndex++] = (unsigned char)V; + } + } + } +#endif /* NEON_SUPPORT */ +} + +/* + * Converts RGBA8888 to YUV420SP + * + * @param y_dst + * Y plane address of YUV420SP[out] + * + * @param uv_dst + * UV plane address of YUV420SP[out] + * + * @param rgb_src + * Address of RGBA8888[in] + * + * @param width + * Width of RGBA8888[in] + * + * @param height + * Height of RGBA8888[in] + */ +void csc_RGBA8888_to_YUV420SP( + unsigned char *y_dst, + unsigned char *uv_dst, + unsigned char *rgb_src, + unsigned int width, + unsigned int height) +{ +#ifdef NEON_SUPPORT + csc_RGBA8888_to_YUV420SP_NEON(y_dst, uv_dst, rgb_src, width, height); +#else + unsigned int i, j; + unsigned int tmp; + + unsigned int R, G, B; + unsigned int Y, U, V; + + unsigned int offset = width * height; + + unsigned int *pSrc = (unsigned int *)rgb_src; + + unsigned char *pDstY = (unsigned char *)y_dst; + unsigned char *pDstUV = (unsigned char *)uv_dst; + + unsigned int yIndex = 0; + unsigned int uvIndex = 0; + + for (j = 0; j < height; j++) { + for (i = 0; i < width; i++) { + tmp = pSrc[j * width + i]; + + B = (tmp & 0x00FF0000) >> 16; + G = (tmp & 0x0000FF00) >> 8; + R = (tmp & 0x000000FF); + + Y = ((66 * R) + (129 * G) + (25 * B) + 128); + Y = Y >> 8; + Y += 16; + + pDstY[yIndex++] = (unsigned char)Y; + + if ((j % 2) == 0 && (i % 2) == 0) { + U = ((-38 * R) - (74 * G) + (112 * B) + 128); + U = U >> 8; + U += 128; + V = ((112 * R) - (94 * G) - (18 * B) + 128); + V = V >> 8; + V += 128; + + pDstUV[uvIndex++] = (unsigned char)U; + pDstUV[uvIndex++] = (unsigned char)V; + } + } + } +#endif /* NEON_SUPPORT */ +} diff --git a/libv4l2/Android.mk b/libv4l2/Android.mk new file mode 100644 index 0000000..8809fa2 --- /dev/null +++ b/libv4l2/Android.mk @@ -0,0 +1,38 @@ +# Copyright (C) 2011 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + exynos_v4l2.c \ + exynos_subdev.c \ + exynos_mc.c + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils + +LOCAL_SHARED_LIBRARIES := \ + liblog \ + libutils \ + libexynosutils + +LOCAL_MODULE := libexynosv4l2 +LOCAL_PRELINK_MODULE := false +LOCAL_MODULE_TAGS := eng + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libv4l2/NOTICE b/libv4l2/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libv4l2/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libv4l2/exynos_mc.c b/libv4l2/exynos_mc.c new file mode 100644 index 0000000..0f9cf95 --- /dev/null +++ b/libv4l2/exynos_mc.c @@ -0,0 +1,776 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_mc.c + * \brief source file for libexynosv4l2 + * \author Jinsung Yang (jsgood.yang@samsung.com) + * \author Sangwoo Park (sw5771.park@samsung.com) + * \date 2012/01/17 + * + * Revision History: + * - 2012/01/17: Jinsung Yang (jsgood.yang@samsung.com) \n + * Initial version + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "exynos_v4l2.h" + +//#define LOG_NDEBUG 0 +#define LOG_TAG "libexynosv4l2-mc" +#include + +static inline unsigned int __media_entity_type(struct media_entity *entity) +{ + return entity->info.type & MEDIA_ENT_TYPE_MASK; +} + +static void __media_debug_default(void *ptr, ...) +{ + va_list argptr; + va_start(argptr, ptr); + vprintf((const char*)ptr, argptr); + va_end(argptr); +} + +static void __media_debug_set_handler( + struct media_device *media, + void (*debug_handler)(void *, ...), + void *debug_priv) +{ + if (debug_handler) { + media->debug_handler = debug_handler; + media->debug_priv = debug_priv; + } else { + media->debug_handler = __media_debug_default; + media->debug_priv = NULL; + } +} + +static struct media_link *__media_entity_add_link(struct media_entity *entity) +{ + if (entity->num_links >= entity->max_links) { + struct media_link *links = entity->links; + unsigned int max_links = entity->max_links * 2; + unsigned int i; + + links = (struct media_link*)realloc(links, max_links * sizeof *links); + if (links == NULL) + return NULL; + + for (i = 0; i < entity->num_links; ++i) + links[i].twin->twin = &links[i]; + + entity->max_links = max_links; + entity->links = links; + } + + return &entity->links[entity->num_links++]; +} + + +static int __media_enum_links(struct media_device *media) +{ + ALOGD("%s: start", __func__); + __u32 id; + int ret = 0; + + for (id = 1; id <= media->entities_count; id++) { + struct media_entity *entity = &media->entities[id - 1]; + struct media_links_enum links; + unsigned int i; + + links.entity = entity->info.id; + links.pads = (struct media_pad_desc*)malloc(entity->info.pads * sizeof(struct media_pad_desc)); + links.links = (struct media_link_desc*)malloc(entity->info.links * sizeof(struct media_link_desc)); + + if (ioctl(media->fd, MEDIA_IOC_ENUM_LINKS, &links) < 0) { + ALOGE("Unable to enumerate pads and links (%s)", strerror(errno)); + free(links.pads); + free(links.links); + return -errno; + } + + for (i = 0; i < entity->info.pads; ++i) { + entity->pads[i].entity = entity; + entity->pads[i].index = links.pads[i].index; + entity->pads[i].flags = links.pads[i].flags; + } + + for (i = 0; i < entity->info.links; ++i) { + struct media_link_desc *link = &links.links[i]; + struct media_link *fwdlink; + struct media_link *backlink; + struct media_entity *source; + struct media_entity *sink; + + source = exynos_media_get_entity_by_id(media, link->source.entity); + sink = exynos_media_get_entity_by_id(media, link->sink.entity); + if (source == NULL || sink == NULL) { + ALOGE("WARNING entity %u link %u from %u/%u to %u/%u is invalid!", + id, i, link->source.entity, + link->source.index, + link->sink.entity, + link->sink.index); + ret = -EINVAL; + } else { + fwdlink = __media_entity_add_link(source); + fwdlink->source = &source->pads[link->source.index]; + fwdlink->sink = &sink->pads[link->sink.index]; + fwdlink->flags = link->flags; + + backlink = __media_entity_add_link(sink); + backlink->source = &source->pads[link->source.index]; + backlink->sink = &sink->pads[link->sink.index]; + backlink->flags = link->flags; + + fwdlink->twin = backlink; + backlink->twin = fwdlink; + } + } + + free(links.pads); + free(links.links); + } + return ret; +} + +static int __media_get_devname_sysfs(struct media_entity *entity) +{ + //struct stat devstat; + char devname[32]; + char sysname[32]; + char target[1024]; + char *p; + int ret; + + snprintf(sysname, sizeof(sysname), "/sys/dev/char/%u:%u", entity->info.v4l.major, + entity->info.v4l.minor); + + ret = readlink(sysname, target, sizeof(target)); + if (ret < 0 || ret >= (int)sizeof(target)) + return -errno; + + target[ret] = '\0'; + p = strrchr(target, '/'); + if (p == NULL) + return -EINVAL; + + snprintf(devname, sizeof(devname), "/tmp/%s", p + 1); + + ret = mknod(devname, 0666 | S_IFCHR, MKDEV(81, entity->info.v4l.minor)); + strncpy(entity->devname, devname, sizeof(devname) - 1); + + return 0; +} + +static int __media_get_media_fd(const char *filename, struct media_device *media) +{ + ssize_t num; + int media_node; + char *ptr; + + ALOGD("%s: %s", __func__, filename); + + media->fd = open(filename, O_RDWR, 0); + if (media->fd < 0) { + ALOGE("Open sysfs media device failed, media->fd: %d", media->fd); + return -1; + } + + ALOGD("%s: media->fd: %d", __func__, media->fd); + + return media->fd; + +} + +static int __media_enum_entities(struct media_device *media) +{ + struct media_entity *entity, *temp_entity; + unsigned int size; + __u32 id; + int ret; + + temp_entity = entity = (struct media_entity*)calloc(1, sizeof(struct media_entity)); + for (id = 0, ret = 0; ; id = entity->info.id) { + size = (media->entities_count + 1) * sizeof(*media->entities); + media->entities = (struct media_entity*)realloc(media->entities, size); + + entity = &media->entities[media->entities_count]; + memset(entity, 0, sizeof(*entity)); + entity->fd = -1; + entity->info.id = id | MEDIA_ENT_ID_FLAG_NEXT; + entity->media = media; + + ret = ioctl(media->fd, MEDIA_IOC_ENUM_ENTITIES, &entity->info); + + if (ret < 0) { + ret = errno != EINVAL ? -errno : 0; + break; + } + + /* Number of links (for outbound links) plus number of pads (for + * inbound links) is a good safe initial estimate of the total + * number of links. + */ + entity->max_links = entity->info.pads + entity->info.links; + + entity->pads = (struct media_pad*)malloc(entity->info.pads * sizeof(*entity->pads)); + entity->links = (struct media_link*)malloc(entity->max_links * sizeof(*entity->links)); + if (entity->pads == NULL || entity->links == NULL) { + ret = -ENOMEM; + break; + } + + media->entities_count++; + + /* Find the corresponding device name. */ + if (__media_entity_type(entity) != MEDIA_ENT_T_DEVNODE && + __media_entity_type(entity) != MEDIA_ENT_T_V4L2_SUBDEV) + continue; + + /* Fall back to get the device name via sysfs */ + __media_get_devname_sysfs(entity); + if (ret < 0) + ALOGE("media_get_devname failed"); + } + free(temp_entity); + + return ret; +} + +static struct media_device *__media_open_debug( + const char *filename, + void (*debug_handler)(void *, ...), + void *debug_priv) +{ + struct media_device *media; + int ret; + + media = (struct media_device *)calloc(1, sizeof(struct media_device)); + if (media == NULL) { + ALOGE("media: %p", media); + return NULL; + } + + __media_debug_set_handler(media, debug_handler, debug_priv); + + ALOGD("%s: Opening media device %s", __func__, filename); + ALOGD("%s: media: %p", __func__, media); + + media->fd = __media_get_media_fd(filename, media); + if (media->fd < 0) { + exynos_media_close(media); + ALOGE("failed __media_get_media_fd %s", filename); + return NULL; + } + + ALOGD("%s: media->fd: %d", __func__, media->fd); + ret = __media_enum_entities(media); + + if (ret < 0) { + ALOGE("Unable to enumerate entities for device %s (%s)", filename, strerror(-ret)); + exynos_media_close(media); + return NULL; + } + + ALOGD("%s: Found %u entities", __func__, media->entities_count); + ALOGD("%s: Enumerating pads and links", __func__); + + ret = __media_enum_links(media); + if (ret < 0) { + ALOGE("Unable to enumerate pads and links for device %s", filename); + exynos_media_close(media); + return NULL; + } + + return media; +} + +/** + * @brief Open a media device. + * @param filename - name (including path) of the device node. + * + * Open the media device referenced by @a filename and enumerate entities, pads and + * links. + * + * @return A pointer to a newly allocated media_device structure instance on + * success and NULL on failure. The returned pointer must be freed with + * exynos_media_close when the device isn't needed anymore. + */ +struct media_device *exynos_media_open(const char *filename) +{ + return __media_open_debug(filename, (void (*)(void *, ...))fprintf, stdout); +} + +/** + * @brief Close a media device. + * @param media - device instance. + * + * Close the @a media device instance and free allocated resources. Access to the + * device instance is forbidden after this function returns. + */ +void exynos_media_close(struct media_device *media) +{ + unsigned int i; + + if (media->fd != -1) + close(media->fd); + + for (i = 0; i < media->entities_count; ++i) { + struct media_entity *entity = &media->entities[i]; + + free(entity->pads); + free(entity->links); + if (entity->fd != -1) + close(entity->fd); + } + + free(media->entities); + free(media); +} + +/** + * @brief Locate the pad at the other end of a link. + * @param pad - sink pad at one end of the link. + * + * Locate the source pad connected to @a pad through an enabled link. As only one + * link connected to a sink pad can be enabled at a time, the connected source + * pad is guaranteed to be unique. + * + * @return A pointer to the connected source pad, or NULL if all links connected + * to @a pad are disabled. Return NULL also if @a pad is not a sink pad. + */ +struct media_pad *exynos_media_entity_remote_source(struct media_pad *pad) +{ + unsigned int i; + + if (!(pad->flags & MEDIA_PAD_FL_SINK)) + return NULL; + + for (i = 0; i < pad->entity->num_links; ++i) { + struct media_link *link = &pad->entity->links[i]; + + if (!(link->flags & MEDIA_LNK_FL_ENABLED)) + continue; + + if (link->sink == pad) + return link->source; + } + + return NULL; +} + +/** + * @brief Find an entity by its name. + * @param media - media device. + * @param name - entity name. + * @param length - size of @a name. + * + * Search for an entity with a name equal to @a name. + * + * @return A pointer to the entity if found, or NULL otherwise. + */ +struct media_entity *exynos_media_get_entity_by_name(struct media_device *media, + const char *name, size_t length) +{ + unsigned int i; + struct media_entity *entity; + + for (i = 0; i < media->entities_count; ++i) { + entity = &media->entities[i]; + + if (strncmp(entity->info.name, name, length) == 0) + return entity; + } + + return NULL; +} + +/** + * @brief Find an entity by its ID. + * @param media - media device. + * @param id - entity ID. + * + * Search for an entity with an ID equal to @a id. + * + * @return A pointer to the entity if found, or NULL otherwise. + */ +struct media_entity *exynos_media_get_entity_by_id(struct media_device *media, + __u32 id) +{ + unsigned int i; + + for (i = 0; i < media->entities_count; ++i) { + struct media_entity *entity = &media->entities[i]; + + if (entity->info.id == id) + return entity; + } + + return NULL; +} + +/** + * @brief Configure a link. + * @param media - media device. + * @param source - source pad at the link origin. + * @param sink - sink pad at the link target. + * @param flags - configuration flags. + * + * Locate the link between @a source and @a sink, and configure it by applying + * the new @a flags. + * + * Only the MEDIA_LINK_FLAG_ENABLED flag is writable. + * + * @return 0 on success, -1 on failure: + * -ENOENT: link not found + * - other error codes returned by MEDIA_IOC_SETUP_LINK + */ +int exynos_media_setup_link(struct media_device *media, + struct media_pad *source, + struct media_pad *sink, + __u32 flags) +{ + struct media_link *link; + struct media_link_desc ulink; + unsigned int i; + int ret; + + for (i = 0; i < source->entity->num_links; i++) { + link = &source->entity->links[i]; + + if (link->source->entity == source->entity && + link->source->index == source->index && + link->sink->entity == sink->entity && + link->sink->index == sink->index) + break; + } + + if (i == source->entity->num_links) { + ALOGE("Link not found"); + return -ENOENT; + } + + /* source pad */ + ulink.source.entity = source->entity->info.id; + ulink.source.index = source->index; + ulink.source.flags = MEDIA_PAD_FL_SOURCE; + + /* sink pad */ + ulink.sink.entity = sink->entity->info.id; + ulink.sink.index = sink->index; + ulink.sink.flags = MEDIA_PAD_FL_SINK; + + ulink.flags = flags | (link->flags & MEDIA_LNK_FL_IMMUTABLE); + + ret = ioctl(media->fd, MEDIA_IOC_SETUP_LINK, &ulink); + if (ret == -1) { + ALOGE("Unable to setup link (%s)", strerror(errno)); + return -errno; + } + + link->flags = ulink.flags; + link->twin->flags = ulink.flags; + return 0; +} + +/** + * @brief Reset all links to the disabled state. + * @param media - media device. + * + * Disable all links in the media device. This function is usually used after + * opening a media device to reset all links to a known state. + * + * @return 0 on success, or a negative error code on failure. + */ +int exynos_media_reset_links(struct media_device *media) +{ + unsigned int i, j; + int ret; + + for (i = 0; i < media->entities_count; ++i) { + struct media_entity *entity = &media->entities[i]; + + for (j = 0; j < entity->num_links; j++) { + struct media_link *link = &entity->links[j]; + + if (link->flags & MEDIA_LNK_FL_IMMUTABLE || + link->source->entity != entity) + continue; + + ret = exynos_media_setup_link(media, link->source, link->sink, + link->flags & ~MEDIA_LNK_FL_ENABLED); + if (ret < 0) + return ret; + } + } + + return 0; +} + +#ifdef HAVE_LIBUDEV + +#include + +static inline int __media_udev_open(struct udev **udev) +{ + *udev = udev_new(); + if (*udev == NULL) + return -ENOMEM; + return 0; +} + +static inline void __media_udev_close(struct udev *udev) +{ + if (udev != NULL) + udev_unref(udev); +} + +static int __media_get_devname_udev(struct udev *udev, + struct media_entity *entity) +{ + struct udev_device *device; + dev_t devnum; + const char *p; + int ret = -ENODEV; + + if (udev == NULL) + return -EINVAL; + + devnum = makedev(entity->info.v4l.major, entity->info.v4l.minor); + ALOGE("looking up device: %u:%u", + major(devnum), minor(devnum)); + device = udev_device_new_from_devnum(udev, 'c', devnum); + if (device) { + p = udev_device_get_devnode(device); + if (p) { + strncpy(entity->devname, p, sizeof(entity->devname)); + entity->devname[sizeof(entity->devname) - 1] = '\0'; + } + ret = 0; + } + + udev_device_unref(device); + + return ret; +} + +#else /* HAVE_LIBUDEV */ + +struct udev; + +static inline int __media_udev_open(struct udev **udev) { return 0; } + +static inline void __media_udev_close(struct udev *udev) { } + +static inline int __media_get_devname_udev(struct udev *udev, + struct media_entity *entity) +{ + return -ENOTSUP; +} + +#endif /* HAVE_LIBUDEV */ + +/** + * @brief Parse string to a pad on the media device. + * @param media - media device. + * @param p - input string + * @param endp - pointer to string where parsing ended + * + * Parse NULL terminated string describing a pad and return its struct + * media_pad instance. + * + * @return Pointer to struct media_pad on success, NULL on failure. + */ +struct media_pad *exynos_media_parse_pad(struct media_device *media, + const char *p, char **endp) +{ + unsigned int entity_id, pad; + struct media_entity *entity; + char *end; + + for (; isspace(*p); ++p); + + if (*p == '"') { + for (end = (char *)p + 1; *end && *end != '"'; ++end); + if (*end != '"') + return NULL; + + entity = exynos_media_get_entity_by_name(media, p + 1, end - p - 1); + if (entity == NULL) + return NULL; + + ++end; + } else { + entity_id = strtoul(p, &end, 10); + entity = exynos_media_get_entity_by_id(media, entity_id); + if (entity == NULL) + return NULL; + } + for (; isspace(*end); ++end); + + if (*end != ':') + return NULL; + for (p = end + 1; isspace(*p); ++p); + + pad = strtoul(p, &end, 10); + for (p = end; isspace(*p); ++p); + + if (pad >= entity->info.pads) + return NULL; + + for (p = end; isspace(*p); ++p); + if (endp) + *endp = (char *)p; + + return &entity->pads[pad]; +} + +/** + * @brief Parse string to a link on the media device. + * @param media - media device. + * @param p - input string + * @param endp - pointer to p where parsing ended + * + * Parse NULL terminated string p describing a link and return its struct + * media_link instance. + * + * @return Pointer to struct media_link on success, NULL on failure. + */ +struct media_link *exynos_media_parse_link( + struct media_device *media, + const char *p, + char **endp) +{ + struct media_link *link; + struct media_pad *source; + struct media_pad *sink; + unsigned int i; + char *end; + + source = exynos_media_parse_pad(media, p, &end); + if (source == NULL) + return NULL; + + if (end[0] != '-' || end[1] != '>') + return NULL; + p = end + 2; + + sink = exynos_media_parse_pad(media, p, &end); + if (sink == NULL) + return NULL; + + *endp = end; + + for (i = 0; i < source->entity->num_links; i++) { + link = &source->entity->links[i]; + + if (link->source == source && link->sink == sink) + return link; + } + + return NULL; +} + +/** + * @brief Parse string to a link on the media device and set it up. + * @param media - media device. + * @param p - input string + * + * Parse NULL terminated string p describing a link and its configuration + * and configure the link. + * + * @return 0 on success, or a negative error code on failure. + */ +int exynos_media_parse_setup_link( + struct media_device *media, + const char *p, + char **endp) +{ + struct media_link *link; + __u32 flags; + char *end; + + link = exynos_media_parse_link(media, p, &end); + if (link == NULL) { + ALOGE("Unable to parse link"); + return -EINVAL; + } + + p = end; + if (*p++ != '[') { + ALOGE("Unable to parse link flags"); + return -EINVAL; + } + + flags = strtoul(p, &end, 10); + for (p = end; isspace(*p); p++); + if (*p++ != ']') { + ALOGE("Unable to parse link flags"); + return -EINVAL; + } + + for (; isspace(*p); p++); + *endp = (char *)p; + + ALOGD("%s: Setting up link %u:%u -> %u:%u [%u]", __func__, + link->source->entity->info.id, link->source->index, + link->sink->entity->info.id, link->sink->index, + flags); + + return exynos_media_setup_link(media, link->source, link->sink, flags); +} + +/** + * @brief Parse string to link(s) on the media device and set it up. + * @param media - media device. + * @param p - input string + * + * Parse NULL terminated string p describing link(s) separated by + * commas (,) and configure the link(s). + * + * @return 0 on success, or a negative error code on failure. + */ +int exynos_media_parse_setup_links(struct media_device *media, const char *p) +{ + char *end; + int ret; + + do { + ret = exynos_media_parse_setup_link(media, p, &end); + if (ret < 0) + return ret; + + p = end + 1; + } while (*end == ','); + + return *end ? -EINVAL : 0; +} diff --git a/libv4l2/exynos_subdev.c b/libv4l2/exynos_subdev.c new file mode 100644 index 0000000..916537f --- /dev/null +++ b/libv4l2/exynos_subdev.c @@ -0,0 +1,454 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_subdev.c + * \brief source file for libv4l2 + * \author Jinsung Yang (jsgood.yang@samsung.com) + * \author Sangwoo Park (sw5771.park@samsung.com) + * \date 2012/01/17 + * + * Revision History: + * - 2012/01/17: Jinsung Yang (jsgood.yang@samsung.com) \n + * Initial version + * + */ + +#include +#include +#include +#include +#include +#include + +#include "exynos_v4l2.h" + +//#define LOG_NDEBUG 0 +#define LOG_TAG "libexynosv4l2-subdev" +#include +#include + +#define SUBDEV_MAX 191 + +static int __subdev_open(const char *filename, int oflag, va_list ap) +{ + mode_t mode = 0; + int fd; + + if (oflag & O_CREAT) + mode = va_arg(ap, int); + + fd = open(filename, oflag, mode); + + return fd; +} + +int exynos_subdev_open(const char *filename, int oflag, ...) +{ + va_list ap; + int fd; + + va_start(ap, oflag); + fd = __subdev_open(filename, oflag, ap); + va_end(ap); + + return fd; +} + +int exynos_subdev_get_node_num(const char *devname, int oflag, ...) +{ + bool found = false; + int ret = -1; + struct stat s; + va_list ap; + FILE *stream_fd; + char filename[64], name[64]; + int i = 0; + + do { + if (i > (SUBDEV_MAX - 128)) + break; + + /* video device node */ + snprintf(filename, sizeof(filename), "/dev/v4l-subdev%d", i); + + /* if the node is video device */ + if ((lstat(filename, &s) == 0) && S_ISCHR(s.st_mode) && + ((int)((unsigned short)(s.st_rdev) >> 8) == 81)) { + ALOGD("try node: %s", filename); + /* open sysfs entry */ + snprintf(filename, sizeof(filename), "/sys/class/video4linux/v4l-subdev%d/name", i); + if (S_ISLNK(s.st_mode)) { + ALOGE("symbolic link detected"); + return -1; + } + stream_fd = fopen(filename, "r"); + if (stream_fd == NULL) { + ALOGE("failed to open sysfs entry for subdev"); + continue; /* try next */ + } + + /* read sysfs entry for device name */ + char *p = fgets(name, sizeof(name), stream_fd); + fclose(stream_fd); + + /* check read size */ + if (p == NULL) { + ALOGE("failed to read sysfs entry for subdev"); + } else { + /* matched */ + if (strncmp(name, devname, strlen(devname)) == 0) { + ALOGI("node found for device %s: /dev/v4l-subdev%d", devname, i); + found = true; + break; + } + } + } + i++; + } while (found == false); + + if (found) + ret = i; + else + ALOGE("no subdev device found"); + + return ret; +} + +int exynos_subdev_open_devname(const char *devname, int oflag, ...) +{ + bool found = false; + int fd = -1; + struct stat s; + va_list ap; + FILE *stream_fd; + char filename[64], name[64]; + long size; + int i = 0; + + do { + if (i > (SUBDEV_MAX - 128)) + break; + + /* video device node */ + snprintf(filename, sizeof(filename), "/dev/v4l-subdev%d", i); + + /* if the node is video device */ + if ((lstat(filename, &s) == 0) && S_ISCHR(s.st_mode) && + ((int)((unsigned short)(s.st_rdev) >> 8) == 81)) { + ALOGD("try node: %s", filename); + /* open sysfs entry */ + snprintf(filename, sizeof(filename), "/sys/class/video4linux/v4l-subdev%d/name", i); + if (S_ISLNK(s.st_mode)) { + ALOGE("symbolic link detected"); + return -1; + } + stream_fd = fopen(filename, "r"); + if (stream_fd == NULL) { + ALOGE("failed to open sysfs entry for subdev"); + continue; /* try next */ + } + + /* read sysfs entry for device name */ + char *p = fgets(name, sizeof(name), stream_fd); + fclose(stream_fd); + + /* check read size */ + if (p == NULL) { + ALOGE("failed to read sysfs entry for subdev"); + } else { + /* matched */ + if (strncmp(name, devname, strlen(devname)) == 0) { + ALOGI("node found for device %s: /dev/v4l-subdev%d", devname, i); + found = true; + break; + } + } + } + i++; + } while (found == false); + + if (found) { + snprintf(filename, sizeof(filename), "/dev/v4l-subdev%d", i); + va_start(ap, oflag); + fd = __subdev_open(filename, oflag, ap); + va_end(ap); + + if (fd > 0) + ALOGI("open subdev device %s", filename); + else + ALOGE("failed to open subdev device %s", filename); + } else { + ALOGE("no subdev device found"); + } + + return fd; +} + +int exynos_subdev_close(int fd) +{ + int ret = -1; + + if (fd < 0) + ALOGE("%s: invalid fd: %d", __func__, fd); + else + ret = close(fd); + + return ret; +} + +/** + * @brief enum frame size on a pad. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_enum_frame_size(int fd, struct v4l2_subdev_frame_size_enum *frame_size_enum) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!frame_size_enum) { + ALOGE("%s: frame_size_enum is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_ENUM_FRAME_SIZE, frame_size_enum); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_ENUM_FRAME_SIZE"); + return ret; + } + + return ret; +} + +/** + * @brief Retrieve the format on a pad. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_g_fmt(int fd, struct v4l2_subdev_format *fmt) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!fmt) { + ALOGE("%s: fmt is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_G_FMT, fmt); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_G_FMT"); + return ret; + } + + return ret; +} + +/** + * @brief Set the format on a pad. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_s_fmt(int fd, struct v4l2_subdev_format *fmt) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!fmt) { + ALOGE("%s: fmt is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_S_FMT, fmt); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_S_FMT"); + return ret; + } + + return ret; +} + +/** + * @brief Retrieve the crop rectangle on a pad. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_g_crop(int fd, struct v4l2_subdev_crop *crop) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!crop) { + ALOGE("%s: crop is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_G_CROP, crop); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_G_CROP"); + return ret; + } + + return ret; +} + +/** + * @brief Set the crop rectangle on a pad. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_s_crop(int fd, struct v4l2_subdev_crop *crop) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!crop) { + ALOGE("%s: crop is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_S_CROP, crop); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_S_CROP"); + return ret; + } + + return ret; +} + +/** + * @brief Retrieve the frame interval on a sub-device. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_enum_frame_interval(int fd, struct v4l2_subdev_frame_interval_enum *frame_internval_enum) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!frame_internval_enum) { + ALOGE("%s: frame_internval_enum is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL, frame_internval_enum); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_ENUM_FRAME_INTERVAL"); + return ret; + } + + return ret; +} + +/** + * @brief Retrieve the frame interval on a sub-device. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_g_frame_interval(int fd, struct v4l2_subdev_frame_interval *frame_internval) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!frame_internval) { + ALOGE("%s: frame_internval is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_G_FRAME_INTERVAL, frame_internval); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_G_FRAME_INTERVAL"); + return ret; + } + + return ret; +} + +/** + * @brief Set the frame interval on a sub-device. + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_s_frame_interval(int fd, struct v4l2_subdev_frame_interval *frame_internval) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!frame_internval) { + ALOGE("%s: frame_internval is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_S_FRAME_INTERVAL, frame_internval); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_S_FRAME_INTERVAL"); + return ret; + } + + return ret; +} + +/** + * @brief enum mbus code + * @return 0 on success, or a negative error code on failure. + */ +int exynos_subdev_enum_mbus_code(int fd, struct v4l2_subdev_mbus_code_enum *mbus_code_enum) +{ + int ret = -1; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!mbus_code_enum) { + ALOGE("%s: mbus_code_enum is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_SUBDEV_ENUM_MBUS_CODE, mbus_code_enum); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_SUBDEV_ENUM_MBUS_CODE"); + return ret; + } + + return ret; +} diff --git a/libv4l2/exynos_v4l2.c b/libv4l2/exynos_v4l2.c new file mode 100644 index 0000000..d11fe70 --- /dev/null +++ b/libv4l2/exynos_v4l2.c @@ -0,0 +1,889 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/*! + * \file exynos_v4l2.c + * \brief source file for libv4l2 + * \author Jinsung Yang (jsgood.yang@samsung.com) + * \author Sangwoo Park (sw5771.park@samsung.com) + * \date 2012/01/17 + * + * Revision History: + * - 2012/01/17: Jinsung Yang (jsgood.yang@samsung.com) \n + * Initial version + * + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "exynos_v4l2.h" + +//#define LOG_NDEBUG 0 +#define LOG_TAG "libexynosv4l2" +#include +#include "Exynos_log.h" + +#define VIDEODEV_MAX 255 + +//#define EXYNOS_V4L2_TRACE 0 +#ifdef EXYNOS_V4L2_TRACE +#define Exynos_v4l2_In() Exynos_Log(EXYNOS_DEV_LOG_DEBUG, LOG_TAG, "%s In , Line: %d", __FUNCTION__, __LINE__) +#define Exynos_v4l2_Out() Exynos_Log(EXYNOS_DEV_LOG_DEBUG, LOG_TAG, "%s Out , Line: %d", __FUNCTION__, __LINE__) +#else +#define Exynos_v4l2_In() ((void *)0) +#define Exynos_v4l2_Out() ((void *)0) +#endif + +static bool __v4l2_check_buf_type(enum v4l2_buf_type type) +{ + bool supported; + + switch (type) { + case V4L2_BUF_TYPE_VIDEO_CAPTURE: + case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE: + case V4L2_BUF_TYPE_VIDEO_OUTPUT: + case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE: + case V4L2_BUF_TYPE_VIDEO_OVERLAY: + supported = true; + break; + + default: + supported = (type >= V4L2_BUF_TYPE_PRIVATE) ? true : false; + break; + } + + return supported; +} + +static int __v4l2_open(const char *filename, int oflag, va_list ap) +{ + mode_t mode = 0; + int fd; + + if (oflag & O_CREAT) + mode = va_arg(ap, int); + + fd = open(filename, oflag, mode); + + return fd; +} + +int exynos_v4l2_open(const char *filename, int oflag, ...) +{ + va_list ap; + int fd; + + Exynos_v4l2_In(); + + va_start(ap, oflag); + fd = __v4l2_open(filename, oflag, ap); + va_end(ap); + + Exynos_v4l2_Out(); + + return fd; +} + +int exynos_v4l2_open_devname(const char *devname, int oflag, ...) +{ + bool found = false; + int fd = -1; + struct stat s; + va_list ap; + FILE *stream_fd; + char filename[64], name[64]; + int i = 0; + + Exynos_v4l2_In(); + + do { + if (i > VIDEODEV_MAX) + break; + + /* video device node */ + snprintf(filename, sizeof(filename), "/dev/video%d", i); + + /* if the node is video device */ + if ((lstat(filename, &s) == 0) && S_ISCHR(s.st_mode) && + ((int)((unsigned short)(s.st_rdev) >> 8) == 81)) { + ALOGD("try node: %s", filename); + /* open sysfs entry */ + snprintf(filename, sizeof(filename), "/sys/class/video4linux/video%d/name", i); + if (S_ISLNK(s.st_mode)) { + ALOGE("symbolic link detected"); + return -1; + } + stream_fd = fopen(filename, "r"); + if (stream_fd == NULL) { + ALOGE("failed to open sysfs entry for videodev"); + continue; /* try next */ + } + + /* read sysfs entry for device name */ + char *p = fgets(name, sizeof(name), stream_fd); + fclose(stream_fd); + + /* check read size */ + if (p == NULL) { + ALOGE("failed to read sysfs entry for videodev"); + } else { + /* matched */ + if (strncmp(name, devname, strlen(devname)) == 0) { + ALOGI("node found for device %s: /dev/video%d", devname, i); + found = true; + break; + } + } + } + i++; + } while (found == false); + + if (found) { + snprintf(filename, sizeof(filename), "/dev/video%d", i); + va_start(ap, oflag); + fd = __v4l2_open(filename, oflag, ap); + va_end(ap); + + if (fd > 0) + ALOGI("open video device %s", filename); + else + ALOGE("failed to open video device %s", filename); + } else { + ALOGE("no video device found"); + } + + Exynos_v4l2_Out(); + + return fd; +} + +int exynos_v4l2_close(int fd) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) + ALOGE("%s: invalid fd: %d", __func__, fd); + else + ret = close(fd); + + Exynos_v4l2_Out(); + + return ret; +} + +bool exynos_v4l2_enuminput(int fd, int index, char *input_name_buf) +{ + int ret = -1; + struct v4l2_input input; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return NULL; + } + + input.index = index; + ret = ioctl(fd, VIDIOC_ENUMINPUT, &input, 32); + if (ret) { + ALOGE("%s: no matching index founds", __func__); + return false; + } + + ALOGI("Name of input channel[%d] is %s", input.index, input.name); + + strncpy(input_name_buf, (const char *)input.name, 32); + + Exynos_v4l2_Out(); + + return true; +} + +int exynos_v4l2_s_input(int fd, int index) +{ + int ret = -1; + struct v4l2_input input; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + input.index = index; + + ret = ioctl(fd, VIDIOC_S_INPUT, &input); + if (ret){ + ALOGE("failed to ioctl: VIDIOC_S_INPUT (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +bool exynos_v4l2_querycap(int fd, unsigned int need_caps) +{ + struct v4l2_capability cap; + int ret; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return false; + } + + if (!(need_caps & V4L2_CAP_VIDEO_CAPTURE) && + !(need_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE) && + !(need_caps & V4L2_CAP_VIDEO_OUTPUT) && + !(need_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE) && + !(need_caps & V4L2_CAP_VIDEO_OVERLAY)) { + ALOGE("%s: unsupported capabilities", __func__); + return false; + } + + memset(&cap, 0, sizeof(cap)); + + ret = ioctl(fd, VIDIOC_QUERYCAP, &cap); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_QUERYCAP (%d - %s)", errno, strerror(errno)); + return false; + } + + if ((need_caps & cap.capabilities) != need_caps) { + ALOGE("%s: unsupported capabilities", __func__); + return false; + } + + Exynos_v4l2_Out(); + + return true; +} + +bool exynos_v4l2_enum_fmt(int fd, enum v4l2_buf_type type, unsigned int fmt) +{ + struct v4l2_fmtdesc fmtdesc; + int found = 0; + + Exynos_v4l2_In(); + + fmtdesc.type = type; + fmtdesc.index = 0; + + while (ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0) { + if (fmtdesc.pixelformat == fmt) { + ALOGE("Passed fmt = %#x found pixel format[%d]: %s", fmt, fmtdesc.index, fmtdesc.description); + found = 1; + break; + } + + fmtdesc.index++; + } + + if (!found) { + ALOGE("%s: unsupported pixel format", __func__); + return false; + } + + Exynos_v4l2_Out(); + + return true; +} + +int exynos_v4l2_g_fmt(int fd, struct v4l2_format *fmt) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!fmt) { + ALOGE("%s: fmt is NULL", __func__); + return ret; + } + + if (__v4l2_check_buf_type(fmt->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_G_FMT, fmt); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_G_FMT (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +static int __v4l2_s_fmt(int fd, unsigned int request, struct v4l2_format *fmt) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!fmt) { + ALOGE("%s: fmt is NULL", __func__); + return ret; + } + + if (__v4l2_check_buf_type(fmt->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } else { + ret = ioctl(fd, request, fmt); + if (ret) { + if (request == VIDIOC_TRY_FMT) + ALOGE("failed to ioctl: VIDIOC_TRY_FMT (%d - %s)", errno, strerror(errno)); + else + ALOGE("failed to ioctl: VIDIOC_S_FMT (%d - %s)", errno, strerror(errno)); + + return ret; + } + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_try_fmt(int fd, struct v4l2_format *fmt) +{ + return __v4l2_s_fmt(fd, VIDIOC_TRY_FMT, fmt); +} + +int exynos_v4l2_s_fmt(int fd, struct v4l2_format *fmt) +{ + return __v4l2_s_fmt(fd, VIDIOC_S_FMT, fmt); +} + +int exynos_v4l2_reqbufs(int fd, struct v4l2_requestbuffers *req) +{ + int ret = -1; + unsigned int count; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!req) { + ALOGE("%s: req is NULL", __func__); + return ret; + } + + if ((req->memory != V4L2_MEMORY_MMAP) && + (req->memory != V4L2_MEMORY_USERPTR) && + (req->memory != V4L2_MEMORY_DMABUF)) { + ALOGE("%s: unsupported memory type", __func__); + return ret; + } + + if (__v4l2_check_buf_type(req->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + count = req->count; + + ret = ioctl(fd, VIDIOC_REQBUFS, req); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_REQBUFS (%d - %s)", ret, strerror(errno)); + return ret; + } + + if (count != req->count) { + ALOGW("number of buffers had been changed: %d => %d", count, req->count); + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_querybuf(int fd, struct v4l2_buffer *buf) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!buf) { + ALOGE("%s: buf is NULL", __func__); + return ret; + } + + if ((buf->memory != V4L2_MEMORY_MMAP) && + (buf->memory != V4L2_MEMORY_DMABUF)) { + ALOGE("%s: unsupported memory type", __func__); + return ret; + } + + if (__v4l2_check_buf_type(buf->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_QUERYBUF, buf); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_QUERYBUF (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_qbuf(int fd, struct v4l2_buffer *buf) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!buf) { + ALOGE("%s: buf is NULL", __func__); + return ret; + } + + if ((buf->memory != V4L2_MEMORY_MMAP) && + (buf->memory != V4L2_MEMORY_USERPTR) && + (buf->memory != V4L2_MEMORY_DMABUF)) { + ALOGE("%s: unsupported memory type", __func__); + return ret; + } + + if (__v4l2_check_buf_type(buf->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_QBUF, buf); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_QBUF (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_dqbuf(int fd, struct v4l2_buffer *buf) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!buf) { + ALOGE("%s: buf is NULL", __func__); + return ret; + } + + if ((buf->memory != V4L2_MEMORY_MMAP) && + (buf->memory != V4L2_MEMORY_USERPTR) && + (buf->memory != V4L2_MEMORY_DMABUF)) { + ALOGE("%s: unsupported memory type", __func__); + return ret; + } + + if (__v4l2_check_buf_type(buf->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_DQBUF, buf); + if (ret) { + if (errno == EAGAIN) + return -errno; + + ALOGW("failed to ioctl: VIDIOC_DQBUF (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_streamon(int fd, enum v4l2_buf_type type) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (__v4l2_check_buf_type(type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_STREAMON, &type); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_STREAMON (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_streamoff(int fd, enum v4l2_buf_type type) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (__v4l2_check_buf_type(type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_STREAMOFF, &type); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_STREAMOFF (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_cropcap(int fd, struct v4l2_cropcap *crop) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!crop) { + ALOGE("%s: crop is NULL", __func__); + return ret; + } + + if (__v4l2_check_buf_type(crop->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_CROPCAP, crop); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_CROPCAP (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_g_crop(int fd, struct v4l2_crop *crop) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!crop) { + ALOGE("%s: crop is NULL", __func__); + return ret; + } + + if (__v4l2_check_buf_type(crop->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_G_CROP, crop); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_G_CROP (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_s_crop(int fd, struct v4l2_crop *crop) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (!crop) { + ALOGE("%s: crop is NULL", __func__); + return ret; + } + + if (__v4l2_check_buf_type(crop->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_S_CROP, crop); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_S_CROP (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_g_ctrl(int fd, unsigned int id, int *value) +{ + int ret = -1; + struct v4l2_control ctrl; + + Exynos_v4l2_In(); + + ctrl.id = id; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + ret = ioctl(fd, VIDIOC_G_CTRL, &ctrl); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_G_CTRL (%d - %s)", errno, strerror(errno)); + return ret; + } + + *value = ctrl.value; + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_s_ctrl(int fd, unsigned int id, int value) +{ + int ret = -1; + struct v4l2_control ctrl; + + Exynos_v4l2_In(); + + ctrl.id = id; + ctrl.value = value; + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + ret = ioctl(fd, VIDIOC_S_CTRL, &ctrl); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_S_CTRL (%d)", errno); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_prepare(int fd, struct v4l2_buffer *arg) +{ + int ret = -1; + struct v4l2_control ctrl; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + ret = ioctl(fd, VIDIOC_PREPARE_BUF, arg); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_PREPARE_BUF (%d)", errno); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_g_parm(int fd, struct v4l2_streamparm *streamparm) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (__v4l2_check_buf_type(streamparm->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_G_PARM, streamparm); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_G_PARM (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_s_parm(int fd, struct v4l2_streamparm *streamparm) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (__v4l2_check_buf_type(streamparm->type) == false) { + ALOGE("%s: unsupported buffer type", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_S_PARM, streamparm); + if (ret) { + ALOGE("failed to ioctl: VIDIOC_S_PARM (%d - %s)", errno, strerror(errno)); + return ret; + } + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_g_ext_ctrl(int fd, struct v4l2_ext_controls *ctrl) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (ctrl == NULL) { + ALOGE("%s: ctrl is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_G_EXT_CTRLS, ctrl); + if (ret) + ALOGE("failed to ioctl: VIDIOC_G_EXT_CTRLS (%d - %s)", errno, strerror(errno)); + + Exynos_v4l2_Out(); + + return ret; +} + +int exynos_v4l2_s_ext_ctrl(int fd, struct v4l2_ext_controls *ctrl) +{ + int ret = -1; + + Exynos_v4l2_In(); + + if (fd < 0) { + ALOGE("%s: invalid fd: %d", __func__, fd); + return ret; + } + + if (ctrl == NULL) { + ALOGE("%s: ctrl is NULL", __func__); + return ret; + } + + ret = ioctl(fd, VIDIOC_S_EXT_CTRLS, ctrl); + if (ret) + ALOGE("failed to ioctl: VIDIOC_S_EXT_CTRLS (%d - %s)", errno, strerror(errno)); + + Exynos_v4l2_Out(); + + return ret; +} diff --git a/libvideocodec/Android.mk b/libvideocodec/Android.mk new file mode 100644 index 0000000..97131bf --- /dev/null +++ b/libvideocodec/Android.mk @@ -0,0 +1,76 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + ExynosVideoInterface.c \ + dec/ExynosVideoDecoder.c \ + enc/ExynosVideoEncoder.c + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/include \ + $(TOP)/hardware/samsung_slsi/exynos/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/system/core/libion/include + +ifeq ($(BOARD_USE_KHRONOS_OMX_HEADER), true) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/openmax/include/khronos +else +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax +endif + +# only 3.4 kernel +ifneq ($(findstring 3.1, $(TARGET_LINUX_KERNEL_VERSION)), 3.1) +LOCAL_CFLAGS += -DUSE_EXYNOS_MEDIA_EXT +endif + +# since 3.10 kernel +ifneq ($(filter-out 3.4, $(TARGET_LINUX_KERNEL_VERSION)),) +LOCAL_CFLAGS += -DCID_SUPPORT +LOCAL_CFLAGS += -DUSE_DEFINE_H264_SEI_TYPE +endif + +# since 3.18 kernel +ifneq ($(filter 3.18, $(TARGET_LINUX_KERNEL_VERSION)),) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/mfc_headers +LOCAL_CFLAGS += -DUSE_MFC_MEDIA +LOCAL_CFLAGS += -DUSE_ORIGINAL_HEADER +ifeq ($(BOARD_USE_SINGLE_PLANE_IN_DRM), true) +LOCAL_CFLAGS += -DUSE_SINGLE_PALNE_SUPPORT +endif +endif + +ifeq ($(BOARD_USE_HEVCDEC_SUPPORT), true) +LOCAL_CFLAGS += -DUSE_HEVCDEC_SUPPORT +endif + +ifeq ($(BOARD_USE_HEVCENC_SUPPORT), true) +LOCAL_CFLAGS += -DUSE_HEVCENC_SUPPORT +endif + +ifeq ($(BOARD_USE_HEVC_HWIP), true) +LOCAL_CFLAGS += -DUSE_HEVC_HWIP +endif + +ifeq ($(BOARD_USE_VP9DEC_SUPPORT), true) +LOCAL_CFLAGS += -DUSE_VP9DEC_SUPPORT +endif + +ifeq ($(BOARD_USE_VP9ENC_SUPPORT), true) +LOCAL_CFLAGS += -DUSE_VP9ENC_SUPPORT +endif + +ifeq ($(BOARD_USE_FORCEFULLY_DISABLE_DUALDPB), true) +LOCAL_CFLAGS += -DUSE_FORCEFULLY_DISABLE_DUALDPB +endif + +ifeq ($(BOARD_USE_DEINTERLACING_SUPPORT), true) +LOCAL_CFLAGS += -DUSE_DEINTERLACING_SUPPORT +endif + +LOCAL_MODULE := libExynosVideoApi +LOCAL_MODULE_TAGS := optional +LOCAL_PRELINK_MODULE := false +LOCAL_ARM_MODE := arm + +include $(BUILD_STATIC_LIBRARY) diff --git a/libvideocodec/ExynosVideoInterface.c b/libvideocodec/ExynosVideoInterface.c new file mode 100644 index 0000000..078e5eb --- /dev/null +++ b/libvideocodec/ExynosVideoInterface.c @@ -0,0 +1,156 @@ +/* + * Copyright 2013 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file ExynosVideoInterface.c + * @brief + * @author Satish Kumar Reddy (palli.satish@samsung.com) + * @version 1.0 + * @history + * 2013.08.14 : Initial versaion + */ + +#include +#include +#include +#include + +#include "ExynosVideoApi.h" +#include "ExynosVideoDec.h" +#include "ExynosVideoEnc.h" + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosVideoInterface" +#include + +#define EXYNOS_MULIPCODEC_SHAREDLIB_NAME "/system/lib/libMulIPExynosVideoApi.so" +static void *glibMulIPHandle = NULL; + +ExynosVideoErrorType Exynos_Video_GetInstInfo( + ExynosVideoInstInfo *pVideoInstInfo, + ExynosVideoBoolType bIsDec) +{ + if (bIsDec == VIDEO_TRUE) + return MFC_Exynos_Video_GetInstInfo_Decoder(pVideoInstInfo); + else + return MFC_Exynos_Video_GetInstInfo_Encoder(pVideoInstInfo); +} + +int Exynos_Video_Register_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int (*MulIPRegisterDecoder)( ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps); + + if ((pDecOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + /* Check for Multiple IP codec library */ + glibMulIPHandle = dlopen(EXYNOS_MULIPCODEC_SHAREDLIB_NAME, RTLD_NOW); + if (glibMulIPHandle == NULL) { + ret = MFC_Exynos_Video_Register_Decoder(pDecOps, pInbufOps, pOutbufOps); + } else { + MulIPRegisterDecoder = dlsym(glibMulIPHandle, "MulIP_Exynos_Video_Register_Decoder"); + if (MulIPRegisterDecoder == NULL) { + ALOGE("%s: dlsym Failed to get MulIP_Exynos_Video_Register_Decoder symbol", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + ret = MulIPRegisterDecoder(pDecOps, pInbufOps, pOutbufOps); + } + +EXIT: + return ret; +} + +int Exynos_Video_Register_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int (*MulIPRegisterEncoder)( ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps); + + if ((pEncOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + /* Check for Multiple IP codec library */ + glibMulIPHandle = dlopen(EXYNOS_MULIPCODEC_SHAREDLIB_NAME, RTLD_NOW); + if (glibMulIPHandle == NULL) { + ret = MFC_Exynos_Video_Register_Encoder(pEncOps, pInbufOps, pOutbufOps); + } else { + MulIPRegisterEncoder = dlsym(glibMulIPHandle, "MulIP_Exynos_Video_Register_Encoder"); + if (MulIPRegisterEncoder == NULL) { + ALOGE("%s: dlsym Failed to get MulIP_Exynos_Video_Register_Encoder symbol", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + ret = MulIPRegisterEncoder(pEncOps, pInbufOps, pOutbufOps); + } + +EXIT: + return ret; +} + +void Exynos_Video_Unregister_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps) +{ + if ((pDecOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + goto EXIT; + } + + memset(pDecOps, 0, sizeof(ExynosVideoDecOps)); + memset(pInbufOps, 0, sizeof(ExynosVideoDecBufferOps)); + memset(pOutbufOps, 0, sizeof(ExynosVideoDecBufferOps)); + +EXIT: + if (glibMulIPHandle != NULL) { + dlclose(glibMulIPHandle); + glibMulIPHandle = NULL; + } +} + +void Exynos_Video_Unregister_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps) +{ + if ((pEncOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + goto EXIT; + } + + memset(pEncOps, 0, sizeof(ExynosVideoEncOps)); + memset(pInbufOps, 0, sizeof(ExynosVideoEncBufferOps)); + memset(pOutbufOps, 0, sizeof(ExynosVideoEncBufferOps)); + +EXIT: + if (glibMulIPHandle != NULL) { + dlclose(glibMulIPHandle); + glibMulIPHandle = NULL; + } +} diff --git a/libvideocodec/NOTICE b/libvideocodec/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvideocodec/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libvideocodec/dec/ExynosVideoDecoder.c b/libvideocodec/dec/ExynosVideoDecoder.c new file mode 100644 index 0000000..4a089de --- /dev/null +++ b/libvideocodec/dec/ExynosVideoDecoder.c @@ -0,0 +1,3107 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file ExynosVideoDecoder.c + * @brief + * @author Jinsung Yang (jsgood.yang@samsung.com) + * @version 1.0.0 + * @history + * 2012.01.15: Initial Version + */ + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include "videodev2_exynos_media.h" +#ifdef USE_EXYNOS_MEDIA_EXT +#include "videodev2_exynos_media_ext.h" +#endif +#ifdef USE_MFC_MEDIA +#include "exynos_mfc_media.h" +#endif + +#include +#include "exynos_ion.h" + + +#include "ExynosVideoApi.h" +#include "ExynosVideoDec.h" +#include "OMX_Core.h" + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosVideoDecoder" +#include + +#define MAX_INPUTBUFFER_COUNT 32 +#define MAX_OUTPUTBUFFER_COUNT 32 + +/* + * [Common] __CodingType_To_V4L2PixelFormat + */ +static unsigned int __CodingType_To_V4L2PixelFormat(ExynosVideoCodingType codingType) +{ + unsigned int pixelformat = V4L2_PIX_FMT_H264; + + switch (codingType) { + case VIDEO_CODING_AVC: + pixelformat = V4L2_PIX_FMT_H264; + break; + case VIDEO_CODING_MPEG4: + pixelformat = V4L2_PIX_FMT_MPEG4; + break; + case VIDEO_CODING_VP8: + pixelformat = V4L2_PIX_FMT_VP8; + break; + case VIDEO_CODING_H263: + pixelformat = V4L2_PIX_FMT_H263; + break; + case VIDEO_CODING_VC1: + pixelformat = V4L2_PIX_FMT_VC1_ANNEX_G; + break; + case VIDEO_CODING_VC1_RCV: + pixelformat = V4L2_PIX_FMT_VC1_ANNEX_L; + break; + case VIDEO_CODING_MPEG2: + pixelformat = V4L2_PIX_FMT_MPEG2; + break; +#ifdef USE_HEVCDEC_SUPPORT + case VIDEO_CODING_HEVC: + pixelformat = V4L2_PIX_FMT_HEVC; + break; +#endif +#ifdef USE_VP9DEC_SUPPORT + case VIDEO_CODING_VP9: + pixelformat = V4L2_PIX_FMT_VP9; + break; +#endif + default: + pixelformat = V4L2_PIX_FMT_H264; + break; + } + + return pixelformat; +} + +/* + * [Common] __ColorFormatType_To_V4L2PixelFormat + */ +static unsigned int __ColorFormatType_To_V4L2PixelFormat( + ExynosVideoColorFormatType colorFormatType, + int nHwVersion) +{ + unsigned int pixelformat = V4L2_PIX_FMT_NV12M; + + switch (colorFormatType) { + case VIDEO_COLORFORMAT_NV12M: + pixelformat = V4L2_PIX_FMT_NV12M; + break; + case VIDEO_COLORFORMAT_NV21M: + pixelformat = V4L2_PIX_FMT_NV21M; + break; + case VIDEO_COLORFORMAT_NV12M_TILED: + if (nHwVersion == (int)MFC_51) + pixelformat = V4L2_PIX_FMT_NV12MT; + else + pixelformat = V4L2_PIX_FMT_NV12MT_16X16; + break; + case VIDEO_COLORFORMAT_I420M: + pixelformat = V4L2_PIX_FMT_YUV420M; + break; + case VIDEO_COLORFORMAT_YV12M: + pixelformat = V4L2_PIX_FMT_YVU420M; + break; +#ifdef USE_SINGLE_PALNE_SUPPORT + case VIDEO_COLORFORMAT_NV12: + pixelformat = V4L2_PIX_FMT_NV12N; + break; + case VIDEO_COLORFORMAT_I420: + pixelformat = V4L2_PIX_FMT_YUV420N; + break; + case VIDEO_COLORFORMAT_NV12_TILED: + pixelformat = V4L2_PIX_FMT_NV12NT; + break; +#endif + default: + pixelformat = V4L2_PIX_FMT_NV12M; + break; + } + + return pixelformat; +} + +/* + * [Common] __Set_SupportFormat + */ +static void __Set_SupportFormat(ExynosVideoInstInfo *pVideoInstInfo) +{ + int nLastIndex = 0; + + if (pVideoInstInfo == NULL) { + ALOGE("%s: ExynosVideoInstInfo must be supplied", __func__); + return ; + } + + memset(pVideoInstInfo->supportFormat, (int)VIDEO_COLORFORMAT_UNKNOWN, sizeof(pVideoInstInfo->supportFormat)); + +#ifdef USE_HEVC_HWIP + if (pVideoInstInfo->eCodecType == VIDEO_CODING_HEVC) { + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV21M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + goto EXIT; + } +#endif + + switch (pVideoInstInfo->HwVersion) { + case MFC_101: /* NV12, NV21, I420, YV12 */ + case MFC_100: + case MFC_1010: + case MFC_1011: + case MFC_90: + case MFC_80: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV21M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + break; + case MFC_92: /* NV12, NV21 */ + case MFC_78D: + case MFC_1020: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV21M; + break; + case MFC_723: /* NV12T, [NV12, NV21, I420, YV12] */ + case MFC_72: + case MFC_77: + if (pVideoInstInfo->specificInfo.dec.bDualDPBSupport == VIDEO_TRUE) { + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV21M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + } + case MFC_78: /* NV12T */ + case MFC_65: + case MFC_61: + case MFC_51: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12_TILED; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M_TILED; + break; + default: + break; + } + +EXIT: + return ; +} + +/* + * [Common] __V4L2PixelFormat_To_ColorFormatType + */ +static ExynosVideoColorFormatType __V4L2PixelFormat_To_ColorFormatType(unsigned int pixelformat) +{ + ExynosVideoColorFormatType colorFormatType = VIDEO_COLORFORMAT_NV12_TILED; + + switch (pixelformat) { + case V4L2_PIX_FMT_NV12M: + colorFormatType = VIDEO_COLORFORMAT_NV12M; + break; + case V4L2_PIX_FMT_NV21M: + colorFormatType = VIDEO_COLORFORMAT_NV21M; + break; + case V4L2_PIX_FMT_YUV420M: + colorFormatType = VIDEO_COLORFORMAT_I420M; + break; + case V4L2_PIX_FMT_YVU420M: + colorFormatType = VIDEO_COLORFORMAT_YV12M; + break; +#ifdef USE_SINGLE_PALNE_SUPPORT + case V4L2_PIX_FMT_NV12N: + case V4L2_PIX_FMT_NV12N_10B: + colorFormatType = VIDEO_COLORFORMAT_NV12; + break; + case V4L2_PIX_FMT_YUV420N: + colorFormatType = VIDEO_COLORFORMAT_I420; + break; + case V4L2_PIX_FMT_NV12NT: + colorFormatType = VIDEO_COLORFORMAT_NV12_TILED; + break; +#endif + case V4L2_PIX_FMT_NV12MT: + case V4L2_PIX_FMT_NV12MT_16X16: + default: + colorFormatType = VIDEO_COLORFORMAT_NV12M_TILED; + break; + } + + return colorFormatType; +} + +/* + * [Decoder OPS] Init + */ +static void *MFC_Decoder_Init(ExynosVideoInstInfo *pVideoInfo) +{ + ExynosVideoDecContext *pCtx = NULL; + pthread_mutex_t *pMutex = NULL; + int needCaps = (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING); + + int hIonClient = -1; + + if (pVideoInfo == NULL) { + ALOGE("%s: bad parameter", __func__); + goto EXIT_ALLOC_FAIL; + } + + pCtx = (ExynosVideoDecContext *)malloc(sizeof(*pCtx)); + if (pCtx == NULL) { + ALOGE("%s: Failed to allocate decoder context buffer", __func__); + goto EXIT_ALLOC_FAIL; + } + + memset(pCtx, 0, sizeof(*pCtx)); + +#ifdef USE_HEVC_HWIP + if (pVideoInfo->eCodecType == VIDEO_CODING_HEVC) { + if (pVideoInfo->eSecurityType == VIDEO_SECURE) { + pCtx->hDec = exynos_v4l2_open_devname(VIDEO_HEVC_SECURE_DECODER_NAME, O_RDWR, 0); + } else { + pCtx->hDec = exynos_v4l2_open_devname(VIDEO_HEVC_DECODER_NAME, O_RDWR, 0); + } + } else +#endif + { + if (pVideoInfo->eSecurityType == VIDEO_SECURE) { + pCtx->hDec = exynos_v4l2_open_devname(VIDEO_MFC_SECURE_DECODER_NAME, O_RDWR, 0); + } else { + pCtx->hDec = exynos_v4l2_open_devname(VIDEO_MFC_DECODER_NAME, O_RDWR, 0); + } + } + + if (pCtx->hDec < 0) { + ALOGE("%s: Failed to open decoder device", __func__); + goto EXIT_OPEN_FAIL; + } + + memcpy(&pCtx->videoInstInfo, pVideoInfo, sizeof(pCtx->videoInstInfo)); + + ALOGV("%s: HW version is %x", __func__, pCtx->videoInstInfo.HwVersion); + + if (!exynos_v4l2_querycap(pCtx->hDec, needCaps)) { + ALOGE("%s: Failed to querycap", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + pCtx->bStreamonInbuf = VIDEO_FALSE; + pCtx->bStreamonOutbuf = VIDEO_FALSE; + + pMutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t)); + if (pMutex == NULL) { + ALOGE("%s: Failed to allocate mutex about input buffer", __func__); + goto EXIT_QUERYCAP_FAIL; + } + if (pthread_mutex_init(pMutex, NULL) != 0) { + free(pMutex); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->pInMutex = (void*)pMutex; + + pMutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t)); + if (pMutex == NULL) { + ALOGE("%s: Failed to allocate mutex about output buffer", __func__); + goto EXIT_QUERYCAP_FAIL; + } + if (pthread_mutex_init(pMutex, NULL) != 0) { + free(pMutex); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->pOutMutex = (void*)pMutex; + + hIonClient = ion_open(); + if (hIonClient < 0) { + ALOGE("%s: Failed to create ion_client", __func__); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->hIONHandle = hIonClient; + + if (ion_alloc_fd(pCtx->hIONHandle, sizeof(PrivateDataShareBuffer) * VIDEO_BUFFER_MAX_NUM, + 0, ION_HEAP_SYSTEM_MASK, ION_FLAG_CACHED, &(pCtx->nPrivateDataShareFD)) < 0) { + ALOGE("%s: Failed to ion_alloc_fd for nPrivateDataShareFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + pCtx->pPrivateDataShareAddress = mmap(NULL, (sizeof(PrivateDataShareBuffer) * VIDEO_BUFFER_MAX_NUM), + PROT_READ | PROT_WRITE, MAP_SHARED, pCtx->nPrivateDataShareFD, 0); + if (pCtx->pPrivateDataShareAddress == MAP_FAILED) { + ALOGE("%s: Failed to mmap for nPrivateDataShareFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + memset(pCtx->pPrivateDataShareAddress, -1, sizeof(PrivateDataShareBuffer) * VIDEO_BUFFER_MAX_NUM); + + return (void *)pCtx; + +EXIT_QUERYCAP_FAIL: + if (pCtx->pInMutex != NULL) { + pthread_mutex_destroy(pCtx->pInMutex); + free(pCtx->pInMutex); + } + + if (pCtx->pOutMutex != NULL) { + pthread_mutex_destroy(pCtx->pOutMutex); + free(pCtx->pOutMutex); + } + + /* free a ion_buffer */ + if (pCtx->nPrivateDataShareFD > 0) { + close(pCtx->nPrivateDataShareFD); + pCtx->nPrivateDataShareFD = -1; + } + + /* free a ion_client */ + if (pCtx->hIONHandle > 0) { + ion_close(pCtx->hIONHandle); + pCtx->hIONHandle = -1; + } + + exynos_v4l2_close(pCtx->hDec); + +EXIT_OPEN_FAIL: + free(pCtx); + +EXIT_ALLOC_FAIL: + return NULL; +} + +/* + * [Decoder OPS] Finalize + */ +static ExynosVideoErrorType MFC_Decoder_Finalize(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + pthread_mutex_t *pMutex = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->pPrivateDataShareAddress != NULL) { + munmap(pCtx->pPrivateDataShareAddress, sizeof(PrivateDataShareBuffer) * VIDEO_BUFFER_MAX_NUM); + pCtx->pPrivateDataShareAddress = NULL; + } + + /* free a ion_buffer */ + if (pCtx->nPrivateDataShareFD > 0) { + close(pCtx->nPrivateDataShareFD); + pCtx->nPrivateDataShareFD = -1; + } + + /* free a ion_client */ + if (pCtx->hIONHandle > 0) { + ion_close(pCtx->hIONHandle); + pCtx->hIONHandle = -1; + } + + if (pCtx->pOutMutex != NULL) { + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_destroy(pMutex); + free(pMutex); + pCtx->pOutMutex = NULL; + } + + if (pCtx->pInMutex != NULL) { + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_destroy(pMutex); + free(pMutex); + pCtx->pInMutex = NULL; + } + + if (pCtx->bShareInbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nInbufs; i++) { + for (j = 0; j < pCtx->nInbufPlanes; j++) { + pVideoPlane = &pCtx->pInbuf[i].planes[j]; + if (pVideoPlane->addr != NULL) { + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + pVideoPlane->addr = NULL; + pVideoPlane->allocSize = 0; + pVideoPlane->dataSize = 0; + } + + pCtx->pInbuf[i].pGeometry = NULL; + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + pCtx->pInbuf[i].bRegistered = VIDEO_FALSE; + } + } + } + + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nOutbufs; i++) { + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + if (pVideoPlane->addr != NULL) { + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + pVideoPlane->addr = NULL; + pVideoPlane->allocSize = 0; + pVideoPlane->dataSize = 0; + } + + pCtx->pOutbuf[i].pGeometry = NULL; + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bRegistered = VIDEO_FALSE; + } + } + } + + if (pCtx->pInbuf != NULL) + free(pCtx->pInbuf); + + if (pCtx->pOutbuf != NULL) + free(pCtx->pOutbuf); + + if (pCtx->hDec >= 0) + exynos_v4l2_close(pCtx->hDec); + + free(pCtx); + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Set Frame Tag + */ +static ExynosVideoErrorType MFC_Decoder_Set_FrameTag( + void *pHandle, + int frameTag) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG, frameTag) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Get Frame Tag + */ +static int MFC_Decoder_Get_FrameTag(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int frameTag = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG, &frameTag); + +EXIT: + return frameTag; +} + +/* + * [Decoder OPS] Get Buffer Count + */ +static int MFC_Decoder_Get_ActualBufferCount(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int bufferCount = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MIN_BUFFERS_FOR_CAPTURE, &bufferCount); + +EXIT: + return bufferCount; +} + +/* + * [Decoder OPS] Set Display Delay + */ +static ExynosVideoErrorType MFC_Decoder_Set_DisplayDelay( + void *pHandle, + int delay) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_DECODER_H264_DISPLAY_DELAY, delay) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Set Immediate Display + */ +static ExynosVideoErrorType MFC_Decoder_Set_ImmediateDisplay( void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_DECODER_IMMEDIATE_DISPLAY, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Set I-Frame Decoding + */ +static ExynosVideoErrorType MFC_Decoder_Set_IFrameDecoding( + void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + +#ifdef USE_HEVC_HWIP + if ((pCtx->videoInstInfo.eCodecType != VIDEO_CODING_HEVC) && + (pCtx->videoInstInfo.HwVersion == (int)MFC_51)) +#else + if (pCtx->videoInstInfo.HwVersion == (int)MFC_51) +#endif + return MFC_Decoder_Set_DisplayDelay(pHandle, 0); + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_I_FRAME_DECODING, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Packed PB + */ +static ExynosVideoErrorType MFC_Decoder_Enable_PackedPB(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_PACKED_PB, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Loop Filter + */ +static ExynosVideoErrorType MFC_Decoder_Enable_LoopFilter(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_DECODER_MPEG4_DEBLOCK_FILTER, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Slice Mode + */ +static ExynosVideoErrorType MFC_Decoder_Enable_SliceMode(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_DECODER_SLICE_INTERFACE, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable SEI Parsing + */ +static ExynosVideoErrorType MFC_Decoder_Enable_SEIParsing(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Get Frame Packing information + */ +static ExynosVideoErrorType MFC_Decoder_Get_FramePackingInfo( + void *pHandle, + ExynosVideoFramePacking *pFramePacking) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_ext_control ext_ctrl[FRAME_PACK_SEI_INFO_NUM]; + struct v4l2_ext_controls ext_ctrls; + + int seiAvailable, seiInfo, seiGridPos, i; + unsigned int seiArgmtId; + + + if ((pCtx == NULL) || (pFramePacking == NULL)) { + ALOGE("%s: Video context info or FramePacking pointer must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(pFramePacking, 0, sizeof(*pFramePacking)); + memset(ext_ctrl, 0, (sizeof(struct v4l2_ext_control) * FRAME_PACK_SEI_INFO_NUM)); + + ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG; + ext_ctrls.count = FRAME_PACK_SEI_INFO_NUM; + ext_ctrls.controls = ext_ctrl; + ext_ctrl[0].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_AVAIL; + ext_ctrl[1].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRGMENT_ID; + ext_ctrl[2].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_INFO; + ext_ctrl[3].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_GRID_POS; + + if (exynos_v4l2_g_ext_ctrl(pCtx->hDec, &ext_ctrls) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + seiAvailable = ext_ctrl[0].value; + seiArgmtId = ext_ctrl[1].value; + seiInfo = ext_ctrl[2].value; + seiGridPos = ext_ctrl[3].value; + + pFramePacking->available = seiAvailable; + pFramePacking->arrangement_id = seiArgmtId; + + pFramePacking->arrangement_cancel_flag = OPERATE_BIT(seiInfo, 0x1, 0); + pFramePacking->arrangement_type = OPERATE_BIT(seiInfo, 0x3f, 1); + pFramePacking->quincunx_sampling_flag = OPERATE_BIT(seiInfo, 0x1, 8); + pFramePacking->content_interpretation_type = OPERATE_BIT(seiInfo, 0x3f, 9); + pFramePacking->spatial_flipping_flag = OPERATE_BIT(seiInfo, 0x1, 15); + pFramePacking->frame0_flipped_flag = OPERATE_BIT(seiInfo, 0x1, 16); + pFramePacking->field_views_flag = OPERATE_BIT(seiInfo, 0x1, 17); + pFramePacking->current_frame_is_frame0_flag = OPERATE_BIT(seiInfo, 0x1, 18); + + pFramePacking->frame0_grid_pos_x = OPERATE_BIT(seiGridPos, 0xf, 0); + pFramePacking->frame0_grid_pos_y = OPERATE_BIT(seiGridPos, 0xf, 4); + pFramePacking->frame1_grid_pos_x = OPERATE_BIT(seiGridPos, 0xf, 8); + pFramePacking->frame1_grid_pos_y = OPERATE_BIT(seiGridPos, 0xf, 12); + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Decoding Timestamp Mode + */ +static ExynosVideoErrorType MFC_Decoder_Enable_DTSMode(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_DECODER_DECODING_TIMESTAMP_MODE, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Set Qos Ratio + */ +static ExynosVideoErrorType MFC_Decoder_Set_QosRatio( + void *pHandle, + int ratio) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_QOS_RATIO, ratio) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Dual DPB Mode + */ +static ExynosVideoErrorType MFC_Decoder_Enable_DualDPBMode(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC_SET_DUAL_DPB_MODE, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Enable Dynamic DPB + */ +static ExynosVideoErrorType MFC_Decoder_Enable_DynamicDPB(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC_SET_DYNAMIC_DPB_MODE, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC_SET_USER_SHARED_HANDLE, pCtx->nPrivateDataShareFD) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder OPS] Set Buffer Process Type + */ +static ExynosVideoErrorType MFC_Decoder_Set_BufferProcessType( + void *pHandle, + int bufferProcessType) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC_SET_BUF_PROCESS_TYPE, bufferProcessType) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Enable Cacheable (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Enable_Cacheable_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_CACHEABLE, 2) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Enable Cacheable (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Enable_Cacheable_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_CACHEABLE, 1) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Set Shareable Buffer (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Set_Shareable_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pCtx->bShareInbuf = VIDEO_TRUE; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Set Shareable Buffer (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Set_Shareable_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pCtx->bShareOutbuf = VIDEO_TRUE; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Get Buffer (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Get_Buffer_Inbuf( + void *pHandle, + int nIndex, + ExynosVideoBuffer **pBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nInbufs <= nIndex) { + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + *pBuffer = (ExynosVideoBuffer *)&pCtx->pInbuf[nIndex]; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Get Buffer (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Get_Buffer_Outbuf( + void *pHandle, + int nIndex, + ExynosVideoBuffer **pBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nOutbufs <= nIndex) { + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + *pBuffer = (ExynosVideoBuffer *)&pCtx->pOutbuf[nIndex]; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Set Geometry (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Set_Geometry_Inbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + fmt.fmt.pix_mp.pixelformat = __CodingType_To_V4L2PixelFormat(bufferConf->eCompressionFormat); + fmt.fmt.pix_mp.plane_fmt[0].sizeimage = bufferConf->nSizeImage; + + if (exynos_v4l2_s_fmt(pCtx->hDec, &fmt) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memcpy(&pCtx->inbufGeometry, bufferConf, sizeof(pCtx->inbufGeometry)); + pCtx->nInbufPlanes = bufferConf->nPlaneCnt; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Set Geometry (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Set_Geometry_Outbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmt.fmt.pix_mp.pixelformat = __ColorFormatType_To_V4L2PixelFormat(bufferConf->eColorFormat, pCtx->videoInstInfo.HwVersion); + + if (exynos_v4l2_s_fmt(pCtx->hDec, &fmt) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memcpy(&pCtx->outbufGeometry, bufferConf, sizeof(pCtx->outbufGeometry)); + pCtx->nOutbufPlanes = bufferConf->nPlaneCnt; + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Get Geometry (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Get_Geometry_Outbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + struct v4l2_crop crop; + int i, value; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + memset(&crop, 0, sizeof(crop)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + if (exynos_v4l2_g_fmt(pCtx->hDec, &fmt) != 0) { + if (errno == EAGAIN) + ret = VIDEO_ERROR_HEADERINFO; + else + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + if (exynos_v4l2_g_crop(pCtx->hDec, &crop) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + bufferConf->nFrameWidth = fmt.fmt.pix_mp.width; + bufferConf->nFrameHeight = fmt.fmt.pix_mp.height; + bufferConf->eColorFormat = __V4L2PixelFormat_To_ColorFormatType(fmt.fmt.pix_mp.pixelformat); + bufferConf->nStride = fmt.fmt.pix_mp.plane_fmt[0].bytesperline; + +#ifdef USE_DEINTERLACING_SUPPORT + if ((fmt.fmt.pix_mp.field == V4L2_FIELD_INTERLACED) || + (fmt.fmt.pix_mp.field == V4L2_FIELD_INTERLACED_TB) || + (fmt.fmt.pix_mp.field == V4L2_FIELD_INTERLACED_BT)) + bufferConf->bInterlaced = VIDEO_TRUE; + else +#endif + bufferConf->bInterlaced = VIDEO_FALSE; + + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC_GET_10BIT_INFO, &value); + if (value == 1) { + bufferConf->eFilledDataType = DATA_8BIT_WITH_2BIT; + if (pCtx->outbufGeometry.eColorFormat != bufferConf->eColorFormat) { + /* format is internally changed, because requested format is not supported by MFC H/W + * In this case, NV12 will be used. + */ + pCtx->nOutbufPlanes = 2; + +#ifdef USE_SINGLE_PALNE_SUPPORT + if (pCtx->videoInstInfo.eSecurityType == VIDEO_SECURE) + pCtx->nOutbufPlanes = 1; +#endif + } + } + + /* Get planes aligned buffer size */ + for (i = 0; i < pCtx->nOutbufPlanes; i++) + bufferConf->nAlignPlaneSize[i] = fmt.fmt.pix_mp.plane_fmt[i].sizeimage; + + bufferConf->cropRect.nTop = crop.c.top; + bufferConf->cropRect.nLeft = crop.c.left; + bufferConf->cropRect.nWidth = crop.c.width; + bufferConf->cropRect.nHeight = crop.c.height; + + ALOGV("%s: %s contents", __FUNCTION__, (bufferConf->eFilledDataType & DATA_10BIT)? "10bit":"8bit"); + + memcpy(&pCtx->outbufGeometry, bufferConf, sizeof(pCtx->outbufGeometry)); + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Setup (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Setup_Inbuf( + void *pHandle, + unsigned int nBufferCount) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (nBufferCount == 0) { + nBufferCount = MAX_INPUTBUFFER_COUNT; + ALOGV("%s: Change buffer count %d", __func__, nBufferCount); + } + + ALOGV("%s: setting up inbufs (%d) shared=%s\n", __func__, nBufferCount, + pCtx->bShareInbuf ? "true" : "false"); + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareInbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hDec, &req) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + if (req.count != nBufferCount) { + ALOGE("%s: asked for %d, got %d\n", __func__, nBufferCount, req.count); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + + pCtx->nInbufs = (int)req.count; + + pCtx->pInbuf = malloc(sizeof(*pCtx->pInbuf) * pCtx->nInbufs); + if (pCtx->pInbuf == NULL) { + ALOGE("Failed to allocate input buffer context"); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + memset(pCtx->pInbuf, 0, sizeof(*pCtx->pInbuf) * pCtx->nInbufs); + + memset(&buf, 0, sizeof(buf)); + + if (pCtx->bShareInbuf == VIDEO_FALSE) { + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_MMAP; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + for (i = 0; i < pCtx->nInbufs; i++) { + buf.index = i; + if (exynos_v4l2_querybuf(pCtx->hDec, &buf) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pVideoPlane = &pCtx->pInbuf[i].planes[0]; + + pVideoPlane->addr = mmap(NULL, + buf.m.planes[0].length, PROT_READ | PROT_WRITE, + MAP_SHARED, pCtx->hDec, buf.m.planes[0].m.mem_offset); + + if (pVideoPlane->addr == MAP_FAILED) { + ret = VIDEO_ERROR_MAPFAIL; + goto EXIT; + } + + pVideoPlane->allocSize = buf.m.planes[0].length; + pVideoPlane->dataSize = 0; + + pCtx->pInbuf[i].pGeometry = &pCtx->inbufGeometry; + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + pCtx->pInbuf[i].bRegistered = VIDEO_TRUE; + } + } + + return ret; + +EXIT: + if ((pCtx != NULL) && (pCtx->pInbuf != NULL)) { + if (pCtx->bShareInbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nInbufs; i++) { + pVideoPlane = &pCtx->pInbuf[i].planes[0]; + if (pVideoPlane->addr == MAP_FAILED) { + pVideoPlane->addr = NULL; + break; + } + + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + } + } + + free(pCtx->pInbuf); + pCtx->pInbuf = NULL; + } + + return ret; +} + +/* + * [Decoder Buffer OPS] Setup (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Setup_Outbuf( + void *pHandle, + unsigned int nBufferCount) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (nBufferCount == 0) { + nBufferCount = MAX_OUTPUTBUFFER_COUNT; + ALOGV("%s: Change buffer count %d", __func__, nBufferCount); + } + + ALOGV("%s: setting up outbufs (%d) shared=%s\n", __func__, nBufferCount, + pCtx->bShareOutbuf ? "true" : "false"); + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hDec, &req) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + if (req.count != nBufferCount) { + ALOGE("%s: asked for %d, got %d\n", __func__, nBufferCount, req.count); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + + pCtx->nOutbufs = req.count; + + pCtx->pOutbuf = malloc(sizeof(*pCtx->pOutbuf) * pCtx->nOutbufs); + if (pCtx->pOutbuf == NULL) { + ALOGE("Failed to allocate output buffer context"); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + memset(pCtx->pOutbuf, 0, sizeof(*pCtx->pOutbuf) * pCtx->nOutbufs); + + memset(&buf, 0, sizeof(buf)); + + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_MMAP; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + for (i = 0; i < pCtx->nOutbufs; i++) { + buf.index = i; + if (exynos_v4l2_querybuf(pCtx->hDec, &buf) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + pVideoPlane->addr = mmap(NULL, + buf.m.planes[j].length, PROT_READ | PROT_WRITE, + MAP_SHARED, pCtx->hDec, buf.m.planes[j].m.mem_offset); + + if (pVideoPlane->addr == MAP_FAILED) { + ret = VIDEO_ERROR_MAPFAIL; + goto EXIT; + } + + pVideoPlane->allocSize = buf.m.planes[j].length; + pVideoPlane->dataSize = 0; + } + + pCtx->pOutbuf[i].pGeometry = &pCtx->outbufGeometry; + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bSlotUsed = VIDEO_FALSE; + pCtx->pOutbuf[i].nIndexUseCnt = 0; + pCtx->pOutbuf[i].bRegistered = VIDEO_TRUE; + } + } + + return ret; + +EXIT: + if ((pCtx != NULL) && (pCtx->pOutbuf != NULL)) { + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nOutbufs; i++) { + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + if (pVideoPlane->addr == MAP_FAILED) { + pVideoPlane->addr = NULL; + break; + } + + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + } + } + } + + free(pCtx->pOutbuf); + pCtx->pOutbuf = NULL; + } + + return ret; +} + +/* + * [Decoder Buffer OPS] Run (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Run_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + if (exynos_v4l2_streamon(pCtx->hDec, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) != 0) { + ALOGE("%s: Failed to streamon for input buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonInbuf = VIDEO_TRUE; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Run (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Run_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + if (exynos_v4l2_streamon(pCtx->hDec, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) != 0) { + ALOGE("%s: Failed to streamon for output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonOutbuf = VIDEO_TRUE; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Stop (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Stop_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_TRUE) { + if (exynos_v4l2_streamoff(pCtx->hDec, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) != 0) { + ALOGE("%s: Failed to streamoff for input buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonInbuf = VIDEO_FALSE; + } + + for (i = 0; i < pCtx->nInbufs; i++) { + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Stop (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Stop_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_TRUE) { + if (exynos_v4l2_streamoff(pCtx->hDec, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) != 0) { + ALOGE("%s: Failed to streamoff for output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonOutbuf = VIDEO_FALSE; + } + + for (i = 0; i < pCtx->nOutbufs; i++) { + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bSlotUsed = VIDEO_FALSE; + pCtx->pOutbuf[i].nIndexUseCnt = 0; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Wait (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Wait_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct pollfd poll_events; + int poll_state; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + poll_events.fd = pCtx->hDec; + poll_events.events = POLLOUT | POLLERR; + poll_events.revents = 0; + + do { + poll_state = poll((struct pollfd*)&poll_events, 1, VIDEO_DECODER_POLL_TIMEOUT); + if (poll_state > 0) { + if (poll_events.revents & POLLOUT) { + break; + } else { + ALOGE("%s: Poll return error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } + } else if (poll_state < 0) { + ALOGE("%s: Poll state error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } + } while (poll_state == 0); + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Decoder_Register_Inbuf( + void *pHandle, + ExynosVideoPlane *planes, + int nPlanes) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex, plane; + + if ((pCtx == NULL) || (planes == NULL) || (nPlanes != pCtx->nInbufPlanes)) { + ALOGE("%s: params must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bRegistered == VIDEO_FALSE) { + for (plane = 0; plane < nPlanes; plane++) { + pCtx->pInbuf[nIndex].planes[plane].addr = planes[plane].addr; + pCtx->pInbuf[nIndex].planes[plane].allocSize = planes[plane].allocSize; + pCtx->pInbuf[nIndex].planes[plane].fd = planes[plane].fd; + ALOGV("%s: registered buf %d (addr=%p alloc_sz=%u fd=%d)\n", __func__, nIndex, + planes[plane].addr, planes[plane].allocSize, planes[plane].fd); + } + pCtx->pInbuf[nIndex].bRegistered = VIDEO_TRUE; + break; + } + } + + if (nIndex == pCtx->nInbufs) { + ALOGE("%s: can not find non-registered input buffer", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Decoder_Register_Outbuf( + void *pHandle, + ExynosVideoPlane *planes, + int nPlanes) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex, plane; + + if ((pCtx == NULL) || (planes == NULL) || (nPlanes != pCtx->nOutbufPlanes)) { + ALOGE("%s: params must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if (pCtx->pOutbuf[nIndex].bRegistered == VIDEO_FALSE) { + for (plane = 0; plane < nPlanes; plane++) { + pCtx->pOutbuf[nIndex].planes[plane].addr = planes[plane].addr; + pCtx->pOutbuf[nIndex].planes[plane].allocSize = planes[plane].allocSize; + pCtx->pOutbuf[nIndex].planes[plane].fd = planes[plane].fd; + ALOGV("%s: registered buf %d[%d]:(addr=%p alloc_sz=%d fd=%d)\n", + __func__, nIndex, plane, planes[plane].addr, planes[plane].allocSize, planes[plane].fd); + } + + /* this is for saving interlaced type */ + if (pCtx->outbufGeometry.bInterlaced == VIDEO_TRUE) { + pCtx->pOutbuf[nIndex].planes[2].addr = planes[2].addr; + pCtx->pOutbuf[nIndex].planes[2].fd = planes[2].fd; + } + + pCtx->pOutbuf[nIndex].bRegistered = VIDEO_TRUE; + + break; + } + } + + if (nIndex == pCtx->nOutbufs) { + ALOGE("%s: can not find non-registered output buffer", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Decoder_Clear_RegisteredBuffer_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex, plane; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + for (plane = 0; plane < pCtx->nInbufPlanes; plane++) { + pCtx->pInbuf[nIndex].planes[plane].addr = NULL; + pCtx->pInbuf[nIndex].planes[plane].fd = -1; + } + + pCtx->pInbuf[nIndex].bRegistered = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Decoder_Clear_RegisteredBuffer_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex, plane; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + for (plane = 0; plane < pCtx->nOutbufPlanes; plane++) { + pCtx->pOutbuf[nIndex].planes[plane].addr = NULL; + pCtx->pOutbuf[nIndex].planes[plane].fd = -1; + } + + /* this is for saving interlaced type */ + if (pCtx->outbufGeometry.bInterlaced == VIDEO_TRUE) { + pCtx->pOutbuf[nIndex].planes[2].addr = NULL; + pCtx->pOutbuf[nIndex].planes[2].fd = -1; + } + pCtx->pOutbuf[nIndex].bRegistered = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Find (Input) + */ +static int MFC_Decoder_Find_Inbuf( + void *pHandle, + unsigned char *pBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bQueued == VIDEO_FALSE) { + if ((pBuffer == NULL) || + (pCtx->pInbuf[nIndex].planes[0].addr == pBuffer)) + break; + } + } + + if (nIndex == pCtx->nInbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Decoder Buffer OPS] Find (Outnput) + */ +static int MFC_Decoder_Find_Outbuf( + void *pHandle, + unsigned char *pBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if (pCtx->pOutbuf[nIndex].bQueued == VIDEO_FALSE) { + if ((pBuffer == NULL) || + (pCtx->pOutbuf[nIndex].planes[0].addr == pBuffer)) + break; + } + } + + if (nIndex == pCtx->nOutbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Decoder Buffer OPS] Enqueue (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Enqueue_Inbuf( + void *pHandle, + void *pBuffer[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int index, i, flags = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nInbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nInbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + index = MFC_Decoder_Find_Inbuf(pCtx, pBuffer[0]); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + if (pCtx->bShareInbuf == VIDEO_TRUE) { + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pCtx->pInbuf[index].planes[i].fd; + + buf.m.planes[i].length = pCtx->pInbuf[index].planes[i].allocSize; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + ALOGV("%s: shared inbuf(%d) plane(%d) addr=%lx len=%d used=%d\n", __func__, + index, i, + buf.m.planes[i].m.userptr, + buf.m.planes[i].length, + buf.m.planes[i].bytesused); + } + } else { + buf.memory = V4L2_MEMORY_MMAP; + for (i = 0; i < nPlanes; i++) { + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + } + } + + if (dataSize[0] <= 0) { + flags = EMPTY_DATA | LAST_FRAME; + ALOGD("%s: EMPTY DATA", __FUNCTION__); + } else { + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_EOS) == OMX_BUFFERFLAG_EOS) + flags = LAST_FRAME; + + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_CODECCONFIG) == OMX_BUFFERFLAG_CODECCONFIG) + flags |= CSD_FRAME; + + if (flags & (CSD_FRAME | LAST_FRAME)) + ALOGD("%s: DATA with flags(0x%x)", __FUNCTION__, flags); + } +#ifdef USE_ORIGINAL_HEADER + buf.reserved2 = flags; +#else + buf.input = flags; +#endif + + signed long long sec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp / 1E6); + signed long long usec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp) - (sec * 1E6); + buf.timestamp.tv_sec = (long)sec; + buf.timestamp.tv_usec = (long)usec; + + pCtx->pInbuf[buf.index].pPrivate = pPrivate; + pCtx->pInbuf[buf.index].bQueued = VIDEO_TRUE; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hDec, &buf) != 0) { + ALOGE("%s: Failed to enqueue input buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pInbuf[buf.index].pPrivate = NULL; + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Enqueue (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Enqueue_Outbuf( + void *pHandle, + void *pBuffer[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int i, index, state = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nOutbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nOutbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + index = MFC_Decoder_Find_Outbuf(pCtx, pBuffer[0]); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + buf.index = index; + + + if (pCtx->bShareOutbuf == VIDEO_TRUE) { + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pCtx->pOutbuf[index].planes[i].fd; + + buf.m.planes[i].length = pCtx->pOutbuf[index].planes[i].allocSize; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + ALOGV("%s: shared outbuf(%d) plane=%d addr=0x%lx len=%d used=%d\n", __func__, + index, i, + buf.m.planes[i].m.userptr, + buf.m.planes[i].length, + buf.m.planes[i].bytesused); + } + } else { + ALOGV("%s: non-shared outbuf(%d)\n", __func__, index); + buf.memory = V4L2_MEMORY_MMAP; + } + + pCtx->pOutbuf[buf.index].pPrivate = pPrivate; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_TRUE; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hDec, &buf) != 0) { + pthread_mutex_lock(pMutex); + pCtx->pOutbuf[buf.index].pPrivate = NULL; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 1) { + /* The case of Resolution is changed */ + ret = VIDEO_ERROR_WRONGBUFFERSIZE; + } else { + ALOGE("%s: Failed to enqueue output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + } + pthread_mutex_unlock(pMutex); + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Dequeue (Input) + */ +static ExynosVideoBuffer *MFC_Decoder_Dequeue_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoBuffer *pInbuf = NULL; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + pInbuf = NULL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + if (pCtx->bShareInbuf == VIDEO_TRUE) + buf.memory = pCtx->videoInstInfo.nMemoryType; + else + buf.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_dqbuf(pCtx->hDec, &buf) != 0) { + pInbuf = NULL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + + pInbuf = &pCtx->pInbuf[buf.index]; + if (pInbuf->bQueued == VIDEO_FALSE) { + pInbuf = NULL; + pthread_mutex_unlock(pMutex); + goto EXIT; + } + + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) + pInbuf = NULL; + + pthread_mutex_unlock(pMutex); + +EXIT: + return pInbuf; +} + +/* + * [Decoder Buffer OPS] Dequeue (Output) + */ +static ExynosVideoBuffer *MFC_Decoder_Dequeue_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoBuffer *pOutbuf = NULL; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + + int value = 0, state = 0; + int ret = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + pOutbuf = NULL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + buf.memory = pCtx->videoInstInfo.nMemoryType; + else + buf.memory = V4L2_MEMORY_MMAP; + + /* HACK: pOutbuf return -1 means DECODING_ONLY for almost cases */ + ret = exynos_v4l2_dqbuf(pCtx->hDec, &buf); + if (ret != 0) { + if (errno == EIO) + pOutbuf = (ExynosVideoBuffer *)VIDEO_ERROR_DQBUF_EIO; + else + pOutbuf = NULL; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + pOutbuf = NULL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + + pOutbuf = &pCtx->pOutbuf[buf.index]; + if (pOutbuf->bQueued == VIDEO_FALSE) { + pOutbuf = NULL; + ret = VIDEO_ERROR_NOBUFFERS; + pthread_mutex_unlock(pMutex); + goto EXIT; + } + + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_DISPLAY_STATUS, &value); + + switch (value) { + case 0: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DECODING_ONLY; +#ifdef USE_HEVC_HWIP + if ((pCtx->videoInstInfo.eCodecType == VIDEO_CODING_HEVC) || + (pCtx->videoInstInfo.HwVersion != (int)MFC_51)) { +#else + if (pCtx->videoInstInfo.HwVersion != (int)MFC_51) { +#endif + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 4) /* DPB realloc for S3D SEI */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_ENABLED_S3D; + } + break; + case 1: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DISPLAY_DECODING; + break; + case 2: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DISPLAY_ONLY; + break; + case 3: + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 1) /* Resolution is changed */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_CHANGE_RESOL; + else /* Decoding is finished */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DECODING_FINISHED; + break; + case 4: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_LAST_FRAME; + break; + default: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_UNKNOWN; + break; + } + + switch (buf.flags & (0x7 << 3)) { + case V4L2_BUF_FLAG_KEYFRAME: + pOutbuf->frameType = VIDEO_FRAME_I; + break; + case V4L2_BUF_FLAG_PFRAME: + pOutbuf->frameType = VIDEO_FRAME_P; + break; + case V4L2_BUF_FLAG_BFRAME: + pOutbuf->frameType = VIDEO_FRAME_B; + break; + default: + pOutbuf->frameType = VIDEO_FRAME_OTHERS; + break; + }; + + if (buf.flags & V4L2_BUF_FLAG_ERROR) + pOutbuf->frameType |= VIDEO_FRAME_CORRUPT; + + if (pCtx->outbufGeometry.bInterlaced == VIDEO_TRUE) { + if ((buf.field == V4L2_FIELD_INTERLACED_TB) || + (buf.field == V4L2_FIELD_INTERLACED_BT)) { + pOutbuf->interlacedType = buf.field; + } else { + ALOGV("%s: buf.field's value is invald(%d)", __FUNCTION__, buf.field); + pOutbuf->interlacedType = V4L2_FIELD_NONE; + } + } + + pOutbuf->bQueued = VIDEO_FALSE; + + pthread_mutex_unlock(pMutex); + +EXIT: + return pOutbuf; +} + +static ExynosVideoErrorType MFC_Decoder_Clear_Queued_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (i = 0; i < pCtx->nInbufs; i++) { + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Decoder_Clear_Queued_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (i = 0; i < pCtx->nOutbufs; i++) { + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Cleanup Buffer (Input) + */ +static ExynosVideoErrorType MFC_Decoder_Cleanup_Buffer_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + int nBufferCount = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + nBufferCount = 0; /* for clean-up */ + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareInbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hDec, &req) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pCtx->nInbufs = (int)req.count; + + if (pCtx->pInbuf != NULL) { + free(pCtx->pInbuf); + pCtx->pInbuf = NULL; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Cleanup Buffer (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Cleanup_Buffer_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + int nBufferCount = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + nBufferCount = 0; /* for clean-up */ + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hDec, &req) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pCtx->nOutbufs = (int)req.count; + + if (pCtx->pOutbuf != NULL) { + free(pCtx->pOutbuf); + pCtx->pOutbuf = NULL; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] Apply Registered Buffer (Output) + */ +static ExynosVideoErrorType MFC_Decoder_Apply_RegisteredBuffer_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hDec, V4L2_CID_MPEG_VIDEO_DECODER_WAIT_DECODING_START, 1) != 0) { + ALOGW("%s: The requested function is not implemented", __func__); + //ret = VIDEO_ERROR_APIFAIL; + //goto EXIT; /* For Backward compatibility */ + } + + ret = MFC_Decoder_Run_Outbuf(pHandle); + if (VIDEO_ERROR_NONE != ret) + goto EXIT; + + ret = MFC_Decoder_Stop_Outbuf(pHandle); + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] FindIndex (Input) + */ +static int MFC_Decoder_FindEmpty_Inbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bQueued == VIDEO_FALSE) + break; + } + + if (nIndex == pCtx->nInbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Decoder Buffer OPS] ExtensionEnqueue (Input) + */ +static ExynosVideoErrorType MFC_Decoder_ExtensionEnqueue_Inbuf( + void *pHandle, + void *pBuffer[], + int pFd[], + unsigned int allocLen[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int index, i, flags = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nInbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nInbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + index = MFC_Decoder_FindEmpty_Inbuf(pCtx); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pFd[i]; + + buf.m.planes[i].length = allocLen[i]; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + + /* Temporary storage for Dequeue */ + pCtx->pInbuf[buf.index].planes[i].addr = pBuffer[i]; + pCtx->pInbuf[buf.index].planes[i].fd = pFd[i]; + pCtx->pInbuf[buf.index].planes[i].allocSize = allocLen[i]; + } + + if (dataSize[0] <= 0) { + flags = EMPTY_DATA | LAST_FRAME; + ALOGD("%s: EMPTY DATA", __FUNCTION__); + } else { + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_EOS) == OMX_BUFFERFLAG_EOS) + flags = LAST_FRAME; + + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_CODECCONFIG) == OMX_BUFFERFLAG_CODECCONFIG) + flags |= CSD_FRAME; + + if (flags & (CSD_FRAME | LAST_FRAME)) + ALOGD("%s: DATA with flags(0x%x)", __FUNCTION__, flags); + } +#ifdef USE_ORIGINAL_HEADER + buf.reserved2 = flags; +#else + buf.input = flags; +#endif + + signed long long sec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp / 1E6); + signed long long usec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp) - (sec * 1E6); + buf.timestamp.tv_sec = (long)sec; + buf.timestamp.tv_usec = (long)usec; + + pCtx->pInbuf[buf.index].pPrivate = pPrivate; + pCtx->pInbuf[buf.index].bQueued = VIDEO_TRUE; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hDec, &buf) != 0) { + ALOGE("%s: Failed to enqueue input buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pInbuf[buf.index].pPrivate = NULL; + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] ExtensionDequeue (Input) + */ +static ExynosVideoErrorType MFC_Decoder_ExtensionDequeue_Inbuf( + void *pHandle, + ExynosVideoBuffer *pVideoBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + buf.memory = pCtx->videoInstInfo.nMemoryType; + if (exynos_v4l2_dqbuf(pCtx->hDec, &buf) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + + if (pCtx->pInbuf[buf.index].bQueued == VIDEO_TRUE) + memcpy(pVideoBuffer, &pCtx->pInbuf[buf.index], sizeof(ExynosVideoBuffer)); + else + ret = VIDEO_ERROR_NOBUFFERS; + memset(&pCtx->pInbuf[buf.index], 0, sizeof(ExynosVideoBuffer)); + + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return ret; +} + + +/* + * [Decoder Buffer OPS] FindIndex (Output) + */ +static int MFC_Decoder_FindEmpty_Outbuf(void *pHandle) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if ((pCtx->pOutbuf[nIndex].bQueued == VIDEO_FALSE) && + (pCtx->pOutbuf[nIndex].bSlotUsed == VIDEO_FALSE)) + break; + } + + if (nIndex == pCtx->nOutbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Decoder Buffer OPS] BufferIndexFree (Output) + */ +void MFC_Decoder_BufferIndexFree_Outbuf( + void *pHandle, + PrivateDataShareBuffer *pPDSB, + int index) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + int i, j; + + ALOGV("De-queue buf.index:%d, fd:%d", index, pCtx->pOutbuf[index].planes[0].fd); + + if (pCtx->pOutbuf[index].nIndexUseCnt == 0) + pCtx->pOutbuf[index].bSlotUsed = VIDEO_FALSE; + + for (i = 0; i < VIDEO_BUFFER_MAX_NUM; i++) { + if (pPDSB->dpbFD[i].fd < 0) + break; + + ALOGV("pPDSB->dpbFD[%d].fd:%d", i, pPDSB->dpbFD[i].fd); + for (j = 0; j < pCtx->nOutbufs; j++) { + if (pPDSB->dpbFD[i].fd == pCtx->pOutbuf[j].planes[0].fd) { + if (pCtx->pOutbuf[j].bQueued == VIDEO_FALSE) { + if (pCtx->pOutbuf[j].nIndexUseCnt > 0) + pCtx->pOutbuf[j].nIndexUseCnt--; + } else if(pCtx->pOutbuf[j].bQueued == VIDEO_TRUE) { + if (pCtx->pOutbuf[j].nIndexUseCnt > 1) { + /* The buffer being used as the reference buffer came again. */ + pCtx->pOutbuf[j].nIndexUseCnt--; + } else { + /* Reference DPB buffer is internally reused. */ + } + } + ALOGV("dec Cnt : FD:%d, pCtx->pOutbuf[%d].nIndexUseCnt:%d", pPDSB->dpbFD[i].fd, j, pCtx->pOutbuf[j].nIndexUseCnt); + if ((pCtx->pOutbuf[j].nIndexUseCnt == 0) && + (pCtx->pOutbuf[j].bQueued == VIDEO_FALSE)) + pCtx->pOutbuf[j].bSlotUsed = VIDEO_FALSE; + } + } + } + memset((char *)pPDSB, -1, sizeof(PrivateDataShareBuffer)); + + return; +} + +/* + * [Decoder Buffer OPS] ExtensionEnqueue (Output) + */ +static ExynosVideoErrorType MFC_Decoder_ExtensionEnqueue_Outbuf( + void *pHandle, + void *pBuffer[], + int pFd[], + unsigned int allocLen[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int i, index, state = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nOutbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nOutbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + + index = MFC_Decoder_Find_Outbuf(pCtx, pBuffer[0]); + if (index == -1) { + ALOGV("%s: Failed to find index", __func__); + index = MFC_Decoder_FindEmpty_Outbuf(pCtx); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + } + + buf.index = index; + ALOGV("En-queue index:%d pCtx->pOutbuf[buf.index].bQueued:%d, pFd[0]:%d", + index, pCtx->pOutbuf[buf.index].bQueued, pFd[0]); + + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pFd[i]; + + buf.m.planes[i].length = allocLen[i]; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + + /* Temporary storage for Dequeue */ + pCtx->pOutbuf[buf.index].planes[i].addr = pBuffer[i]; + pCtx->pOutbuf[buf.index].planes[i].fd = pFd[i]; + pCtx->pOutbuf[buf.index].planes[i].allocSize = allocLen[i]; + + ALOGV("%s: shared outbuf(%d) plane=%d addr=0x%p fd=%d len=%d used=%d\n", + __func__, index, i, + (void*)buf.m.planes[i].m.userptr, buf.m.planes[i].m.fd, + buf.m.planes[i].length, buf.m.planes[i].bytesused); + } + + /* this is for saving interlaced type */ + if (pCtx->outbufGeometry.bInterlaced == VIDEO_TRUE) { + pCtx->pOutbuf[buf.index].planes[2].addr = pBuffer[2]; + pCtx->pOutbuf[buf.index].planes[2].fd = pFd[2]; + } + + pCtx->pOutbuf[buf.index].pPrivate = pPrivate; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_TRUE; + pCtx->pOutbuf[buf.index].bSlotUsed = VIDEO_TRUE; + pCtx->pOutbuf[buf.index].nIndexUseCnt++; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hDec, &buf) != 0) { + pthread_mutex_lock(pMutex); + pCtx->pOutbuf[buf.index].nIndexUseCnt--; + pCtx->pOutbuf[buf.index].pPrivate = NULL; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + if (pCtx->pOutbuf[buf.index].nIndexUseCnt == 0) + pCtx->pOutbuf[buf.index].bSlotUsed = VIDEO_FALSE; + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 1) { + /* The case of Resolution is changed */ + ret = VIDEO_ERROR_WRONGBUFFERSIZE; + } else { + ALOGE("%s: Failed to enqueue output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + } + pthread_mutex_unlock(pMutex); + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Decoder Buffer OPS] ExtensionDequeue (Output) + */ +static ExynosVideoErrorType MFC_Decoder_ExtensionDequeue_Outbuf( + void *pHandle, + ExynosVideoBuffer *pVideoBuffer) +{ + ExynosVideoDecContext *pCtx = (ExynosVideoDecContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + ExynosVideoBuffer *pOutbuf = NULL; + PrivateDataShareBuffer *pPDSB = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int value = 0, state = 0; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + pOutbuf = NULL; + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + buf.memory = pCtx->videoInstInfo.nMemoryType; + + /* HACK: pOutbuf return -1 means DECODING_ONLY for almost cases */ + if (exynos_v4l2_dqbuf(pCtx->hDec, &buf) != 0) { + if (errno == EIO) + ret = VIDEO_ERROR_DQBUF_EIO; + else + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + + pOutbuf = &pCtx->pOutbuf[buf.index]; + if (pOutbuf->bQueued == VIDEO_FALSE) { + pOutbuf = NULL; + ret = VIDEO_ERROR_NOBUFFERS; + pthread_mutex_unlock(pMutex); + goto EXIT; + } + + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_DISPLAY_STATUS, &value); + + switch (value) { + case 0: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DECODING_ONLY; +#ifdef USE_HEVC_HWIP + if ((pCtx->videoInstInfo.eCodecType == VIDEO_CODING_HEVC) || + (pCtx->videoInstInfo.HwVersion != (int)MFC_51)) { +#else + if (pCtx->videoInstInfo.HwVersion != (int)MFC_51) { +#endif + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 4) /* DPB realloc for S3D SEI */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_ENABLED_S3D; + } + break; + case 1: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DISPLAY_DECODING; + break; + case 2: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DISPLAY_ONLY; + break; + case 3: + exynos_v4l2_g_ctrl(pCtx->hDec, V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE, &state); + if (state == 1) /* Resolution is changed */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_CHANGE_RESOL; + else /* Decoding is finished */ + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_DECODING_FINISHED; + break; + case 4: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_LAST_FRAME; + break; + default: + pOutbuf->displayStatus = VIDEO_FRAME_STATUS_UNKNOWN; + break; + } + + switch (buf.flags & (0x7 << 3)) { + case V4L2_BUF_FLAG_KEYFRAME: + pOutbuf->frameType = VIDEO_FRAME_I; + break; + case V4L2_BUF_FLAG_PFRAME: + pOutbuf->frameType = VIDEO_FRAME_P; + break; + case V4L2_BUF_FLAG_BFRAME: + pOutbuf->frameType = VIDEO_FRAME_B; + break; + default: + pOutbuf->frameType = VIDEO_FRAME_OTHERS; + break; + }; + + if (buf.flags & V4L2_BUF_FLAG_ERROR) + pOutbuf->frameType |= VIDEO_FRAME_CORRUPT; + + if (pCtx->outbufGeometry.bInterlaced == VIDEO_TRUE) { + if ((buf.field == V4L2_FIELD_INTERLACED_TB) || + (buf.field == V4L2_FIELD_INTERLACED_BT)) { + pOutbuf->interlacedType = buf.field; + } else { + ALOGV("%s: buf.field's value is invald(%d)", __FUNCTION__, buf.field); + pOutbuf->interlacedType = V4L2_FIELD_NONE; + } + } + + pPDSB = ((PrivateDataShareBuffer *)pCtx->pPrivateDataShareAddress) + buf.index; + if (pCtx->pOutbuf[buf.index].bQueued == VIDEO_TRUE) { + memcpy(pVideoBuffer, pOutbuf, sizeof(ExynosVideoBuffer)); + memcpy((char *)(&(pVideoBuffer->PDSB)), (char *)pPDSB, sizeof(PrivateDataShareBuffer)); + } else { + ret = VIDEO_ERROR_NOBUFFERS; + ALOGV("%s :: %d", __FUNCTION__, __LINE__); + } + + MFC_Decoder_BufferIndexFree_Outbuf(pHandle, pPDSB, buf.index); + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return ret; +} + + +/* + * [Decoder OPS] Common + */ +static ExynosVideoDecOps defDecOps = { + .nSize = 0, + .Init = MFC_Decoder_Init, + .Finalize = MFC_Decoder_Finalize, + .Set_DisplayDelay = MFC_Decoder_Set_DisplayDelay, + .Set_IFrameDecoding = MFC_Decoder_Set_IFrameDecoding, + .Enable_PackedPB = MFC_Decoder_Enable_PackedPB, + .Enable_LoopFilter = MFC_Decoder_Enable_LoopFilter, + .Enable_SliceMode = MFC_Decoder_Enable_SliceMode, + .Get_ActualBufferCount = MFC_Decoder_Get_ActualBufferCount, + .Set_FrameTag = MFC_Decoder_Set_FrameTag, + .Get_FrameTag = MFC_Decoder_Get_FrameTag, + .Enable_SEIParsing = MFC_Decoder_Enable_SEIParsing, + .Get_FramePackingInfo = MFC_Decoder_Get_FramePackingInfo, + .Set_ImmediateDisplay = MFC_Decoder_Set_ImmediateDisplay, + .Enable_DTSMode = MFC_Decoder_Enable_DTSMode, + .Set_QosRatio = MFC_Decoder_Set_QosRatio, + .Enable_DualDPBMode = MFC_Decoder_Enable_DualDPBMode, + .Enable_DynamicDPB = MFC_Decoder_Enable_DynamicDPB, + .Set_BufferProcessType = MFC_Decoder_Set_BufferProcessType, +}; + +/* + * [Decoder Buffer OPS] Input + */ +static ExynosVideoDecBufferOps defInbufOps = { + .nSize = 0, + .Enable_Cacheable = MFC_Decoder_Enable_Cacheable_Inbuf, + .Set_Shareable = MFC_Decoder_Set_Shareable_Inbuf, + .Get_Buffer = NULL, + .Set_Geometry = MFC_Decoder_Set_Geometry_Inbuf, + .Get_Geometry = NULL, + .Setup = MFC_Decoder_Setup_Inbuf, + .Run = MFC_Decoder_Run_Inbuf, + .Stop = MFC_Decoder_Stop_Inbuf, + .Enqueue = MFC_Decoder_Enqueue_Inbuf, + .Enqueue_All = NULL, + .Dequeue = MFC_Decoder_Dequeue_Inbuf, + .Register = MFC_Decoder_Register_Inbuf, + .Clear_RegisteredBuffer = MFC_Decoder_Clear_RegisteredBuffer_Inbuf, + .Clear_Queue = MFC_Decoder_Clear_Queued_Inbuf, + .Cleanup_Buffer = MFC_Decoder_Cleanup_Buffer_Inbuf, + .Apply_RegisteredBuffer = NULL, + .ExtensionEnqueue = MFC_Decoder_ExtensionEnqueue_Inbuf, + .ExtensionDequeue = MFC_Decoder_ExtensionDequeue_Inbuf, +}; + +/* + * [Decoder Buffer OPS] Output + */ +static ExynosVideoDecBufferOps defOutbufOps = { + .nSize = 0, + .Enable_Cacheable = MFC_Decoder_Enable_Cacheable_Outbuf, + .Set_Shareable = MFC_Decoder_Set_Shareable_Outbuf, + .Get_Buffer = MFC_Decoder_Get_Buffer_Outbuf, + .Set_Geometry = MFC_Decoder_Set_Geometry_Outbuf, + .Get_Geometry = MFC_Decoder_Get_Geometry_Outbuf, + .Setup = MFC_Decoder_Setup_Outbuf, + .Run = MFC_Decoder_Run_Outbuf, + .Stop = MFC_Decoder_Stop_Outbuf, + .Enqueue = MFC_Decoder_Enqueue_Outbuf, + .Enqueue_All = NULL, + .Dequeue = MFC_Decoder_Dequeue_Outbuf, + .Register = MFC_Decoder_Register_Outbuf, + .Clear_RegisteredBuffer = MFC_Decoder_Clear_RegisteredBuffer_Outbuf, + .Clear_Queue = MFC_Decoder_Clear_Queued_Outbuf, + .Cleanup_Buffer = MFC_Decoder_Cleanup_Buffer_Outbuf, + .Apply_RegisteredBuffer = MFC_Decoder_Apply_RegisteredBuffer_Outbuf, + .ExtensionEnqueue = MFC_Decoder_ExtensionEnqueue_Outbuf, + .ExtensionDequeue = MFC_Decoder_ExtensionDequeue_Outbuf, +}; + +ExynosVideoErrorType MFC_Exynos_Video_GetInstInfo_Decoder( + ExynosVideoInstInfo *pVideoInstInfo) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int hDec = -1; + int mode = 0, version = 0; + + if (pVideoInstInfo == NULL) { + ALOGE("%s: bad parameter", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + +#ifdef USE_HEVC_HWIP + if (pVideoInstInfo->eCodecType == VIDEO_CODING_HEVC) + hDec = exynos_v4l2_open_devname(VIDEO_HEVC_DECODER_NAME, O_RDWR, 0); + else +#endif + hDec = exynos_v4l2_open_devname(VIDEO_MFC_DECODER_NAME, O_RDWR, 0); + + if (hDec < 0) { + ALOGE("%s: Failed to open decoder device", __func__); + ret = VIDEO_ERROR_OPENFAIL; + goto EXIT; + } + + if (exynos_v4l2_g_ctrl(hDec, V4L2_CID_MPEG_MFC_GET_VERSION_INFO, &version) != 0) { + ALOGW("%s: HW version information is not available", __func__); +#ifdef USE_HEVC_HWIP + if (pVideoInstInfo->eCodecType == VIDEO_CODING_HEVC) + pVideoInstInfo->HwVersion = (int)HEVC_10; + else +#endif + pVideoInstInfo->HwVersion = (int)MFC_65; + } else { + pVideoInstInfo->HwVersion = version; + } + + if (exynos_v4l2_g_ctrl(hDec, V4L2_CID_MPEG_MFC_GET_EXT_INFO, &mode) != 0) { + pVideoInstInfo->specificInfo.dec.bDualDPBSupport = VIDEO_FALSE; + pVideoInstInfo->specificInfo.dec.bDynamicDPBSupport = VIDEO_FALSE; + pVideoInstInfo->specificInfo.dec.bLastFrameSupport = VIDEO_FALSE; + goto EXIT; + } + + pVideoInstInfo->specificInfo.dec.bSkypeSupport = (mode & (0x1 << 3))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.dec.bLastFrameSupport = (mode & (0x1 << 2))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.dec.bDynamicDPBSupport = (mode & (0x1 << 1))? VIDEO_TRUE:VIDEO_FALSE; +#ifndef USE_FORCEFULLY_DISABLE_DUALDPB + pVideoInstInfo->specificInfo.dec.bDualDPBSupport = (mode & (0x1 << 0))? VIDEO_TRUE:VIDEO_FALSE; +#endif + + pVideoInstInfo->SwVersion = 0; +#ifdef CID_SUPPORT + if (pVideoInstInfo->specificInfo.dec.bSkypeSupport == VIDEO_TRUE) { + int swVersion = 0; + + if (exynos_v4l2_g_ctrl(hDec, V4L2_CID_MPEG_MFC_GET_DRIVER_INFO, &swVersion) != 0) { + ALOGE("%s: g_ctrl is failed(V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE)", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pVideoInstInfo->SwVersion = (unsigned long long)swVersion; + } +#endif + + __Set_SupportFormat(pVideoInstInfo); + +EXIT: + if (hDec >= 0) + exynos_v4l2_close(hDec); + + return ret; +} + +int MFC_Exynos_Video_Register_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if ((pDecOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + defDecOps.nSize = sizeof(defDecOps); + defInbufOps.nSize = sizeof(defInbufOps); + defOutbufOps.nSize = sizeof(defOutbufOps); + + memcpy((char *)pDecOps + sizeof(pDecOps->nSize), (char *)&defDecOps + sizeof(defDecOps.nSize), + pDecOps->nSize - sizeof(pDecOps->nSize)); + + memcpy((char *)pInbufOps + sizeof(pInbufOps->nSize), (char *)&defInbufOps + sizeof(defInbufOps.nSize), + pInbufOps->nSize - sizeof(pInbufOps->nSize)); + + memcpy((char *)pOutbufOps + sizeof(pOutbufOps->nSize), (char *)&defOutbufOps + sizeof(defOutbufOps.nSize), + pOutbufOps->nSize - sizeof(pOutbufOps->nSize)); + +EXIT: + return ret; +} diff --git a/libvideocodec/enc/ExynosVideoEncoder.c b/libvideocodec/enc/ExynosVideoEncoder.c new file mode 100644 index 0000000..c505a4f --- /dev/null +++ b/libvideocodec/enc/ExynosVideoEncoder.c @@ -0,0 +1,4054 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* + * @file ExynosVideoEncoder.c + * @brief + * @author Hyeyeon Chung (hyeon.chung@samsung.com) + * @author Jinsung Yang (jsgood.yang@samsung.com) + * @author Yunji Kim (yunji.kim@samsung.com) + * @version 1.0.0 + * @history + * 2012.02.09: Initial Version + */ + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include + +#include "videodev2_exynos_media.h" +#ifdef USE_EXYNOS_MEDIA_EXT +#include "videodev2_exynos_media_ext.h" +#endif +#ifdef USE_MFC_MEDIA +#include "exynos_mfc_media.h" +#endif + +#include +#include "exynos_ion.h" + +#include "ExynosVideoApi.h" +#include "ExynosVideoEnc.h" +#include "OMX_Core.h" + +/* #define LOG_NDEBUG 0 */ +#define LOG_TAG "ExynosVideoEncoder" +#include + +#define MAX_CTRL_NUM 107 +#define H264_CTRL_NUM 91 /* +7(svc), +4(skype), +1(roi), +4(qp range) */ +#define MPEG4_CTRL_NUM 26 /* +4(qp range) */ +#define H263_CTRL_NUM 18 /* +2(qp range) */ +#define VP8_CTRL_NUM 30 /* +3(svc), +2(qp range) */ +#define HEVC_CTRL_NUM 53 /* +7(svc), +1(roi), +4(qp range)*/ +#define VP9_CTRL_NUM 29 /* +3(svc), +4(qp range) */ + +#define MAX_INPUTBUFFER_COUNT 32 +#define MAX_OUTPUTBUFFER_COUNT 32 + +#ifdef USE_DEFINE_H264_SEI_TYPE +enum v4l2_mpeg_video_h264_sei_fp_type { + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_CHEKERBOARD = 0, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_COLUMN = 1, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_ROW = 2, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_SIDE_BY_SIDE = 3, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TOP_BOTTOM = 4, + V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_TEMPORAL = 5, +}; +#endif + +static const int vp8_qp_trans[] = +{ + 0, 1, 2, 3, 4, 5, 7, 8, + 9, 10, 12, 13, 15, 17, 18, 19, + 20, 21, 23, 24, 25, 26, 27, 28, + 29, 30, 31, 33, 35, 37, 39, 41, + 43, 45, 47, 49, 51, 53, 55, 57, + 59, 61, 64, 67, 70, 73, 76, 79, + 82, 85, 88, 91, 94, 97, 100, 103, + 106, 109, 112, 115, 118, 121, 124, 127, +}; + +const int vp9_qp_trans[] = { + 0, 4, 8, 12, 16, 20, 24, 28, + 32, 36, 40, 44, 48, 52, 56, 60, + 64, 68, 72, 76, 80, 84, 88, 92, + 96, 100, 104, 108, 112, 116, 120, 124, + 128, 132, 136, 140, 144, 148, 152, 156, + 160, 164, 168, 172, 176, 180, 184, 188, + 192, 196, 200, 204, 208, 212, 216, 220, + 224, 228, 232, 236, 240, 244, 249, 255, +}; + +#define GET_VALUE(value, min, max) ((value < min)? min:(value > max)? max:value) +#define GET_H264_QP_VALUE(value) GET_VALUE(value, 0, 51) +#define GET_MPEG4_QP_VALUE(value) GET_VALUE(value, 1, 31) +#define GET_H263_QP_VALUE(value) GET_VALUE(value, 1, 31) +#define GET_VP8_QP_VALUE(value) (vp8_qp_trans[GET_VALUE(value, 0, ((int)(sizeof(vp8_qp_trans)/sizeof(int)) - 1))]) +#define GET_HEVC_QP_VALUE(value) GET_VALUE(value, 0, 51) +#define GET_VP9_QP_VALUE(value) (vp9_qp_trans[GET_VALUE(value, 1, ((int)(sizeof(vp9_qp_trans)/sizeof(int)) - 1))]) + +/* + * [Common] __CodingType_To_V4L2PixelFormat + */ +static unsigned int __CodingType_To_V4L2PixelFormat(ExynosVideoCodingType codingType) +{ + unsigned int pixelformat = V4L2_PIX_FMT_H264; + + switch (codingType) { + case VIDEO_CODING_AVC: + pixelformat = V4L2_PIX_FMT_H264; + break; + case VIDEO_CODING_MPEG4: + pixelformat = V4L2_PIX_FMT_MPEG4; + break; + case VIDEO_CODING_VP8: + pixelformat = V4L2_PIX_FMT_VP8; + break; + case VIDEO_CODING_H263: + pixelformat = V4L2_PIX_FMT_H263; + break; + case VIDEO_CODING_VC1: + pixelformat = V4L2_PIX_FMT_VC1_ANNEX_G; + break; + case VIDEO_CODING_VC1_RCV: + pixelformat = V4L2_PIX_FMT_VC1_ANNEX_L; + break; + case VIDEO_CODING_MPEG2: + pixelformat = V4L2_PIX_FMT_MPEG2; + break; +#ifdef USE_HEVCENC_SUPPORT + case VIDEO_CODING_HEVC: + pixelformat = V4L2_PIX_FMT_HEVC; + break; +#endif +#ifdef USE_VP9ENC_SUPPORT + case VIDEO_CODING_VP9: + pixelformat = V4L2_PIX_FMT_VP9; + break; +#endif + default: + pixelformat = V4L2_PIX_FMT_H264; + break; + } + + return pixelformat; +} + +/* + * [Common] __ColorFormatType_To_V4L2PixelFormat + */ +static unsigned int __ColorFormatType_To_V4L2PixelFormat( + ExynosVideoColorFormatType colorFormatType, + int nHwVersion) +{ + unsigned int pixelformat = V4L2_PIX_FMT_NV12M; + + switch (colorFormatType) { + case VIDEO_COLORFORMAT_NV12M: + pixelformat = V4L2_PIX_FMT_NV12M; + break; + case VIDEO_COLORFORMAT_NV21M: + pixelformat = V4L2_PIX_FMT_NV21M; + break; + case VIDEO_COLORFORMAT_NV12M_TILED: + if (nHwVersion == (int)MFC_51) + pixelformat = V4L2_PIX_FMT_NV12MT; + else + pixelformat = V4L2_PIX_FMT_NV12MT_16X16; + break; + case VIDEO_COLORFORMAT_I420M: + pixelformat = V4L2_PIX_FMT_YUV420M; + break; + case VIDEO_COLORFORMAT_YV12M: + pixelformat = V4L2_PIX_FMT_YVU420M; + break; +#ifdef USE_SINGLE_PALNE_SUPPORT + case VIDEO_COLORFORMAT_NV12: + pixelformat = V4L2_PIX_FMT_NV12N; + break; + case VIDEO_COLORFORMAT_I420: + pixelformat = V4L2_PIX_FMT_YUV420N; + break; + case VIDEO_COLORFORMAT_NV12_TILED: + pixelformat = V4L2_PIX_FMT_NV12NT; + break; +#endif + case VIDEO_COLORFORMAT_ARGB8888: + pixelformat = V4L2_PIX_FMT_ARGB32; + break; + case VIDEO_COLORFORMAT_BGRA8888: + pixelformat = V4L2_PIX_FMT_BGR32; + break; + case VIDEO_COLORFORMAT_RGBA8888: + pixelformat = V4L2_PIX_FMT_RGB32X; + break; + default: + pixelformat = V4L2_PIX_FMT_NV12M; + break; + } + + return pixelformat; +} + +/* + * [Common] __Set_SupportFormat + */ +static void __Set_SupportFormat(ExynosVideoInstInfo *pVideoInstInfo) +{ + int nLastIndex = 0; + + if (pVideoInstInfo == NULL) { + ALOGE("%s: ExynosVideoInstInfo must be supplied", __func__); + return ; + } + + memset(pVideoInstInfo->supportFormat, (int)VIDEO_COLORFORMAT_UNKNOWN, sizeof(pVideoInstInfo->supportFormat)); + + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV21M; + + switch ((int)pVideoInstInfo->HwVersion) { + case MFC_101: /* NV12, NV21, I420, YV12 */ + case MFC_100: + case MFC_1010: + case MFC_1011: + case MFC_1020: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + break; + case MFC_90: /* NV12, NV21, BGRA, RGBA, I420, YV12, ARGB */ + case MFC_80: +#ifdef USE_HEVC_HWIP + case HEVC_10: +#endif + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_BGRA8888; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_RGBA8888; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_ARGB8888; + break; + case MFC_723: /* NV12, NV21, BGRA, RGBA, I420, YV12, ARGB, NV12T */ + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_BGRA8888; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_RGBA8888; + case MFC_72: /* NV12, NV21, I420, YV12, ARGB, NV12T */ + case MFC_77: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_I420M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_YV12M; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_ARGB8888; + case MFC_78: /* NV12, NV21, NV12T */ + case MFC_65: + case MFC_61: + case MFC_51: + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12_TILED; + pVideoInstInfo->supportFormat[nLastIndex++] = VIDEO_COLORFORMAT_NV12M_TILED; + break; + default: + break; + } + +EXIT: + return ; +} + +/* + * [Encoder OPS] Init + */ +static void *MFC_Encoder_Init(ExynosVideoInstInfo *pVideoInfo) +{ + ExynosVideoEncContext *pCtx = NULL; + pthread_mutex_t *pMutex = NULL; + int needCaps = (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_STREAMING); + + int hIonClient = -1; + + if (pVideoInfo == NULL) { + ALOGE("%s: bad parameter", __func__); + goto EXIT_ALLOC_FAIL; + } + + pCtx = (ExynosVideoEncContext *)malloc(sizeof(*pCtx)); + if (pCtx == NULL) { + ALOGE("%s: Failed to allocate encoder context buffer", __func__); + goto EXIT_ALLOC_FAIL; + } + + memset(pCtx, 0, sizeof(*pCtx)); + +#ifdef USE_HEVC_HWIP + if (pVideoInfo->eCodecType == VIDEO_CODING_HEVC) { + if (pVideoInfo->eSecurityType == VIDEO_SECURE) { + pCtx->hEnc = exynos_v4l2_open_devname(VIDEO_HEVC_SECURE_ENCODER_NAME, O_RDWR, 0); + } else { + pCtx->hEnc = exynos_v4l2_open_devname(VIDEO_HEVC_ENCODER_NAME, O_RDWR, 0); + } + } else +#endif + { + if (pVideoInfo->eSecurityType == VIDEO_SECURE) { + pCtx->hEnc = exynos_v4l2_open_devname(VIDEO_SECURE_ENCODER_NAME, O_RDWR, 0); + } else { + pCtx->hEnc = exynos_v4l2_open_devname(VIDEO_ENCODER_NAME, O_RDWR, 0); + } + } + + if (pCtx->hEnc < 0) { + ALOGE("%s: Failed to open encoder device", __func__); + goto EXIT_OPEN_FAIL; + } + + memcpy(&pCtx->videoInstInfo, pVideoInfo, sizeof(pCtx->videoInstInfo)); + + ALOGV("%s: MFC version is %x", __func__, pCtx->videoInstInfo.HwVersion); + + if (!exynos_v4l2_querycap(pCtx->hEnc, needCaps)) { + ALOGE("%s: Failed to querycap", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + pCtx->bStreamonInbuf = VIDEO_FALSE; + pCtx->bStreamonOutbuf = VIDEO_FALSE; + + pMutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t)); + if (pMutex == NULL) { + ALOGE("%s: Failed to allocate mutex about input buffer", __func__); + goto EXIT_QUERYCAP_FAIL; + } + if (pthread_mutex_init(pMutex, NULL) != 0) { + free(pMutex); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->pInMutex = (void*)pMutex; + + pMutex = (pthread_mutex_t *)malloc(sizeof(pthread_mutex_t)); + if (pMutex == NULL) { + ALOGE("%s: Failed to allocate mutex about output buffer", __func__); + goto EXIT_QUERYCAP_FAIL; + } + if (pthread_mutex_init(pMutex, NULL) != 0) { + free(pMutex); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->pOutMutex = (void*)pMutex; + + hIonClient = ion_open(); + if (hIonClient < 0) { + ALOGE("%s: Failed to create ion_client", __func__); + goto EXIT_QUERYCAP_FAIL; + } + pCtx->hIONHandle = hIonClient; + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + if (ion_alloc_fd(pCtx->hIONHandle, sizeof(TemporalLayerShareBuffer), + 0, ION_HEAP_SYSTEM_MASK, ION_FLAG_CACHED, &(pCtx->nTemporalLayerShareBufferFD)) < 0 ) { + ALOGE("%s: Failed to ion_alloc_fd for nTemporalLayerShareBufferFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + pCtx->pTemporalLayerShareBufferAddr = mmap(NULL, sizeof(TemporalLayerShareBuffer), + PROT_READ | PROT_WRITE, MAP_SHARED, pCtx->nTemporalLayerShareBufferFD, 0); + if (pCtx->pTemporalLayerShareBufferAddr == MAP_FAILED) { + ALOGE("%s: Failed to mmap for nTemporalLayerShareBufferFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + memset(pCtx->pTemporalLayerShareBufferAddr, 0, sizeof(TemporalLayerShareBuffer)); + } + + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_TRUE) { + if (ion_alloc_fd(pCtx->hIONHandle, sizeof(RoiInfoShareBuffer), + 0, ION_HEAP_SYSTEM_MASK, ION_FLAG_CACHED, &(pCtx->nRoiShareBufferFD)) < 0 ) { + ALOGE("%s: Failed to ion_alloc_fd for nRoiShareBufferFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + pCtx->pRoiShareBufferAddr = mmap(NULL, sizeof(RoiInfoShareBuffer), + PROT_READ | PROT_WRITE, MAP_SHARED, pCtx->nRoiShareBufferFD, 0); + if (pCtx->pRoiShareBufferAddr == MAP_FAILED) { + ALOGE("%s: Failed to mmap for nRoiShareBufferFD", __func__); + goto EXIT_QUERYCAP_FAIL; + } + + memset(pCtx->pRoiShareBufferAddr, 0, sizeof(RoiInfoShareBuffer)); + } + + return (void *)pCtx; + +EXIT_QUERYCAP_FAIL: + if (pCtx->pInMutex != NULL) { + pthread_mutex_destroy(pCtx->pInMutex); + free(pCtx->pInMutex); + } + + if (pCtx->pOutMutex != NULL) { + pthread_mutex_destroy(pCtx->pOutMutex); + free(pCtx->pOutMutex); + } + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + + if (pCtx->pTemporalLayerShareBufferAddr != NULL) { + munmap(pCtx->pTemporalLayerShareBufferAddr, sizeof(TemporalLayerShareBuffer)); + pCtx->pTemporalLayerShareBufferAddr = NULL; + } + + /* free a ion_buffer */ + if (pCtx->nTemporalLayerShareBufferFD > 0) { + close(pCtx->nTemporalLayerShareBufferFD); + pCtx->nTemporalLayerShareBufferFD = -1; + } + } + + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_TRUE) { + + if (pCtx->pRoiShareBufferAddr != NULL) { + munmap(pCtx->pRoiShareBufferAddr, sizeof(RoiInfoShareBuffer)); + pCtx->pRoiShareBufferAddr = NULL; + } + + /* free a ion_buffer */ + if (pCtx->nRoiShareBufferFD > 0) { + close(pCtx->nRoiShareBufferFD); + pCtx->nRoiShareBufferFD = -1; + } + } + + /* free a ion_client */ + if (pCtx->hIONHandle > 0) { + ion_close(pCtx->hIONHandle); + pCtx->hIONHandle = -1; + } + + exynos_v4l2_close(pCtx->hEnc); + +EXIT_OPEN_FAIL: + free(pCtx); + +EXIT_ALLOC_FAIL: + return NULL; +} + +/* + * [Encoder OPS] Finalize + */ +static ExynosVideoErrorType MFC_Encoder_Finalize(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + pthread_mutex_t *pMutex = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + if (pCtx->pTemporalLayerShareBufferAddr != NULL) { + munmap(pCtx->pTemporalLayerShareBufferAddr, sizeof(TemporalLayerShareBuffer)); + pCtx->pTemporalLayerShareBufferAddr = NULL; + } + + /* free a ion_buffer */ + if (pCtx->nTemporalLayerShareBufferFD > 0) { + close(pCtx->nTemporalLayerShareBufferFD); + pCtx->nTemporalLayerShareBufferFD = -1; + } + } + + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_TRUE) { + if (pCtx->pRoiShareBufferAddr != NULL) { + munmap(pCtx->pRoiShareBufferAddr, sizeof(RoiInfoShareBuffer)); + pCtx->pRoiShareBufferAddr = NULL; + } + + /* free a ion_buffer */ + if (pCtx->nRoiShareBufferFD > 0) { + close(pCtx->nRoiShareBufferFD); + pCtx->nRoiShareBufferFD = -1; + } + } + + /* free a ion_client */ + if (pCtx->hIONHandle > 0) { + ion_close(pCtx->hIONHandle); + pCtx->hIONHandle = -1; + } + + if (pCtx->pOutMutex != NULL) { + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_destroy(pMutex); + free(pMutex); + pCtx->pOutMutex = NULL; + } + + if (pCtx->pInMutex != NULL) { + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_destroy(pMutex); + free(pMutex); + pCtx->pInMutex = NULL; + } + + if (pCtx->bShareInbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nInbufs; i++) { + for (j = 0; j < pCtx->nInbufPlanes; j++) { + pVideoPlane = &pCtx->pInbuf[i].planes[j]; + if (pVideoPlane->addr != NULL) { + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + pVideoPlane->addr = NULL; + pVideoPlane->allocSize = 0; + pVideoPlane->dataSize = 0; + } + + pCtx->pInbuf[i].pGeometry = NULL; + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + pCtx->pInbuf[i].bRegistered = VIDEO_FALSE; + } + } + } + + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nOutbufs; i++) { + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + if (pVideoPlane->addr != NULL) { + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + pVideoPlane->addr = NULL; + pVideoPlane->allocSize = 0; + pVideoPlane->dataSize = 0; + } + + pCtx->pOutbuf[i].pGeometry = NULL; + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bRegistered = VIDEO_FALSE; + } + } + } + + if (pCtx->pInbuf != NULL) { + free(pCtx->pInbuf); + pCtx->pInbuf = NULL; + } + + if (pCtx->pOutbuf != NULL) { + free(pCtx->pOutbuf); + pCtx->pOutbuf = NULL; + } + + if (pCtx->hEnc >= 0) + exynos_v4l2_close(pCtx->hEnc); + + free(pCtx); + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Extended Control + */ +static ExynosVideoErrorType MFC_Encoder_Set_EncParam ( + void *pHandle, + ExynosVideoEncParam *pEncParam) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoEncInitParam *pInitParam = NULL; + ExynosVideoEncCommonParam *pCommonParam = NULL; + + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + int i; + struct v4l2_ext_control ext_ctrl[MAX_CTRL_NUM]; + struct v4l2_ext_controls ext_ctrls; + + if ((pCtx == NULL) || (pEncParam == NULL)) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pInitParam = &pEncParam->initParam; + pCommonParam = &pEncParam->commonParam; + + /* common parameters */ + ext_ctrl[0].id = V4L2_CID_MPEG_VIDEO_GOP_SIZE; + ext_ctrl[0].value = pCommonParam->IDRPeriod; + ext_ctrl[1].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MODE; + ext_ctrl[1].value = pCommonParam->SliceMode; /* 0: one, 1: fixed #mb, 2: fixed #bytes */ + ext_ctrl[2].id = V4L2_CID_MPEG_VIDEO_CYCLIC_INTRA_REFRESH_MB; + ext_ctrl[2].value = pCommonParam->RandomIntraMBRefresh; + ext_ctrl[3].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING; + ext_ctrl[3].value = pCommonParam->PadControlOn; + ext_ctrl[4].id = V4L2_CID_MPEG_MFC51_VIDEO_PADDING_YUV; + ext_ctrl[4].value = pCommonParam->CrPadVal; + ext_ctrl[4].value |= (pCommonParam->CbPadVal << 8); + ext_ctrl[4].value |= (pCommonParam->LumaPadVal << 16); + ext_ctrl[5].id = V4L2_CID_MPEG_VIDEO_FRAME_RC_ENABLE; + ext_ctrl[5].value = pCommonParam->EnableFRMRateControl; + ext_ctrl[6].id = V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE; + ext_ctrl[6].value = pCommonParam->EnableMBRateControl; + ext_ctrl[7].id = V4L2_CID_MPEG_VIDEO_BITRATE; + + /* FIXME temporary fix */ + if (pCommonParam->Bitrate) + ext_ctrl[7].value = pCommonParam->Bitrate; + else + ext_ctrl[7].value = 1; /* just for testing Movie studio */ + + /* codec specific parameters */ + switch (pEncParam->eCompressionFormat) { + case VIDEO_CODING_AVC: + { + ExynosVideoEncH264Param *pH264Param = &pEncParam->codecParam.h264; + + /* common parameters but id is depends on codec */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_H264_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_H264_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; /* QP range : I frame */ + ext_ctrl[10].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMax_I); + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_H264_MIN_QP; + ext_ctrl[11].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMin_I); + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + /* H.264 specific parameters */ + switch (pCommonParam->SliceMode) { + case 0: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = 1; /* default */ + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = 2800; /* based on MFC6.x */ + break; + case 1: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = pH264Param->SliceArgument; + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = 2800; /* based on MFC6.x */ + break; + case 2: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = 1; /* default */ + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = pH264Param->SliceArgument; + break; + default: + break; + } + + ext_ctrl[15].id = V4L2_CID_MPEG_VIDEO_H264_PROFILE; + ext_ctrl[15].value = pH264Param->ProfileIDC; + ext_ctrl[16].id = V4L2_CID_MPEG_VIDEO_H264_LEVEL; + ext_ctrl[16].value = pH264Param->LevelIDC; + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_NUM_REF_PIC_FOR_P; + ext_ctrl[17].value = pH264Param->NumberRefForPframes; + /* + * It should be set using h264Param->NumberBFrames after being handled by appl. + */ + ext_ctrl[18].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; + ext_ctrl[18].value = pH264Param->NumberBFrames; + ext_ctrl[19].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_MODE; + ext_ctrl[19].value = pH264Param->LoopFilterDisable; + ext_ctrl[20].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_ALPHA; + ext_ctrl[20].value = pH264Param->LoopFilterAlphaC0Offset; + ext_ctrl[21].id = V4L2_CID_MPEG_VIDEO_H264_LOOP_FILTER_BETA; + ext_ctrl[21].value = pH264Param->LoopFilterBetaOffset; + ext_ctrl[22].id = V4L2_CID_MPEG_VIDEO_H264_ENTROPY_MODE; + ext_ctrl[22].value = pH264Param->SymbolMode; + ext_ctrl[23].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_INTERLACE; + ext_ctrl[23].value = pH264Param->PictureInterlace; + ext_ctrl[24].id = V4L2_CID_MPEG_VIDEO_H264_8X8_TRANSFORM; + ext_ctrl[24].value = pH264Param->Transform8x8Mode; + ext_ctrl[25].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE; + ext_ctrl[25].value = pH264Param->FrameRate; + ext_ctrl[26].id = V4L2_CID_MPEG_VIDEO_H264_B_FRAME_QP; + ext_ctrl[26].value = pH264Param->FrameQp_B; + ext_ctrl[27].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_DARK; + ext_ctrl[27].value = pH264Param->DarkDisable; + ext_ctrl[28].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_SMOOTH; + ext_ctrl[28].value = pH264Param->SmoothDisable; + ext_ctrl[29].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_STATIC; + ext_ctrl[29].value = pH264Param->StaticDisable; + ext_ctrl[30].id = V4L2_CID_MPEG_MFC51_VIDEO_H264_ADAPTIVE_RC_ACTIVITY; + ext_ctrl[30].value = pH264Param->ActivityDisable; + + /* doesn't have to be set */ + ext_ctrl[31].id = V4L2_CID_MPEG_VIDEO_GOP_CLOSURE; + ext_ctrl[31].value = 1; + ext_ctrl[32].id = V4L2_CID_MPEG_VIDEO_H264_I_PERIOD; + ext_ctrl[32].value = 0; + ext_ctrl[33].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[33].value = 0; + ext_ctrl[34].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + + if (pH264Param->HeaderWithIFrame == 0) { + /* default */ + ext_ctrl[34].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; /* 0: seperated header */ + } else { + ext_ctrl[34].value = V4L2_MPEG_VIDEO_HEADER_MODE_JOINED_WITH_1ST_FRAME; /* 1: header + first frame */ + } + ext_ctrl[35].id = V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_ENABLE; + ext_ctrl[35].value = pH264Param->SarEnable; + ext_ctrl[36].id = V4L2_CID_MPEG_VIDEO_H264_VUI_SAR_IDC; + ext_ctrl[36].value = pH264Param->SarIndex; + ext_ctrl[37].id = V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_WIDTH; + ext_ctrl[37].value = pH264Param->SarWidth; + ext_ctrl[38].id = V4L2_CID_MPEG_VIDEO_H264_VUI_EXT_SAR_HEIGHT; + ext_ctrl[38].value = pH264Param->SarHeight; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[39].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[39].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[39].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[39].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[39].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[39].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + + /* SVC is not supported yet */ + ext_ctrl[40].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING; + ext_ctrl[40].value = 0; + switch (pH264Param->HierarType) { + case 1: + ext_ctrl[41].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE; + ext_ctrl[41].value = V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_B; + break; + case 0: + default: + ext_ctrl[41].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_TYPE; + ext_ctrl[41].value = V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P; + break; + } + ext_ctrl[42].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER; + ext_ctrl[42].value = pH264Param->TemporalSVC.nTemporalLayerCount; + ext_ctrl[43].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[43].value = (0 << 16 | 29); + ext_ctrl[44].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[44].value = (1 << 16 | 29); + ext_ctrl[45].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[45].value = (2 << 16 | 29); + + ext_ctrl[46].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FRAME_PACKING; + ext_ctrl[46].value = 0; + ext_ctrl[47].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_CURRENT_FRAME_0; + ext_ctrl[47].value = 0; + ext_ctrl[48].id = V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRANGEMENT_TYPE; + ext_ctrl[48].value = V4L2_MPEG_VIDEO_H264_SEI_FP_TYPE_SIDE_BY_SIDE; + + /* FMO is not supported yet */ + ext_ctrl[49].id = V4L2_CID_MPEG_VIDEO_H264_FMO; + ext_ctrl[49].value = 0; + ext_ctrl[50].id = V4L2_CID_MPEG_VIDEO_H264_FMO_MAP_TYPE; + ext_ctrl[50].value = V4L2_MPEG_VIDEO_H264_FMO_MAP_TYPE_INTERLEAVED_SLICES; + ext_ctrl[51].id = V4L2_CID_MPEG_VIDEO_H264_FMO_SLICE_GROUP; + ext_ctrl[51].value = 4; + ext_ctrl[52].id = V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH; + ext_ctrl[52].value = (0 << 30 | 0); + ext_ctrl[53].id = V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH; + ext_ctrl[53].value = (1 << 30 | 0); + ext_ctrl[54].id = V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH; + ext_ctrl[54].value = (2 << 30 | 0); + ext_ctrl[55].id = V4L2_CID_MPEG_VIDEO_H264_FMO_RUN_LENGTH; + ext_ctrl[55].value = (3 << 30 | 0); + ext_ctrl[56].id = V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_DIRECTION; + ext_ctrl[56].value = V4L2_MPEG_VIDEO_H264_FMO_CHANGE_DIR_RIGHT; + ext_ctrl[57].id = V4L2_CID_MPEG_VIDEO_H264_FMO_CHANGE_RATE; + ext_ctrl[57].value = 0; + + /* ASO is not supported yet */ + ext_ctrl[58].id = V4L2_CID_MPEG_VIDEO_H264_ASO; + ext_ctrl[58].value = 0; + for (i = 0; i < 32; i++) { + ext_ctrl[59 + i].id = V4L2_CID_MPEG_VIDEO_H264_ASO_SLICE_ORDER; + ext_ctrl[59 + i].value = (i << 16 | 0); + } + ext_ctrls.count = H264_CTRL_NUM; + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT0; + ext_ctrl[i].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[0]; + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT1; + ext_ctrl[i + 1].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[1]; + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT2; + ext_ctrl[i + 2].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[2]; + ext_ctrl[i + 3].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT3; + ext_ctrl[i + 3].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[3]; + ext_ctrl[i + 4].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT4; + ext_ctrl[i + 4].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[4]; + ext_ctrl[i + 5].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT5; + ext_ctrl[i + 5].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[5]; + ext_ctrl[i + 6].id = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT6; + ext_ctrl[i + 6].value = pH264Param->TemporalSVC.nTemporalLayerBitrateRatio[6]; + ext_ctrls.count += 7; + } + +#ifdef CID_SUPPORT + if (pCtx->videoInstInfo.specificInfo.enc.bSkypeSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + + /* VUI RESRICTION ENABLE */ + ext_ctrl[i].id = V4L2_CID_MPEG_MFC_H264_VUI_RESTRICTION_ENABLE; + ext_ctrl[i].value = pH264Param->VuiRestrictionEnable; + + /* H264 ENABLE LTR */ + ext_ctrl[i + 1].id = V4L2_CID_MPEG_MFC_H264_ENABLE_LTR; + ext_ctrl[i + 1].value = pH264Param->LTREnable; + + /* FRAME LEVEL QP ENABLE */ + ext_ctrl[i + 2].id = V4L2_CID_MPEG_MFC_CONFIG_QP_ENABLE; + ext_ctrl[i + 2].value = pCommonParam->EnableFRMQpControl; + + /* CONFIG QP VALUE */ + ext_ctrl[i + 3].id = V4L2_CID_MPEG_MFC_CONFIG_QP; + ext_ctrl[i + 3].value = pCommonParam->FrameQp; + + ext_ctrls.count += 4; + } +#endif +#ifdef USE_MFC_MEDIA + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_ROI_ENABLE; + ext_ctrl[i].value = pH264Param->ROIEnable; + + ext_ctrls.count += 1; + } +#endif + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_H264_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP_P; + ext_ctrl[i + 1].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMax_P); + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_H264_MIN_QP_B; /* B frame */ + ext_ctrl[i + 2].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMin_B); + ext_ctrl[i + 3].id = V4L2_CID_MPEG_VIDEO_H264_MAX_QP_B; + ext_ctrl[i + 3].value = GET_H264_QP_VALUE(pCommonParam->QpRange.QpMax_B); + + ext_ctrls.count += 4; + } + break; + } + + case VIDEO_CODING_MPEG4: + { + ExynosVideoEncMpeg4Param *pMpeg4Param = &pEncParam->codecParam.mpeg4; + + /* common parameters but id is depends on codec */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_MPEG4_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_MPEG4_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP; /* I frame */ + ext_ctrl[10].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMax_I); + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP; + ext_ctrl[11].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMin_I); + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + /* MPEG4 specific parameters */ + switch (pCommonParam->SliceMode) { + case 0: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = 1; /* default */ + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = 2800; /* based on MFC6.x */ + break; + case 1: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = pMpeg4Param->SliceArgument; + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = 2800; /* based on MFC6.x */ + break; + case 2: + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB; + ext_ctrl[13].value = 1; /* default */ + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_BYTES; + ext_ctrl[14].value = pMpeg4Param->SliceArgument; + break; + default: + break; + } + + ext_ctrl[15].id = V4L2_CID_MPEG_VIDEO_MPEG4_PROFILE; + ext_ctrl[15].value = pMpeg4Param->ProfileIDC; + ext_ctrl[16].id = V4L2_CID_MPEG_VIDEO_MPEG4_LEVEL; + ext_ctrl[16].value = pMpeg4Param->LevelIDC; + ext_ctrl[17].id = V4L2_CID_MPEG_VIDEO_MPEG4_QPEL; + ext_ctrl[17].value = pMpeg4Param->DisableQpelME; + + /* + * It should be set using mpeg4Param->NumberBFrames after being handled by appl. + */ + ext_ctrl[18].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; + ext_ctrl[18].value = pMpeg4Param->NumberBFrames; + + ext_ctrl[19].id = V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_TIME_RES; + ext_ctrl[19].value = pMpeg4Param->TimeIncreamentRes; + ext_ctrl[20].id = V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_FRM_DELTA; + ext_ctrl[20].value = pMpeg4Param->VopTimeIncreament; + ext_ctrl[21].id = V4L2_CID_MPEG_VIDEO_MPEG4_B_FRAME_QP; + ext_ctrl[21].value = pMpeg4Param->FrameQp_B; + ext_ctrl[22].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[22].value = 0; + ext_ctrl[23].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + ext_ctrl[23].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; + ext_ctrl[24].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; + ext_ctrl[24].value = 1; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[25].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[25].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[25].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[25].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[25].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[25].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + ext_ctrls.count = MPEG4_CTRL_NUM; + + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_P; + ext_ctrl[i + 1].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMax_P); + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_B; /* B frame */ + ext_ctrl[i + 2].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMin_B); + ext_ctrl[i + 3].id = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_B; + ext_ctrl[i + 3].value = GET_MPEG4_QP_VALUE(pCommonParam->QpRange.QpMax_B); + + ext_ctrls.count += 4; + } + break; + } + + case VIDEO_CODING_H263: + { + ExynosVideoEncH263Param *pH263Param = &pEncParam->codecParam.h263; + + /* common parameters but id is depends on codec */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_H263_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_H263_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_H263_MAX_QP; /* I frame */ + ext_ctrl[10].value = GET_H263_QP_VALUE(pCommonParam->QpRange.QpMax_I); + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_H263_MIN_QP; + ext_ctrl[11].value = GET_H263_QP_VALUE(pCommonParam->QpRange.QpMin_I); + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + /* H263 specific parameters */ + ext_ctrl[13].id = V4L2_CID_MPEG_MFC51_VIDEO_H263_RC_FRAME_RATE; + ext_ctrl[13].value = pH263Param->FrameRate; + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[14].value = 0; + ext_ctrl[15].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + ext_ctrl[15].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; + ext_ctrl[16].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; + ext_ctrl[16].value = 1; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + ext_ctrls.count = H263_CTRL_NUM; + + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_H263_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_H263_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_H263_MAX_QP_P; + ext_ctrl[i + 1].value = GET_H263_QP_VALUE(pCommonParam->QpRange.QpMax_P); + + ext_ctrls.count += 2; + } + break; + } + + case VIDEO_CODING_VP8: + { + ExynosVideoEncVp8Param *pVp8Param = &pEncParam->codecParam.vp8; + + /* common parameters but id is depends on codec */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_VP8_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_VP8_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_VP8_MAX_QP; /* I frame */ + ext_ctrl[10].value = GET_VP8_QP_VALUE(pCommonParam->QpRange.QpMax_I); + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_VP8_MIN_QP; + ext_ctrl[11].value = GET_VP8_QP_VALUE(pCommonParam->QpRange.QpMin_I); + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + /* VP8 specific parameters */ + ext_ctrl[13].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_RC_FRAME_RATE; + ext_ctrl[13].value = pVp8Param->FrameRate; + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[14].value = 0; + ext_ctrl[15].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + ext_ctrl[15].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; + ext_ctrl[16].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_FIXED_TARGET_BIT; + ext_ctrl[16].value = 1; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + + ext_ctrl[18].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_VERSION; + ext_ctrl[18].value = pVp8Param->Vp8Version; + + ext_ctrl[19].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_OF_PARTITIONS; + ext_ctrl[19].value = pVp8Param->Vp8NumberOfPartitions; + + ext_ctrl[20].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_LEVEL; + ext_ctrl[20].value = pVp8Param->Vp8FilterLevel; + + ext_ctrl[21].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_SHARPNESS; + ext_ctrl[21].value = pVp8Param->Vp8FilterSharpness; + + ext_ctrl[22].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_GOLDEN_FRAMESEL; + ext_ctrl[22].value = pVp8Param->Vp8GoldenFrameSel; + + ext_ctrl[23].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_ENABLE; + ext_ctrl[23].value = 0; + + ext_ctrl[24].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER0; + ext_ctrl[24].value = (0 << 16 | 37); + + ext_ctrl[25].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER1; + ext_ctrl[25].value = (1 << 16 | 37); + + ext_ctrl[26].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER2; + ext_ctrl[26].value = (2 << 16 | 37); + + ext_ctrl[27].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_REF_NUMBER_FOR_PFRAMES; + ext_ctrl[27].value = pVp8Param->RefNumberForPFrame; + + ext_ctrl[28].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_DISABLE_INTRA_MD4X4; + ext_ctrl[28].value = pVp8Param->DisableIntraMd4x4; + + ext_ctrl[29].id = V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_TEMPORAL_LAYER; + ext_ctrl[29].value = pVp8Param->TemporalSVC.nTemporalLayerCount; + ext_ctrls.count = VP8_CTRL_NUM; + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT0; + ext_ctrl[i].value = pVp8Param->TemporalSVC.nTemporalLayerBitrateRatio[0]; + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT1; + ext_ctrl[i + 1].value = pVp8Param->TemporalSVC.nTemporalLayerBitrateRatio[1]; + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT2; + ext_ctrl[i + 2].value = pVp8Param->TemporalSVC.nTemporalLayerBitrateRatio[2]; + + ext_ctrls.count += 3; + } + + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_VP8_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_VP8_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_VP8_MAX_QP_P; + ext_ctrl[i + 1].value = GET_VP8_QP_VALUE(pCommonParam->QpRange.QpMax_P); + + ext_ctrls.count += 2; + } + break; + } +#ifdef USE_HEVCENC_SUPPORT + case VIDEO_CODING_HEVC: + { + ExynosVideoEncHevcParam *pHevcParam = &pEncParam->codecParam.hevc; + + /* common parameters but id is depends on codec */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP; /* I frame */ + ext_ctrl[10].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMax_I); + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP; + ext_ctrl[11].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMin_I); + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + /* HEVC specific parameters */ + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_QP; + ext_ctrl[13].value = pHevcParam->FrameQp_B; + ext_ctrl[14].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_RC_FRAME_RATE; + ext_ctrl[14].value = pHevcParam->FrameRate; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[15].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[15].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[15].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[15].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[15].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[15].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + + ext_ctrl[16].id = V4L2_CID_MPEG_VIDEO_HEVC_PROFILE; + ext_ctrl[16].value = pHevcParam->ProfileIDC; + + ext_ctrl[17].id = V4L2_CID_MPEG_VIDEO_HEVC_LEVEL; + ext_ctrl[17].value = pHevcParam->LevelIDC; + + ext_ctrl[18].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TIER_FLAG; + ext_ctrl[18].value = pHevcParam->TierIDC; + + ext_ctrl[19].id = V4L2_CID_MPEG_VIDEO_B_FRAMES; + ext_ctrl[19].value = pHevcParam->NumberBFrames; + + ext_ctrl[20].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_PARTITION_DEPTH; + ext_ctrl[20].value = pHevcParam->MaxPartitionDepth; + + ext_ctrl[21].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REF_NUMBER_FOR_PFRAMES; + ext_ctrl[21].value = pHevcParam->NumberRefForPframes; + + ext_ctrl[22].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_DISABLE; + ext_ctrl[22].value = pHevcParam->LoopFilterDisable; + + ext_ctrl[23].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_SLICE_BOUNDARY; + ext_ctrl[23].value = pHevcParam->LoopFilterSliceFlag; + + ext_ctrl[24].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_TC_OFFSET_DIV2; + ext_ctrl[24].value = pHevcParam->LoopFilterTcOffset; + + ext_ctrl[25].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_BETA_OFFSET_DIV2; + ext_ctrl[25].value = pHevcParam->LoopFilterBetaOffset; + + ext_ctrl[26].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LTR_ENABLE; + ext_ctrl[26].value = pHevcParam->LongtermRefEnable; + + ext_ctrl[27].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_USER_REF; + ext_ctrl[27].value = pHevcParam->LongtermUserRef; + + ext_ctrl[28].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STORE_REF; + ext_ctrl[28].value = pHevcParam->LongtermStoreRef; + + /* should be set V4L2_CID_MPEG_VIDEO_MB_RC_ENABLE first */ + ext_ctrl[29].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_DARK; + ext_ctrl[29].value = pHevcParam->DarkDisable; + + ext_ctrl[30].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_SMOOTH; + ext_ctrl[30].value = pHevcParam->SmoothDisable; + + ext_ctrl[31].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_STATIC; + ext_ctrl[31].value = pHevcParam->StaticDisable; + + ext_ctrl[32].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_ACTIVITY; + ext_ctrl[32].value = pHevcParam->ActivityDisable; + + /* doesn't have to be set */ + ext_ctrl[33].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_TYPE; + ext_ctrl[33].value = 2; + ext_ctrl[34].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_PERIOD; + ext_ctrl[34].value = 0; + ext_ctrl[35].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LOSSLESS_CU_ENABLE; + ext_ctrl[35].value = 0; + ext_ctrl[36].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_CONST_INTRA_PRED_ENABLE; + ext_ctrl[36].value = 0; + ext_ctrl[37].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WAVEFRONT_ENABLE; + ext_ctrl[37].value = 0; + ext_ctrl[38].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIGN_DATA_HIDING; + ext_ctrl[38].value = 1; + ext_ctrl[39].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_GENERAL_PB_ENABLE; + ext_ctrl[39].value = 1; + ext_ctrl[40].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TEMPORAL_ID_ENABLE; + ext_ctrl[40].value = 1; + ext_ctrl[41].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STRONG_SMOTHING_FLAG; + ext_ctrl[41].value = 1; + ext_ctrl[42].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_NUM_MERGE_MV_MINUS1; + ext_ctrl[42].value = 4; + ext_ctrl[43].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_INTRA_PU_SPLIT; + ext_ctrl[43].value = 0; + ext_ctrl[44].id = V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_TMV_PREDICTION; + ext_ctrl[44].value = 0; + ext_ctrl[45].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[45].value = 0; + ext_ctrl[46].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + ext_ctrl[46].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; + + /* SVC is not supported yet */ + ext_ctrl[47].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_QP_ENABLE; + ext_ctrl[47].value = 0; + ext_ctrl[48].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_TYPE; + ext_ctrl[48].value = V4L2_MPEG_VIDEO_H264_HIERARCHICAL_CODING_P; + ext_ctrl[49].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER; + ext_ctrl[49].value = pHevcParam->TemporalSVC.nTemporalLayerCount; + ext_ctrl[50].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[50].value = (0 << 16 | 29); + ext_ctrl[51].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[51].value = (1 << 16 | 29); + ext_ctrl[52].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[52].value = (2 << 16 | 29); + ext_ctrls.count = HEVC_CTRL_NUM; + +#ifdef CID_SUPPORT + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT0; + ext_ctrl[i].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[0]; + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT1; + ext_ctrl[i + 1].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[1]; + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT2; + ext_ctrl[i + 2].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[2]; + ext_ctrl[i + 3].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT3; + ext_ctrl[i + 3].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[3]; + ext_ctrl[i + 4].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT4; + ext_ctrl[i + 4].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[4]; + ext_ctrl[i + 5].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT5; + ext_ctrl[i + 5].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[5]; + ext_ctrl[i + 6].id = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT6; + ext_ctrl[i + 6].value = pHevcParam->TemporalSVC.nTemporalLayerBitrateRatio[6]; + ext_ctrls.count += 7; + } +#endif +#ifdef USE_MFC_MEDIA + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_ROI_ENABLE; + ext_ctrl[i].value = pHevcParam->ROIEnable; + + ext_ctrls.count += 1; + } +#endif + + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_P; + ext_ctrl[i + 1].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMax_P); + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_B; /* B frame */ + ext_ctrl[i + 2].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMin_B); + ext_ctrl[i + 3].id = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_B; + ext_ctrl[i + 3].value = GET_HEVC_QP_VALUE(pCommonParam->QpRange.QpMax_B); + + ext_ctrls.count += 4; + } + break; + } +#endif +#ifdef USE_VP9ENC_SUPPORT + case VIDEO_CODING_VP9: + { + ExynosVideoEncVp9Param *pVp9Param = &pEncParam->codecParam.vp9; + + /* VP9 specific parameters */ + ext_ctrl[8].id = V4L2_CID_MPEG_VIDEO_VP9_I_FRAME_QP; + ext_ctrl[8].value = pCommonParam->FrameQp; + + ext_ctrl[9].id = V4L2_CID_MPEG_VIDEO_VP9_P_FRAME_QP; + ext_ctrl[9].value = pCommonParam->FrameQp_P; + + ext_ctrl[10].id = V4L2_CID_MPEG_VIDEO_VP9_MAX_QP; /* I frame */ + ext_ctrl[10].value = GET_VP9_QP_VALUE(pCommonParam->QpRange.QpMax_I); + + ext_ctrl[11].id = V4L2_CID_MPEG_VIDEO_VP9_MIN_QP; + ext_ctrl[11].value = GET_VP9_QP_VALUE(pCommonParam->QpRange.QpMin_I); + + ext_ctrl[12].id = V4L2_CID_MPEG_MFC51_VIDEO_RC_REACTION_COEFF; + ext_ctrl[12].value = pCommonParam->CBRPeriodRf; + + ext_ctrl[13].id = V4L2_CID_MPEG_VIDEO_VP9_RC_FRAME_RATE; + ext_ctrl[13].value = pVp9Param->FrameRate; + + ext_ctrl[14].id = V4L2_CID_MPEG_VIDEO_VBV_SIZE; + ext_ctrl[14].value = 0; + + ext_ctrl[15].id = V4L2_CID_MPEG_VIDEO_HEADER_MODE; + ext_ctrl[15].value = V4L2_MPEG_VIDEO_HEADER_MODE_SEPARATE; + + ext_ctrl[16].id = V4L2_CID_MPEG_VIDEO_VP9_DISABLE_INTRA_PU_SPLIT; + ext_ctrl[16].value = 0; + + /* Initial parameters : Frame Skip */ + switch (pInitParam->FrameSkip) { + case VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT; + break; + case VIDEO_FRAME_SKIP_MODE_BUF_LIMIT: + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_BUF_LIMIT; + break; + default: + /* VIDEO_FRAME_SKIP_MODE_DISABLE (default) */ + ext_ctrl[17].id = V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE; + ext_ctrl[17].value = V4L2_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE_DISABLED; + break; + } + + ext_ctrl[18].id = V4L2_CID_MPEG_VIDEO_VP9_VERSION; + ext_ctrl[18].value = pVp9Param->Vp9Version; + + ext_ctrl[19].id = V4L2_CID_MPEG_VIDEO_VP9_MAX_PARTITION_DEPTH; + ext_ctrl[19].value = pVp9Param->MaxPartitionDepth; + + ext_ctrl[20].id = V4L2_CID_MPEG_VIDEO_VP9_GOLDEN_FRAMESEL; + ext_ctrl[20].value = pVp9Param->Vp9GoldenFrameSel; + + ext_ctrl[21].id = V4L2_CID_MPEG_VIDEO_VP9_GF_REFRESH_PERIOD; + ext_ctrl[21].value = pVp9Param->Vp9GFRefreshPeriod; + + ext_ctrl[22].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHY_QP_ENABLE; + ext_ctrl[22].value = 0; + + ext_ctrl[23].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[23].value = (0 << 16 | 90); + + ext_ctrl[24].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[24].value = (1 << 16 | 90); + + ext_ctrl[25].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP; + ext_ctrl[25].value = (2 << 16 | 90); + + ext_ctrl[26].id = V4L2_CID_MPEG_VIDEO_VP9_REF_NUMBER_FOR_PFRAMES; + ext_ctrl[26].value = pVp9Param->RefNumberForPFrame; + + ext_ctrl[27].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER; + ext_ctrl[27].value = pVp9Param->TemporalSVC.nTemporalLayerCount; + + ext_ctrl[28].id = V4L2_CID_MPEG_VIDEO_DISABLE_IVF_HEADER; + ext_ctrl[28].value = 1; + ext_ctrls.count = VP9_CTRL_NUM; + + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT0; + ext_ctrl[i].value = pVp9Param->TemporalSVC.nTemporalLayerBitrateRatio[0]; + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT1; + ext_ctrl[i + 1].value = pVp9Param->TemporalSVC.nTemporalLayerBitrateRatio[1]; + ext_ctrl[i + 2].id = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT2; + ext_ctrl[i + 2].value = pVp9Param->TemporalSVC.nTemporalLayerBitrateRatio[2]; + + ext_ctrls.count += 3; + } + + /* optional : if these are not set, set value are same as I frame */ + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + i = ext_ctrls.count; + ext_ctrl[i].id = V4L2_CID_MPEG_VIDEO_VP9_MIN_QP_P; /* P frame */ + ext_ctrl[i].value = GET_VP9_QP_VALUE(pCommonParam->QpRange.QpMin_P); + ext_ctrl[i + 1].id = V4L2_CID_MPEG_VIDEO_VP9_MAX_QP_P; + ext_ctrl[i + 1].value = GET_VP9_QP_VALUE(pCommonParam->QpRange.QpMax_P); + + ext_ctrls.count += 2; + } + break; + } +#endif + default: + ALOGE("[%s] Undefined codec type",__func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + ext_ctrls.ctrl_class = V4L2_CTRL_CLASS_MPEG; + ext_ctrls.controls = ext_ctrl; + + if (exynos_v4l2_s_ext_ctrl(pCtx->hEnc, &ext_ctrls) != 0) { + ALOGE("%s: Failed to s_ext_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Frame Tag + */ +static ExynosVideoErrorType MFC_Encoder_Set_FrameTag( + void *pHandle, + int frameTag) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG, frameTag) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Get Frame Tag + */ +static int MFC_Encoder_Get_FrameTag(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + int frameTag = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (exynos_v4l2_g_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG, &frameTag) != 0) { + ALOGE("%s: Failed to g_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return frameTag; +} + +/* + * [Encoder OPS] Set Frame Type + */ +static ExynosVideoErrorType MFC_Encoder_Set_FrameType( + void *pHandle, + ExynosVideoFrameType frameType) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FORCE_FRAME_TYPE, frameType) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Frame Rate + */ +static ExynosVideoErrorType MFC_Encoder_Set_FrameRate( + void *pHandle, + int frameRate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH, frameRate) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Bit Rate + */ +static ExynosVideoErrorType MFC_Encoder_Set_BitRate( + void *pHandle, + int bitRate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_BIT_RATE_CH, bitRate) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Quantization Min/Max + */ +static ExynosVideoErrorType MFC_Encoder_Set_QuantizationRange( + void *pHandle, ExynosVideoQPRange qpRange) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + int cids[3][2], values[3][2]; /* I, P, B : min & max */ + int i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __FUNCTION__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (qpRange.QpMin_I > qpRange.QpMax_I) { + ALOGE("%s: QP(I) range(%d, %d) is wrong", __FUNCTION__, qpRange.QpMin_I, qpRange.QpMax_I); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (qpRange.QpMin_P > qpRange.QpMax_P) { + ALOGE("%s: QP(P) range(%d, %d) is wrong", __FUNCTION__, qpRange.QpMin_P, qpRange.QpMax_P); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(cids, 0, sizeof(cids)); + memset(values, 0, sizeof(values)); + + /* codec specific parameters */ + /* common parameters but id is depends on codec */ + switch (pCtx->videoInstInfo.eCodecType) { + case VIDEO_CODING_AVC: + { + if (qpRange.QpMin_B > qpRange.QpMax_B) { + ALOGE("%s: QP(B) range(%d, %d) is wrong", __FUNCTION__, qpRange.QpMin_B, qpRange.QpMax_B); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + cids[0][0] = V4L2_CID_MPEG_VIDEO_H264_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_H264_MAX_QP; + + values[0][0] = GET_H264_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_H264_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_H264_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_H264_MAX_QP_P; + cids[2][0] = V4L2_CID_MPEG_VIDEO_H264_MIN_QP_B; + cids[2][1] = V4L2_CID_MPEG_VIDEO_H264_MAX_QP_B; + + values[1][0] = GET_H264_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_H264_QP_VALUE(qpRange.QpMax_P); + values[2][0] = GET_H264_QP_VALUE(qpRange.QpMin_B); + values[2][1] = GET_H264_QP_VALUE(qpRange.QpMax_B); + } + break; + } + case VIDEO_CODING_MPEG4: + { + if (qpRange.QpMin_B > qpRange.QpMax_B) { + ALOGE("%s: QP(B) range(%d, %d) is wrong", __FUNCTION__, qpRange.QpMin_B, qpRange.QpMax_B); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + cids[0][0] = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP; + + values[0][0] = GET_MPEG4_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_MPEG4_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_P; + cids[2][0] = V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_B; + cids[2][1] = V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_B; + + values[1][0] = GET_MPEG4_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_MPEG4_QP_VALUE(qpRange.QpMax_P); + values[2][0] = GET_MPEG4_QP_VALUE(qpRange.QpMin_B); + values[2][1] = GET_MPEG4_QP_VALUE(qpRange.QpMax_B); + } + break; + } + case VIDEO_CODING_H263: + { + cids[0][0] = V4L2_CID_MPEG_VIDEO_H263_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_H263_MAX_QP; + + values[0][0] = GET_H263_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_H263_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_H263_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_H263_MAX_QP_P; + + values[1][0] = GET_H263_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_H263_QP_VALUE(qpRange.QpMax_P); + } + break; + } + case VIDEO_CODING_VP8: + { + cids[0][0] = V4L2_CID_MPEG_VIDEO_VP8_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_VP8_MAX_QP; + + values[0][0] = GET_VP8_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_VP8_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_VP8_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_VP8_MAX_QP_P; + + values[1][0] = GET_VP8_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_VP8_QP_VALUE(qpRange.QpMax_P); + } + break; + } +#ifdef USE_HEVCENC_SUPPORT + case VIDEO_CODING_HEVC: + { + if (qpRange.QpMin_B > qpRange.QpMax_B) { + ALOGE("%s: QP(B) range(%d, %d) is wrong", __FUNCTION__, qpRange.QpMin_B, qpRange.QpMax_B); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + cids[0][0] = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP; + + values[0][0] = GET_HEVC_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_HEVC_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_P; + cids[2][0] = V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_B; + cids[2][1] = V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_B; + + values[1][0] = GET_HEVC_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_HEVC_QP_VALUE(qpRange.QpMax_P); + values[2][0] = GET_HEVC_QP_VALUE(qpRange.QpMin_B); + values[2][1] = GET_HEVC_QP_VALUE(qpRange.QpMax_B); + } + break; + } +#endif +#ifdef USE_VP9ENC_SUPPORT + case VIDEO_CODING_VP9: + { + cids[0][0] = V4L2_CID_MPEG_VIDEO_VP9_MIN_QP; + cids[0][1] = V4L2_CID_MPEG_VIDEO_VP9_MAX_QP; + + values[0][0] = GET_VP9_QP_VALUE(qpRange.QpMin_I); + values[0][1] = GET_VP9_QP_VALUE(qpRange.QpMax_I); + + if (pCtx->videoInstInfo.specificInfo.enc.bQpRangePBSupport == VIDEO_TRUE) { + cids[1][0] = V4L2_CID_MPEG_VIDEO_VP9_MIN_QP_P; + cids[1][1] = V4L2_CID_MPEG_VIDEO_VP9_MAX_QP_P; + + values[1][0] = GET_VP9_QP_VALUE(qpRange.QpMin_P); + values[1][1] = GET_VP9_QP_VALUE(qpRange.QpMax_P); + } + break; + } +#endif + default: + { + ALOGE("[%s] Undefined codec type", __FUNCTION__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + break; + } + + for (i = 0; i < (int)(sizeof(cids)/sizeof(cids[0])); i++) { + if (cids[i][0] == 0) + break; + + ALOGV("%s: QP[%d] range (%d / %d)", __FUNCTION__, i, values[i][0], values[i][1]); + + /* keep a calling sequence as Max->Min because dirver has a restriction */ + if (exynos_v4l2_s_ctrl(pCtx->hEnc, cids[i][1], values[i][1]) != 0) { + ALOGE("%s: Failed to s_ctrl for max value", __FUNCTION__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, cids[i][0], values[i][0]) != 0) { + ALOGE("%s: Failed to s_ctrl for min value", __FUNCTION__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Frame Skip + */ +static ExynosVideoErrorType MFC_Encoder_Set_FrameSkip( + void *pHandle, + int frameSkip) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_SKIP_MODE, frameSkip) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set IDR Period + */ +static ExynosVideoErrorType MFC_Encoder_Set_IDRPeriod( + void *pHandle, + int IDRPeriod) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_I_PERIOD_CH, IDRPeriod) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Enable Prepend SPS and PPS to every IDR Frames + */ +static ExynosVideoErrorType MFC_Encoder_Enable_PrependSpsPpsToIdr(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + switch ((int)pCtx->videoInstInfo.eCodecType) { + case VIDEO_CODING_AVC: + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_VIDEO_H264_PREPEND_SPSPPS_TO_IDR, 1) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + break; +#if defined(USE_HEVCENC_SUPPORT) && defined(CID_SUPPORT) + case VIDEO_CODING_HEVC: + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_VIDEO_HEVC_PREPEND_SPSPPS_TO_IDR, 1) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + break; +#endif + default: + ALOGE("%s: codec(%x) can't support PrependSpsPpsToIdr", __func__, pCtx->videoInstInfo.eCodecType); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Qos Ratio + */ +static ExynosVideoErrorType MFC_Encoder_Set_QosRatio( + void *pHandle, + int ratio) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_VIDEO_QOS_RATIO, ratio) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Layer Change + */ +static ExynosVideoErrorType MFC_Encoder_Set_LayerChange( + void *pHandle, + TemporalLayerShareBuffer TemporalSVC) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + TemporalLayerShareBuffer *pTLSB = NULL; + unsigned int CID = 0; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pTLSB = (TemporalLayerShareBuffer *)pCtx->pTemporalLayerShareBufferAddr; + if (pCtx->videoInstInfo.specificInfo.enc.bTemporalSvcSupport == VIDEO_FALSE) { + ALOGE("%s: Layer Change is not supported :: codec type(%x), F/W ver(%x)", __func__, pCtx->videoInstInfo.eCodecType, pCtx->videoInstInfo.HwVersion); + ret = VIDEO_ERROR_NOSUPPORT; + goto EXIT; + } + + switch ((int)pCtx->videoInstInfo.eCodecType) { + case VIDEO_CODING_AVC: + CID = V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_CH; + break; +#if defined(USE_HEVCENC_SUPPORT) && defined(CID_SUPPORT) + case VIDEO_CODING_HEVC: + CID = V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_CH; + break; +#endif + case VIDEO_CODING_VP8: + CID = V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_CH; + break; +#ifdef USE_VP9ENC_SUPPORT + case VIDEO_CODING_VP9: + CID = V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_CH; + break; +#endif + default: + ALOGE("%s: this codec type is not supported(%x), F/W ver(%x)", __func__, pCtx->videoInstInfo.eCodecType, pCtx->videoInstInfo.HwVersion); + ret = VIDEO_ERROR_NOSUPPORT; + goto EXIT; + break; + } + + memcpy(pTLSB, &TemporalSVC, sizeof(TemporalLayerShareBuffer)); + if (exynos_v4l2_s_ctrl(pCtx->hEnc, CID, pCtx->nTemporalLayerShareBufferFD) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +#ifdef CID_SUPPORT +/* + * [Encoder OPS] Set Dynamic QP Control + */ +static ExynosVideoErrorType MFC_Encoder_Set_DynamicQpControl(void *pHandle, int nQp) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC_CONFIG_QP, nQp) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Mark LTR-frame + */ +static ExynosVideoErrorType MFC_Encoder_Set_MarkLTRFrame(void *pHandle, int nLongTermFrmIdx) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC_H264_MARK_LTR, nLongTermFrmIdx) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Used LTR-frame + */ +static ExynosVideoErrorType MFC_Encoder_Set_UsedLTRFrame(void *pHandle, int nUsedLTRFrameNum) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC_H264_USE_LTR, nUsedLTRFrameNum) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Set Base PID + */ +static ExynosVideoErrorType MFC_Encoder_Set_BasePID(void *pHandle, int nPID) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC_H264_BASE_PRIORITY, nPID) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} +#endif // CID_SUPPORT + +#ifdef USE_MFC_MEDIA +/* + * [Encoder OPS] Set Roi Information + */ +static ExynosVideoErrorType MFC_Encoder_Set_RoiInfo( + void *pHandle, + RoiInfoShareBuffer *pRoiInfo) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + RoiInfoShareBuffer *pRISB = NULL; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pRISB = (RoiInfoShareBuffer *)pCtx->pRoiShareBufferAddr; + if (pCtx->videoInstInfo.specificInfo.enc.bRoiInfoSupport == VIDEO_FALSE) { + ALOGE("%s: ROI Info setting is not supported :: codec type(%x), F/W ver(%x)", __func__, pCtx->videoInstInfo.eCodecType, pCtx->videoInstInfo.HwVersion); + ret = VIDEO_ERROR_NOSUPPORT; + goto EXIT; + } + + memcpy(pRISB, pRoiInfo, sizeof(RoiInfoShareBuffer)); + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_VIDEO_ROI_CONTROL, pCtx->nRoiShareBufferFD) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} +#endif // USE_MFC_MEDIA + +/* + * [Encoder Buffer OPS] Enable Cacheable (Input) + */ +static ExynosVideoErrorType MFC_Encoder_Enable_Cacheable_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_CACHEABLE, 2) != 0) { + ALOGE("%s: Failed V4L2_CID_CACHEABLE", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Enable Cacheable (Output) + */ +static ExynosVideoErrorType MFC_Encoder_Enable_Cacheable_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_CACHEABLE, 1) != 0) { + ALOGE("%s: Failed V4L2_CID_CACHEABLE", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Set Shareable Buffer (Input) + */ +static ExynosVideoErrorType MFC_Encoder_Set_Shareable_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pCtx->bShareInbuf = VIDEO_TRUE; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Set Shareable Buffer (Output) + */ +static ExynosVideoErrorType MFC_Encoder_Set_Shareable_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + pCtx->bShareOutbuf = VIDEO_TRUE; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Get Buffer (Input) + */ +static ExynosVideoErrorType MFC_Encoder_Get_Buffer_Inbuf( + void *pHandle, + int nIndex, + ExynosVideoBuffer **pBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + *pBuffer = NULL; + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + if (pCtx->nInbufs <= nIndex) { + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + *pBuffer = (ExynosVideoBuffer *)&pCtx->pInbuf[nIndex]; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Get Buffer (Output) + */ +static ExynosVideoErrorType MFC_Encoder_Get_Buffer_Outbuf( + void *pHandle, + int nIndex, + ExynosVideoBuffer **pBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + *pBuffer = NULL; + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + if (pCtx->nOutbufs <= nIndex) { + *pBuffer = NULL; + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + *pBuffer = (ExynosVideoBuffer *)&pCtx->pOutbuf[nIndex]; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Set Geometry (Src) + */ +static ExynosVideoErrorType MFC_Encoder_Set_Geometry_Inbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + fmt.fmt.pix_mp.pixelformat = __ColorFormatType_To_V4L2PixelFormat(bufferConf->eColorFormat, pCtx->videoInstInfo.HwVersion); + fmt.fmt.pix_mp.width = bufferConf->nFrameWidth; + fmt.fmt.pix_mp.height = bufferConf->nFrameHeight; + fmt.fmt.pix_mp.plane_fmt[0].bytesperline = bufferConf->nStride; + fmt.fmt.pix_mp.num_planes = bufferConf->nPlaneCnt; + + if (exynos_v4l2_s_fmt(pCtx->hEnc, &fmt) != 0) { + ALOGE("%s: Failed to s_fmt", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memcpy(&pCtx->inbufGeometry, bufferConf, sizeof(pCtx->inbufGeometry)); + pCtx->nInbufPlanes = bufferConf->nPlaneCnt; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Get Geometry (Src) + */ +static ExynosVideoErrorType MFC_Encoder_Get_Geometry_Inbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + if (exynos_v4l2_g_fmt(pCtx->hEnc, &fmt) != 0) { + ALOGE("%s: Failed to g_fmt", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + bufferConf->nFrameHeight = fmt.fmt.pix_mp.width; + bufferConf->nFrameHeight = fmt.fmt.pix_mp.height; + bufferConf->nSizeImage = fmt.fmt.pix_mp.plane_fmt[0].sizeimage; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Set Geometry (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Set_Geometry_Outbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + fmt.fmt.pix_mp.pixelformat = __CodingType_To_V4L2PixelFormat(bufferConf->eCompressionFormat); + fmt.fmt.pix_mp.plane_fmt[0].sizeimage = bufferConf->nSizeImage; + + if (exynos_v4l2_s_fmt(pCtx->hEnc, &fmt) != 0) { + ALOGE("%s: Failed to s_fmt", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memcpy(&pCtx->outbufGeometry, bufferConf, sizeof(pCtx->outbufGeometry)); + pCtx->nOutbufPlanes = bufferConf->nPlaneCnt; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Get Geometry (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Get_Geometry_Outbuf( + void *pHandle, + ExynosVideoGeometry *bufferConf) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_format fmt; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (bufferConf == NULL) { + ALOGE("%s: Buffer geometry must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&fmt, 0, sizeof(fmt)); + + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + if (exynos_v4l2_g_fmt(pCtx->hEnc, &fmt) != 0) { + ALOGE("%s: Failed to g_fmt", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + bufferConf->nSizeImage = fmt.fmt.pix_mp.plane_fmt[0].sizeimage; + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Setup (Src) + */ +static ExynosVideoErrorType MFC_Encoder_Setup_Inbuf( + void *pHandle, + unsigned int nBufferCount) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (nBufferCount == 0) { + nBufferCount = MAX_INPUTBUFFER_COUNT; + ALOGV("%s: Change buffer count %d", __func__, nBufferCount); + } + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareInbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hEnc, &req) != 0) { + ALOGE("Failed to require buffer"); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pCtx->nInbufs = (int)req.count; + + pCtx->pInbuf = malloc(sizeof(*pCtx->pInbuf) * pCtx->nInbufs); + if (pCtx->pInbuf == NULL) { + ALOGE("%s: Failed to allocate input buffer context", __func__); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + memset(pCtx->pInbuf, 0, sizeof(*pCtx->pInbuf) * pCtx->nInbufs); + + memset(&buf, 0, sizeof(buf)); + + if (pCtx->bShareInbuf == VIDEO_FALSE) { + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.memory = V4L2_MEMORY_MMAP; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + for (i = 0; i < pCtx->nInbufs; i++) { + buf.index = i; + if (exynos_v4l2_querybuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to querybuf", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + for (j = 0; j < pCtx->nInbufPlanes; j++) { + pVideoPlane = &pCtx->pInbuf[i].planes[j]; + pVideoPlane->addr = mmap(NULL, + buf.m.planes[j].length, PROT_READ | PROT_WRITE, + MAP_SHARED, pCtx->hEnc, buf.m.planes[j].m.mem_offset); + + if (pVideoPlane->addr == MAP_FAILED) { + ALOGE("%s: Failed to map", __func__); + ret = VIDEO_ERROR_MAPFAIL; + goto EXIT; + } + + pVideoPlane->allocSize = buf.m.planes[j].length; + pVideoPlane->dataSize = 0; + } + + pCtx->pInbuf[i].pGeometry = &pCtx->inbufGeometry; + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + pCtx->pInbuf[i].bRegistered = VIDEO_TRUE; + + } + } else { + for (i = 0; i < pCtx->nInbufs; i++) { + pCtx->pInbuf[i].pGeometry = &pCtx->inbufGeometry; + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + pCtx->pInbuf[i].bRegistered = VIDEO_FALSE; + } + } + + return ret; + +EXIT: + if ((pCtx != NULL) && (pCtx->pInbuf != NULL)) { + if (pCtx->bShareInbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nInbufs; i++) { + for (j = 0; j < pCtx->nInbufPlanes; j++) { + pVideoPlane = &pCtx->pInbuf[i].planes[j]; + if (pVideoPlane->addr == MAP_FAILED) { + pVideoPlane->addr = NULL; + break; + } + + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + } + } + } + + free(pCtx->pInbuf); + pCtx->pInbuf = NULL; + } + + return ret; +} + +/* + * [Encoder Buffer OPS] Setup (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Setup_Outbuf( + void *pHandle, + unsigned int nBufferCount) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoPlane *pVideoPlane = NULL; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct v4l2_requestbuffers req; + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int i, j; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (nBufferCount == 0) { + nBufferCount = MAX_OUTPUTBUFFER_COUNT; + ALOGV("%s: Change buffer count %d", __func__, nBufferCount); + } + + memset(&req, 0, sizeof(req)); + + req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + req.count = nBufferCount; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + req.memory = pCtx->videoInstInfo.nMemoryType; + else + req.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_reqbufs(pCtx->hEnc, &req) != 0) { + ALOGE("%s: Failed to reqbuf", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pCtx->nOutbufs = req.count; + + pCtx->pOutbuf = malloc(sizeof(*pCtx->pOutbuf) * pCtx->nOutbufs); + if (pCtx->pOutbuf == NULL) { + ALOGE("%s: Failed to allocate output buffer context", __func__); + ret = VIDEO_ERROR_NOMEM; + goto EXIT; + } + memset(pCtx->pOutbuf, 0, sizeof(*pCtx->pOutbuf) * pCtx->nOutbufs); + + memset(&buf, 0, sizeof(buf)); + + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.memory = V4L2_MEMORY_MMAP; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + for (i = 0; i < pCtx->nOutbufs; i++) { + buf.index = i; + if (exynos_v4l2_querybuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to querybuf", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + pVideoPlane->addr = mmap(NULL, + buf.m.planes[j].length, PROT_READ | PROT_WRITE, + MAP_SHARED, pCtx->hEnc, buf.m.planes[j].m.mem_offset); + + if (pVideoPlane->addr == MAP_FAILED) { + ALOGE("%s: Failed to map", __func__); + ret = VIDEO_ERROR_MAPFAIL; + goto EXIT; + } + + pVideoPlane->allocSize = buf.m.planes[j].length; + pVideoPlane->dataSize = 0; + } + + pCtx->pOutbuf[i].pGeometry = &pCtx->outbufGeometry; + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bRegistered = VIDEO_TRUE; + } + } else { + for (i = 0; i < pCtx->nOutbufs; i++ ) { + pCtx->pOutbuf[i].pGeometry = &pCtx->outbufGeometry; + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + pCtx->pOutbuf[i].bRegistered = VIDEO_FALSE; + } + } + + return ret; + +EXIT: + if ((pCtx != NULL) && (pCtx->pOutbuf != NULL)) { + if (pCtx->bShareOutbuf == VIDEO_FALSE) { + for (i = 0; i < pCtx->nOutbufs; i++) { + for (j = 0; j < pCtx->nOutbufPlanes; j++) { + pVideoPlane = &pCtx->pOutbuf[i].planes[j]; + if (pVideoPlane->addr == MAP_FAILED) { + pVideoPlane->addr = NULL; + break; + } + + munmap(pVideoPlane->addr, pVideoPlane->allocSize); + } + } + } + + free(pCtx->pOutbuf); + pCtx->pOutbuf = NULL; + } + + return ret; +} + +/* + * [Encoder Buffer OPS] Run (src) + */ +static ExynosVideoErrorType MFC_Encoder_Run_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + if (exynos_v4l2_streamon(pCtx->hEnc, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) != 0) { + ALOGE("%s: Failed to streamon for input buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonInbuf = VIDEO_TRUE; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Run (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Run_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + if (exynos_v4l2_streamon(pCtx->hEnc, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) != 0) { + ALOGE("%s: Failed to streamon for output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonOutbuf = VIDEO_TRUE; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Stop (Src) + */ +static ExynosVideoErrorType MFC_Encoder_Stop_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_TRUE) { + if (exynos_v4l2_streamoff(pCtx->hEnc, V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) != 0) { + ALOGE("%s: Failed to streamoff for input buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonInbuf = VIDEO_FALSE; + } + + for (i = 0; i < pCtx->nInbufs; i++) { + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Stop (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Stop_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_TRUE) { + if (exynos_v4l2_streamoff(pCtx->hEnc, V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) != 0) { + ALOGE("%s: Failed to streamoff for output buffer", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->bStreamonOutbuf = VIDEO_FALSE; + } + + for (i = 0; i < pCtx->nOutbufs; i++) { + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Wait (Src) + */ +static ExynosVideoErrorType MFC_Encoder_Wait_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct pollfd poll_events; + int poll_state; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + poll_events.fd = pCtx->hEnc; + poll_events.events = POLLOUT | POLLERR; + poll_events.revents = 0; + + do { + poll_state = poll((struct pollfd*)&poll_events, 1, VIDEO_ENCODER_POLL_TIMEOUT); + if (poll_state > 0) { + if (poll_events.revents & POLLOUT) { + break; + } else { + ALOGE("%s: Poll return error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } + } else if (poll_state < 0) { + ALOGE("%s: Poll state error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } + } while (poll_state == 0); + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Wait (Dst) + */ +static ExynosVideoErrorType MFC_Encoder_Wait_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + struct pollfd poll_events; + int poll_state; + int bframe_count = 0; // FIXME + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + poll_events.fd = pCtx->hEnc; + poll_events.events = POLLIN | POLLERR; + poll_events.revents = 0; + + do { + poll_state = poll((struct pollfd*)&poll_events, 1, VIDEO_ENCODER_POLL_TIMEOUT); + if (poll_state > 0) { + if (poll_events.revents & POLLIN) { + break; + } else { + ALOGE("%s: Poll return error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } + } else if (poll_state < 0) { + ALOGE("%s: Poll state error", __func__); + ret = VIDEO_ERROR_POLL; + break; + } else { + bframe_count++; // FIXME + } + } while (poll_state == 0 && bframe_count < 5); // FIXME + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Encoder_Register_Inbuf( + void *pHandle, + ExynosVideoPlane *planes, + int nPlanes) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex; + + if ((pCtx == NULL) || (planes == NULL) || (nPlanes != pCtx->nInbufPlanes)) { + ALOGE("%s: input params must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bRegistered == VIDEO_FALSE) { + int plane; + for (plane = 0; plane < nPlanes; plane++) { + pCtx->pInbuf[nIndex].planes[plane].addr = planes[plane].addr; + pCtx->pInbuf[nIndex].planes[plane].allocSize = planes[plane].allocSize; + pCtx->pInbuf[nIndex].planes[plane].fd = planes[plane].fd; + } + pCtx->pInbuf[nIndex].bRegistered = VIDEO_TRUE; + break; + } + } + + if (nIndex == pCtx->nInbufs) { + ALOGE("%s: can not find non-registered input buffer", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Encoder_Register_Outbuf( + void *pHandle, + ExynosVideoPlane *planes, + int nPlanes) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex; + + if ((pCtx == NULL) || (planes == NULL) || (nPlanes != pCtx->nOutbufPlanes)) { + ALOGE("%s: params must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if (pCtx->pOutbuf[nIndex].bRegistered == VIDEO_FALSE) { + int plane; + for (plane = 0; plane < nPlanes; plane++) { + pCtx->pOutbuf[nIndex].planes[plane].addr = planes[plane].addr; + pCtx->pOutbuf[nIndex].planes[plane].allocSize = planes[plane].allocSize; + pCtx->pOutbuf[nIndex].planes[plane].fd = planes[plane].fd; + } + pCtx->pOutbuf[nIndex].bRegistered = VIDEO_TRUE; + break; + } + } + + if (nIndex == pCtx->nOutbufs) { + ALOGE("%s: can not find non-registered output buffer", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Encoder_Clear_RegisteredBuffer_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex = -1, plane; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + for (plane = 0; plane < pCtx->nInbufPlanes; plane++) + pCtx->pInbuf[nIndex].planes[plane].addr = NULL; + pCtx->pInbuf[nIndex].bRegistered = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Encoder_Clear_RegisteredBuffer_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int nIndex = -1, plane; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + for (plane = 0; plane < pCtx->nOutbufPlanes; plane++) + pCtx->pOutbuf[nIndex].planes[plane].addr = NULL; + pCtx->pOutbuf[nIndex].bRegistered = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Find (Input) + */ +static int MFC_Encoder_Find_Inbuf( + void *pHandle, + unsigned char *pBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bQueued == VIDEO_FALSE) { + if ((pBuffer == NULL) || + (pCtx->pInbuf[nIndex].planes[0].addr == pBuffer)) + break; + } + } + + if (nIndex == pCtx->nInbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Encoder Buffer OPS] Find (Output) + */ +static int MFC_Encoder_Find_Outbuf( + void *pHandle, + unsigned char *pBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if (pCtx->pOutbuf[nIndex].bQueued == VIDEO_FALSE) { + if ((pBuffer == NULL) || + (pCtx->pOutbuf[nIndex].planes[0].addr == pBuffer)) + break; + } + } + + if (nIndex == pCtx->nOutbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Encoder Buffer OPS] Enqueue (Input) + */ +static ExynosVideoErrorType MFC_Encoder_Enqueue_Inbuf( + void *pHandle, + void *pBuffer[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int index, i, flags = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nInbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nInbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + index = MFC_Encoder_Find_Inbuf(pCtx, pBuffer[0]); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGW("%s: Matching Buffer index not found", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + if (pCtx->bShareInbuf == VIDEO_TRUE) { + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pCtx->pInbuf[buf.index].planes[i].fd; + + buf.m.planes[i].length = pCtx->pInbuf[index].planes[i].allocSize; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + } + } else { + buf.memory = V4L2_MEMORY_MMAP; + for (i = 0; i < nPlanes; i++) { + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + } + } + + if (dataSize[0] <= 0) { + flags = EMPTY_DATA | LAST_FRAME; + ALOGD("%s: EMPTY DATA", __FUNCTION__); + } else { + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_EOS) == OMX_BUFFERFLAG_EOS) + flags = LAST_FRAME; + + if (flags & LAST_FRAME) + ALOGD("%s: DATA with flags(0x%x)", __FUNCTION__, flags); + } +#ifdef USE_ORIGINAL_HEADER + buf.reserved2 = flags; +#else + buf.input = flags; +#endif + + signed long long sec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp / 1E6); + signed long long usec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp) - (sec * 1E6); + buf.timestamp.tv_sec = (long)sec; + buf.timestamp.tv_usec = (long)usec; + + pCtx->pInbuf[buf.index].pPrivate = pPrivate; + pCtx->pInbuf[buf.index].bQueued = VIDEO_TRUE; + + if (pCtx->videoInstInfo.eCodecType == VIDEO_CODING_VP8) { + int oldFrameRate = 0; + int curFrameRate = 0; + int64_t curDuration = 0; + + curDuration = ((int64_t)((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp - pCtx->oldTimeStamp); + if ((curDuration > 0) && (pCtx->oldDuration > 0)) { + oldFrameRate = (1E6 / pCtx->oldDuration); + curFrameRate = (1E6 / curDuration); + + if (((curFrameRate - oldFrameRate) >= FRAME_RATE_CHANGE_THRESH_HOLD) || + ((oldFrameRate - curFrameRate) >= FRAME_RATE_CHANGE_THRESH_HOLD)) { + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH, curFrameRate) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->oldFrameRate = curFrameRate; + } + } + + if (curDuration > 0) + pCtx->oldDuration = curDuration; + pCtx->oldTimeStamp = (int64_t)((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp; + } + + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to enqueue input buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pInbuf[buf.index].pPrivate = NULL; + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Enqueue (Output) + */ +static ExynosVideoErrorType MFC_Encoder_Enqueue_Outbuf( + void *pHandle, + void *pBuffer[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int i, index; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nOutbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nOutbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + index = MFC_Encoder_Find_Outbuf(pCtx, pBuffer[0]); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + if (pCtx->bShareOutbuf == VIDEO_TRUE) { + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pCtx->pOutbuf[index].planes[i].fd; + + buf.m.planes[i].length = pCtx->pOutbuf[index].planes[i].allocSize; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + } + } else { + buf.memory = V4L2_MEMORY_MMAP; + } + + pCtx->pOutbuf[buf.index].pPrivate = pPrivate; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_TRUE; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to enqueue output buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pOutbuf[buf.index].pPrivate = NULL; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Enqueue All (Output) + */ +static ExynosVideoErrorType MFC_Encoder_Enqueue_All_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + void *pBuffer[VIDEO_BUFFER_MAX_PLANES] = {NULL, }; + unsigned int dataSize[VIDEO_BUFFER_MAX_PLANES] = {0, }; + + int i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (i = 0; i < pCtx->nOutbufs; i++) { + ret = MFC_Encoder_Enqueue_Outbuf(pCtx, pBuffer, dataSize, 1, NULL); + if (ret != VIDEO_ERROR_NONE) + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] Dequeue (Input) + */ +static ExynosVideoBuffer *MFC_Encoder_Dequeue_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoBuffer *pInbuf = NULL; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + pInbuf = NULL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + if (pCtx->bShareInbuf == VIDEO_TRUE) + buf.memory = pCtx->videoInstInfo.nMemoryType; + else + buf.memory = V4L2_MEMORY_MMAP; + + if (exynos_v4l2_dqbuf(pCtx->hEnc, &buf) != 0) { + pInbuf = NULL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + + pInbuf = &pCtx->pInbuf[buf.index]; + if (pInbuf->bQueued == VIDEO_FALSE) { + pInbuf = NULL; + pthread_mutex_unlock(pMutex); + goto EXIT; + } + + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return pInbuf; +} + +/* + * [Encoder Buffer OPS] Dequeue (Output) + */ +static ExynosVideoBuffer *MFC_Encoder_Dequeue_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoBuffer *pOutbuf = NULL; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int value, plane; + int ret = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + pOutbuf = NULL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + buf.memory = pCtx->videoInstInfo.nMemoryType; + else + buf.memory = V4L2_MEMORY_MMAP; + + /* returning DQBUF_EIO means MFC H/W status is invalid */ + ret = exynos_v4l2_dqbuf(pCtx->hEnc, &buf); + if (ret != 0) { + if (errno == EIO) + pOutbuf = (ExynosVideoBuffer *)VIDEO_ERROR_DQBUF_EIO; + else + pOutbuf = NULL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + + pOutbuf = &pCtx->pOutbuf[buf.index]; + if (pOutbuf->bQueued == VIDEO_FALSE) { + pOutbuf = NULL; + pthread_mutex_unlock(pMutex); + goto EXIT; + } + + for (plane = 0; plane < pCtx->nOutbufPlanes; plane++) + pOutbuf->planes[plane].dataSize = buf.m.planes[plane].bytesused; + + switch (buf.flags & (0x7 << 3)) { + case V4L2_BUF_FLAG_KEYFRAME: + pOutbuf->frameType = VIDEO_FRAME_I; + break; + case V4L2_BUF_FLAG_PFRAME: + pOutbuf->frameType = VIDEO_FRAME_P; + break; + case V4L2_BUF_FLAG_BFRAME: + pOutbuf->frameType = VIDEO_FRAME_B; + break; + default: + ALOGI("%s: encoded frame type is = %d",__func__, (buf.flags & (0x7 << 3))); + pOutbuf->frameType = VIDEO_FRAME_OTHERS; + break; + }; + + pOutbuf->bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return pOutbuf; +} + +static ExynosVideoErrorType MFC_Encoder_Clear_Queued_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (i = 0; i < pCtx->nInbufs; i++) { + pCtx->pInbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + +static ExynosVideoErrorType MFC_Encoder_Clear_Queued_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int i = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + for (i = 0; i < pCtx->nOutbufs; i++) { + pCtx->pOutbuf[i].bQueued = VIDEO_FALSE; + } + +EXIT: + return ret; +} + + +/* + * [Encoder Buffer OPS] FindIndex (Input) + */ +static int MFC_Encoder_FindEmpty_Inbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nInbufs; nIndex++) { + if (pCtx->pInbuf[nIndex].bQueued == VIDEO_FALSE) { + break; + } + } + + if (nIndex == pCtx->nInbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Encoder Buffer OPS] FindIndex (Output) + */ +static int MFC_Encoder_FindEmpty_Outbuf(void *pHandle) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + int nIndex = -1; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + goto EXIT; + } + + for (nIndex = 0; nIndex < pCtx->nOutbufs; nIndex++) { + if (pCtx->pOutbuf[nIndex].bQueued == VIDEO_FALSE) + break; + } + + if (nIndex == pCtx->nInbufs) + nIndex = -1; + +EXIT: + return nIndex; +} + +/* + * [Encoder Buffer OPS] ExtensionEnqueue (Input) + */ +static ExynosVideoErrorType MFC_Encoder_ExtensionEnqueue_Inbuf( + void *pHandle, + void *pBuffer[], + int pFd[], + unsigned int allocLen[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int index, i, flags = 0; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nInbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nInbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + index = MFC_Encoder_FindEmpty_Inbuf(pCtx); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + buf.memory = pCtx->videoInstInfo.nMemoryType; + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pFd[i]; + + buf.m.planes[i].length = allocLen[i]; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + + /* Temporary storage for Dequeue */ + pCtx->pInbuf[buf.index].planes[i].addr = pBuffer[i]; + pCtx->pInbuf[buf.index].planes[i].fd = pFd[i]; + pCtx->pInbuf[buf.index].planes[i].allocSize = allocLen[i]; + } + + if (dataSize[0] <= 0) { + flags = EMPTY_DATA | LAST_FRAME; + ALOGD("%s: EMPTY DATA", __FUNCTION__); + } else { + if ((((OMX_BUFFERHEADERTYPE *)pPrivate)->nFlags & OMX_BUFFERFLAG_EOS) == OMX_BUFFERFLAG_EOS) + flags = LAST_FRAME; + + if (flags & LAST_FRAME) + ALOGD("%s: DATA with flags(0x%x)", __FUNCTION__, flags); + } +#ifdef USE_ORIGINAL_HEADER + buf.reserved2 = flags; +#else + buf.input = flags; +#endif + + signed long long sec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp / 1E6); + signed long long usec = (((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp) - (sec * 1E6); + buf.timestamp.tv_sec = (long)sec; + buf.timestamp.tv_usec = (long)usec; + + pCtx->pInbuf[buf.index].pPrivate = pPrivate; + pCtx->pInbuf[buf.index].bQueued = VIDEO_TRUE; + + if (pCtx->videoInstInfo.eCodecType == VIDEO_CODING_VP8) { + int oldFrameRate = 0; + int curFrameRate = 0; + int64_t curDuration = 0; + + curDuration = ((int64_t)((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp - pCtx->oldTimeStamp); + if ((curDuration > 0) && (pCtx->oldDuration > 0)) { + oldFrameRate = (1E6 / pCtx->oldDuration); + curFrameRate = (1E6 / curDuration); + + if (((curFrameRate - oldFrameRate) >= FRAME_RATE_CHANGE_THRESH_HOLD) || + ((oldFrameRate - curFrameRate) >= FRAME_RATE_CHANGE_THRESH_HOLD)) { + if (exynos_v4l2_s_ctrl(pCtx->hEnc, V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH, curFrameRate) != 0) { + ALOGE("%s: Failed to s_ctrl", __func__); + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + pCtx->oldFrameRate = curFrameRate; + } + } + + if (curDuration > 0) + pCtx->oldDuration = curDuration; + pCtx->oldTimeStamp = (int64_t)((OMX_BUFFERHEADERTYPE *)pPrivate)->nTimeStamp; + } + + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to enqueue input buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pInbuf[buf.index].pPrivate = NULL; + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] ExtensionDequeue (Input) + */ +static ExynosVideoErrorType MFC_Encoder_ExtensionDequeue_Inbuf( + void *pHandle, + ExynosVideoBuffer *pVideoBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonInbuf == VIDEO_FALSE) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nInbufPlanes;; + buf.memory = pCtx->videoInstInfo.nMemoryType; + if (exynos_v4l2_dqbuf(pCtx->hEnc, &buf) != 0) { + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pInMutex; + pthread_mutex_lock(pMutex); + + if (pCtx->pInbuf[buf.index].bQueued == VIDEO_TRUE) + memcpy(pVideoBuffer, &pCtx->pInbuf[buf.index], sizeof(ExynosVideoBuffer)); + else + ret = VIDEO_ERROR_NOBUFFERS; + memset(&pCtx->pInbuf[buf.index], 0, sizeof(ExynosVideoBuffer)); + + pCtx->pInbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return ret; +} + +/* + * [Encoder Buffer OPS] ExtensionEnqueue (Output) + */ +static ExynosVideoErrorType MFC_Encoder_ExtensionEnqueue_Outbuf( + void *pHandle, + void *pBuffer[], + int pFd[], + unsigned int allocLen[], + unsigned int dataSize[], + int nPlanes, + void *pPrivate) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + pthread_mutex_t *pMutex = NULL; + + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + struct v4l2_buffer buf; + int index, i; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->nOutbufPlanes < nPlanes) { + ALOGE("%s: Number of max planes : %d, nPlanes : %d", __func__, + pCtx->nOutbufPlanes, nPlanes); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + index = MFC_Encoder_FindEmpty_Outbuf(pCtx); + if (index == -1) { + pthread_mutex_unlock(pMutex); + ALOGE("%s: Failed to get index", __func__); + ret = VIDEO_ERROR_NOBUFFERS; + goto EXIT; + } + + buf.index = index; + buf.memory = pCtx->videoInstInfo.nMemoryType; + + for (i = 0; i < nPlanes; i++) { + if (buf.memory == V4L2_MEMORY_USERPTR) + buf.m.planes[i].m.userptr = (unsigned long)pBuffer[i]; + else + buf.m.planes[i].m.fd = pFd[i]; + + buf.m.planes[i].length = allocLen[i]; + buf.m.planes[i].bytesused = dataSize[i]; + buf.m.planes[i].data_offset = 0; + + /* Temporary storage for Dequeue */ + pCtx->pOutbuf[buf.index].planes[i].addr = pBuffer[i]; + pCtx->pOutbuf[buf.index].planes[i].fd = pFd[i]; + pCtx->pOutbuf[buf.index].planes[i].allocSize = allocLen[i]; + } + + pCtx->pOutbuf[buf.index].pPrivate = pPrivate; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_TRUE; + pthread_mutex_unlock(pMutex); + + if (exynos_v4l2_qbuf(pCtx->hEnc, &buf) != 0) { + ALOGE("%s: Failed to enqueue output buffer", __func__); + pthread_mutex_lock(pMutex); + pCtx->pOutbuf[buf.index].pPrivate = NULL; + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + +EXIT: + return ret; +} + + +/* + * [Encoder Buffer OPS] ExtensionDequeue (Output) + */ +static ExynosVideoErrorType MFC_Encoder_ExtensionDequeue_Outbuf( + void *pHandle, + ExynosVideoBuffer *pVideoBuffer) +{ + ExynosVideoEncContext *pCtx = (ExynosVideoEncContext *)pHandle; + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + ExynosVideoBuffer *pOutbuf = NULL; + pthread_mutex_t *pMutex = NULL; + struct v4l2_buffer buf; + struct v4l2_plane planes[VIDEO_BUFFER_MAX_PLANES]; + int value, plane; + + if (pCtx == NULL) { + ALOGE("%s: Video context info must be supplied", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + if (pCtx->bStreamonOutbuf == VIDEO_FALSE) { + pOutbuf = NULL; + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + memset(&buf, 0, sizeof(buf)); + buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; + buf.m.planes = planes; + buf.length = pCtx->nOutbufPlanes; + + if (pCtx->bShareOutbuf == VIDEO_TRUE) + buf.memory = pCtx->videoInstInfo.nMemoryType; + else + buf.memory = V4L2_MEMORY_MMAP; + + /* returning DQBUF_EIO means MFC H/W status is invalid */ + if (exynos_v4l2_dqbuf(pCtx->hEnc, &buf) != 0) { + if (errno == EIO) + ret = VIDEO_ERROR_DQBUF_EIO; + else + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pMutex = (pthread_mutex_t*)pCtx->pOutMutex; + pthread_mutex_lock(pMutex); + + pOutbuf = &pCtx->pOutbuf[buf.index]; + for (plane = 0; plane < pCtx->nOutbufPlanes; plane++) + pOutbuf->planes[plane].dataSize = buf.m.planes[plane].bytesused; + + switch (buf.flags & (0x7 << 3)) { + case V4L2_BUF_FLAG_KEYFRAME: + pOutbuf->frameType = VIDEO_FRAME_I; + break; + case V4L2_BUF_FLAG_PFRAME: + pOutbuf->frameType = VIDEO_FRAME_P; + break; + case V4L2_BUF_FLAG_BFRAME: + pOutbuf->frameType = VIDEO_FRAME_B; + break; + default: + ALOGI("%s: encoded frame type is = %d",__func__, (buf.flags & (0x7 << 3))); + pOutbuf->frameType = VIDEO_FRAME_OTHERS; + break; + }; + + if (pCtx->pOutbuf[buf.index].bQueued == VIDEO_TRUE) + memcpy(pVideoBuffer, pOutbuf, sizeof(ExynosVideoBuffer)); + else + ret = VIDEO_ERROR_NOBUFFERS; + memset(pOutbuf, 0, sizeof(ExynosVideoBuffer)); + + pCtx->pOutbuf[buf.index].bQueued = VIDEO_FALSE; + pthread_mutex_unlock(pMutex); + +EXIT: + return ret; +} + +/* + * [Encoder OPS] Common + */ +static ExynosVideoEncOps defEncOps = { + .nSize = 0, + .Init = MFC_Encoder_Init, + .Finalize = MFC_Encoder_Finalize, + .Set_EncParam = MFC_Encoder_Set_EncParam, + .Set_FrameType = MFC_Encoder_Set_FrameType, + .Set_FrameRate = MFC_Encoder_Set_FrameRate, + .Set_BitRate = MFC_Encoder_Set_BitRate, + .Set_QpRange = MFC_Encoder_Set_QuantizationRange, + .Set_FrameSkip = MFC_Encoder_Set_FrameSkip, + .Set_IDRPeriod = MFC_Encoder_Set_IDRPeriod, + .Set_FrameTag = MFC_Encoder_Set_FrameTag, + .Get_FrameTag = MFC_Encoder_Get_FrameTag, + .Enable_PrependSpsPpsToIdr = MFC_Encoder_Enable_PrependSpsPpsToIdr, + .Set_QosRatio = MFC_Encoder_Set_QosRatio, + .Set_LayerChange = MFC_Encoder_Set_LayerChange, +#ifdef CID_SUPPORT + .Set_DynamicQpControl = MFC_Encoder_Set_DynamicQpControl, + .Set_MarkLTRFrame = MFC_Encoder_Set_MarkLTRFrame, + .Set_UsedLTRFrame = MFC_Encoder_Set_UsedLTRFrame, + .Set_BasePID = MFC_Encoder_Set_BasePID, +#endif +#ifdef USE_MFC_MEDIA + .Set_RoiInfo = MFC_Encoder_Set_RoiInfo, +#endif +}; + +/* + * [Encoder Buffer OPS] Input + */ +static ExynosVideoEncBufferOps defInbufOps = { + .nSize = 0, + .Enable_Cacheable = MFC_Encoder_Enable_Cacheable_Inbuf, + .Set_Shareable = MFC_Encoder_Set_Shareable_Inbuf, + .Get_Buffer = NULL, + .Set_Geometry = MFC_Encoder_Set_Geometry_Inbuf, + .Get_Geometry = MFC_Encoder_Get_Geometry_Inbuf, + .Setup = MFC_Encoder_Setup_Inbuf, + .Run = MFC_Encoder_Run_Inbuf, + .Stop = MFC_Encoder_Stop_Inbuf, + .Enqueue = MFC_Encoder_Enqueue_Inbuf, + .Enqueue_All = NULL, + .Dequeue = MFC_Encoder_Dequeue_Inbuf, + .Register = MFC_Encoder_Register_Inbuf, + .Clear_RegisteredBuffer = MFC_Encoder_Clear_RegisteredBuffer_Inbuf, + .Clear_Queue = MFC_Encoder_Clear_Queued_Inbuf, + .ExtensionEnqueue = MFC_Encoder_ExtensionEnqueue_Inbuf, + .ExtensionDequeue = MFC_Encoder_ExtensionDequeue_Inbuf, +}; + +/* + * [Encoder Buffer OPS] Output + */ +static ExynosVideoEncBufferOps defOutbufOps = { + .nSize = 0, + .Enable_Cacheable = MFC_Encoder_Enable_Cacheable_Outbuf, + .Set_Shareable = MFC_Encoder_Set_Shareable_Outbuf, + .Get_Buffer = MFC_Encoder_Get_Buffer_Outbuf, + .Set_Geometry = MFC_Encoder_Set_Geometry_Outbuf, + .Get_Geometry = MFC_Encoder_Get_Geometry_Outbuf, + .Setup = MFC_Encoder_Setup_Outbuf, + .Run = MFC_Encoder_Run_Outbuf, + .Stop = MFC_Encoder_Stop_Outbuf, + .Enqueue = MFC_Encoder_Enqueue_Outbuf, + .Enqueue_All = NULL, + .Dequeue = MFC_Encoder_Dequeue_Outbuf, + .Register = MFC_Encoder_Register_Outbuf, + .Clear_RegisteredBuffer = MFC_Encoder_Clear_RegisteredBuffer_Outbuf, + .Clear_Queue = MFC_Encoder_Clear_Queued_Outbuf, + .ExtensionEnqueue = MFC_Encoder_ExtensionEnqueue_Outbuf, + .ExtensionDequeue = MFC_Encoder_ExtensionDequeue_Outbuf, +}; + +ExynosVideoErrorType MFC_Exynos_Video_GetInstInfo_Encoder( + ExynosVideoInstInfo *pVideoInstInfo) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + int hEnc = -1; + int mode = 0, version = 0; + + if (pVideoInstInfo == NULL) { + ALOGE("%s: bad parameter", __func__); + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + +#ifdef USE_HEVC_HWIP + if (pVideoInstInfo->eCodecType == VIDEO_CODING_HEVC) + hEnc = exynos_v4l2_open_devname(VIDEO_HEVC_ENCODER_NAME, O_RDWR, 0); + else +#endif + hEnc = exynos_v4l2_open_devname(VIDEO_ENCODER_NAME, O_RDWR, 0); + + if (hEnc < 0) { + ALOGE("%s: Failed to open decoder device", __func__); + ret = VIDEO_ERROR_OPENFAIL; + goto EXIT; + } + + if (exynos_v4l2_g_ctrl(hEnc, V4L2_CID_MPEG_MFC_GET_VERSION_INFO, &version) != 0) { + ALOGW("%s: MFC version information is not available", __func__); +#ifdef USE_HEVC_HWIP + if (pVideoInstInfo->eCodecType == VIDEO_CODING_HEVC) + pVideoInstInfo->HwVersion = (int)HEVC_10; + else +#endif + pVideoInstInfo->HwVersion = (int)MFC_65; + } else { + pVideoInstInfo->HwVersion = version; + } + + if (exynos_v4l2_g_ctrl(hEnc, V4L2_CID_MPEG_MFC_GET_EXT_INFO, &mode) != 0) { + pVideoInstInfo->specificInfo.enc.bRGBSupport = VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.nSpareSize = 0; + pVideoInstInfo->specificInfo.enc.bTemporalSvcSupport = VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bRoiInfoSupport = VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bQpRangePBSupport = VIDEO_FALSE; + goto EXIT; + } + + pVideoInstInfo->specificInfo.enc.bQpRangePBSupport = (mode & (0x1 << 5))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bRoiInfoSupport = (mode & (0x1 << 4))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bSkypeSupport = (mode & (0x1 << 3))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bTemporalSvcSupport = (mode & (0x1 << 2))? VIDEO_TRUE:VIDEO_FALSE; + pVideoInstInfo->specificInfo.enc.bRGBSupport = (mode & (0x1 << 0))? VIDEO_TRUE:VIDEO_FALSE; + if (mode & (0x1 << 1)) { + if (exynos_v4l2_g_ctrl(hEnc, V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE, &(pVideoInstInfo->specificInfo.enc.nSpareSize)) != 0) { + ALOGE("%s: g_ctrl is failed(V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE)", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + } + + pVideoInstInfo->SwVersion = 0; +#ifdef CID_SUPPORT + if (pVideoInstInfo->specificInfo.enc.bSkypeSupport == VIDEO_TRUE) { + int swVersion = 0; + + if (exynos_v4l2_g_ctrl(hEnc, V4L2_CID_MPEG_MFC_GET_DRIVER_INFO, &swVersion) != 0) { + ALOGE("%s: g_ctrl is failed(V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE)", __func__); + ret = VIDEO_ERROR_APIFAIL; + goto EXIT; + } + + pVideoInstInfo->SwVersion = (unsigned long long)swVersion; + } +#endif + + __Set_SupportFormat(pVideoInstInfo); + +EXIT: + if (hEnc >= 0) + exynos_v4l2_close(hEnc); + + return ret; +} + +int MFC_Exynos_Video_Register_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps) +{ + ExynosVideoErrorType ret = VIDEO_ERROR_NONE; + + if ((pEncOps == NULL) || (pInbufOps == NULL) || (pOutbufOps == NULL)) { + ret = VIDEO_ERROR_BADPARAM; + goto EXIT; + } + + defEncOps.nSize = sizeof(defEncOps); + defInbufOps.nSize = sizeof(defInbufOps); + defOutbufOps.nSize = sizeof(defOutbufOps); + + memcpy((char *)pEncOps + sizeof(pEncOps->nSize), (char *)&defEncOps + sizeof(defEncOps.nSize), + pEncOps->nSize - sizeof(pEncOps->nSize)); + + memcpy((char *)pInbufOps + sizeof(pInbufOps->nSize), (char *)&defInbufOps + sizeof(defInbufOps.nSize), + pInbufOps->nSize - sizeof(pInbufOps->nSize)); + + memcpy((char *)pOutbufOps + sizeof(pOutbufOps->nSize), (char *)&defOutbufOps + sizeof(defOutbufOps.nSize), + pOutbufOps->nSize - sizeof(pOutbufOps->nSize)); + +EXIT: + return ret; +} diff --git a/libvideocodec/include/ExynosVideoApi.h b/libvideocodec/include/ExynosVideoApi.h new file mode 100644 index 0000000..3814676 --- /dev/null +++ b/libvideocodec/include/ExynosVideoApi.h @@ -0,0 +1,555 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _EXYNOS_VIDEO_API_H_ +#define _EXYNOS_VIDEO_API_H_ + +#include "exynos_v4l2.h" + +/* Fixed */ +#define VIDEO_BUFFER_MAX_PLANES 3 +#define VIDEO_BUFFER_MAX_NUM 32 +#define LAST_FRAME 0x80000000 +#define EMPTY_DATA 0x40000000 +#define CSD_FRAME 0x20000000 + +/* Temporal SVC */ +#define VIDEO_MIN_TEMPORAL_LAYERS 1 +#define VIDEO_MAX_TEMPORAL_LAYERS 7 + +typedef enum _ExynosVideoBoolType { + VIDEO_FALSE = 0, + VIDEO_TRUE = 1, +} ExynosVideoBoolType; + +typedef enum _ExynosVideoErrorType { + VIDEO_ERROR_NONE = 1, + VIDEO_ERROR_BADPARAM = -1, + VIDEO_ERROR_OPENFAIL = -2, + VIDEO_ERROR_NOMEM = -3, + VIDEO_ERROR_APIFAIL = -4, + VIDEO_ERROR_MAPFAIL = -5, + VIDEO_ERROR_NOBUFFERS = -6, + VIDEO_ERROR_POLL = -7, + VIDEO_ERROR_DQBUF_EIO = -8, + VIDEO_ERROR_NOSUPPORT = -9, + VIDEO_ERROR_HEADERINFO = -10, + VIDEO_ERROR_WRONGBUFFERSIZE = -11, +} ExynosVideoErrorType; + +typedef enum _ExynosVideoCodingType { + VIDEO_CODING_UNKNOWN = 0, + VIDEO_CODING_MPEG2, + VIDEO_CODING_H263, + VIDEO_CODING_MPEG4, + VIDEO_CODING_VC1, + VIDEO_CODING_VC1_RCV, + VIDEO_CODING_AVC, + VIDEO_CODING_MVC, + VIDEO_CODING_VP8, + VIDEO_CODING_HEVC, + VIDEO_CODING_VP9, + VIDEO_CODING_RESERVED, +} ExynosVideoCodingType; + +typedef enum _ExynosVideoColorFormatType { + VIDEO_COLORFORMAT_UNKNOWN = 0, + VIDEO_COLORFORMAT_NV12, + VIDEO_COLORFORMAT_NV12M, + VIDEO_COLORFORMAT_NV21M, + VIDEO_COLORFORMAT_NV12_TILED, + VIDEO_COLORFORMAT_NV12M_TILED, + VIDEO_COLORFORMAT_I420, + VIDEO_COLORFORMAT_I420M, + VIDEO_COLORFORMAT_YV12M, + VIDEO_COLORFORMAT_ARGB8888, + VIDEO_COLORFORMAT_BGRA8888, + VIDEO_COLORFORMAT_RGBA8888, + VIDEO_COLORFORMAT_MAX, +} ExynosVideoColorFormatType; + +typedef enum _ExynosVideoFrameType { + VIDEO_FRAME_NOT_CODED = 0, + VIDEO_FRAME_I = 0x1 << 0, + VIDEO_FRAME_P = 0x1 << 1, + VIDEO_FRAME_B = 0x1 << 2, + VIDEO_FRAME_SKIPPED = 0x1 << 3, + VIDEO_FRAME_CORRUPT = 0x1 << 4, + VIDEO_FRAME_OTHERS = 0x1 << 5, +} ExynosVideoFrameType; + +typedef enum _ExynosVideoFrameStatusType { + VIDEO_FRAME_STATUS_UNKNOWN = 0, + VIDEO_FRAME_STATUS_DECODING_ONLY, + VIDEO_FRAME_STATUS_DISPLAY_DECODING, + VIDEO_FRAME_STATUS_DISPLAY_ONLY, + VIDEO_FRAME_STATUS_DECODING_FINISHED, + VIDEO_FRAME_STATUS_CHANGE_RESOL, + VIDEO_FRAME_STATUS_ENABLED_S3D, + VIDEO_FRAME_STATUS_LAST_FRAME, +} ExynosVideoFrameStatusType; + +typedef enum _ExynosVideoFrameSkipMode { + VIDEO_FRAME_SKIP_DISABLED = 0, + VIDEO_FRAME_SKIP_MODE_LEVEL_LIMIT, + VIDEO_FRAME_SKIP_MODE_BUF_LIMIT, +} ExynosVideoFrameSkipMode; + +typedef enum _ExynosVideoMFCVersion { + MFC_ERROR = 0, + MFC_51 = 0x51, + MFC_61 = 0x61, + MFC_65 = 0x65, + MFC_72 = 0x72, + MFC_723 = 0x723, + MFC_77 = 0x77, + MFC_78 = 0x78, + MFC_78D = 0x78D, + MFC_80 = 0x80, + MFC_90 = 0x90, + MFC_92 = 0x92, + MFC_100 = 0xA0, + MFC_101 = 0xA01, + MFC_1010 = 0xA0A0, + MFC_1011 = 0xA0B0, + MFC_1020 = 0xA140, +} ExynosVideoMFCVersion; + +typedef enum _ExynosVideoHEVCVersion { + HEVC_ERROR = 0, + HEVC_10 = 0x10, +} ExynosVideoHEVCVersion; + +typedef enum _ExynosVideoSecurityType { + VIDEO_NORMAL = 0, + VIDEO_SECURE = 1, +} ExynosVideoSecurityType; + +typedef enum _ExynosFilledDataType { + DATA_8BIT = 0x00, + DATA_10BIT = 0x01, + DATA_8BIT_WITH_2BIT = 0x11, +} ExynosFilledDataType; + +typedef struct _ExynosVideoRect { + unsigned int nTop; + unsigned int nLeft; + unsigned int nWidth; + unsigned int nHeight; +} ExynosVideoRect; + +typedef struct _ExynosVideoGeometry { + unsigned int nFrameWidth; + unsigned int nFrameHeight; + unsigned int nStride; + unsigned int nSizeImage; + unsigned int nAlignPlaneSize[VIDEO_BUFFER_MAX_PLANES]; + unsigned int nPlaneCnt; + ExynosVideoRect cropRect; + ExynosVideoCodingType eCompressionFormat; + ExynosVideoColorFormatType eColorFormat; + ExynosVideoBoolType bInterlaced; + ExynosFilledDataType eFilledDataType; +} ExynosVideoGeometry; + +typedef struct _ExynosVideoPlane { + void *addr; + unsigned int allocSize; + unsigned int dataSize; + unsigned long offset; + int fd; +} ExynosVideoPlane; + +typedef struct _ReleaseDPB { + int fd; + int fd1; + int fd2; +} ReleaseDPB; + +typedef struct _PrivateDataShareBuffer { + int index; + ReleaseDPB dpbFD[VIDEO_BUFFER_MAX_NUM]; +} PrivateDataShareBuffer; + +typedef struct _ExynosVideoBuffer { + ExynosVideoPlane planes[VIDEO_BUFFER_MAX_PLANES]; + ExynosVideoGeometry *pGeometry; + ExynosVideoFrameStatusType displayStatus; + ExynosVideoFrameType frameType; + int interlacedType; + ExynosVideoBoolType bQueued; + ExynosVideoBoolType bSlotUsed; + ExynosVideoBoolType bRegistered; + void *pPrivate; + PrivateDataShareBuffer PDSB; + int nIndexUseCnt; +} ExynosVideoBuffer; + +typedef struct _ExynosVideoFramePacking{ + int available; + unsigned int arrangement_id; + int arrangement_cancel_flag; + unsigned char arrangement_type; + int quincunx_sampling_flag; + unsigned char content_interpretation_type; + int spatial_flipping_flag; + int frame0_flipped_flag; + int field_views_flag; + int current_frame_is_frame0_flag; + unsigned char frame0_grid_pos_x; + unsigned char frame0_grid_pos_y; + unsigned char frame1_grid_pos_x; + unsigned char frame1_grid_pos_y; +} ExynosVideoFramePacking; + +typedef struct _ExynosVideoQPRange { + int QpMin_I; + int QpMax_I; + int QpMin_P; + int QpMax_P; + int QpMin_B; + int QpMax_B; +} ExynosVideoQPRange; + +/* for Temporal SVC */ +typedef struct _TemporalLayerShareBuffer { + /* In case of H.264 codec, + * nTemporalLayerCount contains the following data format. + * -------------------------------------------------------------------- + * | Temporal SVC Coding Style (16bit) | Temporal Layer Count (16bit) | + * -------------------------------------------------------------------- + * - Temporal SVC Coding Style Value + * 0 : default + * 1 : custom Temporal SVC mode(for MCD) + * + * - Temporal Layer Count Value + * 0 : Temporal SVC is disabled + * 1 ~ MAX : Temporal SVC is enabled + */ + unsigned int nTemporalLayerCount; + unsigned int nTemporalLayerBitrateRatio[VIDEO_MAX_TEMPORAL_LAYERS]; +} TemporalLayerShareBuffer; + +/* for Roi Info */ +typedef struct _RoiInfoShareBuffer { + unsigned long long pRoiMBInfo; /* For 32/64 bit compatibility */ + int nRoiMBInfoSize; + int nUpperQpOffset; + int nLowerQpOffset; + ExynosVideoBoolType bUseRoiInfo; +} RoiInfoShareBuffer; + +typedef struct _ExynosVideoEncInitParam{ + /* Initial parameters */ + ExynosVideoFrameSkipMode FrameSkip; /* [IN] frame skip mode */ + int FMO; + int ASO; +}ExynosVideoEncInitParam; + +typedef struct _ExynosVideoEncCommonParam{ + /* common parameters */ + int SourceWidth; /* [IN] width of video to be encoded */ + int SourceHeight; /* [IN] height of video to be encoded */ + int IDRPeriod; /* [IN] GOP number(interval of I-frame) */ + int SliceMode; /* [IN] Multi slice mode */ + int RandomIntraMBRefresh; /* [IN] cyclic intra refresh */ + int EnableFRMRateControl; /* [IN] frame based rate control enable */ + int EnableMBRateControl; /* [IN] Enable macroblock-level rate control */ + int Bitrate; /* [IN] rate control parameter(bit rate) */ + int FrameQp; /* [IN] The quantization parameter of the frame */ + int FrameQp_P; /* [IN] The quantization parameter of the P frame */ + int EnableFRMQpControl; /* [IN] Enable quantization parameter per frame */ + ExynosVideoQPRange QpRange; /* [IN] Quantization range */ + int CBRPeriodRf; /* [IN] Reaction coefficient parameter for rate control */ + int PadControlOn; /* [IN] Enable padding control */ + int LumaPadVal; /* [IN] Luma pel value used to fill padding area */ + int CbPadVal; /* [IN] CB pel value used to fill padding area */ + int CrPadVal; /* [IN] CR pel value used to fill padding area */ + int FrameMap; /* [IN] Encoding input mode(tile mode or linear mode) */ +}ExynosVideoEncCommonParam; + +typedef struct _ExynosVideoEncH264Param{ + /* H.264 specific parameters */ + int ProfileIDC; /* [IN] profile */ + int LevelIDC; /* [IN] level */ + int FrameQp_B; /* [IN] The quantization parameter of the B frame */ + int FrameRate; /* [IN] rate control parameter(frame rate) */ + int SliceArgument; /* [IN] MB number or byte number */ + int NumberBFrames; /* [IN] The number of consecutive B frame inserted */ + int NumberReferenceFrames; /* [IN] The number of reference pictures used */ + int NumberRefForPframes; /* [IN] The number of reference pictures used for encoding P pictures */ + int LoopFilterDisable; /* [IN] disable the loop filter */ + int LoopFilterAlphaC0Offset; /* [IN] Alpha & C0 offset for H.264 loop filter */ + int LoopFilterBetaOffset; /* [IN] Beta offset for H.264 loop filter */ + int SymbolMode; /* [IN] The mode of entropy coding(CABAC, CAVLC) */ + int PictureInterlace; /* [IN] Enables the interlace mode */ + int Transform8x8Mode; /* [IN] Allow 8x8 transform(This is allowed only for high profile) */ + int DarkDisable; /* [IN] Disable adaptive rate control on dark region */ + int SmoothDisable; /* [IN] Disable adaptive rate control on smooth region */ + int StaticDisable; /* [IN] Disable adaptive rate control on static region */ + int ActivityDisable; /* [IN] Disable adaptive rate control on high activity region */ + TemporalLayerShareBuffer TemporalSVC; /* [IN] Temporal SVC */ + int HierarType; /* [IN] Hierarchal P & B */ + int VuiRestrictionEnable; /* [IN] Num Reorder Frames 0 enable */ + int HeaderWithIFrame; /* [IN] Header With I-Frame 0:disable, 1:enable*/ + int SarEnable; /* [IN] SarEnable */ + int SarIndex; /* [IN] SarIndex */ + int SarWidth; /* [IN] SarWidth */ + int SarHeight; /* [IN] SarHeight */ + int LTREnable; /* [IN] LTR frames */ + int ROIEnable; /* [IN] ROIEnable */ +} ExynosVideoEncH264Param; + +typedef struct _ExynosVideoEncMpeg4Param { + /* MPEG4 specific parameters */ + int ProfileIDC; /* [IN] profile */ + int LevelIDC; /* [IN] level */ + int FrameQp_B; /* [IN] The quantization parameter of the B frame */ + int TimeIncreamentRes; /* [IN] frame rate */ + int VopTimeIncreament; /* [IN] frame rate */ + int SliceArgument; /* [IN] MB number or byte number */ + int NumberBFrames; /* [IN] The number of consecutive B frame inserted */ + int DisableQpelME; /* [IN] disable quarter-pixel motion estimation */ +} ExynosVideoEncMpeg4Param; + +typedef struct _ExynosVideoEncH263Param { + /* H.263 specific parameters */ + int FrameRate; /* [IN] rate control parameter(frame rate) */ +} ExynosVideoEncH263Param; + +typedef struct _ExynosVideoEncVp8Param { + /* VP8 specific parameters */ + int FrameRate; /* [IN] rate control parameter(frame rate) */ + int Vp8Version; /* [IN] vp8 version */ + int Vp8NumberOfPartitions; /* [IN] number of partitions */ + int Vp8FilterLevel; /* [IN] filter level */ + int Vp8FilterSharpness; /* [IN] filter sharpness */ + int Vp8GoldenFrameSel; /* [IN] indication of golden frame */ + int Vp8GFRefreshPeriod; /* [IN] refresh period of golden frame */ + int RefNumberForPFrame; /* [IN] number of refernce picture for p frame */ + int DisableIntraMd4x4; /* [IN] prevent intra 4x4 mode */ + TemporalLayerShareBuffer TemporalSVC; /* [IN] Temporal SVC */ +} ExynosVideoEncVp8Param; + +typedef struct _ExynosVideoEncHevcParam{ + /* HEVC specific parameters */ + int ProfileIDC; /* [IN] profile */ + int TierIDC; /* [IN] tier flag(MAIN, HIGH) */ + int LevelIDC; /* [IN] level */ + int FrameQp_B; /* [IN] The quantization parameter of the B frame */ + int FrameRate; /* [IN] rate control parameter(frame rate) */ + int MaxPartitionDepth; /* [IN] Max partition depth */ + int NumberBFrames; /* [IN] The number of consecutive B frame inserted */ + int NumberReferenceFrames; /* [IN] The number of reference pictures used */ + int NumberRefForPframes; /* [IN] The number of reference pictures used for encoding P pictures */ + int LoopFilterDisable; /* [IN] disable the loop filter */ + int LoopFilterSliceFlag; /* [IN] in loop filter, select across or not slice boundary */ + int LoopFilterTcOffset; /* [IN] TC offset for HEVC loop filter */ + int LoopFilterBetaOffset; /* [IN] Beta offset for HEVC loop filter */ + int LongtermRefEnable; /* [IN] long term reference enable */ + int LongtermUserRef; /* [IN] use long term reference index (0 or 1) */ + int LongtermStoreRef; /* [IN] use long term frame index (0 or 1) */ + int DarkDisable; /* [IN] Disable adaptive rate control on dark region */ + int SmoothDisable; /* [IN] Disable adaptive rate control on smooth region */ + int StaticDisable; /* [IN] Disable adaptive rate control on static region */ + int ActivityDisable; /* [IN] Disable adaptive rate control on high activity region */ + TemporalLayerShareBuffer TemporalSVC; /* [IN] Temporal SVC */ + int ROIEnable; /* [IN] ROIEnable */ +} ExynosVideoEncHevcParam; + +typedef struct _ExynosVideoEncVp9Param { + /* VP9 specific parameters */ + int FrameRate; /* [IN] rate control parameter(frame rate) */ + int Vp9Version; /* [IN] vp8 version */ + int Vp9GoldenFrameSel; /* [IN] indication of golden frame */ + int Vp9GFRefreshPeriod; /* [IN] refresh period of golden frame */ + int RefNumberForPFrame; /* [IN] number of refernce picture for p frame */ + int MaxPartitionDepth; /* [IN] Max partition depth */ + TemporalLayerShareBuffer TemporalSVC; /* [IN] Temporal SVC */ +} ExynosVideoEncVp9Param; + +typedef union _ExynosVideoEncCodecParam { + ExynosVideoEncH264Param h264; + ExynosVideoEncMpeg4Param mpeg4; + ExynosVideoEncH263Param h263; + ExynosVideoEncVp8Param vp8; + ExynosVideoEncHevcParam hevc; + ExynosVideoEncVp9Param vp9; +} ExynosVideoEncCodecParam; + +typedef struct _ExynosVideoEncParam { + ExynosVideoCodingType eCompressionFormat; + ExynosVideoEncInitParam initParam; + ExynosVideoEncCommonParam commonParam; + ExynosVideoEncCodecParam codecParam; +} ExynosVideoEncParam; + +typedef struct _ExynosVideoDecInstInfo { + ExynosVideoBoolType bDualDPBSupport; + ExynosVideoBoolType bDynamicDPBSupport; + ExynosVideoBoolType bLastFrameSupport; + ExynosVideoBoolType bSkypeSupport; +} ExynosVideoDecInstInfo; + +typedef struct _ExynosVideoEncInstInfo { + ExynosVideoBoolType bRGBSupport; + int nSpareSize; + ExynosVideoBoolType bTemporalSvcSupport; + ExynosVideoBoolType bSkypeSupport; + ExynosVideoBoolType bRoiInfoSupport; + ExynosVideoBoolType bQpRangePBSupport; +} ExynosVideoEncInstInfo; + +typedef struct _ExynosVideoInstInfo { + unsigned int nSize; + + unsigned int nWidth; + unsigned int nHeight; + unsigned int nBitrate; + unsigned int xFramerate; + int nMemoryType; + ExynosVideoCodingType eCodecType; + int HwVersion; + unsigned long long SwVersion; + ExynosVideoSecurityType eSecurityType; + ExynosVideoColorFormatType supportFormat[VIDEO_COLORFORMAT_MAX]; + + union { + ExynosVideoDecInstInfo dec; + ExynosVideoEncInstInfo enc; + } specificInfo; +} ExynosVideoInstInfo; + +typedef struct _ExynosVideoDecOps { + unsigned int nSize; + + void * (*Init)(ExynosVideoInstInfo *pVideoInfo); + ExynosVideoErrorType (*Finalize)(void *pHandle); + + /* Add new ops at the end of structure, no order change */ + ExynosVideoErrorType (*Set_FrameTag)(void *pHandle, int frameTag); + int (*Get_FrameTag)(void *pHandle); + int (*Get_ActualBufferCount)(void *pHandle); + ExynosVideoErrorType (*Set_DisplayDelay)(void *pHandle, int delay); + ExynosVideoErrorType (*Set_IFrameDecoding)(void *pHandle); + ExynosVideoErrorType (*Enable_PackedPB)(void *pHandle); + ExynosVideoErrorType (*Enable_LoopFilter)(void *pHandle); + ExynosVideoErrorType (*Enable_SliceMode)(void *pHandle); + ExynosVideoErrorType (*Enable_SEIParsing)(void *pHandle); + ExynosVideoErrorType (*Get_FramePackingInfo)(void *pHandle, ExynosVideoFramePacking *pFramepacking); + ExynosVideoErrorType (*Set_ImmediateDisplay)(void *pHandle); + ExynosVideoErrorType (*Enable_DTSMode)(void *pHandle); + ExynosVideoErrorType (*Set_QosRatio)(void *pHandle, int ratio); + ExynosVideoErrorType (*Enable_DualDPBMode)(void *pHandle); + ExynosVideoErrorType (*Enable_DynamicDPB)(void *pHandle); + ExynosVideoErrorType (*Set_BufferProcessType)(void *pHandle, int bufferProcessType); +} ExynosVideoDecOps; + +typedef struct _ExynosVideoEncOps { + unsigned int nSize; + void * (*Init)(ExynosVideoInstInfo *pVideoInfo); + ExynosVideoErrorType (*Finalize)(void *pHandle); + + /* Add new ops at the end of structure, no order change */ + ExynosVideoErrorType (*Set_EncParam)(void *pHandle, ExynosVideoEncParam*encParam); + ExynosVideoErrorType (*Set_FrameTag)(void *pHandle, int frameTag); + int (*Get_FrameTag)(void *pHandle); + ExynosVideoErrorType (*Set_FrameType)(void *pHandle, ExynosVideoFrameType frameType); + ExynosVideoErrorType (*Set_FrameRate)(void *pHandle, int frameRate); + ExynosVideoErrorType (*Set_BitRate)(void *pHandle, int bitRate); + ExynosVideoErrorType (*Set_QpRange)(void *pHandle, ExynosVideoQPRange qpRange); + ExynosVideoErrorType (*Set_FrameSkip)(void *pHandle, int frameSkip); + ExynosVideoErrorType (*Set_IDRPeriod)(void *pHandle, int period); + ExynosVideoErrorType (*Enable_PrependSpsPpsToIdr)(void *pHandle); + ExynosVideoErrorType (*Set_QosRatio)(void *pHandle, int ratio); + ExynosVideoErrorType (*Set_LayerChange)(void *pHandle, TemporalLayerShareBuffer TemporalSVC); + ExynosVideoErrorType (*Set_DynamicQpControl)(void *pHandle, int nQp); + ExynosVideoErrorType (*Set_MarkLTRFrame)(void *pHandle, int nLongTermFrmIdx); + ExynosVideoErrorType (*Set_UsedLTRFrame)(void *pHandle, int nUsedLTRFrameNum); + ExynosVideoErrorType (*Set_BasePID)(void *pHandle, int nPID); + ExynosVideoErrorType (*Set_RoiInfo)(void *pHandle, RoiInfoShareBuffer *pRoiInfo); +} ExynosVideoEncOps; + +typedef struct _ExynosVideoDecBufferOps { + unsigned int nSize; + + /* Add new ops at the end of structure, no order change */ + ExynosVideoErrorType (*Enable_Cacheable)(void *pHandle); + ExynosVideoErrorType (*Set_Shareable)(void *pHandle); + ExynosVideoErrorType (*Get_Buffer)(void *pHandle, int nIndex, ExynosVideoBuffer **pBuffer); + ExynosVideoErrorType (*Set_Geometry)(void *pHandle, ExynosVideoGeometry *bufferConf); + ExynosVideoErrorType (*Get_Geometry)(void *pHandle, ExynosVideoGeometry *bufferConf); + ExynosVideoErrorType (*Setup)(void *pHandle, unsigned int nBufferCount); + ExynosVideoErrorType (*Run)(void *pHandle); + ExynosVideoErrorType (*Stop)(void *pHandle); + ExynosVideoErrorType (*Enqueue)(void *pHandle, void *pBuffer[], unsigned int dataSize[], int nPlanes, void *pPrivate); + ExynosVideoErrorType (*Enqueue_All)(void *pHandle); + ExynosVideoBuffer * (*Dequeue)(void *pHandle); + ExynosVideoErrorType (*Register)(void *pHandle, ExynosVideoPlane *planes, int nPlanes); + ExynosVideoErrorType (*Clear_RegisteredBuffer)(void *pHandle); + ExynosVideoErrorType (*Clear_Queue)(void *pHandle); + ExynosVideoErrorType (*Cleanup_Buffer)(void *pHandle); + ExynosVideoErrorType (*Apply_RegisteredBuffer)(void *pHandle); + ExynosVideoErrorType (*ExtensionEnqueue)(void *pHandle, void *pBuffer[], int pFd[], unsigned int allocLen[], unsigned int dataSize[], int nPlanes, void *pPrivate); + ExynosVideoErrorType (*ExtensionDequeue)(void *pHandle, ExynosVideoBuffer *pVideoBuffer); +} ExynosVideoDecBufferOps; + +typedef struct _ExynosVideoEncBufferOps { + unsigned int nSize; + + /* Add new ops at the end of structure, no order change */ + ExynosVideoErrorType (*Enable_Cacheable)(void *pHandle); + ExynosVideoErrorType (*Set_Shareable)(void *pHandle); + ExynosVideoErrorType (*Get_Buffer)(void *pHandle, int nIndex, ExynosVideoBuffer **pBuffer); + ExynosVideoErrorType (*Set_Geometry)(void *pHandle, ExynosVideoGeometry *bufferConf); + ExynosVideoErrorType (*Get_Geometry)(void *pHandle, ExynosVideoGeometry *bufferConf); + ExynosVideoErrorType (*Setup)(void *pHandle, unsigned int nBufferCount); + ExynosVideoErrorType (*Run)(void *pHandle); + ExynosVideoErrorType (*Stop)(void *pHandle); + ExynosVideoErrorType (*Enqueue)(void *pHandle, void *pBuffer[], unsigned int dataSize[], int nPlanes, void *pPrivate); + ExynosVideoErrorType (*Enqueue_All)(void *pHandle); + ExynosVideoBuffer * (*Dequeue)(void *pHandle); + ExynosVideoErrorType (*Register)(void *pHandle, ExynosVideoPlane *planes, int nPlanes); + ExynosVideoErrorType (*Clear_RegisteredBuffer)(void *pHandle); + ExynosVideoErrorType (*Clear_Queue)(void *pHandle); + ExynosVideoErrorType (*ExtensionEnqueue)(void *pHandle, void *pBuffer[], int pFd[], unsigned int allocLen[], unsigned int dataSize[], int nPlanes, void *pPrivate); + ExynosVideoErrorType (*ExtensionDequeue)(void *pHandle, ExynosVideoBuffer *pVideoBuffer); +} ExynosVideoEncBufferOps; + +ExynosVideoErrorType Exynos_Video_GetInstInfo( + ExynosVideoInstInfo *pVideoInstInfo, ExynosVideoBoolType bIsDec); + +int Exynos_Video_Register_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps); + +int Exynos_Video_Register_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps); + +void Exynos_Video_Unregister_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps); + +void Exynos_Video_Unregister_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps); +#endif /* _EXYNOS_VIDEO_API_H_ */ diff --git a/libvideocodec/include/ExynosVideoDec.h b/libvideocodec/include/ExynosVideoDec.h new file mode 100644 index 0000000..81d0a57 --- /dev/null +++ b/libvideocodec/include/ExynosVideoDec.h @@ -0,0 +1,70 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _EXYNOS_VIDEO_DEC_H_ +#define _EXYNOS_VIDEO_DEC_H_ + +/* Configurable */ +/* Normal Node */ +#define VIDEO_MFC_DECODER_NAME "s5p-mfc-dec" +#define VIDEO_HEVC_DECODER_NAME "exynos-hevc-dec" +/* Secure Node */ +#define VIDEO_MFC_SECURE_DECODER_NAME "s5p-mfc-dec-secure" +#define VIDEO_HEVC_SECURE_DECODER_NAME "exynos-hevc-dec-secure" + +#define VIDEO_DECODER_INBUF_SIZE (1920 * 1080 * 3 / 2) +#define VIDEO_DECODER_DEFAULT_INBUF_PLANES 1 +#define VIDEO_DECODER_DEFAULT_OUTBUF_PLANES 2 +#define VIDEO_DECODER_POLL_TIMEOUT 25 + +#define OPERATE_BIT(x, mask, shift) ((x & (mask << shift)) >> shift) +#define FRAME_PACK_SEI_INFO_NUM 4 + + +typedef struct _ExynosVideoDecContext { + int hDec; + ExynosVideoBoolType bShareInbuf; + ExynosVideoBoolType bShareOutbuf; + ExynosVideoBuffer *pInbuf; + ExynosVideoBuffer *pOutbuf; + ExynosVideoGeometry inbufGeometry; + ExynosVideoGeometry outbufGeometry; + int nInbufs; + int nInbufPlanes; + int nOutbufs; + int nOutbufPlanes; + ExynosVideoBoolType bStreamonInbuf; + ExynosVideoBoolType bStreamonOutbuf; + void *pPrivate; + void *pInMutex; + void *pOutMutex; + ExynosVideoInstInfo videoInstInfo; + + int hIONHandle; + int nPrivateDataShareFD; + void *pPrivateDataShareAddress; +} ExynosVideoDecContext; + +ExynosVideoErrorType MFC_Exynos_Video_GetInstInfo_Decoder( + ExynosVideoInstInfo *pVideoInstInfo); + +int MFC_Exynos_Video_Register_Decoder( + ExynosVideoDecOps *pDecOps, + ExynosVideoDecBufferOps *pInbufOps, + ExynosVideoDecBufferOps *pOutbufOps); + +#endif /* _EXYNOS_VIDEO_DEC_H_ */ diff --git a/libvideocodec/include/ExynosVideoEnc.h b/libvideocodec/include/ExynosVideoEnc.h new file mode 100644 index 0000000..8bdbada --- /dev/null +++ b/libvideocodec/include/ExynosVideoEnc.h @@ -0,0 +1,74 @@ +/* + * + * Copyright 2012 Samsung Electronics S.LSI Co. LTD + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _EXYNOS_VIDEO_ENC_H_ +#define _EXYNOS_VIDEO_ENC_H_ + +/* Configurable */ +/* Normal Node */ +#define VIDEO_ENCODER_NAME "s5p-mfc-enc" +#define VIDEO_HEVC_ENCODER_NAME "exynos-hevc-enc" +/* Secure Node */ +#define VIDEO_SECURE_ENCODER_NAME "s5p-mfc-enc-secure" +#define VIDEO_HEVC_SECURE_ENCODER_NAME "exynos-hevc-enc-secure" + +#define VIDEO_ENCODER_DEFAULT_INBUF_PLANES 2 +#define VIDEO_ENCODER_DEFAULT_OUTBUF_PLANES 1 +#define VIDEO_ENCODER_POLL_TIMEOUT 25 + +#define FRAME_RATE_CHANGE_THRESH_HOLD 5 + +typedef struct _ExynosVideoEncContext { + int hEnc; + ExynosVideoBoolType bShareInbuf; + ExynosVideoBoolType bShareOutbuf; + ExynosVideoBuffer *pInbuf; + ExynosVideoBuffer *pOutbuf; + + /* FIXME : temp */ + ExynosVideoGeometry inbufGeometry; + ExynosVideoGeometry outbufGeometry; + int nInbufs; + int nInbufPlanes; + int nOutbufs; + int nOutbufPlanes; + ExynosVideoBoolType bStreamonInbuf; + ExynosVideoBoolType bStreamonOutbuf; + void *pPrivate; + void *pInMutex; + void *pOutMutex; + ExynosVideoInstInfo videoInstInfo; + + int hIONHandle; + int nTemporalLayerShareBufferFD; + void *pTemporalLayerShareBufferAddr; + int nRoiShareBufferFD; + void *pRoiShareBufferAddr; + int oldFrameRate; + int64_t oldTimeStamp; + int64_t oldDuration; +} ExynosVideoEncContext; + +ExynosVideoErrorType MFC_Exynos_Video_GetInstInfo_Encoder( + ExynosVideoInstInfo *pVideoInstInfo); + +int MFC_Exynos_Video_Register_Encoder( + ExynosVideoEncOps *pEncOps, + ExynosVideoEncBufferOps *pInbufOps, + ExynosVideoEncBufferOps *pOutbufOps); + +#endif /* _EXYNOS_VIDEO_ENC_H_ */ diff --git a/libvideocodec/mfc_headers/exynos_mfc_media.h b/libvideocodec/mfc_headers/exynos_mfc_media.h new file mode 100644 index 0000000..c50ac81 --- /dev/null +++ b/libvideocodec/mfc_headers/exynos_mfc_media.h @@ -0,0 +1,283 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __EXYNOS_MFC_MEDIA_H +#define __EXYNOS_MFC_MEDIA_H __FILE__ +#define V4L2_PIX_FMT_RGB32X v4l2_fourcc('R', 'G', 'B', 'X') +#define V4L2_PIX_FMT_YUV444_2P v4l2_fourcc('Y', 'U', '2', 'P') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_YVU444_2P v4l2_fourcc('Y', 'V', '2', 'P') +#define V4L2_PIX_FMT_YUV444_3P v4l2_fourcc('Y', 'U', '3', 'P') +#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') +#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') +#define V4L2_PIX_FMT_FIMV v4l2_fourcc('F', 'I', 'M', 'V') +#define V4L2_PIX_FMT_FIMV1 v4l2_fourcc('F', 'I', 'M', '1') +#define V4L2_PIX_FMT_FIMV2 v4l2_fourcc('F', 'I', 'M', '2') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_FIMV3 v4l2_fourcc('F', 'I', 'M', '3') +#define V4L2_PIX_FMT_FIMV4 v4l2_fourcc('F', 'I', 'M', '4') +#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') +#define V4L2_PIX_FMT_VP9 v4l2_fourcc('V', 'P', '9', '0') +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_PIX_FMT_HEVC v4l2_fourcc('H', 'E', 'V', 'C') +#define V4L2_CID_MPEG_MFC_BASE (V4L2_CTRL_CLASS_MPEG | 0x2000) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_AVAIL (V4L2_CID_MPEG_MFC_BASE + 1) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_ARRGMENT_ID (V4L2_CID_MPEG_MFC_BASE + 2) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_INFO (V4L2_CID_MPEG_MFC_BASE + 3) +#define V4L2_CID_MPEG_VIDEO_H264_SEI_FP_GRID_POS (V4L2_CID_MPEG_MFC_BASE + 4) +#define V4L2_CID_MPEG_MFC51_VIDEO_PACKED_PB (V4L2_CID_MPEG_MFC_BASE + 5) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TAG (V4L2_CID_MPEG_MFC_BASE + 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_ENABLE (V4L2_CID_MPEG_MFC_BASE + 7) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA (V4L2_CID_MPEG_MFC_BASE + 8) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA (V4L2_CID_MPEG_MFC_BASE + 9) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_LUMA_BOT (V4L2_CID_MPEG_MFC_BASE + 10) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_DATA_CHROMA_BOT (V4L2_CID_MPEG_MFC_BASE + 11) +#define V4L2_CID_MPEG_MFC51_VIDEO_CRC_GENERATED (V4L2_CID_MPEG_MFC_BASE + 12) +#define V4L2_CID_MPEG_MFC51_VIDEO_CHECK_STATE (V4L2_CID_MPEG_MFC_BASE + 13) +#define V4L2_CID_MPEG_MFC51_VIDEO_DISPLAY_STATUS (V4L2_CID_MPEG_MFC_BASE + 14) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_LUMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 15) +#define V4L2_CID_MPEG_MFC51_VIDEO_CHROMA_ADDR (V4L2_CID_MPEG_MFC_BASE + 16) +#define V4L2_CID_MPEG_MFC51_VIDEO_STREAM_SIZE (V4L2_CID_MPEG_MFC_BASE + 17) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_COUNT (V4L2_CID_MPEG_MFC_BASE + 18) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_TYPE (V4L2_CID_MPEG_MFC_BASE + 19) +enum v4l2_mpeg_mfc51_video_frame_type { + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_NOT_CODED = 0, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_I_FRAME = 1, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_P_FRAME = 2, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_B_FRAME = 3, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_SKIPPED = 4, + V4L2_MPEG_MFC51_VIDEO_FRAME_TYPE_OTHERS = 5, +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +}; +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_INTERLACE (V4L2_CID_MPEG_MFC_BASE + 20) +#define V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 21) +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_TIME_RES (V4L2_CID_MPEG_MFC_BASE + 22) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_MPEG4_VOP_FRM_DELTA (V4L2_CID_MPEG_MFC_BASE + 23) +#define V4L2_CID_MPEG_MFC51_VIDEO_H263_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 24) +#define V4L2_CID_MPEG_MFC6X_VIDEO_FRAME_DELTA (V4L2_CID_MPEG_MFC_BASE + 25) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_PERIOD_CH V4L2_CID_MPEG_VIDEO_GOP_SIZE +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE_CH V4L2_CID_MPEG_MFC51_VIDEO_H264_RC_FRAME_RATE +#define V4L2_CID_MPEG_MFC51_VIDEO_BIT_RATE_CH V4L2_CID_MPEG_VIDEO_BITRATE +#define V4L2_MPEG_VIDEO_MPEG4_LEVEL_6 8 +#define V4L2_MPEG_VIDEO_HEADER_MODE_AT_THE_READY 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_MPEG_VIDEO_MULTI_SLICE_MODE_MAX_MB_ROW 3 +#define V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_HIGH 17 +#define V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_S_B V4L2_MPEG_VIDEO_H264_LOOP_FILTER_MODE_DISABLED_AT_SLICE_BOUNDARY +#define V4L2_CID_MPEG_VIDEO_H264_MVC_VIEW_ID (V4L2_CID_MPEG_MFC_BASE + 42) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_STATUS (V4L2_CID_MPEG_MFC_BASE + 43) +#define V4L2_CID_MPEG_MFC51_VIDEO_I_FRAME_DECODING (V4L2_CID_MPEG_MFC_BASE + 44) +#define V4L2_CID_MPEG_MFC51_VIDEO_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 45) +#define V4L2_CID_MPEG_VIDEO_H264_PREPEND_SPSPPS_TO_IDR (V4L2_CID_MPEG_MFC_BASE + 46) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_DECODER_IMMEDIATE_DISPLAY (V4L2_CID_MPEG_MFC_BASE + 47) +#define V4L2_CID_MPEG_VIDEO_DECODER_DECODING_TIMESTAMP_MODE (V4L2_CID_MPEG_MFC_BASE + 48) +#define V4L2_CID_MPEG_VIDEO_DECODER_WAIT_DECODING_START (V4L2_CID_MPEG_MFC_BASE + 49) +#define V4L2_CID_MPEG_VIDEO_QOS_RATIO (V4L2_CID_MPEG_MFC_BASE + 50) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 51) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 52) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 53) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 54) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 55) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT3 (V4L2_CID_MPEG_MFC_BASE + 56) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT4 (V4L2_CID_MPEG_MFC_BASE + 57) +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT5 (V4L2_CID_MPEG_MFC_BASE + 58) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_HIERARCHICAL_CODING_LAYER_BIT6 (V4L2_CID_MPEG_MFC_BASE + 59) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_VERSION (V4L2_CID_MPEG_MFC_BASE + 60) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 61) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 62) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 63) +#define V4L2_CID_MPEG_VIDEO_VP8_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 64) +#define V4L2_CID_MPEG_VIDEO_VP8_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 65) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_OF_PARTITIONS (V4L2_CID_MPEG_MFC_BASE + 66) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_LEVEL (V4L2_CID_MPEG_MFC_BASE + 67) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_FILTER_SHARPNESS (V4L2_CID_MPEG_MFC_BASE + 68) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 69) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 70) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 71) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER0 (V4L2_CID_MPEG_MFC_BASE + 72) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER1 (V4L2_CID_MPEG_MFC_BASE + 73) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_HIERARCHY_QP_LAYER2 (V4L2_CID_MPEG_MFC_BASE + 74) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 75) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_DISABLE_INTRA_MD4X4 (V4L2_CID_MPEG_MFC_BASE + 76) +#define V4L2_CID_MPEG_MFC70_VIDEO_VP8_NUM_TEMPORAL_LAYER (V4L2_CID_MPEG_MFC_BASE + 77) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT (V4L2_CID_MPEG_MFC_BASE + 78) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 79) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 80) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 81) +#define V4L2_CID_MPEG_VIDEO_VP8_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 82) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_GET_VERSION_INFO (V4L2_CID_MPEG_MFC_BASE + 91) +#define V4L2_CID_MPEG_MFC_GET_EXTRA_BUFFER_SIZE (V4L2_CID_MPEG_MFC_BASE + 92) +#define V4L2_CID_MPEG_MFC_SET_DUAL_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 93) +#define V4L2_CID_MPEG_MFC_SET_DYNAMIC_DPB_MODE (V4L2_CID_MPEG_MFC_BASE + 95) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_SET_USER_SHARED_HANDLE (V4L2_CID_MPEG_MFC_BASE + 96) +#define V4L2_CID_MPEG_MFC_GET_EXT_INFO (V4L2_CID_MPEG_MFC_BASE + 97) +#define V4L2_CID_MPEG_MFC_SET_BUF_PROCESS_TYPE (V4L2_CID_MPEG_MFC_BASE + 98) +#define V4L2_CID_MPEG_MFC_GET_10BIT_INFO (V4L2_CID_MPEG_MFC_BASE + 99) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_ENABLE_LTR (V4L2_CID_MPEG_MFC_BASE + 100) +#define V4L2_CID_MPEG_MFC_H264_MARK_LTR (V4L2_CID_MPEG_MFC_BASE + 101) +#define V4L2_CID_MPEG_MFC_H264_USE_LTR (V4L2_CID_MPEG_MFC_BASE + 102) +#define V4L2_CID_MPEG_VIDEO_MULTI_SLICE_MAX_MB_ROW (V4L2_CID_MPEG_MFC_BASE + 103) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_H264_BASE_PRIORITY (V4L2_CID_MPEG_MFC_BASE + 104) +#define V4L2_CID_MPEG_MFC_CONFIG_QP (V4L2_CID_MPEG_MFC_BASE + 105) +#define V4L2_CID_MPEG_MFC_H264_VUI_RESTRICTION_ENABLE (V4L2_CID_MPEG_MFC_BASE + 106) +#define V4L2_CID_MPEG_MFC_GET_DRIVER_INFO (V4L2_CID_MPEG_MFC_BASE + 107) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC_CONFIG_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 108) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 110) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 111) +#define V4L2_CID_MPEG_VIDEO_HEVC_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 112) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 113) +#define V4L2_CID_MPEG_VIDEO_HEVC_B_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 114) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 115) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_TYPE (V4L2_CID_MPEG_MFC_BASE + 116) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +enum v4l2_mpeg_video_hevc_hierarchical_coding_type { + V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_B = 0, + V4L2_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_P = 1, +}; +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 117) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 118) +#define V4L2_CID_MPEG_VIDEO_HEVC_PROFILE (V4L2_CID_MPEG_MFC_BASE + 120) +#define V4L2_CID_MPEG_VIDEO_HEVC_LEVEL (V4L2_CID_MPEG_MFC_BASE + 121) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 122) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TIER_FLAG (V4L2_CID_MPEG_MFC_BASE + 123) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 124) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 125) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_DISABLE (V4L2_CID_MPEG_MFC_BASE + 126) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_SLICE_BOUNDARY (V4L2_CID_MPEG_MFC_BASE + 127) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_BETA_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 128) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LF_TC_OFFSET_DIV2 (V4L2_CID_MPEG_MFC_BASE + 129) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_TYPE (V4L2_CID_MPEG_MFC_BASE + 130) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 131) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LOSSLESS_CU_ENABLE (V4L2_CID_MPEG_MFC_BASE + 132) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_CONST_INTRA_PRED_ENABLE (V4L2_CID_MPEG_MFC_BASE + 133) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WAVEFRONT_ENABLE (V4L2_CID_MPEG_MFC_BASE + 134) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_LTR_ENABLE (V4L2_CID_MPEG_MFC_BASE + 135) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_USER_REF (V4L2_CID_MPEG_MFC_BASE + 136) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STORE_REF (V4L2_CID_MPEG_MFC_BASE + 137) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIGN_DATA_HIDING (V4L2_CID_MPEG_MFC_BASE + 138) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_GENERAL_PB_ENABLE (V4L2_CID_MPEG_MFC_BASE + 139) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_TEMPORAL_ID_ENABLE (V4L2_CID_MPEG_MFC_BASE + 140) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_STRONG_SMOTHING_FLAG (V4L2_CID_MPEG_MFC_BASE + 141) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_MAX_NUM_MERGE_MV_MINUS1 (V4L2_CID_MPEG_MFC_BASE + 142) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_DARK (V4L2_CID_MPEG_MFC_BASE + 143) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_SMOOTH (V4L2_CID_MPEG_MFC_BASE + 144) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_STATIC (V4L2_CID_MPEG_MFC_BASE + 145) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_ADAPTIVE_RC_ACTIVITY (V4L2_CID_MPEG_MFC_BASE + 146) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 147) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_DISABLE_TMV_PREDICTION (V4L2_CID_MPEG_MFC_BASE + 148) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_WITHOUT_STARTCODE_ENABLE (V4L2_CID_MPEG_MFC_BASE + 149) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CR (V4L2_CID_MPEG_MFC_BASE + 150) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_QP_INDEX_CB (V4L2_CID_MPEG_MFC_BASE + 151) +#define V4L2_CID_MPEG_MFC90_VIDEO_HEVC_SIZE_OF_LENGTH_FIELD (V4L2_CID_MPEG_MFC_BASE + 152) +#define V4L2_CID_MPEG_VIDEO_HEVC_PREPEND_SPSPPS_TO_IDR (V4L2_CID_MPEG_MFC_BASE + 153) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 154) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 155) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 156) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 157) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT3 (V4L2_CID_MPEG_MFC_BASE + 158) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT4 (V4L2_CID_MPEG_MFC_BASE + 159) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT5 (V4L2_CID_MPEG_MFC_BASE + 160) +#define V4L2_CID_MPEG_VIDEO_HEVC_HIERARCHICAL_CODING_LAYER_BIT6 (V4L2_CID_MPEG_MFC_BASE + 161) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_VERSION (V4L2_CID_MPEG_MFC_BASE + 163) +#define V4L2_CID_MPEG_VIDEO_VP9_RC_FRAME_RATE (V4L2_CID_MPEG_MFC_BASE + 164) +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP (V4L2_CID_MPEG_MFC_BASE + 165) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP (V4L2_CID_MPEG_MFC_BASE + 166) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_I_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 167) +#define V4L2_CID_MPEG_VIDEO_VP9_P_FRAME_QP (V4L2_CID_MPEG_MFC_BASE + 168) +#define V4L2_CID_MPEG_VIDEO_VP9_GOLDEN_FRAMESEL (V4L2_CID_MPEG_MFC_BASE + 169) +#define V4L2_CID_MPEG_VIDEO_VP9_GF_REFRESH_PERIOD (V4L2_CID_MPEG_MFC_BASE + 170) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHY_QP_ENABLE (V4L2_CID_MPEG_MFC_BASE + 171) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_QP (V4L2_CID_MPEG_MFC_BASE + 172) +#define V4L2_CID_MPEG_VIDEO_VP9_REF_NUMBER_FOR_PFRAMES (V4L2_CID_MPEG_MFC_BASE + 173) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER (V4L2_CID_MPEG_MFC_BASE + 174) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_CH (V4L2_CID_MPEG_MFC_BASE + 175) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT0 (V4L2_CID_MPEG_MFC_BASE + 176) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT1 (V4L2_CID_MPEG_MFC_BASE + 177) +#define V4L2_CID_MPEG_VIDEO_VP9_HIERARCHICAL_CODING_LAYER_BIT2 (V4L2_CID_MPEG_MFC_BASE + 178) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_PARTITION_DEPTH (V4L2_CID_MPEG_MFC_BASE + 179) +#define V4L2_CID_MPEG_VIDEO_VP9_DISABLE_INTRA_PU_SPLIT (V4L2_CID_MPEG_MFC_BASE + 180) +#define V4L2_CID_MPEG_VIDEO_DISABLE_IVF_HEADER (V4L2_CID_MPEG_MFC_BASE + 181) +#define V4L2_CID_MPEG_VIDEO_ROI_CONTROL (V4L2_CID_MPEG_MFC_BASE + 190) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_ROI_ENABLE (V4L2_CID_MPEG_MFC_BASE + 191) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 201) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 202) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 203) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H263_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 204) +#define V4L2_CID_MPEG_VIDEO_VP8_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 205) +#define V4L2_CID_MPEG_VIDEO_VP9_MAX_QP_P (V4L2_CID_MPEG_MFC_BASE + 206) +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 207) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 208) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 209) +#define V4L2_CID_MPEG_VIDEO_H263_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 210) +#define V4L2_CID_MPEG_VIDEO_VP8_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 211) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_VP9_MIN_QP_P (V4L2_CID_MPEG_MFC_BASE + 212) +#define V4L2_CID_MPEG_VIDEO_H264_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 213) +#define V4L2_CID_MPEG_VIDEO_HEVC_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 214) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MAX_QP_B (V4L2_CID_MPEG_MFC_BASE + 215) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_CID_MPEG_VIDEO_H264_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 216) +#define V4L2_CID_MPEG_VIDEO_HEVC_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 217) +#define V4L2_CID_MPEG_VIDEO_MPEG4_MIN_QP_B (V4L2_CID_MPEG_MFC_BASE + 218) +#endif +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ diff --git a/libvirtualdisplay/Android.mk b/libvirtualdisplay/Android.mk new file mode 100644 index 0000000..b7bd74b --- /dev/null +++ b/libvirtualdisplay/Android.mk @@ -0,0 +1,52 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false + +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils libexynosv4l2 \ + libhwcutils libdisplay libmpp libsync + +LOCAL_CFLAGS += -DLOG_TAG=\"virtual\" +LOCAL_CFLAGS += -DHLOG_CODE=3 + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp + +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppdisplay +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libdisplay +endif + +LOCAL_SRC_FILES := \ + ExynosVirtualDisplay.cpp + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libvirtualdisplaymodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libvirtualdisplay + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) diff --git a/libvirtualdisplay/ExynosVirtualDisplay.cpp b/libvirtualdisplay/ExynosVirtualDisplay.cpp new file mode 100644 index 0000000..da2f86b --- /dev/null +++ b/libvirtualdisplay/ExynosVirtualDisplay.cpp @@ -0,0 +1,608 @@ +#define LOG_NDEBUG 0 +#undef LOG_TAG +#define LOG_TAG "ExynosVirtualDisplay" +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosG2DWrapper.h" +#include "ExynosVirtualDisplay.h" +#include + +ExynosVirtualDisplay::ExynosVirtualDisplay(struct exynos5_hwc_composer_device_1_t *pdev) : + ExynosDisplay(1), + mWidth(0), + mHeight(0), + mDisplayWidth(0), + mDisplayHeight(0), + mIsWFDState(false), + mIsRotationState(false), + mIsSecureDRM(false), + mIsNormalDRM(false), + mPhysicallyLinearBuffer(NULL), + mPhysicallyLinearBufferAddr(0), + mPresentationMode(0), + mDeviceOrientation(0), + mFrameBufferTargetTransform(0), + mCompositionType(COMPOSITION_GLES), + mPrevCompositionType(COMPOSITION_GLES), + mGLESFormat(HAL_PIXEL_FORMAT_RGBA_8888), + mSinkUsage(GRALLOC_USAGE_HW_COMPOSER) +{ + this->mHwc = pdev; + mMPPs[0] = new ExynosMPPModule(this, WFD_GSC_IDX); + mG2D = new ExynosG2DWrapper(NULL, NULL, this); + + for (int i = 0; i < NUM_FB_TARGET; i++) { + fbTargetInfo[i].fd = -1; + fbTargetInfo[i].mappedAddr = 0; + fbTargetInfo[i].mapSize = 0; + } + + memset(mDstHandles, 0x0, sizeof(int) * MAX_BUFFER_COUNT); + mPrevDisplayFrame.left = 0; + mPrevDisplayFrame.top = 0; + mPrevDisplayFrame.right = 0; + mPrevDisplayFrame.bottom = 0; + memset(mPrevFbHandle, 0x0, sizeof(int) * NUM_FRAME_BUFFER); +} + +ExynosVirtualDisplay::~ExynosVirtualDisplay() +{ + delete mMPPs[0]; + delete mG2D; + unmapAddrFBTarget(); +} + +int ExynosVirtualDisplay::prepare(hwc_display_contents_1_t* contents) +{ + ALOGV("preparing %u layers for virtual", contents->numHwLayers); + hwc_layer_1_t *video_layer = NULL; + int ret = 0; + mIsRotationState = false; + hwc_layer_1_t *overlay_layer = NULL; + hwc_layer_1_t *fb_layer = NULL; + + mCompositionType = COMPOSITION_GLES; + mIsSecureDRM = false; + mIsNormalDRM = false; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.flags & HWC_SCREENSHOT_ANIMATOR_LAYER) { + ALOGV("include rotation animation layer"); + mIsRotationState = true; + overlay_layer = &layer; + break; + } + + if (layer.flags & HWC_SKIP_LAYER) { + ALOGV("include skipped layer"); + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(h->flags) != NORMAL_DRM || !mIsWFDState) + continue; + } else + continue; + } + + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if(mMPPs[0]->isProcessingSupported(layer, h->format, false) > 0) { + if (getDrmMode(h->flags) == SECURE_DRM) { + layer.compositionType = HWC_OVERLAY; + mIsSecureDRM = true; + overlay_layer = &layer; + ALOGV("include secure drm layer"); + continue; + } + if (getDrmMode(h->flags) == NORMAL_DRM) { + layer.compositionType = HWC_OVERLAY; + mIsNormalDRM = true; + overlay_layer = &layer; + ALOGV("include normal drm layer"); + continue; + } +#ifdef VIRTUAL_DISPLAY_VIDEO_IS_OVERLAY + if ((h->flags & GRALLOC_USAGE_EXTERNAL_DISP) && mIsWFDState && + (h->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M || + h->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV || + h->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED)) { + layer.compositionType = HWC_OVERLAY; + overlay_layer = &layer; + ALOGV("include normal video layer as overlay"); + continue; + } +#endif + } + if (layer.compositionType == HWC_FRAMEBUFFER) { + fb_layer = &layer; + ALOGV("include fb layer"); + } + } + } + + if (overlay_layer && fb_layer) + mCompositionType = COMPOSITION_MIXED; + else if (overlay_layer) + mCompositionType = COMPOSITION_HWC; + + ALOGV("mCompositionType 0x%x, mPrevCompositionType 0x%x, overlay_layer 0x%x, fb_layer 0x%x", + mCompositionType, mPrevCompositionType, overlay_layer, fb_layer); + + if (mPrevCompositionType != mCompositionType) { + ExynosMPPModule &gsc = *mMPPs[0]; + gsc.mDstBuffers[gsc.mCurrentBuf] = NULL; + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + gsc.cleanupM2M(); + } + + mSinkUsage = GRALLOC_USAGE_HW_COMPOSER; + + if (mIsSecureDRM) + mSinkUsage |= GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_PROTECTED | + GRALLOC_USAGE_PHYSICALLY_LINEAR; + else if (mIsNormalDRM) + mSinkUsage |= GRALLOC_USAGE_PRIVATE_NONSECURE; + ALOGV("Sink Buffer's Usage: 0x%x", mSinkUsage); + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + ALOGV("\tlayer %u: framebuffer target", i); + + if (!mIsSecureDRM && !mIsNormalDRM) + calcDisplayRect(layer); + layer.transform = mFrameBufferTargetTransform; + continue; + } + + if (mIsRotationState) { + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_RENDERING; + if (mIsSecureDRM) + ret = true; + continue; + } + + if (layer.compositionType == HWC_BACKGROUND) { + ALOGV("\tlayer %u: background layer", i); + dumpLayer(&layer); + continue; + } + + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + + if (overlay_layer && (h->flags & GRALLOC_USAGE_EXTERNAL_DISP)) { + if (mMPPs[0]->isProcessingSupported(layer, h->format, false) > 0) { + if (!video_layer) { + if (mIsSecureDRM) + ret = mG2D->InitSecureG2D(); + video_layer = &layer; + calcDisplayRect(layer); + + ALOGV("\tlayer %u: video layer, mIsSecureDRM %d, mPhysicallyLinearBuffer 0x%x", + i, mIsSecureDRM, mPhysicallyLinearBuffer); + dumpLayer(&layer); + continue; + } + } else { + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_RENDERING; + ALOGV("\tlayer %u: skip drm layer", i); + continue; + } + } + } + layer.compositionType = HWC_FRAMEBUFFER; + dumpLayer(&layer); + } + + if (!ret && mPhysicallyLinearBufferAddr) { + mG2D->TerminateSecureG2D(); + unmapAddrFBTarget(); + } + + return 0; +} + +bool ExynosVirtualDisplay::isLayerFullSize(hwc_layer_1_t *layer) +{ + if (layer == NULL) { + ALOGE("layer is null"); + return false; + } + + if (layer->displayFrame.left == 0 && + layer->displayFrame.top == 0 && + layer->displayFrame.right == mWidth && + layer->displayFrame.bottom == mHeight) { + return true; + } else { + return false; + } +} + +bool ExynosVirtualDisplay::isLayerResized(hwc_layer_1_t *layer) +{ + if (layer == NULL) { + ALOGE("layer is null"); + return false; + } + + if (layer->displayFrame.left == mPrevDisplayFrame.left && + layer->displayFrame.top == mPrevDisplayFrame.top && + layer->displayFrame.right == mPrevDisplayFrame.right && + layer->displayFrame.bottom == mPrevDisplayFrame.bottom) { + return false; + } else { + mPrevDisplayFrame.left = layer->displayFrame.left; + mPrevDisplayFrame.top = layer->displayFrame.top; + mPrevDisplayFrame.right = layer->displayFrame.right; + mPrevDisplayFrame.bottom = layer->displayFrame.bottom; + return true; + } +} + +bool ExynosVirtualDisplay::isNewHandle(void *dstHandle) +{ + int i = 0; + for (i = 0; i < MAX_BUFFER_COUNT; i++) { + if (mDstHandles[i] == dstHandle) { + return false; + } else if (mDstHandles[i] == NULL) { + mDstHandles[i] = dstHandle; + break; + } + } + + if (i == MAX_BUFFER_COUNT) { + memset(mDstHandles, 0x0, sizeof(int) * MAX_BUFFER_COUNT); + mDstHandles[0] = dstHandle; + } + return true; +} + +int ExynosVirtualDisplay::set(hwc_display_contents_1_t* contents) +{ + hwc_layer_1_t *overlay_layer = NULL; + hwc_layer_1_t *target_layer = NULL; + hwc_layer_1_t *fb_layer[NUM_FRAME_BUFFER] = {NULL}; + int number_of_fb = 0; + int IsNormalDRMWithSkipLayer = false; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.flags & HWC_SKIP_LAYER) { + ALOGV("skipping layer %d", i); + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(h->flags) == NORMAL_DRM && mIsWFDState) { + ALOGV("skipped normal drm layer %d", i); + IsNormalDRMWithSkipLayer = true; + } + } + continue; + } + + if (layer.compositionType == HWC_FRAMEBUFFER) { + if (!layer.handle) + continue; + ALOGV("framebuffer layer %d", i); + fb_layer[number_of_fb++] = &layer; + if (number_of_fb >= NUM_FRAME_BUFFER) + number_of_fb = NUM_FRAME_BUFFER-1; + } + + if (layer.compositionType == HWC_OVERLAY) { + if (!layer.handle) + continue; + + if (layer.flags & HWC_SKIP_RENDERING) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + + ALOGV("overlay layer %d", i); + overlay_layer = &layer; + continue; + } + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + if (!layer.handle) + continue; + + ALOGV("FB target layer %d", i); + + target_layer = &layer; + continue; + } + } + + if (contents->outbuf == NULL) { + ALOGE("BufferQueue is abandoned"); + return 0; + } + + if (target_layer) { + int ret = 0; + ExynosMPPModule &gsc = *mMPPs[0]; + gsc.mDstBuffers[gsc.mCurrentBuf] = contents->outbuf; + gsc.mDstBufFence[gsc.mCurrentBuf] = contents->outbufAcquireFenceFd; + private_handle_t *dstHandle = private_handle_t::dynamicCast(contents->outbuf); + + if (IsNormalDRMWithSkipLayer) { + if (target_layer->acquireFenceFd >= 0) + contents->retireFenceFd = target_layer->acquireFenceFd; + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } + mG2D->runCompositor(*target_layer, dstHandle, 0, 0xff, 0xff000000, BLIT_OP_SOLID_FILL, true, 0, 0, 0); + } else if (mCompositionType == COMPOSITION_GLES) { + ALOGV("COMPOSITION_GLES"); + if (target_layer->acquireFenceFd >= 0) + contents->retireFenceFd = target_layer->acquireFenceFd; + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } + } else if (overlay_layer && mCompositionType == COMPOSITION_MIXED) { + void *newFbHandle[NUM_FRAME_BUFFER] = {NULL}; + for(size_t i = 0; i < number_of_fb; i++) { + newFbHandle[i] = (void *)fb_layer[i]->handle; + } + + if (isLayerResized(overlay_layer) || + (!isLayerFullSize(overlay_layer) && number_of_fb > 0 && memcmp(mPrevFbHandle, newFbHandle, sizeof(int) * NUM_FRAME_BUFFER) != 0)) { + memset(mDstHandles, 0x0, sizeof(int) * MAX_BUFFER_COUNT); + } + + if (isNewHandle(dstHandle)) { + if (mIsSecureDRM) { + private_handle_t *secureHandle = private_handle_t::dynamicCast(mPhysicallyLinearBuffer); + ret = mG2D->runSecureCompositor(*target_layer, dstHandle, secureHandle, 0xff, 0xff000000, BLIT_OP_SOLID_FILL, true); + } else { + ret = mG2D->runCompositor(*target_layer, dstHandle, 0, 0xff, 0xff000000, BLIT_OP_SOLID_FILL, true, 0, 0, 0); + } + } + if (number_of_fb > 0) { + ALOGV("COMPOSITION_MIXED"); + ret = gsc.processM2M(*overlay_layer, dstHandle->format, NULL, false); + if (ret < 0) + ALOGE("failed to configure gscaler for video layer"); + + if (gsc.mDstConfig.releaseFenceFd >= 0) { + if (sync_wait(gsc.mDstConfig.releaseFenceFd, 1000) < 0) + ALOGE("sync_wait error"); + close(gsc.mDstConfig.releaseFenceFd); + gsc.mDstConfig.releaseFenceFd = -1; + } + if (target_layer->acquireFenceFd > 0) { + close(target_layer->acquireFenceFd); + target_layer->acquireFenceFd = -1; + } + + if (mIsSecureDRM) { + ALOGV("Secure DRM playback"); + private_handle_t *targetBufferHandle = private_handle_t::dynamicCast(target_layer->handle); + void* srcAddr = getMappedAddrFBTarget(targetBufferHandle->fd); + private_handle_t *secureHandle = private_handle_t::dynamicCast(mPhysicallyLinearBuffer); + + if (memcmp(mPrevFbHandle, newFbHandle, sizeof(int) * NUM_FRAME_BUFFER) != 0) { + ALOGV("Buffer of fb layer is changed"); + memcpy(mPrevFbHandle, newFbHandle, sizeof(int) * NUM_FRAME_BUFFER); + if ((srcAddr != NULL) && mPhysicallyLinearBufferAddr) { + memcpy((void *)mPhysicallyLinearBufferAddr, (void *)srcAddr, mWidth * mHeight * 4); + } else { + ALOGE("can't memcpy for secure G2D input buffer"); + } + } + + ret = mG2D->runSecureCompositor(*target_layer, dstHandle, secureHandle, 0xff, + 0, BLIT_OP_SRC_OVER, false); + if (ret < 0) { + mG2D->TerminateSecureG2D(); + unmapAddrFBTarget(); + ALOGE("runSecureCompositor is failed"); + } + } else { /* Normal video layer + Blending */ + ALOGV("Normal DRM playback"); + ret = mG2D->runCompositor(*target_layer, dstHandle, 0, 0xff, 0, + BLIT_OP_SRC_OVER, false, 0, 0, 0); + if (ret < 0) { + ALOGE("runCompositor is failed"); + } + + if (target_layer->releaseFenceFd > 0) { + close(target_layer->releaseFenceFd); + target_layer->releaseFenceFd = -1; + } + } + } + } else if (overlay_layer) { + ALOGV("COMPOSITION_HWC"); + ret = gsc.processM2M(*overlay_layer, dstHandle->format, NULL, false); + if (ret < 0) + ALOGE("failed to configure gscaler for video layer"); + contents->retireFenceFd = gsc.mDstConfig.releaseFenceFd; + if (target_layer->acquireFenceFd > 0) { + close(target_layer->acquireFenceFd); + target_layer->acquireFenceFd = -1; + } + } else { + ALOGV("animation layer skip"); + if (target_layer->acquireFenceFd >= 0) + contents->retireFenceFd = target_layer->acquireFenceFd; + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } + } + } + + mPrevCompositionType = mCompositionType; + + return 0; +} + +void ExynosVirtualDisplay::calcDisplayRect(hwc_layer_1_t &layer) +{ + bool needToTransform = false; + unsigned int newTransform = 0; + unsigned int calc_w = (mWidth - mDisplayWidth) >> 1; + unsigned int calc_h = (mHeight - mDisplayHeight) >> 1; + + if (layer.compositionType) { + if (mPresentationMode) { + /* Use EXTERNAL_TB directly (DRM-extention) */ + newTransform = layer.transform; + needToTransform = false; + } else if (mFrameBufferTargetTransform) { + switch(mFrameBufferTargetTransform) { + case HAL_TRANSFORM_ROT_90: + newTransform = 0; + needToTransform = true; + break; + case HAL_TRANSFORM_ROT_180: + newTransform = HAL_TRANSFORM_ROT_90; + needToTransform = false; + break; + case HAL_TRANSFORM_ROT_270: + newTransform = HAL_TRANSFORM_ROT_180; + needToTransform = true; + break; + default: + newTransform = 0; + needToTransform = false; + break; + } + } else { + switch(mDeviceOrientation) { + case 1: /* HAL_TRANSFORM_ROT_90 */ + newTransform = HAL_TRANSFORM_ROT_270; + needToTransform = false; + break; + case 3: /* HAL_TRANSFORM_ROT_270 */ + newTransform = HAL_TRANSFORM_ROT_90; + needToTransform = false; + break; + default: /* Default | HAL_TRANSFORM_ROT_180 */ + newTransform = 0; + needToTransform = false; + break; + } + } + + if (layer.compositionType == HWC_OVERLAY) { + if (needToTransform) { + mHwc->mVirtualDisplayRect.left = layer.displayFrame.left + calc_h; + mHwc->mVirtualDisplayRect.top = layer.displayFrame.top + calc_w; + mHwc->mVirtualDisplayRect.width = WIDTH(layer.displayFrame) - (calc_h << 1); + mHwc->mVirtualDisplayRect.height = HEIGHT(layer.displayFrame) - (calc_w << 1); + } else { + mHwc->mVirtualDisplayRect.left = layer.displayFrame.left + calc_w; + mHwc->mVirtualDisplayRect.top = layer.displayFrame.top + calc_h; + mHwc->mVirtualDisplayRect.width = WIDTH(layer.displayFrame) - (calc_w << 1); + mHwc->mVirtualDisplayRect.height = HEIGHT(layer.displayFrame) - (calc_h << 1); + } + + if (layer.displayFrame.left < 0 || layer.displayFrame.top < 0 || + mWidth < (unsigned int)WIDTH(layer.displayFrame) || mHeight < (unsigned int)HEIGHT(layer.displayFrame)) { + if (needToTransform) { + mHwc->mVirtualDisplayRect.left = 0 + calc_h; + mHwc->mVirtualDisplayRect.top = 0 + calc_w; + + mHwc->mVirtualDisplayRect.width = mWidth - (calc_h << 1); + mHwc->mVirtualDisplayRect.height = mHeight - (calc_w << 1); + } else { + mHwc->mVirtualDisplayRect.left = 0 + calc_w; + mHwc->mVirtualDisplayRect.top = 0 + calc_h; + + mHwc->mVirtualDisplayRect.width = mWidth - (calc_w << 1); + mHwc->mVirtualDisplayRect.height = mHeight - (calc_h << 1); + } + } + } else { /* HWC_FRAMEBUFFER_TARGET */ + if (needToTransform) { + mHwc->mVirtualDisplayRect.width = (mDisplayHeight * mDisplayHeight) / mDisplayWidth; + mHwc->mVirtualDisplayRect.height = mDisplayHeight; + mHwc->mVirtualDisplayRect.left = (mDisplayWidth - mHwc->mVirtualDisplayRect.width) / 2; + mHwc->mVirtualDisplayRect.top = 0; + } else { + mHwc->mVirtualDisplayRect.left = 0; + mHwc->mVirtualDisplayRect.top = 0; + mHwc->mVirtualDisplayRect.width = mDisplayWidth; + mHwc->mVirtualDisplayRect.height = mDisplayHeight; + } + } + } +} + +void* ExynosVirtualDisplay::getMappedAddrFBTarget(int fd) +{ + for (int i = 0; i < NUM_FB_TARGET; i++) { + if (fbTargetInfo[i].fd == fd) + return fbTargetInfo[i].mappedAddr; + + if (fbTargetInfo[i].fd == -1) { + fbTargetInfo[i].fd = fd; + fbTargetInfo[i].mappedAddr = mmap(NULL, mWidth * mHeight * 4, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); + fbTargetInfo[i].mapSize = mWidth * mHeight * 4; + return fbTargetInfo[i].mappedAddr; + } + } + return 0; +} + +void ExynosVirtualDisplay::unmapAddrFBTarget() +{ + for (int i = 0; i < NUM_FB_TARGET; i++) { + if (fbTargetInfo[i].fd != -1) { + munmap((void *)fbTargetInfo[i].mappedAddr, fbTargetInfo[i].mapSize); + fbTargetInfo[i].fd = -1; + fbTargetInfo[i].mappedAddr = 0; + fbTargetInfo[i].mapSize = 0; + } + } +} + +void ExynosVirtualDisplay::init() +{ + +} + +void ExynosVirtualDisplay::init(hwc_display_contents_1_t* contents) +{ + init(); +} + +void ExynosVirtualDisplay::deInit() +{ + ExynosMPPModule &gsc = *mMPPs[0]; + gsc.mDstBuffers[gsc.mCurrentBuf] = NULL; + gsc.mDstBufFence[gsc.mCurrentBuf] = -1; + gsc.cleanupM2M(); + mG2D->TerminateSecureG2D(); + unmapAddrFBTarget(); + mPrevCompositionType = COMPOSITION_GLES; +} + +int ExynosVirtualDisplay::blank() +{ + return 0; +} + +int ExynosVirtualDisplay::getConfig() +{ + return 0; +} + +int32_t ExynosVirtualDisplay::getDisplayAttributes(const uint32_t attribute) +{ + return 0; +} diff --git a/libvirtualdisplay/ExynosVirtualDisplay.h b/libvirtualdisplay/ExynosVirtualDisplay.h new file mode 100644 index 0000000..1107756 --- /dev/null +++ b/libvirtualdisplay/ExynosVirtualDisplay.h @@ -0,0 +1,79 @@ +#ifndef EXYNOS_VIRTUAL_DISPLAY_H +#define EXYNOS_VIRTUAL_DISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +#define NUM_FB_TARGET 4 +#define NUM_FRAME_BUFFER 5 +#define HWC_SKIP_RENDERING 0x80000000 +#define MAX_BUFFER_COUNT 8 + +class ExynosG2DWrapper; + +class ExynosVirtualDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosVirtualDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosVirtualDisplay(); + + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + virtual void calcDisplayRect(hwc_layer_1_t &layer); + virtual void* getMappedAddrFBTarget(int fd); + virtual void unmapAddrFBTarget(); + virtual int blank(); + virtual int getConfig(); + virtual int32_t getDisplayAttributes(const uint32_t attribute); + bool isNewHandle(void *dstHandle); + bool isLayerResized(hwc_layer_1_t *layer); + bool isLayerFullSize(hwc_layer_1_t *layer); + virtual void init(); + virtual void init(hwc_display_contents_1_t* contents); + virtual void deInit(); + + /* Fields */ + enum CompositionType { + COMPOSITION_UNKNOWN = 0, + COMPOSITION_GLES = 1, + COMPOSITION_HWC = 2, + COMPOSITION_MIXED = COMPOSITION_GLES | COMPOSITION_HWC + }; + + struct FB_TARGET_INFO { + int32_t fd; + void *mappedAddr; + int mapSize; + }; + + unsigned int mWidth; + unsigned int mHeight; + unsigned int mDisplayWidth; + unsigned int mDisplayHeight; + + bool mIsWFDState; + bool mIsRotationState; + bool mIsSecureDRM; + bool mIsNormalDRM; + buffer_handle_t mPhysicallyLinearBuffer; + unsigned long mPhysicallyLinearBufferAddr; + struct FB_TARGET_INFO fbTargetInfo[NUM_FB_TARGET]; + + bool mPresentationMode; + unsigned int mDeviceOrientation; + unsigned int mFrameBufferTargetTransform; + + CompositionType mCompositionType; + CompositionType mPrevCompositionType; + int mGLESFormat; + int mSinkUsage; + + ExynosMPPModule *mMPPs[1]; + ExynosG2DWrapper *mG2D; + + void* mDstHandles[MAX_BUFFER_COUNT]; + hwc_rect_t mPrevDisplayFrame; + void* mPrevFbHandle[NUM_FRAME_BUFFER]; +}; + +#endif diff --git a/libvirtualdisplay/NOTICE b/libvirtualdisplay/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvirtualdisplay/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libvppdisplay/Android.mk b/libvppdisplay/Android.mk new file mode 100644 index 0000000..ef311a1 --- /dev/null +++ b/libvppdisplay/Android.mk @@ -0,0 +1,74 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libsync libhwcutils +#ifeq ($(BOARD_USES_FIMC), true) +#LOCAL_SHARED_LIBRARIES += libexynosfimc +#else +#LOCAL_SHARED_LIBRARIES += libexynosgscaler +#endif + +ifeq ($(BOARD_USES_FB_PHY_LINEAR),true) + LOCAL_SHARED_LIBRARIES += libfimg + LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/libfimg4x +endif + +LOCAL_C_INCLUDES := \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwc \ + $(LOCAL_PATH)/../libhwcutils \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule + +ifeq ($(BOARD_HDMI_INCAPABLE), true) + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libhdmi_dummy +else + LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvpphdmi +endif + +ifeq ($(BOARD_USES_VIRTUAL_DISPLAY), true) +ifeq ($(BOARD_USES_VPP), true) +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvppvirtualdisplay +else +LOCAL_C_INCLUDES += $(LOCAL_PATH)/../libvirtualdisplay +endif +endif + +LOCAL_SRC_FILES := \ + ExynosDisplay.cpp \ + ExynosOverlayDisplay.cpp \ + ExynosDisplayResourceManager.cpp + +ifeq ($(BOARD_USES_DUAL_DISPLAY), true) +LOCAL_SRC_FILES += ExynosSecondaryDisplay.cpp +endif + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libdisplay + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libvppdisplay/ExynosDisplay.cpp b/libvppdisplay/ExynosDisplay.cpp new file mode 100644 index 0000000..469c063 --- /dev/null +++ b/libvppdisplay/ExynosDisplay.cpp @@ -0,0 +1,3404 @@ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +//#define LOG_NDEBUG 0 +#define LOG_TAG "display" +#include "ExynosDisplay.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosHWCDebug.h" +#include +#include +#include +#if defined(USES_DUAL_DISPLAY) +#include "ExynosSecondaryDisplayModule.h" +#endif + +int getGCD(int a, int b) +{ + if (b == 0) + return a; + else + return getGCD(b, a%b); +} + +int getLCM(int a, int b) +{ + return (a*b)/getGCD(a,b); +} + +bool frameChanged(decon_frame *f1, decon_frame *f2) +{ + return f1->x != f2->x || + f1->y != f2->y || + f1->w != f2->w || + f1->h != f2->h || + f1->f_w != f2->f_w || + f1->f_h != f2->f_h; +} + +bool winConfigChanged(decon_win_config *c1, decon_win_config *c2) +{ + return c1->state != c2->state || + c1->fd_idma[0] != c2->fd_idma[0] || + c1->fd_idma[1] != c2->fd_idma[1] || + c1->fd_idma[2] != c2->fd_idma[2] || + frameChanged(&c1->src, &c2->src) || + frameChanged(&c1->dst, &c2->dst) || + c1->format != c2->format || + c1->blending != c2->blending || + c1->plane_alpha != c2->plane_alpha; +} + +void ExynosDisplay::dumpConfig(decon_win_config &c) +{ + DISPLAY_LOGD(eDebugWinConfig, "\tstate = %u", c.state); + if (c.state == c.DECON_WIN_STATE_COLOR) { + DISPLAY_LOGD(eDebugWinConfig, "\t\tcolor = %u", c.color); + } else if (c.state != c.DECON_WIN_STATE_DISABLED) { + DISPLAY_LOGD(eDebugWinConfig, "\t\tfd = %d, dma = %u " + "src_f_w = %u, src_f_h = %u, src_x = %d, src_y = %d, src_w = %u, src_h = %u, " + "dst_f_w = %u, dst_f_h = %u, dst_x = %d, dst_y = %d, dst_w = %u, dst_h = %u, " + "format = %u, blending = %u, protection = %u, transparent(x:%d, y:%d, w:%d, h:%d), " + "block(x:%d, y:%d, w:%d, h:%d)", + c.fd_idma[0], c.idma_type, + c.src.f_w, c.src.f_h, c.src.x, c.src.y, c.src.w, c.src.h, + c.dst.f_w, c.dst.f_h, c.dst.x, c.dst.y, c.dst.w, c.dst.h, + c.format, c.blending, c.protection, + c.transparent_area.x, c.transparent_area.y, c.transparent_area.w, c.transparent_area.h, + c.covered_opaque_area.x, c.covered_opaque_area.y, c.covered_opaque_area.w, c.covered_opaque_area.h); + } +} + +void ExynosDisplay::dumpConfig(decon_win_config &c, android::String8& result) +{ + result.appendFormat("\tstate = %u", c.state); + if (c.state == c.DECON_WIN_STATE_COLOR) { + result.appendFormat("\t\tcolor = %u", c.color); + } else { + result.appendFormat("\t\tfd = %d, dma = %u " + "src_f_w = %u, src_f_h = %u, src_x = %d, src_y = %d, src_w = %u, src_h = %u, " + "dst_f_w = %u, dst_f_h = %u, dst_x = %d, dst_y = %d, dst_w = %u, dst_h = %u, " + "format = %u, blending = %u, protection = %u, transparent(x:%d, y:%d, w:%d, h:%d), " + "block(x:%d, y:%d, w:%d, h:%d)\n", + c.fd_idma[0], c.idma_type, + c.src.f_w, c.src.f_h, c.src.x, c.src.y, c.src.w, c.src.h, + c.dst.f_w, c.dst.f_h, c.dst.x, c.dst.y, c.dst.w, c.dst.h, + c.format, c.blending, c.protection, + c.transparent_area.x, c.transparent_area.y, c.transparent_area.w, c.transparent_area.h, + c.covered_opaque_area.x, c.covered_opaque_area.y, c.covered_opaque_area.w, c.covered_opaque_area.h); + } +} + +enum decon_pixel_format halFormatToS3CFormat(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + return DECON_PIXEL_FORMAT_RGBA_8888; + case HAL_PIXEL_FORMAT_RGBX_8888: + return DECON_PIXEL_FORMAT_RGBX_8888; + case HAL_PIXEL_FORMAT_RGB_565: + return DECON_PIXEL_FORMAT_RGB_565; + case HAL_PIXEL_FORMAT_BGRA_8888: + return DECON_PIXEL_FORMAT_BGRA_8888; +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: + return DECON_PIXEL_FORMAT_BGRX_8888; +#endif + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + return DECON_PIXEL_FORMAT_YVU420M; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + return DECON_PIXEL_FORMAT_YUV420M; + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + return DECON_PIXEL_FORMAT_NV21M; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + return DECON_PIXEL_FORMAT_NV21; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B: + return DECON_PIXEL_FORMAT_NV12M; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B: + return DECON_PIXEL_FORMAT_NV12N_10B; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN: + return DECON_PIXEL_FORMAT_NV12N; + default: + return DECON_PIXEL_FORMAT_MAX; + } +} + +int S3CFormatToHalFormat(enum decon_pixel_format format) +{ + switch (format) { + case DECON_PIXEL_FORMAT_RGBA_8888: + return HAL_PIXEL_FORMAT_RGBA_8888; + case DECON_PIXEL_FORMAT_RGBX_8888: + return HAL_PIXEL_FORMAT_RGBX_8888; + case DECON_PIXEL_FORMAT_RGB_565: + return HAL_PIXEL_FORMAT_RGB_565; + case DECON_PIXEL_FORMAT_BGRA_8888: + return HAL_PIXEL_FORMAT_BGRA_8888; +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case DECON_PIXEL_FORMAT_BGRX_8888: + return HAL_PIXEL_FORMAT_BGRX_8888; +#endif + case DECON_PIXEL_FORMAT_YVU420M: + return HAL_PIXEL_FORMAT_EXYNOS_YV12_M; + case DECON_PIXEL_FORMAT_YUV420M: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M; + /* HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL ?? */ + case DECON_PIXEL_FORMAT_NV21M: + return HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M; + case DECON_PIXEL_FORMAT_NV21: + return HAL_PIXEL_FORMAT_YCrCb_420_SP; + /* HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV, HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_S10B */ + case DECON_PIXEL_FORMAT_NV12M: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M; + case DECON_PIXEL_FORMAT_NV12N: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN; + case DECON_PIXEL_FORMAT_NV12N_10B: + return HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SPN_S10B; + default: + return -1; + } +} + +bool isFormatSupported(int format) +{ + return halFormatToS3CFormat(format) < DECON_PIXEL_FORMAT_MAX; +} + +enum decon_blending halBlendingToS3CBlending(int32_t blending) +{ + switch (blending) { + case HWC_BLENDING_NONE: + return DECON_BLENDING_NONE; + case HWC_BLENDING_PREMULT: + return DECON_BLENDING_PREMULT; + case HWC_BLENDING_COVERAGE: + return DECON_BLENDING_COVERAGE; + + default: + return DECON_BLENDING_MAX; + } +} + +bool isBlendingSupported(int32_t blending) +{ + return halBlendingToS3CBlending(blending) < DECON_BLENDING_MAX; +} + +#define NUMA(a) (sizeof(a) / sizeof(a [0])) +const char *deconFormat2str(uint32_t format) +{ + android::String8 result; + + for (unsigned int n1 = 0; n1 < NUMA(deconFormat); n1++) { + if (format == deconFormat[n1].format) { + return deconFormat[n1].desc; + } + } + + result.appendFormat("? %08x", format); + return result; +} + +enum vpp_rotate halTransformToHWRot(uint32_t halTransform) +{ + switch (halTransform) { + case HAL_TRANSFORM_FLIP_H: + return VPP_ROT_YFLIP; + case HAL_TRANSFORM_FLIP_V: + return VPP_ROT_XFLIP; + case HAL_TRANSFORM_ROT_180: + return VPP_ROT_180; + case HAL_TRANSFORM_ROT_90: + return VPP_ROT_90; + case (HAL_TRANSFORM_ROT_90|HAL_TRANSFORM_FLIP_H): + /* + * HAL: HAL_TRANSFORM_FLIP_H -> HAL_TRANSFORM_ROT_90 + * VPP: ROT_90 -> XFLIP + */ + return VPP_ROT_90_XFLIP; + case (HAL_TRANSFORM_ROT_90|HAL_TRANSFORM_FLIP_V): + /* + * HAL: HAL_TRANSFORM_FLIP_V -> HAL_TRANSFORM_ROT_90 + * VPP: ROT_90 -> YFLIP + */ + return VPP_ROT_90_YFLIP; + case HAL_TRANSFORM_ROT_270: + return VPP_ROT_270; + default: + return VPP_ROT_NORMAL; + } +} + +ExynosDisplay::ExynosDisplay(int __unused numGSCs) + : mDisplayFd(-1), + mType(0), + mPanelType(PANEL_LEGACY), + mDSCHSliceNum(WINUPDATE_DSC_H_SLICE_NUM), + mDSCYSliceSize(WINUPDATE_DSC_Y_SLICE_SIZE), + mXres(0), + mYres(0), + mXdpi(0), + mYdpi(0), + mVsyncPeriod(0), + mBlanked(true), + mHwc(NULL), + mAllocDevice(NULL), + mGrallocModule(NULL), + mLastFbWindow(NO_FB_NEEDED), + mVirtualOverlayFlag(0), + mBypassSkipStaticLayer(false), + mMPPLayers(0), + mYuvLayers(0), + mHasDrmSurface(false), + mFbNeeded(false), + mFirstFb(0), + mLastFb(0), + mFbWindow(0), + mForceFb(false), + mForceOverlayLayerIndex(-1), + mAllowedOverlays(5), + mOtfMode(OTF_OFF), + mGscUsed(false), + mMaxWindowOverlapCnt(NUM_HW_WINDOWS), + mUseSecureDMA(false), + mExternalMPPDstFormat(HAL_PIXEL_FORMAT_RGBX_8888), + mSkipStaticInitFlag(false), + mNumStaticLayers(0), + mLastRetireFenceFd(-1), + mFbPreAssigned(false), + mActiveConfigIndex(0), + mWinData(NULL) +{ + memset(mLastMPPMap, 0, sizeof(mLastMPPMap)); + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(&mLastConfigData, 0, sizeof(mLastConfigData)); + memset(mLastLayerHandles, 0, sizeof(mLastLayerHandles)); + memset(&mFbUpdateRegion, 0, sizeof(mFbUpdateRegion)); + + mPreProcessedInfo.mHasDrmSurface = false; + mCheckIntMPP = new ExynosMPPModule(NULL, MPP_VGR, 0); + + mWinData = (struct decon_win_config_data *)malloc(sizeof(*mWinData)); + if (mWinData == NULL) + DISPLAY_LOGE("Fail to allocate mWinData"); +} +ExynosDisplay::ExynosDisplay(uint32_t type, struct exynos5_hwc_composer_device_1_t *pdev) + : mDisplayFd(-1), + mType(type), + mPanelType(PANEL_LEGACY), + mDSCHSliceNum(WINUPDATE_DSC_H_SLICE_NUM), + mDSCYSliceSize(WINUPDATE_DSC_Y_SLICE_SIZE), + mXres(0), + mYres(0), + mXdpi(0), + mYdpi(0), + mVsyncPeriod(0), + mBlanked(true), + mHwc(pdev), + mAllocDevice(NULL), + mGrallocModule(NULL), + mLastFbWindow(NO_FB_NEEDED), + mVirtualOverlayFlag(0), + mBypassSkipStaticLayer(false), + mMPPLayers(0), + mYuvLayers(0), + mHasDrmSurface(false), + mFbNeeded(false), + mFirstFb(0), + mLastFb(0), + mFbWindow(0), + mForceFb(false), + mForceOverlayLayerIndex(-1), + mAllowedOverlays(5), + mOtfMode(OTF_OFF), + mGscUsed(false), + mMaxWindowOverlapCnt(NUM_HW_WINDOWS), + mUseSecureDMA(false), + mExternalMPPDstFormat(HAL_PIXEL_FORMAT_RGBX_8888), + mSkipStaticInitFlag(false), + mNumStaticLayers(0), + mLastRetireFenceFd(-1), + mFbPreAssigned(false), + mActiveConfigIndex(0), + mWinData(NULL) +{ + memset(mLastMPPMap, 0, sizeof(mLastMPPMap)); + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(&mLastConfigData, 0, sizeof(mLastConfigData)); + memset(mLastLayerHandles, 0, sizeof(mLastLayerHandles)); + + switch (mType) { + case EXYNOS_VIRTUAL_DISPLAY: + mDisplayName = android::String8("VirtualDisplay"); + break; + case EXYNOS_EXTERNAL_DISPLAY: + mDisplayName = android::String8("ExternalDisplay"); + break; + case EXYNOS_PRIMARY_DISPLAY: + mDisplayName = android::String8("PrimaryDisplay"); + break; +#if defined(USES_DUAL_DISPLAY) + case EXYNOS_SECONDARY_DISPLAY: + mDisplayName = android::String8("SecondaryDisplay"); + break; +#endif + default: + mDisplayName = android::String8("Unknown"); + break; + } + memset(&mFbUpdateRegion, 0, sizeof(mFbUpdateRegion)); + + mPreProcessedInfo.mHasDrmSurface = false; + mCheckIntMPP = new ExynosMPPModule(NULL, MPP_VGR, 0); + + mWinData = (struct decon_win_config_data *)malloc(sizeof(*mWinData)); + if (mWinData == NULL) + DISPLAY_LOGE("Fail to allocate mWinData"); +} + +ExynosDisplay::~ExynosDisplay() +{ + if (!mLayerInfos.isEmpty()) { + for (size_t i = 0; i < mLayerInfos.size(); i++) { + delete mLayerInfos[i]; + } + mLayerInfos.clear(); + } + if (mCheckIntMPP != NULL) { + delete mCheckIntMPP; + mCheckIntMPP = NULL; + } + + if (mWinData != NULL) + free(mWinData); +} + +int ExynosDisplay::prepare(hwc_display_contents_1_t *contents) +{ + ATRACE_CALL(); + DISPLAY_LOGD(eDebugDefault, "preparing %u layers for FIMD", contents->numHwLayers); + + if (!mForceFb) + skipStaticLayers(contents); + + if (mVirtualOverlayFlag) + mFbNeeded = 0; + + if (!mFbNeeded) + mFbWindow = NO_FB_NEEDED; + + return 0; +} + +int ExynosDisplay::set(hwc_display_contents_1_t *contents) +{ + hwc_layer_1_t *fb_layer = NULL; + int err = 0; + + if (mFbWindow != NO_FB_NEEDED) { + if (contents->numHwLayers >= 1 && + contents->hwLayers[contents->numHwLayers - 1].compositionType == HWC_FRAMEBUFFER_TARGET) + fb_layer = &contents->hwLayers[contents->numHwLayers - 1]; + + if (CC_UNLIKELY(!fb_layer)) { + DISPLAY_LOGE("framebuffer target expected, but not provided"); + err = -EINVAL; + } else { + DISPLAY_LOGD(eDebugDefault, "framebuffer target buffer:"); + dumpLayer(eDebugDefault, fb_layer); + } + } + + int fence; + if (!err) { + fence = postFrame(contents); + if (fence < 0) + err = fence; + } + +#if defined(USES_DUAL_DISPLAY) + if (mType != EXYNOS_SECONDARY_DISPLAY) + { +#endif + if (err) + fence = clearDisplay(); + + if (fence == 0) { + /* + * WIN_CONFIG is skipped, not error + */ + fence = -1; + if (mLastRetireFenceFd >= 0) { + int dup_fd = dup(mLastRetireFenceFd); + if (dup_fd >= 0) { + fence = dup_fd; + mLastRetireFenceFd = dup_fd; + dupFence(fence, contents); + } else { + DISPLAY_LOGW("mLastRetireFenceFd dup failed: %s", strerror(errno)); + mLastRetireFenceFd = -1; + } + } else { + ALOGE("WIN_CONFIG is skipped, but mLastRetireFenceFd is not valid"); + } + } else { + mLastRetireFenceFd = fence; + dupFence(fence, contents); + } +#if defined(USES_DUAL_DISPLAY) + } +#endif + + return err; +} + +void ExynosDisplay::dupFence(int fence, hwc_display_contents_1_t *contents) +{ + if (contents == NULL) + return; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle != NULL) + handle = private_handle_t::dynamicCast(layer.handle); + + if ((mVirtualOverlayFlag == true) && (layer.compositionType == HWC_OVERLAY) && + ((handle != NULL) && (getDrmMode(handle->flags) == NO_DRM)) && + (mFirstFb <= i) && (i <= mLastFb)) + continue; + + if (!(layer.flags & HWC_SKIP_RENDERING) && ((layer.compositionType == HWC_OVERLAY) || + ((mFbNeeded == true || this->mVirtualOverlayFlag) && layer.compositionType == HWC_FRAMEBUFFER_TARGET))) { + int dup_fd = dup(fence); + DISPLAY_LOGD(eDebugFence, "%d layer[type: %d, dst: %d, %d, %d, %d] fence is duplicated(%d)", + i, layer.compositionType, + layer.displayFrame.left, layer.displayFrame.top, + layer.displayFrame.right, layer.displayFrame.bottom, + dup_fd); + if (dup_fd < 0) + DISPLAY_LOGW("release fence dup failed: %s", strerror(errno)); + if (mLayerInfos[i]->mInternalMPP != NULL) { + ExynosMPPModule *exynosMPP = mLayerInfos[i]->mInternalMPP; + if (mLayerInfos[i]->mInternalMPP->mDstBufFence[0] >= 0) + close(mLayerInfos[i]->mInternalMPP->mDstBufFence[0]); + exynosMPP->mDstBufFence[0] = dup(fence); + } + if (mLayerInfos[i]->mExternalMPP != NULL) { + ExynosMPPModule *exysnosMPP = mLayerInfos[i]->mExternalMPP; + if (exysnosMPP->mDstBufFence[exysnosMPP->mCurrentBuf] >= 0) { + close (exysnosMPP->mDstBufFence[exysnosMPP->mCurrentBuf]); + exysnosMPP->mDstBufFence[exysnosMPP->mCurrentBuf] = -1; + } + exysnosMPP->mDstBufFence[exysnosMPP->mCurrentBuf] = dup_fd; + exysnosMPP->mCurrentBuf = (exysnosMPP->mCurrentBuf + 1) % exysnosMPP->mNumAvailableDstBuffers; + } else { + if (this->mVirtualOverlayFlag && (layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + if (layer.releaseFenceFd >= 0) + close(layer.releaseFenceFd); + } + layer.releaseFenceFd = dup_fd; + } + } + } + +#if defined(USES_DUAL_DISPLAY) + if (mType == EXYNOS_SECONDARY_DISPLAY) + contents->retireFenceFd = dup(fence); + else + contents->retireFenceFd = fence; +#else + contents->retireFenceFd = fence; +#endif +} + +void ExynosDisplay::dump(android::String8& result) +{ + result.append( + " type | handle | color | blend | pa | format | position | size | intMPP | extMPP \n" + "----------+--------------|----------+-------+----+---------------+---------------+----------------------------------\n"); + // 8_______ | 12__________ | 8_______ | 5____ | 2_ | 13___________ | [5____,5____] | [5____,5____] | [2_,2_] | [2_,2_]\n" + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + struct decon_win_config &config = mLastConfigData.config[i]; + if ((config.state == config.DECON_WIN_STATE_DISABLED) && + (mLastMPPMap[i].internal_mpp.type == -1) && + (mLastMPPMap[i].external_mpp.type == -1)){ + result.appendFormat(" %8s | %12s | %8s | %5s | %2s | %13s | %13s | %13s", + "OVERLAY", "-", "-", "-", "-", "-", "-", "-"); + } + else { + if (config.state == config.DECON_WIN_STATE_COLOR) + result.appendFormat(" %8s | %12s | %8x | %5s | %2s | %13s", "COLOR", + "-", config.color, "-", "-", "-"); + else + result.appendFormat(" %8s | %12" PRIxPTR " | %8s | %5x | %2x | %13s", + mLastFbWindow == i ? "FB" : "OVERLAY", + intptr_t(mLastHandles[i]), + "-", config.blending, config.plane_alpha, deconFormat2str(config.format)); + + result.appendFormat(" | [%5d,%5d] | [%5u,%5u]", config.dst.x, config.dst.y, + config.dst.w, config.dst.h); + } + if (mLastMPPMap[i].internal_mpp.type == -1) { + result.appendFormat(" | [%2s,%2s]", "-", "-"); + } else { + result.appendFormat(" | [%2d,%2d]", mLastMPPMap[i].internal_mpp.type, mLastMPPMap[i].internal_mpp.index); + } + + if (mLastMPPMap[i].external_mpp.type == -1) { + result.appendFormat(" | [%2s,%2s]", "-", "-"); + } else { + result.appendFormat(" | [%2d,%2d]", mLastMPPMap[i].external_mpp.type, mLastMPPMap[i].external_mpp.index); + } + result.append("\n"); + } +} + +void ExynosDisplay::freeMPP() +{ +} + +void ExynosDisplay::doPreProcessing(hwc_display_contents_1_t* contents) +{ + mPreProcessedInfo.mHasDrmSurface = false; + mForceOverlayLayerIndex = -1; + this->mHasDrmSurface = false; + mYuvLayers = 0; + mLayersNeedScaling = false; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (h->flags & GRALLOC_USAGE_PROTECTED) { + mPreProcessedInfo.mHasDrmSurface = true; + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + } + if (!isFormatRgb(h->format)) { + this->mYuvLayers++; + } + if (isScaled(layer)) + mLayersNeedScaling = true; + } + } +} + +void ExynosDisplay::allocateLayerInfos(hwc_display_contents_1_t* contents) +{ + if (contents == NULL) + return; + + if (!mLayerInfos.isEmpty()) { + for (size_t i = 0; i < mLayerInfos.size(); i++) { + delete mLayerInfos[i]; + } + mLayerInfos.clear(); + } + + for (size_t i= 0; i < contents->numHwLayers; i++) { + ExynosLayerInfo *layerInfo = new ExynosLayerInfo(); + memset(layerInfo, 0, sizeof(ExynosLayerInfo)); + layerInfo->mDmaType = -1; + mLayerInfos.push(layerInfo); + } + + mForceFb = mHwc->force_gpu; + + doPreProcessing(contents); +} + +void ExynosDisplay::dumpLayerInfo(android::String8& result) +{ + if (!mLayerInfos.isEmpty()) { + result.append( + " type | CheckOverlayFlag | CheckMPPFlag | Comp | mWinIndex | mDmaType | mIntMPP | mExtMPP \n" + "------------+------------------+--------------+------+-----------+----------+-----------+----------\n"); + // 10________ | 8_______ | 8_______ | 3__ | 9________ | 8_______ | [3__, 2_] | [3__, 2_]\n" + for (size_t i = 0; i < mLayerInfos.size(); i++) { + unsigned int type = mLayerInfos[i]->compositionType; + static char const* compositionTypeName[] = { + "GLES", + "HWC", + "BACKGROUND", + "FB TARGET", + "UNKNOWN"}; + + if (type >= NELEM(compositionTypeName)) + type = NELEM(compositionTypeName) - 1; + result.appendFormat( + " %10s | 0x%8x | 0x%8x | %1s | %9d | %8d", + compositionTypeName[type], + mLayerInfos[i]->mCheckOverlayFlag, mLayerInfos[i]->mCheckMPPFlag, + mLayerInfos[i]->mCompressed ? "Y" : "N", + mLayerInfos[i]->mWindowIndex, mLayerInfos[i]->mDmaType); + + if (mLayerInfos[i]->mInternalMPP == NULL) + result.appendFormat(" | [%3s, %2s]", "-", "-"); + else { + result.appendFormat(" | [%3s, %2d]", mLayerInfos[i]->mInternalMPP->getName().string(), mLayerInfos[i]->mInternalMPP->mIndex); + } + + if (mLayerInfos[i]->mExternalMPP == NULL) + result.appendFormat(" | [%3s, %2s]", "-", "-"); + else { + result.appendFormat(" | [%3s, %2d]", mLayerInfos[i]->mExternalMPP->getName().string(), mLayerInfos[i]->mExternalMPP->mIndex); + } + result.append("\n"); + } + } + result.append("\n"); +} + +bool ExynosDisplay::handleTotalBandwidthOverload(hwc_display_contents_1_t *contents) +{ + bool changed = false; + bool addFB = true; + if (mHwc->totPixels >= FIMD_TOTAL_BW_LIMIT) { + changed = true; + if (mFbNeeded) { + for (int i = mFirstFb - 1; i >= 0; i--) { + if (mForceOverlayLayerIndex == 0 && i == 0) + break; + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + mLayerInfos[i]->mInternalMPP = NULL; + mLayerInfos[i]->mExternalMPP = NULL; + mFirstFb = (size_t)i; + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + if (mHwc->totPixels >= FIMD_TOTAL_BW_LIMIT) { + for (size_t i = mLastFb + 1; i < contents->numHwLayers - 1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + mLayerInfos[i]->mInternalMPP = NULL; + mLayerInfos[i]->mExternalMPP = NULL; + mLastFb = i; + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + } + } else { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY && + mForceOverlayLayerIndex != (int)i) { + layer.compositionType = HWC_FRAMEBUFFER; + mLastFb = max(mLastFb, i); + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eInsufficientBandwidth; + mLayerInfos[i]->mInternalMPP = NULL; + mLayerInfos[i]->mExternalMPP = NULL; + if (addFB) { + addFB = false; + mHwc->totPixels += mXres * mYres; + } + mHwc->totPixels -= WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + if (mHwc->totPixels < FIMD_TOTAL_BW_LIMIT) + break; + } + } + if (mForceOverlayLayerIndex == 0) + mFirstFb = 1; + else + mFirstFb = 0; + } + mFbNeeded = true; + } + + return changed; +} + +int ExynosDisplay::clearDisplay() +{ + struct decon_win_config_data win_data; + memset(&win_data, 0, sizeof(win_data)); + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + LOG_ALWAYS_FATAL_IF(ret < 0, + "%s ioctl S3CFB_WIN_CONFIG failed to clear screen: %s", + mDisplayName.string(), strerror(errno)); + // the causes of an empty config failing are all unrecoverable + + return win_data.fence; +} + +int ExynosDisplay::getCompModeSwitch() +{ + unsigned int tot_win_size = 0, updateFps = 0; + unsigned int lcd_size = this->mXres * this->mYres; + uint64_t TimeStampDiff; + float Temp; + + if (!mHwc->hwc_ctrl.dynamic_recomp_mode) { + mHwc->LastModeSwitchTimeStamp = 0; + mHwc->CompModeSwitch = NO_MODE_SWITCH; + return 0; + } + + /* initialize the Timestamps */ + if (!mHwc->LastModeSwitchTimeStamp) { + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + mHwc->CompModeSwitch = NO_MODE_SWITCH; + return 0; + } + + /* If video layer is there, skip the mode switch */ + if (mYuvLayers || mLayersNeedScaling) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + return 0; + } else { + mHwc->CompModeSwitch = GLES_2_HWC; + mHwc->updateCallCnt = 0; + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + DISPLAY_LOGI("[DYNAMIC_RECOMP] GLES_2_HWC by video layer"); + return GLES_2_HWC; + } + } + + /* Mode Switch is not required if total pixels are not more than the threshold */ + if ((uint32_t)mHwc->incomingPixels <= lcd_size * HWC_FIMD_BW_TH) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + return 0; + } else { + mHwc->CompModeSwitch = GLES_2_HWC; + mHwc->updateCallCnt = 0; + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + DISPLAY_LOGI("[DYNAMIC_RECOMP] GLES_2_HWC by BW check"); + return GLES_2_HWC; + } + } + + /* + * There will be at least one composition call per one minute (because of time update) + * To minimize the analysis overhead, just analyze it once in a second + */ + TimeStampDiff = systemTime(SYSTEM_TIME_MONOTONIC) - mHwc->LastModeSwitchTimeStamp; + + /* + * previous CompModeSwitch was GLES_2_HWC: check fps every 250ms from LastModeSwitchTimeStamp + * previous CompModeSwitch was HWC_2_GLES: check immediately + */ + if ((mHwc->CompModeSwitch != HWC_2_GLES) && (TimeStampDiff < (VSYNC_INTERVAL * 15))) { + return 0; + } + mHwc->LastModeSwitchTimeStamp = mHwc->LastUpdateTimeStamp; + if ((mHwc->update_event_cnt != 1) && // This is not called by hwc_update_stat_thread + (mHwc->CompModeSwitch == HWC_2_GLES) && (mHwc->updateCallCnt == 1)) { + DISPLAY_LOGI("[DYNAMIC_RECOMP] first frame after HWC_2_GLES"); + updateFps = HWC_FPS_TH; + } else { + Temp = (VSYNC_INTERVAL * 60) / TimeStampDiff; + updateFps = (int)(mHwc->updateCallCnt * Temp + 0.5); + } + mHwc->updateCallCnt = 0; + /* + * FPS estimation. + * If FPS is lower than HWC_FPS_TH, try to switch the mode to GLES + */ + if (updateFps < HWC_FPS_TH) { + if (mHwc->CompModeSwitch != HWC_2_GLES) { + mHwc->CompModeSwitch = HWC_2_GLES; + DISPLAY_LOGI("[DYNAMIC_RECOMP] HWC_2_GLES by low FPS(%d)", updateFps); + return HWC_2_GLES; + } else { + return 0; + } + } else { + if (mHwc->CompModeSwitch == HWC_2_GLES) { + mHwc->CompModeSwitch = GLES_2_HWC; + DISPLAY_LOGI("[DYNAMIC_RECOMP] GLES_2_HWC by high FPS(%d)", updateFps); + return GLES_2_HWC; + } else { + return 0; + } + } + + return 0; +} + +int32_t ExynosDisplay::getDisplayAttributes(const uint32_t attribute, uint32_t __unused config) +{ + switch(attribute) { + case HWC_DISPLAY_VSYNC_PERIOD: + return this->mVsyncPeriod; + + case HWC_DISPLAY_WIDTH: +#if defined(USES_DUAL_DISPLAY) + if ((mType == EXYNOS_PRIMARY_DISPLAY) || (mType == EXYNOS_SECONDARY_DISPLAY)) + return this->mXres/2; + else + return mXres; +#else + return this->mXres; +#endif + + case HWC_DISPLAY_HEIGHT: + return this->mYres; + + case HWC_DISPLAY_DPI_X: + return this->mXdpi; + + case HWC_DISPLAY_DPI_Y: + return this->mYdpi; + + default: + DISPLAY_LOGE("unknown display attribute %u", attribute); + return -EINVAL; + } +} + +bool ExynosDisplay::isOverlaySupportedByIDMA(hwc_layer_1_t __unused &layer, size_t __unused index) +{ + if (isCompressed(layer)) + return false; + else + return true; +} + +void ExynosDisplay::getIDMAMinSize(hwc_layer_1_t __unused &layer, int *w, int *h) +{ + *w = 1; + *h = 1; +} + +bool ExynosDisplay::isOverlaySupported(hwc_layer_1_t &layer, size_t index, bool useVPPOverlay, + ExynosMPPModule** supportedInternalMPP, ExynosMPPModule** supportedExternalMPP) +{ + int mMPPIndex = 0; + int ret = 0; + ExynosMPPModule* transitionInternalMPP = NULL; + private_handle_t *handle = NULL; + int handleFormat = 0; + bool firstFrameFramebufferTarget = false; + + DISPLAY_LOGD(eDebugOverlaySupported, "isOverlaySupported:: index(%d), useVPPOverlay(%d)", index, useVPPOverlay); + + if (layer.flags & HWC_SKIP_LAYER) { + mLayerInfos[index]->mCheckOverlayFlag |= eSkipLayer; + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: skipping", index); + return false; + } + + if (!layer.planeAlpha) + return true; + + if (index == 0 && layer.planeAlpha < 255) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: eUnsupportedPlaneAlpha", index); + mLayerInfos[index]->mCheckOverlayFlag |= eUnsupportedPlaneAlpha; + return false; + } + + if (layer.handle) { + handle = private_handle_t::dynamicCast(layer.handle); + handleFormat = handle->format; + } + + if ((layer.compositionType != HWC_FRAMEBUFFER_TARGET) && !handle) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: handle is NULL, type is %d", index, layer.compositionType); + mLayerInfos[index]->mCheckOverlayFlag |= eInvalidHandle; + return false; + } + + if (!handle && (layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + firstFrameFramebufferTarget = true; + handleFormat = HAL_PIXEL_FORMAT_RGBA_8888; + } + + if (handle && (getDrmMode(handle->flags) == NO_DRM) && + (isFloat(layer.sourceCropf.left) || isFloat(layer.sourceCropf.top) || + isFloat(layer.sourceCropf.right - layer.sourceCropf.left) || + isFloat(layer.sourceCropf.bottom - layer.sourceCropf.top))) { + if (isSourceCropfSupported(layer) == false) + return false; + } + + if (!isBlendingSupported(layer.blending)) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: blending %d not supported", index, layer.blending); + mLayerInfos[index]->mCheckOverlayFlag |= eUnsupportedBlending; + return false; + } + + int32_t bpp = formatToBpp(handleFormat); + int32_t left = max(layer.displayFrame.left, 0); + int32_t right = min(layer.displayFrame.right, mXres); + uint32_t visible_width = 0; + + if ((bpp == 16) && + ((layer.displayFrame.left % 2 != 0) || (layer.displayFrame.right % 2 != 0))) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: eNotAlignedDstPosition", index); + mLayerInfos[index]->mCheckOverlayFlag |= eNotAlignedDstPosition; + return false; + } + + visible_width = (right - left) * bpp / 8; + if (visible_width < BURSTLEN_BYTES) { +#ifdef USE_DRM_BURST_LEN + if (handle && (getDrmMode(handle->flags) != NO_DRM)) { + if (visible_width < DRM_BURSTLEN_BYTES) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: visible area is too narrow", index); + mLayerInfos[index]->mCheckOverlayFlag |= eUnsupportedDstWidth; + return false; + } + } else { +#endif + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: visible area is too narrow", index); + mLayerInfos[index]->mCheckOverlayFlag |= eUnsupportedDstWidth; + return false; +#ifdef USE_DRM_BURST_LEN + } +#endif + } + + if (!isProcessingRequired(layer) && !useVPPOverlay) + return true; + + hwc_layer_1_t extMPPOutLayer = layer; + int originalHandleFormt = handleFormat; + int dst_format = handleFormat; + bool isBothMPPUsed = isBothMPPProcessingRequired(layer, &extMPPOutLayer); + DISPLAY_LOGD(eDebugOverlaySupported, "isOverlaySupported:: index(%d), isBothMPPUsed(%d)", index, isBothMPPUsed); + + if (isBothMPPUsed) { + if ((*supportedInternalMPP != NULL) && (*supportedExternalMPP != NULL)) + return true; + } else { + if ((*supportedInternalMPP != NULL) || (*supportedExternalMPP != NULL && !useVPPOverlay)) + return true; + } + + if (*supportedExternalMPP == NULL && isBothMPPUsed) + { + /* extMPPOutLayer is output of ExtMPP + * The output of ExtMPP is the input of IntMPP + */ + if (!isFormatRgb(handleFormat) && + (WIDTH(extMPPOutLayer.displayFrame) % mCheckIntMPP->getCropWidthAlign(layer) != 0 || + HEIGHT(extMPPOutLayer.displayFrame) % mCheckIntMPP->getCropHeightAlign(layer) != 0 || + !(mCheckIntMPP->isFormatSupportedByMPP(handleFormat)) || + !(mCheckIntMPP->isCSCSupportedByMPP(handleFormat, HAL_PIXEL_FORMAT_RGBX_8888, layer.dataSpace)))) + dst_format = mExternalMPPDstFormat; + + /* extMPPOutLayer is output of ExtMPP */ + for (size_t i = 0; i < mExternalMPPs.size(); i++) + { + ExynosMPPModule* externalMPP = mExternalMPPs[i]; + if (externalMPP->mState == MPP_STATE_FREE) { + ret = externalMPP->isProcessingSupported(extMPPOutLayer, dst_format); + if (ret > 0) { + *supportedExternalMPP = externalMPP; + break; + } else { + mLayerInfos[index]->mCheckMPPFlag |= -ret; + } + } + } + + /* Can't find valid externalMPP */ + if (*supportedExternalMPP == NULL) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: Can't find valid externalMPP", index); + mLayerInfos[index]->mCheckOverlayFlag |= eInsufficientMPP; + return false; + } + } + if (*supportedInternalMPP == NULL) { + for (size_t i = 0; i < mInternalMPPs.size(); i++) + { + ExynosMPPModule* internalMPP = mInternalMPPs[i]; + hwc_layer_1_t extMPPTempOutLayer = extMPPOutLayer; + if (isBothMPPUsed) { + if (internalMPP->mType == MPP_VPP_G) + continue; + /* extMPPOutLayer is output of ExtMPP + * The output of ExtMPP is the input of IntMPP + */ + if (!isFormatRgb(handleFormat) && + (WIDTH(extMPPTempOutLayer.displayFrame) % internalMPP->getCropWidthAlign(layer) != 0 || + HEIGHT(extMPPTempOutLayer.displayFrame) % internalMPP->getCropHeightAlign(layer) != 0 || + !(internalMPP->isFormatSupportedByMPP(handleFormat)))) + dst_format = mExternalMPPDstFormat; + + extMPPTempOutLayer.sourceCropf.left = extMPPOutLayer.displayFrame.left; + extMPPTempOutLayer.sourceCropf.top = extMPPOutLayer.displayFrame.top; + extMPPTempOutLayer.sourceCropf.right = extMPPOutLayer.displayFrame.right; + extMPPTempOutLayer.sourceCropf.bottom = extMPPOutLayer.displayFrame.bottom; + extMPPTempOutLayer.displayFrame.left = layer.displayFrame.left; + extMPPTempOutLayer.displayFrame.top = layer.displayFrame.top; + extMPPTempOutLayer.displayFrame.right = layer.displayFrame.right; + extMPPTempOutLayer.displayFrame.bottom = layer.displayFrame.bottom; + ((private_handle_t *)extMPPTempOutLayer.handle)->format = dst_format; + extMPPTempOutLayer.transform = 0; + } + ExynosDisplay *addedDisplay = (mHwc->hdmi_hpd ? (ExynosDisplay *)mHwc->externalDisplay : (ExynosDisplay *)mHwc->virtualDisplay); + ExynosDisplay *otherDisplay = (mType ? (ExynosDisplay *)mHwc->primaryDisplay : addedDisplay); + + /* + * If MPP was assigned to other Device in previous frame + * then doesn't assign it untill it is cleared + */ + if ((internalMPP->mState == MPP_STATE_FREE) && + (internalMPP->mDisplay == NULL || internalMPP->mDisplay == this)) { + /* InternalMPP doesn't need to check dst_format. Set dst_format with source format */ + if (firstFrameFramebufferTarget) + ret = 1; + else { + ret = internalMPP->isProcessingSupported(extMPPTempOutLayer, ((private_handle_t *)extMPPTempOutLayer.handle)->format); + handle->format = originalHandleFormt; + } + if (ret > 0) { + *supportedInternalMPP = internalMPP; + return true; + } else { + mLayerInfos[index]->mCheckMPPFlag |= -ret; + } + } else if (internalMPP->wasUsedByDisplay(otherDisplay)) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: internalMPP[%d, %d] was used by other device", index, internalMPP->mType, internalMPP->mIndex); + if (transitionInternalMPP == NULL) + transitionInternalMPP = internalMPP; + } + if (handle) + handle->format = originalHandleFormt; + } + } + + if ((*supportedInternalMPP == NULL) && (useVPPOverlay == true) && !isProcessingRequired(layer)) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: eInsufficientMPP", index); + mLayerInfos[index]->mCheckOverlayFlag |= eInsufficientMPP; + if (transitionInternalMPP != NULL) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: internalMPP[%d, %d] transition is started", index, transitionInternalMPP->mType, transitionInternalMPP->mIndex); + transitionInternalMPP->startTransition(this); + } + return false; + } + + /* Can't find valid internalMPP */ + if (isBothMPPProcessingRequired(layer) && *supportedInternalMPP == NULL) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: Can't find valid internalMPP", index); + if (transitionInternalMPP != NULL) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: internalMPP[%d, %d] transition is started", index, transitionInternalMPP->mType, transitionInternalMPP->mIndex); + transitionInternalMPP->startTransition(this); + } + mLayerInfos[index]->mCheckOverlayFlag |= eInsufficientMPP; + return false; + } + + if (*supportedExternalMPP == NULL) { + for (size_t i = 0; i < mExternalMPPs.size(); i++) + { + ExynosMPPModule* externalMPP = mExternalMPPs[i]; + int dst_format = handleFormat; + if (!isFormatRgb(handleFormat)) + dst_format = mExternalMPPDstFormat; + + if (externalMPP->mState == MPP_STATE_FREE) { + if (firstFrameFramebufferTarget) + ret = 1; + else + ret = externalMPP->isProcessingSupported(layer, dst_format); + if (ret > 0) { + *supportedExternalMPP = externalMPP; + if (useVPPOverlay) + break; + return true; + } else { + mLayerInfos[index]->mCheckMPPFlag |= -ret; + } + } + } + } + + if (*supportedExternalMPP != NULL && useVPPOverlay == true && *supportedInternalMPP == NULL) { + int originalHandleFormt = handleFormat; + dst_format = handleFormat; + if (!isFormatRgb(handleFormat)) + dst_format = mExternalMPPDstFormat; + for (size_t i = 0; i < mInternalMPPs.size(); i++) + { + extMPPOutLayer = layer; + /* extMPPOutLayer is output of ExtMPP + * The output of ExtMPP is the input of IntMPP + */ + extMPPOutLayer.sourceCropf.left = layer.displayFrame.left; + extMPPOutLayer.sourceCropf.top = layer.displayFrame.top; + extMPPOutLayer.sourceCropf.right = layer.displayFrame.right; + extMPPOutLayer.sourceCropf.bottom = layer.displayFrame.bottom; + extMPPOutLayer.transform = 0; + if (handle) + ((private_handle_t *)extMPPOutLayer.handle)->format = dst_format; + ExynosMPPModule* internalMPP = mInternalMPPs[i]; + + /* + * If MPP was assigned to other Device in previous frame + * then doesn't assign it untill it is cleared + */ + if ((internalMPP->mState == MPP_STATE_FREE) && + (internalMPP->mDisplay == NULL || internalMPP->mDisplay == this)) { + if (firstFrameFramebufferTarget) + ret = 1; + else { + ret = internalMPP->isProcessingSupported(extMPPOutLayer, ((private_handle_t *)extMPPOutLayer.handle)->format); + handle->format = originalHandleFormt; + } + if (ret > 0) { + *supportedInternalMPP = internalMPP; + return true; + } else { + mLayerInfos[index]->mCheckMPPFlag |= -ret; + } + } else { + ExynosDisplay *addedDisplay = (mHwc->hdmi_hpd ? (ExynosDisplay *)mHwc->externalDisplay : (ExynosDisplay *)mHwc->virtualDisplay); + ExynosDisplay *otherDisplay = (mType ? (ExynosDisplay *)mHwc->primaryDisplay : addedDisplay); + if (firstFrameFramebufferTarget) { + if ((internalMPP->wasUsedByDisplay(otherDisplay)) && (transitionInternalMPP == NULL)) + transitionInternalMPP = internalMPP; + } else { + if ((internalMPP->wasUsedByDisplay(otherDisplay)) && + ((transitionInternalMPP == NULL) || + ((transitionInternalMPP->isProcessingSupported(extMPPOutLayer, ((private_handle_t *)extMPPOutLayer.handle)->format) < 0) && + (internalMPP->isProcessingSupported(extMPPOutLayer, ((private_handle_t *)extMPPOutLayer.handle)->format) > 0)))) + transitionInternalMPP = internalMPP; + } + } + + if (handle) + handle->format = originalHandleFormt; + } + } + + /* Transit display for next frame */ + if ((*supportedInternalMPP == NULL) && (transitionInternalMPP != NULL)) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: internalMPP[%d, %d] transition is started", index, transitionInternalMPP->mType, transitionInternalMPP->mIndex); + transitionInternalMPP->startTransition(this); + } + + /* Can't find valid MPP */ + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: can't find valid MPP", index); + mLayerInfos[index]->mCheckOverlayFlag |= eInsufficientMPP; + return false; + +} + +void ExynosDisplay::configureHandle(private_handle_t *handle, size_t index, + hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg) +{ + int isOpaque = 0; + + if (handle == NULL) + return; + + if ((layer.flags & HWC_SET_OPAQUE) && handle && (handle->format == HAL_PIXEL_FORMAT_RGBA_8888) + && (layer.compositionType == HWC_OVERLAY)) { + handle->format = HAL_PIXEL_FORMAT_RGBX_8888; + isOpaque = 1; + } + hwc_frect_t &sourceCrop = layer.sourceCropf; + hwc_rect_t &displayFrame = layer.compositionType == HWC_FRAMEBUFFER_TARGET ? mFbUpdateRegion : layer.displayFrame; + int32_t blending = layer.blending; + int32_t planeAlpha = layer.planeAlpha; + uint32_t x, y; + uint32_t w = WIDTH(displayFrame); + uint32_t h = HEIGHT(displayFrame); + uint8_t bpp = formatToBpp(handle->format); + uint32_t offset = ((uint32_t)sourceCrop.top * handle->stride + (uint32_t)sourceCrop.left) * bpp / 8; + ExynosMPPModule* internalMPP = mLayerInfos[index]->mInternalMPP; + ExynosMPPModule* externalMPP = mLayerInfos[index]->mExternalMPP; + +#ifdef USES_DECON_AFBC_DECODER + cfg.compression = isCompressed(layer); +#endif + + if (displayFrame.left < 0) { + unsigned int crop = -displayFrame.left; + DISPLAY_LOGD(eDebugWinConfig, "layer off left side of screen; cropping %u pixels from left edge", + crop); + x = 0; + w -= crop; + offset += crop * bpp / 8; + } else { + x = displayFrame.left; + } + + if (displayFrame.right > this->mXres) { + unsigned int crop = displayFrame.right - this->mXres; + DISPLAY_LOGD(eDebugWinConfig, "layer off right side of screen; cropping %u pixels from right edge", + crop); + w -= crop; + } + + if (displayFrame.top < 0) { + unsigned int crop = -displayFrame.top; + DISPLAY_LOGD(eDebugWinConfig, "layer off top side of screen; cropping %u pixels from top edge", + crop); + y = 0; + h -= crop; + offset += handle->stride * crop * bpp / 8; + } else { + y = displayFrame.top; + } + + if (displayFrame.bottom > this->mYres) { + int crop = displayFrame.bottom - this->mYres; + DISPLAY_LOGD(eDebugWinConfig, "layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + h -= crop; + } + + cfg.fd_idma[0] = handle->fd; + cfg.fd_idma[1] = handle->fd1; + cfg.fd_idma[2] = handle->fd2; + if (mLayerInfos[index]->mDmaType == -1) { + cfg.state = cfg.DECON_WIN_STATE_DISABLED; + } else { + cfg.state = cfg.DECON_WIN_STATE_BUFFER; + cfg.idma_type = (decon_idma_type)mLayerInfos[index]->mDmaType; + } + cfg.dst.x = x; + cfg.dst.y = y; + cfg.dst.w = w; + cfg.dst.h = h; + cfg.dst.f_w = mXres; + cfg.dst.f_h = mYres; + cfg.format = halFormatToS3CFormat(handle->format); + + cfg.src.f_w = handle->stride; + cfg.src.f_h = handle->vstride; + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) { + if (handle->fd2 >= 0) { + void *metaData = NULL; + int interlacedType = -1; + metaData = mmap(0, 64, PROT_READ|PROT_WRITE, MAP_SHARED, handle->fd2, 0); + if (metaData) + interlacedType = *(int *)metaData; + else + interlacedType = -1; + + if (interlacedType == V4L2_FIELD_INTERLACED_TB || + interlacedType == V4L2_FIELD_INTERLACED_BT) { + cfg.src.f_w = handle->stride * 2; + cfg.src.f_h = handle->vstride / 2; + } + if (metaData) + munmap(metaData, 64); + } + } + + cfg.src.x = (int)sourceCrop.left; + cfg.src.y = (int)sourceCrop.top; + + if (cfg.src.x < 0) + cfg.src.x = 0; + if (cfg.src.y < 0) + cfg.src.y = 0; + + if (internalMPP != NULL) { + if (cfg.src.f_w > (unsigned int)internalMPP->getMaxWidth(layer)) + cfg.src.f_w = (unsigned int)internalMPP->getMaxWidth(layer); + if (cfg.src.f_h > (unsigned int)internalMPP->getMaxHeight(layer)) + cfg.src.f_h = (unsigned int)internalMPP->getMaxHeight(layer); + cfg.src.f_w = ALIGN_DOWN((unsigned int)cfg.src.f_w, internalMPP->getSrcWidthAlign(layer)); + cfg.src.f_h = ALIGN_DOWN((unsigned int)cfg.src.f_h, internalMPP->getSrcHeightAlign(layer)); + + cfg.src.x = ALIGN((unsigned int)sourceCrop.left, internalMPP->getSrcXOffsetAlign(layer)); + cfg.src.y = ALIGN((unsigned int)sourceCrop.top, internalMPP->getSrcYOffsetAlign(layer)); + } + + cfg.src.w = WIDTH(sourceCrop) - (cfg.src.x - (uint32_t)sourceCrop.left); + if (cfg.src.x + cfg.src.w > cfg.src.f_w) + cfg.src.w = cfg.src.f_w - cfg.src.x; + cfg.src.h = HEIGHT(sourceCrop) - (cfg.src.y - (uint32_t)sourceCrop.top); + if (cfg.src.y + cfg.src.h > cfg.src.f_h) + cfg.src.h = cfg.src.f_h - cfg.src.y; + + if (internalMPP != NULL) { + if (cfg.src.w > (unsigned int)internalMPP->getMaxCropWidth(layer)) + cfg.src.w = (unsigned int)internalMPP->getMaxCropWidth(layer); + if (cfg.src.h > (unsigned int)internalMPP->getMaxCropHeight(layer)) + cfg.src.h = (unsigned int)internalMPP->getMaxCropHeight(layer); + cfg.src.w = ALIGN_DOWN(cfg.src.w, internalMPP->getCropWidthAlign(layer)); + cfg.src.h = ALIGN_DOWN(cfg.src.h, internalMPP->getCropHeightAlign(layer)); + } + + if (isSrcCropFloat(layer.sourceCropf)) + { + if (internalMPP != NULL) { + exynos_mpp_img srcImg; + internalMPP->adjustSourceImage(layer, srcImg); + cfg.src.f_w = srcImg.fw; + cfg.src.f_h = srcImg.fh; + cfg.src.x = srcImg.x; + cfg.src.y = srcImg.y; + cfg.src.w = srcImg.w; + cfg.src.h = srcImg.h; + } else { + if (externalMPP == NULL) + ALOGE("float sourceCrop should be handled by MPP"); + } +#if 0 + ALOGD("x = %7.1f, 0x%8x", sourceCrop.left, cfg.src.x); + ALOGD("y = %7.1f, 0x%8x", sourceCrop.top, cfg.src.y); + ALOGD("w = %7.1f, 0x%8x", sourceCrop.right - sourceCrop.left, cfg.src.w); + ALOGD("h = %7.1f, 0x%8x", sourceCrop.bottom - sourceCrop.top, cfg.src.h); +#endif + } + + cfg.blending = halBlendingToS3CBlending(blending); + cfg.fence_fd = fence_fd; + cfg.plane_alpha = 255; + if (planeAlpha && (planeAlpha < 255)) { + cfg.plane_alpha = planeAlpha; + } + if (mLayerInfos[index]->mInternalMPP) { + cfg.vpp_parm.rot = (vpp_rotate)halTransformToHWRot(layer.transform); + cfg.vpp_parm.eq_mode = isFullRangeColor(layer) ? BT_601_WIDE : BT_601_NARROW; + + if ((!mLayerInfos[index]->mExternalMPP && + (mHwc->mS3DMode == S3D_MODE_READY || mHwc->mS3DMode == S3D_MODE_RUNNING) && + !isFormatRgb(handle->format)) && + mType == EXYNOS_PRIMARY_DISPLAY) { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + cfg.src.w /= 2; + else if (S3DFormat == S3D_TB) + cfg.src.h /= 2; + } + } + /* transparent region coordinates is on source buffer */ + getLayerRegion(layer, cfg.transparent_area, eTransparentRegion); + cfg.transparent_area.x += cfg.dst.x; + cfg.transparent_area.y += cfg.dst.y; + + /* opaque region coordinates is on screen */ + getLayerRegion(layer, cfg.covered_opaque_area, eCoveredOpaqueRegion); + + if (isOpaque && (handle->format == HAL_PIXEL_FORMAT_RGBX_8888)) { + handle->format = HAL_PIXEL_FORMAT_RGBA_8888; + isOpaque = 0; + } +} + +void ExynosDisplay::configureOverlay(hwc_layer_1_t *layer, size_t index, decon_win_config &cfg) +{ + if (layer->compositionType == HWC_BACKGROUND) { + hwc_color_t color = layer->backgroundColor; + cfg.state = cfg.DECON_WIN_STATE_COLOR; + cfg.color = (color.r << 16) | (color.g << 8) | color.b; + cfg.dst.x = 0; + cfg.dst.y = 0; + cfg.dst.w = this->mXres; + cfg.dst.h = this->mYres; + return; + } + private_handle_t *handle = private_handle_t::dynamicCast(layer->handle); + hwc_frect_t originalCrop = layer->sourceCropf; + if (layer->compositionType == HWC_FRAMEBUFFER_TARGET) { + /* Adjust FbUpdateRegion */ + int minCropWidth = 0; + int minCropHeight = 0; + int cropWidthAlign = 1; + if (mLayerInfos[index]->mInternalMPP != NULL) { + minCropWidth = mLayerInfos[index]->mInternalMPP->getMinWidth(*layer); + minCropHeight = mLayerInfos[index]->mInternalMPP->getMinHeight(*layer); + cropWidthAlign = mLayerInfos[index]->mInternalMPP->getCropWidthAlign(*layer); + } else { + getIDMAMinSize(*layer, &minCropWidth, &minCropHeight); + } +#if defined(USES_DUAL_DISPLAY) + int32_t minLeftPosition = (mType != EXYNOS_SECONDARY_DISPLAY)? 0:(mXres/2); + int32_t maxRightPosition = (mType == EXYNOS_PRIMARY_DISPLAY)?(mXres/2):mXres; +#else + int32_t minLeftPosition = 0; + int32_t maxRightPosition = mXres; +#endif + if (mFbUpdateRegion.left < minLeftPosition) mFbUpdateRegion.left = minLeftPosition; + if (mFbUpdateRegion.right < minLeftPosition) mFbUpdateRegion.right = minLeftPosition; + if (mFbUpdateRegion.left > maxRightPosition) mFbUpdateRegion.left = maxRightPosition; + if (mFbUpdateRegion.right > maxRightPosition) mFbUpdateRegion.right = maxRightPosition; + if (mFbUpdateRegion.top < 0) mFbUpdateRegion.top = 0; + if (mFbUpdateRegion.bottom < 0) mFbUpdateRegion.bottom = 0; + if (mFbUpdateRegion.top > mYres) mFbUpdateRegion.top = mYres; + if (mFbUpdateRegion.bottom > mYres) mFbUpdateRegion.bottom = mYres; + + if ((WIDTH(mFbUpdateRegion) % cropWidthAlign) != 0) { + mFbUpdateRegion.left = ALIGN_DOWN(mFbUpdateRegion.left, cropWidthAlign); + mFbUpdateRegion.right = ALIGN_UP(mFbUpdateRegion.right, cropWidthAlign); + } + if (WIDTH(mFbUpdateRegion) < minCropWidth) { +#if defined(USES_DUAL_DISPLAY) + if (mFbUpdateRegion.left + minCropWidth <= maxRightPosition) + mFbUpdateRegion.right = mFbUpdateRegion.left + minCropWidth; + else + mFbUpdateRegion.left = mFbUpdateRegion.right - minCropWidth; +#else + if (mFbUpdateRegion.left + minCropWidth <= mXres) + mFbUpdateRegion.right = mFbUpdateRegion.left + minCropWidth; + else + mFbUpdateRegion.left = mFbUpdateRegion.right - minCropWidth; +#endif + } + if (HEIGHT(mFbUpdateRegion) < minCropHeight) { + if (mFbUpdateRegion.top + minCropHeight <= mYres) + mFbUpdateRegion.bottom = mFbUpdateRegion.top + minCropHeight; + else + mFbUpdateRegion.top = mFbUpdateRegion.bottom - minCropHeight; + } + + if ((mFbUpdateRegion.left >= minLeftPosition) && (mFbUpdateRegion.top >= 0) && + (mFbUpdateRegion.right <= maxRightPosition) && (mFbUpdateRegion.bottom <= mYres)) { +#ifdef USES_DUAL_DISPLAY + if (mType == EXYNOS_SECONDARY_DISPLAY) { + layer->sourceCropf.left = (double)mFbUpdateRegion.left - (mXres/2); + layer->sourceCropf.right = (double)mFbUpdateRegion.right - (mXres/2); + } else { + layer->sourceCropf.left = (double)mFbUpdateRegion.left; + layer->sourceCropf.right = (double)mFbUpdateRegion.right; + } +#else + layer->sourceCropf.left = (double)mFbUpdateRegion.left; + layer->sourceCropf.right = (double)mFbUpdateRegion.right; +#endif + layer->sourceCropf.top = (double)mFbUpdateRegion.top; + layer->sourceCropf.bottom = (double)mFbUpdateRegion.bottom; + } else { + mFbUpdateRegion = layer->displayFrame; + } + } + configureHandle(handle, index, *layer, layer->acquireFenceFd, cfg); + layer->sourceCropf = originalCrop; +} + +int ExynosDisplay::handleWindowUpdate(hwc_display_contents_1_t __unused *contents, + struct decon_win_config __unused *config) +{ + int layerIdx = -1; + int updatedWinCnt = 0; + int totalWinCnt = 0; + int bitsPerPixel = 0; + size_t winUpdateInfoIdx; + hwc_rect updateRect = {this->mXres, this->mYres, 0, 0}; + hwc_rect currentRect = {0, 0, 0, 0}; + bool burstLengthCheckDone = false; + int alignAdjustment = 1; + int intersectionWidth = 0; + int xAlign = 0; + int wAlign = 0; + int yAlign = 0; + int hAlign = 0; + +#if defined(USES_DUAL_DISPLAY) + return -eWindowUpdateDisabled; +#endif + + + char value[PROPERTY_VALUE_MAX]; + property_get("debug.hwc.winupdate", value, NULL); + + if (!(!strcmp(value, "1") || !strcmp(value, "true"))) + return -eWindowUpdateDisabled; + + if (DECON_WIN_UPDATE_IDX < 0) + return -eWindowUpdateInvalidIndex; + winUpdateInfoIdx = DECON_WIN_UPDATE_IDX; + + if (contents->flags & HWC_GEOMETRY_CHANGED) + return -eWindowUpdateGeometryChanged; + + if (mPanelType == PANEL_DSC) { + xAlign = this->mXres / mDSCHSliceNum; + wAlign = this->mXres / mDSCHSliceNum; + yAlign = mDSCYSliceSize; + hAlign = mDSCYSliceSize; + } else { + xAlign = WINUPDATE_X_ALIGNMENT; + wAlign = WINUPDATE_W_ALIGNMENT; + yAlign = 1; + hAlign = 1; + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER) + continue; + + if (!mFbNeeded && contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + int32_t windowIndex = mLayerInfos[i]->mWindowIndex; + if ((windowIndex < 0) || (windowIndex > MAX_DECON_WIN)) + return -eWindowUpdateInvalidConfig; + + if (config[windowIndex].state != config[windowIndex].DECON_WIN_STATE_DISABLED) { + totalWinCnt++; + + if (winConfigChanged(&config[windowIndex], &this->mLastConfigData.config[windowIndex])) { + updatedWinCnt++; + + currentRect.left = config[windowIndex].dst.x; + currentRect.right = config[windowIndex].dst.x + config[windowIndex].dst.w; + currentRect.top = config[windowIndex].dst.y; + currentRect.bottom = config[windowIndex].dst.y + config[windowIndex].dst.h; + + if (hwcHasApiVersion((hwc_composer_device_1_t*)mHwc, HWC_DEVICE_API_VERSION_1_5)) + { + private_handle_t *handle = NULL; + hwc_rect damageRect = {0, 0, 0, 0}; + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + unsigned int damageRegionMod = getLayerRegion(layer, damageRect, eDamageRegion); + + if (damageRegionMod == eDamageRegionSkip) + continue; + + if (handle && !isScaled(layer) && !isRotated(layer) && (damageRegionMod == eDamageRegionPartial)) { + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE][surfaceDamage] layer w(%4d) h(%4d), dirty (%4d, %4d) - (%4d, %4d)", + handle->width, handle->height, damageRect.left, damageRect.top, damageRect.right, damageRect.bottom); + + currentRect.left = config[windowIndex].dst.x - (int32_t)layer.sourceCropf.left + damageRect.left; + currentRect.right = config[windowIndex].dst.x - (int32_t)layer.sourceCropf.left + damageRect.right; + currentRect.top = config[windowIndex].dst.y - (int32_t)layer.sourceCropf.top + damageRect.top; + currentRect.bottom = config[windowIndex].dst.y - (int32_t)layer.sourceCropf.top + damageRect.bottom; + adjustRect(currentRect, mXres, mYres); + + } + } + + if ((currentRect.left > currentRect.right) || (currentRect.top > currentRect.bottom)) { + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] window(%d) layer(%d) invalid region (%4d, %4d) - (%4d, %4d)", + i, layerIdx, currentRect.left, currentRect.top, currentRect.right, currentRect.bottom); + return -eWindowUpdateInvalidRegion; + } + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] Updated Window(%d) Layer(%d) (%4d, %4d) - (%4d, %4d)", + windowIndex, i, currentRect.left, currentRect.top, currentRect.right, currentRect.bottom); + updateRect = expand(updateRect, currentRect); + } + } + } + if (updatedWinCnt == 0) + return -eWindowUpdateNotUpdated; + + /* Alignment check */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER) + continue; + + if (!mFbNeeded && contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + + int32_t windowIndex = mLayerInfos[i]->mWindowIndex; + currentRect.left = config[windowIndex].dst.x; + currentRect.right = config[windowIndex].dst.x + config[windowIndex].dst.w; + currentRect.top = config[windowIndex].dst.y; + currentRect.bottom = config[windowIndex].dst.y + config[windowIndex].dst.h; + + if ((config[windowIndex].state != config[windowIndex].DECON_WIN_STATE_DISABLED) && + intersect(currentRect, updateRect)) { + private_handle_t *handle = NULL; + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + else + return -eWindowUpdateInvalidConfig; + + int originalFormat = handle->format; + int originalTransform = layer.transform; + if (mLayerInfos[i]->mInternalMPP != NULL) { + /* VPP scaling case */ + if ((config[windowIndex].src.w != config[windowIndex].dst.w) || + (config[windowIndex].src.h != config[windowIndex].dst.h)) + return -eWindowUpdateUnsupportedUseCase; + + handle->format = S3CFormatToHalFormat(config[windowIndex].format); + if (handle->format >= 0) { + /* rotation was handled by externalMPP */ + if (mLayerInfos[i]->mExternalMPP != NULL) + layer.transform = 0; + xAlign = getLCM(xAlign, mLayerInfos[i]->mInternalMPP->getSrcXOffsetAlign(layer)); + yAlign = getLCM(yAlign, mLayerInfos[i]->mInternalMPP->getSrcYOffsetAlign(layer)); + wAlign = getLCM(wAlign, mLayerInfos[i]->mInternalMPP->getCropWidthAlign(layer)); + hAlign = getLCM(hAlign, mLayerInfos[i]->mInternalMPP->getCropHeightAlign(layer)); + } else { + handle->format = originalFormat; + layer.transform = originalTransform; + return -eWindowUpdateInvalidConfig; + } + } + handle->format = originalFormat; + layer.transform = originalTransform; + } + } + + updateRect.left = ALIGN_DOWN(updateRect.left, xAlign); + updateRect.top = ALIGN_DOWN(updateRect.top, yAlign); + + if (HEIGHT(updateRect) < WINUPDATE_MIN_HEIGHT) { + if (updateRect.top + WINUPDATE_MIN_HEIGHT <= mYres) + updateRect.bottom = updateRect.top + WINUPDATE_MIN_HEIGHT; + else + updateRect.top = updateRect.bottom - WINUPDATE_MIN_HEIGHT; + } + + if ((100 * (WIDTH(updateRect) * HEIGHT(updateRect)) / (this->mXres * this->mYres)) > WINUPDATE_THRESHOLD) + return -eWindowUpdateOverThreshold; + + alignAdjustment = getLCM(alignAdjustment, xAlign); + alignAdjustment = getLCM(alignAdjustment, wAlign); + + while (1) { + burstLengthCheckDone = true; + updateRect.left = ALIGN_DOWN(updateRect.left, xAlign); + if ((WIDTH(updateRect) % wAlign) != 0) + updateRect.right = updateRect.left + ALIGN_DOWN(WIDTH(updateRect), wAlign) + wAlign; + updateRect.top = ALIGN_DOWN(updateRect.top, yAlign); + if ((HEIGHT(updateRect) % hAlign) != 0) + updateRect.bottom = updateRect.top + ALIGN_DOWN(HEIGHT(updateRect), hAlign) + hAlign; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER) + continue; + if (!mFbNeeded && contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + int32_t windowIndex = mLayerInfos[i]->mWindowIndex; + if (config[windowIndex].state != config[windowIndex].DECON_WIN_STATE_DISABLED) { + enum decon_pixel_format fmt = config[windowIndex].format; + if (fmt == DECON_PIXEL_FORMAT_RGBA_5551 || fmt == DECON_PIXEL_FORMAT_RGB_565) + bitsPerPixel = 16; + else if (fmt == DECON_PIXEL_FORMAT_NV12 || fmt == DECON_PIXEL_FORMAT_NV21 || + fmt == DECON_PIXEL_FORMAT_NV12M || fmt == DECON_PIXEL_FORMAT_NV21M) + bitsPerPixel = 12; + else + bitsPerPixel = 32; + + currentRect.left = config[windowIndex].dst.x; + currentRect.right = config[windowIndex].dst.x + config[windowIndex].dst.w; + currentRect.top = config[windowIndex].dst.y; + currentRect.bottom = config[windowIndex].dst.y + config[windowIndex].dst.h; + + intersectionWidth = WIDTH(intersection(currentRect, updateRect)); + + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] win[%d] left(%d) right(%d) intersection(%d)", windowIndex, currentRect.left, currentRect.right, intersectionWidth); + + if (intersectionWidth != 0 && (size_t)((intersectionWidth * bitsPerPixel) / 8) < BURSTLEN_BYTES) { +#ifdef USE_DRM_BURST_LEN + if (mHasDrmSurface) { + if ((size_t)((intersectionWidth * bitsPerPixel) / 8) < DRM_BURSTLEN_BYTES) { + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] win[%d] insufficient burst length (%d)*(%d) < %d", windowIndex, intersectionWidth, bitsPerPixel, BURSTLEN_BYTES); + burstLengthCheckDone = false; + break; + + } + } else { +#endif + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] win[%d] insufficient burst length (%d)*(%d) < %d", windowIndex, intersectionWidth, bitsPerPixel, BURSTLEN_BYTES); + burstLengthCheckDone = false; + break; +#ifdef USE_DRM_BURST_LEN + } +#endif + } + } + } + + if (burstLengthCheckDone) + break; + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] Adjusting update width. current left(%d) right(%d)", updateRect.left, updateRect.right); + if (updateRect.left >= alignAdjustment) { + updateRect.left -= alignAdjustment; + } else if (updateRect.right + alignAdjustment <= this->mXres) { + updateRect.right += alignAdjustment; + } else { + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] Error during update width adjustment"); + return -eWindowUpdateAdjustmentFail; + } + } + + config[winUpdateInfoIdx].state = config[winUpdateInfoIdx].DECON_WIN_STATE_UPDATE; + config[winUpdateInfoIdx].dst.x = ALIGN_DOWN(updateRect.left, xAlign); + if ((WIDTH(updateRect) % wAlign) != 0) + updateRect.right = updateRect.left + ALIGN_DOWN(WIDTH(updateRect), wAlign) + wAlign; + config[winUpdateInfoIdx].dst.w = WIDTH(updateRect); + + config[winUpdateInfoIdx].dst.y = ALIGN_DOWN(updateRect.top, yAlign); + if ((HEIGHT(updateRect) % hAlign) != 0) + updateRect.bottom = updateRect.top + ALIGN_DOWN(HEIGHT(updateRect), hAlign) + hAlign; + config[winUpdateInfoIdx].dst.h = HEIGHT(updateRect); + + /* Final check */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER) + continue; + + if (!mFbNeeded && contents->hwLayers[i].compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + + int32_t windowIndex = mLayerInfos[i]->mWindowIndex; + currentRect.left = config[windowIndex].dst.x; + currentRect.right = config[windowIndex].dst.x + config[windowIndex].dst.w; + currentRect.top = config[windowIndex].dst.y; + currentRect.bottom = config[windowIndex].dst.y + config[windowIndex].dst.h; + + if ((config[windowIndex].state != config[windowIndex].DECON_WIN_STATE_DISABLED) && + intersect(currentRect, updateRect)) { + private_handle_t *handle = NULL; + hwc_rect intersect_rect = intersection(currentRect, updateRect); + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + else + return -eWindowUpdateInvalidConfig; + + int originalFormat = handle->format; + int originalTransform = layer.transform; + if (mLayerInfos[i]->mInternalMPP != NULL) { + handle->format = S3CFormatToHalFormat(config[windowIndex].format); + /* rotation was handled by externalMPP */ + if (mLayerInfos[i]->mExternalMPP != NULL) + layer.transform = 0; + if (((mLayerInfos[i]->mInternalMPP->getSrcXOffsetAlign(layer) % intersect_rect.left) != 0) || + ((mLayerInfos[i]->mInternalMPP->getSrcYOffsetAlign(layer) % intersect_rect.top) != 0) || + ((mLayerInfos[i]->mInternalMPP->getCropWidthAlign(layer) % WIDTH(intersect_rect)) != 0) || + ((mLayerInfos[i]->mInternalMPP->getCropHeightAlign(layer) % HEIGHT(intersect_rect)) != 0)) { + handle->format = originalFormat; + layer.transform = originalTransform; + config[winUpdateInfoIdx].state = config[winUpdateInfoIdx].DECON_WIN_STATE_DISABLED; + return -eWindowUpdateAdjustmentFail; + } + } + handle->format = originalFormat; + layer.transform = originalTransform; + } + } + + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] UpdateRegion cfg (%4d, %4d) w(%4d) h(%4d) updatedWindowCnt(%d)", + config[winUpdateInfoIdx].dst.x, config[winUpdateInfoIdx].dst.y, config[winUpdateInfoIdx].dst.w, config[winUpdateInfoIdx].dst.h, updatedWinCnt); + + /* Disable block mode if window update region is not full screen */ + if ((config[winUpdateInfoIdx].dst.x != 0) || (config[winUpdateInfoIdx].dst.y != 0) || + (config[winUpdateInfoIdx].dst.w != (uint32_t)mXres) || (config[winUpdateInfoIdx].dst.h != (uint32_t)mXres)) { + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + memset(&config[i].transparent_area, 0, sizeof(config[i].transparent_area)); + memset(&config[i].covered_opaque_area, 0, sizeof(config[i].covered_opaque_area)); + } + } + + return 1; +} + +void ExynosDisplay::getLayerRegion(hwc_layer_1_t &layer, decon_win_rect &rect_area, uint32_t regionType) +{ + hwc_rect_t const *hwcRects = NULL; + unsigned int numRects = 0; + switch (regionType) { + case eTransparentRegion: + hwcRects = layer.transparentRegion.rects; + numRects = layer.transparentRegion.numRects; + break; + case eCoveredOpaqueRegion: + hwcRects = layer.coveredOpaqueRegion.rects; + numRects = layer.coveredOpaqueRegion.numRects; + break; + default: + ALOGE("%s:: Invalid regionType (%d)", __func__, regionType); + return; + } + + rect_area.x = rect_area.y = rect_area.w = rect_area.h = 0; + if (hwcRects != NULL) { + for (size_t j = 0; j < numRects; j++) { + hwc_rect_t rect; + rect.left = hwcRects[j].left; + rect.top = hwcRects[j].top; + rect.right = hwcRects[j].right; + rect.bottom = hwcRects[j].bottom; + adjustRect(rect, mXres, mYres); + /* Find the largest rect */ + if ((rect_area.w * rect_area.h) < + (uint32_t)(WIDTH(rect) * HEIGHT(rect))) { + rect_area.x = rect.left; + rect_area.y = rect.top; + rect_area.w = WIDTH(rect); + rect_area.h = HEIGHT(rect); + } + } + } +} + +unsigned int ExynosDisplay::getLayerRegion(hwc_layer_1_t &layer, hwc_rect &rect_area, uint32_t regionType) { + hwc_rect_t const *hwcRects = NULL; + unsigned int numRects = 0; + + switch (regionType) { + case eDamageRegion: + hwcRects = layer.surfaceDamage.rects; + numRects = layer.surfaceDamage.numRects; + break; + default: + ALOGE("%s:: Invalid regionType (%d)", __func__, regionType); + return eDamageRegionError; + } + + if ((numRects == 0) || (hwcRects == NULL)) + return eDamageRegionFull; + + if ((numRects == 1) && (hwcRects[0].left == 0) && (hwcRects[0].top == 0) && + (hwcRects[0].right == 0) && (hwcRects[0].bottom == 0)) + return eDamageRegionSkip; + + rect_area.left = INT_MAX; + rect_area.top = INT_MAX; + rect_area.right = rect_area.bottom = 0; + if (hwcRects != NULL) { + for (size_t j = 0; j < numRects; j++) { + hwc_rect_t rect; + rect.left = hwcRects[j].left; + rect.top = hwcRects[j].top; + rect.right = hwcRects[j].right; + rect.bottom = hwcRects[j].bottom; + adjustRect(rect, INT_MAX, INT_MAX); + /* Get sums of rects */ + rect_area = expand(rect_area, rect); + } + } + + return eDamageRegionPartial; +} + +bool ExynosDisplay::getPreviousDRMDMA(int *dma) +{ + *dma = -1; + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (mLastConfigData.config[i].protection == 1) { + *dma = (int)mLastConfigData.config[i].idma_type; + return true; + } + } + return false; +} + +int ExynosDisplay::winconfigIoctl(decon_win_config_data *win_data) +{ + ATRACE_CALL(); + return ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, win_data); +} + +int ExynosDisplay::postFrame(hwc_display_contents_1_t* contents) +{ + ATRACE_CALL(); + + if (mWinData == NULL) { + DISPLAY_LOGE("mWinData is not valid"); + return -1; + } + struct decon_win_config *config = mWinData->config; + int win_map = 0; + int tot_ovly_wins = 0; + uint32_t rectCount = 0; + int ret = 0; + + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(mLastMPPMap, 0, sizeof(mLastMPPMap)); + memset(config, 0, sizeof(mWinData->config)); + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + config[i].fence_fd = -1; + mLastMPPMap[i].internal_mpp.type = -1; + mLastMPPMap[i].external_mpp.type = -1; + } + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV + && rectCount < mBackUpFrect.size()) + layer.sourceCropf = mBackUpFrect[rectCount++]; + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + int32_t window_index = mLayerInfos[i]->mWindowIndex; + private_handle_t *handle = NULL; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + + if ((layer.flags & HWC_SKIP_RENDERING) || + ((layer.compositionType == HWC_OVERLAY) && + ((window_index < 0) || (window_index >= NUM_HW_WINDOWS)))) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + + if ((window_index < 0) || (window_index >= NUM_HW_WINDOWS)) { + android::String8 result; + DISPLAY_LOGE("window of layer %d was not assigned (window_index: %d)", i, window_index); + dumpContents(result, contents); + ALOGE(result.string()); + result.clear(); + dumpLayerInfo(result); + ALOGE(result.string()); + } + continue; + } + + if ((mVirtualOverlayFlag == true) && (layer.compositionType == HWC_OVERLAY) && + handle && (getDrmMode(handle->flags) == NO_DRM) && + (mFirstFb <= i) && (i <= mLastFb)) + continue; + + if (((layer.compositionType == HWC_OVERLAY) || + (mFbNeeded == true && layer.compositionType == HWC_FRAMEBUFFER_TARGET))) { + mLastHandles[window_index] = layer.handle; + + if (handle && (getDrmMode(handle->flags) == SECURE_DRM)) + config[window_index].protection = 1; + else + config[window_index].protection = 0; + + if (mLayerInfos[i]->mInternalMPP != NULL) { + mLastMPPMap[window_index].internal_mpp.type = mLayerInfos[i]->mInternalMPP->mType; + mLastMPPMap[window_index].internal_mpp.index = mLayerInfos[i]->mInternalMPP->mIndex; + } + if (mLayerInfos[i]->mExternalMPP != NULL) { + mLastMPPMap[window_index].external_mpp.type = mLayerInfos[i]->mExternalMPP->mType; + mLastMPPMap[window_index].external_mpp.index = mLayerInfos[i]->mExternalMPP->mIndex; + if (postMPPM2M(layer, config, window_index, i) < 0) + continue; + } else { + configureOverlay(&layer, i, config[window_index]); + } + } + if (window_index == 0 && config[window_index].blending != DECON_BLENDING_NONE) { + DISPLAY_LOGD(eDebugWinConfig, "blending not supported on window 0; forcing BLENDING_NONE"); + config[window_index].blending = DECON_BLENDING_NONE; + } + if ((window_index < DECON_WIN_UPDATE_IDX) && + (config[window_index].state != config[window_index].DECON_WIN_STATE_DISABLED) && + (config[window_index].src.w == 0 || config[window_index].src.h == 0 || + config[window_index].dst.w == 0 || config[window_index].dst.h == 0)) { + config[window_index].state = config[window_index].DECON_WIN_STATE_DISABLED; + } + } + + if (this->mVirtualOverlayFlag) { + handleStaticLayers(contents, *mWinData, tot_ovly_wins); + } + + if ((ret = handleWindowUpdate(contents, config)) < 0) + DISPLAY_LOGD(eDebugWindowUpdate, "[WIN_UPDATE] UpdateRegion is FullScreen, ret(%d)", ret); + +#if defined(USES_DUAL_DISPLAY) + if (mType == EXYNOS_PRIMARY_DISPLAY) { + int8_t indexLastConfig = 0; + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (config[i].state == config[i].DECON_WIN_STATE_DISABLED) { + indexLastConfig = i; + break; + } + } + if (mHwc->secondaryDisplay->mEnabled) { + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + struct decon_win_config &secondary_config = mHwc->secondaryDisplay->mLastConfigData.config[i]; + int8_t index = 0; + if (indexLastConfig == NUM_HW_WINDOWS) { + DISPLAY_LOGE("primaryDisplay last config index is not valid(primaryLastIndex: %d)", + indexLastConfig); + break; + } + if (i == (NUM_HW_WINDOWS - 1)) + index = i; + else + index = indexLastConfig + i; + DISPLAY_LOGD(eDebugWinConfig, "secondary_config window %u configuration:", i); + dumpConfig(secondary_config); + if (secondary_config.state != secondary_config.DECON_WIN_STATE_DISABLED) { + if (index >= NUM_HW_WINDOWS) { + DISPLAY_LOGE("secondaryDisplay config index is not valid(primaryLastIndex: %d, index:%d", + indexLastConfig, index); + } else { + memcpy(&config[index],&secondary_config, sizeof(struct decon_win_config)); + mLastHandles[index] = mHwc->secondaryDisplay->mLastHandles[i]; + } + } + } + } + } + + if (mType != EXYNOS_SECONDARY_DISPLAY) + { +#endif + for (size_t i = 0; i <= NUM_HW_WINDOWS; i++) { + DISPLAY_LOGD(eDebugWinConfig, "window %u configuration:", i); + dumpConfig(config[i]); + } + + if (checkConfigValidation(config) < 0) { + android::String8 result; + DISPLAY_LOGE("WIN_CONFIG is not valid"); + for (size_t i = 0; i <= MAX_DECON_WIN; i++) { + result.appendFormat("window %zu configuration:\n", i); + dumpConfig(config[i], result); + } + ALOGE(result.string()); + result.clear(); + dumpContents(result, contents); + ALOGE(result.string()); + result.clear(); + dumpLayerInfo(result); + ALOGE(result.string()); + } + + if (checkConfigChanged(*mWinData, mLastConfigData) == false) { + ret = 0; + } else { + ret = winconfigIoctl(mWinData); + if (ret < 0) { + DISPLAY_LOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + } else { + ret = mWinData->fence; + memcpy(&(this->mLastConfigData), mWinData, sizeof(*mWinData)); + } + } + + /* + * Acquire fence of all of OVERLAY layers(including layers for secondary LCD) + * should be closed even if WIN_CONFIG is skipped + */ + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + } +#if defined(USES_DUAL_DISPLAY) + } else { + memcpy(&(this->mLastConfigData), mWinData, sizeof(*mWinData)); + } +#endif + if (contents->numHwLayers == 1) { + hwc_layer_1_t &layer = contents->hwLayers[0]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + rectCount = 0; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV + && rectCount < mOriginFrect.size()) + layer.sourceCropf = mOriginFrect[rectCount++]; + } + } + mOriginFrect.clear(); + mBackUpFrect.clear(); + + if (!this->mVirtualOverlayFlag && (ret >= 0)) + this->mLastFbWindow = mFbWindow; + + return ret; +} + +void ExynosDisplay::skipStaticLayers(hwc_display_contents_1_t* contents) +{ + mVirtualOverlayFlag = 0; + int win_map = 0; + int fbIndex = contents->numHwLayers - 1; + + if (!mHwc->hwc_ctrl.skip_static_layer_mode) + return; + + if (mBypassSkipStaticLayer) + return; + + if (contents->flags & HWC_GEOMETRY_CHANGED) { + mSkipStaticInitFlag = false; + return; + } + + if (!mFbNeeded || ((mLastFb - mFirstFb + 1) > NUM_VIRT_OVER)) { + mSkipStaticInitFlag = false; + return; + } + + if (mSkipStaticInitFlag) { + if (mNumStaticLayers != (mLastFb - mFirstFb + 1)) { + mSkipStaticInitFlag = false; + return; + } + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (!layer.handle || (layer.flags & HWC_SKIP_LAYER) || (mLastLayerHandles[i - mFirstFb] != layer.handle)) { + mSkipStaticInitFlag = false; + return; + } + } + + if ((mLastFbWindow >= NUM_HW_WINDOWS) || (fbIndex < 0)) { + mSkipStaticInitFlag = false; + DISPLAY_LOGE("skipStaticLayers:: invalid mLastFbWindow(%d), fbIndex(%d)", mLastFbWindow, fbIndex); + return; + } + /* DMA mapping is changed */ + if (mLastConfigData.config[mLastFbWindow].idma_type != mLayerInfos[fbIndex]->mDmaType) { + mSkipStaticInitFlag = false; + return; + } + + mVirtualOverlayFlag = 1; + for (size_t i = 0; i < contents->numHwLayers-1; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) { + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSkipStaticLayer; + } + } + mLastFbWindow = mFbWindow; + return; + } + + mSkipStaticInitFlag = true; + for (size_t i = 0; i < NUM_VIRT_OVER; i++) + mLastLayerHandles[i] = 0; + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + mLastLayerHandles[i - mFirstFb] = layer.handle; + } + mNumStaticLayers = (mLastFb - mFirstFb + 1); + + return; +} + +void ExynosDisplay::dumpMPPs(android::String8& result) +{ + result.appendFormat("displayType(%d)\n", mType); + result.appendFormat("Internal MPPs number: %zu\n", mInternalMPPs.size()); + result.append( + " mType | mIndex | mState \n" + "-------+--------+-----------\n"); + // 5____ | 6_____ | 9________ \n + + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + ExynosMPPModule* internalMPP = mInternalMPPs[i]; + result.appendFormat(" %5d | %6d | %9d \n", + internalMPP->mType, internalMPP->mIndex, internalMPP->mState); + } + + result.append("\n"); + result.appendFormat("External MPPs number: %zu\n", mExternalMPPs.size()); + result.append( + " mType | mIndex | mState \n" + "-------+--------+-----------\n"); + // 5____ | 6_____ | 9________ \n + + for (size_t i = 0; i < mExternalMPPs.size(); i++) { + ExynosMPPModule* internalMPP = mExternalMPPs[i]; + result.appendFormat(" %5d | %6d | %9d \n", + internalMPP->mType, internalMPP->mIndex, internalMPP->mState); + } +} + +void ExynosDisplay::preAssignFbTarget(hwc_display_contents_1_t *contents, bool assign) +{ + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + + int fbIndex = contents->numHwLayers - 1; + hwc_layer_1_t &layer = contents->hwLayers[fbIndex]; + mFbPreAssigned = false; + + if (!assign) + return; + + if (layer.compositionType != HWC_FRAMEBUFFER_TARGET) { + ALOGE("preAssignFbTarget: FRAMEBUFFER_TARGET is not set properly"); + return; + } + + bool ret = isOverlaySupported(layer, fbIndex, true, &supportedInternalMPP, &supportedExternalMPP); + if (ret && (supportedInternalMPP != NULL) && (supportedExternalMPP == NULL)) { + DISPLAY_LOGD(eDebugResourceAssigning, "Preassigning FramebufferTarget with internalMPP(%d, %d)", supportedInternalMPP->mType, supportedInternalMPP->mIndex); + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[fbIndex]->mInternalMPP = supportedInternalMPP; + mLayerInfos[fbIndex]->mDmaType = getDeconDMAType(mLayerInfos[fbIndex]->mInternalMPP); + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + if ((ExynosMPPModule *)mInternalMPPs[i] == supportedInternalMPP) { + mInternalMPPs.removeItemsAt(i); + } + } + mFbPreAssigned = true; + } else { + ALOGE("preAssignFbTarget: preassigning FB failed"); + return; + } +} + +void ExynosDisplay::determineYuvOverlay(hwc_display_contents_1_t *contents) +{ + mYuvLayers = 0; + bool useVPPOverlayFlag = false, hasDrmLayer = mHasDrmSurface; + uint32_t rectCount = 0; + int drmLayerIndex = mForceOverlayLayerIndex; + size_t i; + mForceOverlayLayerIndex = -1; + mHasDrmSurface = false; + + for (size_t j = 0; j < contents->numHwLayers; j++) { + i = hasDrmLayer ? ((j + drmLayerIndex) % contents->numHwLayers) : j; + + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + hwc_layer_1_t &layer = contents->hwLayers[i]; + useVPPOverlayFlag = false; + hwc_frect_t origin; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (getDrmMode(handle->flags) != NO_DRM) { + useVPPOverlayFlag = true; + if (mHwc->hdmi_hpd && (!mHwc->video_playback_status)) { + layer.flags |= HWC_SKIP_RENDERING; + continue; + } else + layer.flags &= ~HWC_SKIP_RENDERING; + } + + /* check yuv surface */ + if (!isFormatRgb(handle->format)) { + if (mForceFb && (getDrmMode(handle->flags) == NO_DRM)) { + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eForceFbEnabled; + continue; + } + /* HACK: force integer source crop */ + layer.sourceCropf.top = (int)layer.sourceCropf.top; + layer.sourceCropf.left = (int)layer.sourceCropf.left; + layer.sourceCropf.bottom = (int)(layer.sourceCropf.bottom + 0.9); + layer.sourceCropf.right = (int)(layer.sourceCropf.right + 0.9); + + /* support to process interlaced color format data */ + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV) { + void *metaData = NULL; + int interlacedType = -1; + mOriginFrect.push(layer.sourceCropf); + + if (handle->fd2 >= 0) { + metaData = mmap(0, 64, PROT_READ|PROT_WRITE, MAP_SHARED, handle->fd2, 0); + if (metaData) + interlacedType = *(int *)metaData; + else + interlacedType = -1; + } + + if (interlacedType == V4L2_FIELD_INTERLACED_BT) { + if ((int)layer.sourceCropf.left < (int)(handle->stride)) { + layer.sourceCropf.left = (int)layer.sourceCropf.left + handle->stride; + layer.sourceCropf.right = (int)layer.sourceCropf.right + handle->stride; + } + } + if (interlacedType == V4L2_FIELD_INTERLACED_TB || interlacedType == V4L2_FIELD_INTERLACED_BT) { + layer.sourceCropf.top = (int)(layer.sourceCropf.top)/2; + layer.sourceCropf.bottom = (int)(layer.sourceCropf.bottom)/2; + } + mBackUpFrect.push(layer.sourceCropf); + + if (metaData) + munmap(metaData, 64); + } + + if (isOverlaySupported(contents->hwLayers[i], i, useVPPOverlayFlag, &supportedInternalMPP, &supportedExternalMPP)) { + this->mYuvLayers++; + if (this->mHasDrmSurface == false) { + /* Assign MPP */ + if (supportedExternalMPP != NULL) + supportedExternalMPP->mState = MPP_STATE_ASSIGNED; + if (supportedInternalMPP != NULL) + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + + mForceOverlayLayerIndex = i; + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->mExternalMPP = supportedExternalMPP; + mLayerInfos[i]->mInternalMPP = supportedInternalMPP; + mLayerInfos[i]->compositionType = layer.compositionType; + + if ((getDrmMode(handle->flags) != NO_DRM) && + isBothMPPProcessingRequired(layer) && + (supportedInternalMPP != NULL)) { + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getCropWidthAlign(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getCropHeightAlign(layer)); + } + + if ((getDrmMode(handle->flags) != NO_DRM) && + (supportedInternalMPP != NULL)) { + if (WIDTH(layer.displayFrame) < supportedInternalMPP->getMinWidth(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame width %d is smaller than vpp minWidth %d", + i, WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + } + if (HEIGHT(layer.displayFrame) < supportedInternalMPP->getMinHeight(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame height %d is smaller than vpp minHeight %d", + i, HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + } + } + } + } else { + if (getDrmMode(handle->flags) != NO_DRM) { + /* This layer should be overlay but HWC can't handle it */ + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + layer.flags |= HWC_SKIP_RENDERING; + } + } + } + + if (getDrmMode(handle->flags) != NO_DRM) { + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + } + } + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV + && rectCount < mOriginFrect.size()) + layer.sourceCropf = mOriginFrect[rectCount++]; + } + } +} + +void ExynosDisplay::determineSupportedOverlays(hwc_display_contents_1_t *contents) +{ + bool videoLayer = false; + + mFbNeeded = false; + mFirstFb = ~0; + mLastFb = 0; + uint32_t rectCount = 0; + + // find unsupported overlays + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + mLayerInfos[i]->mCompressed = isCompressed(layer); + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: framebuffer target", i); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.compositionType == HWC_BACKGROUND && !mForceFb) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: background supported", i); + dumpLayer(eDebugOverlaySupported, &contents->hwLayers[i]); + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.flags & HWC_SKIP_RENDERING) { + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + continue; + } + + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV + && rectCount < mBackUpFrect.size()) + layer.sourceCropf = mBackUpFrect[rectCount++]; + + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + + if ((int)get_yuv_planes(halFormatToV4L2Format(handle->format)) > 0) { + videoLayer = true; + if (!mHwc->hdmi_hpd && mHwc->mS3DMode == S3D_MODE_READY) + mHwc->mS3DMode = S3D_MODE_RUNNING; + } + mHwc->incomingPixels += WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer(%d), type=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, " + "{%7.1f,%7.1f,%7.1f,%7.1f}, {%d,%d,%d,%d}", i, + layer.compositionType, layer.flags, layer.handle, layer.transform, + layer.blending, + layer.sourceCropf.left, + layer.sourceCropf.top, + layer.sourceCropf.right, + layer.sourceCropf.bottom, + layer.displayFrame.left, + layer.displayFrame.top, + layer.displayFrame.right, + layer.displayFrame.bottom); + /* Video layer's compositionType was set in determineYuvOverlay */ + if (!isFormatRgb(handle->format) && layer.compositionType == HWC_OVERLAY) + continue; + + if(((getDrmMode(handle->flags) != NO_DRM) || + (!mForceFb && (!mHwc->hwc_ctrl.dynamic_recomp_mode || mHwc->CompModeSwitch != HWC_2_GLES))) && + isOverlaySupported(contents->hwLayers[i], i, false, &supportedInternalMPP, &supportedExternalMPP)) { + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: overlay supported", i); + if (supportedExternalMPP != NULL) { + supportedExternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[i]->mExternalMPP = supportedExternalMPP; + } + if (supportedInternalMPP != NULL) { + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[i]->mInternalMPP = supportedInternalMPP; + } + + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + + dumpLayer(eDebugOverlaySupported, &contents->hwLayers[i]); + continue; + } else { + ExynosMPPModule *dummyInternal = NULL; + ExynosMPPModule *dummyExternal = NULL; + DISPLAY_LOGD(eDebugOverlaySupported, "\tlayer %u: overlay is not supported, dynamic_recomp_mode(%d), CompModeSwitch(%d)", i, mHwc->hwc_ctrl.dynamic_recomp_mode, mHwc->CompModeSwitch); + if (mForceFb) + mLayerInfos[i]->mCheckOverlayFlag |= eForceFbEnabled; + else if (mHwc->hwc_ctrl.dynamic_recomp_mode && mHwc->CompModeSwitch == HWC_2_GLES) + mLayerInfos[i]->mCheckOverlayFlag |= eDynamicRecomposition; + else if (isOverlaySupported(contents->hwLayers[i], i, false, &dummyInternal, &dummyExternal)) + mLayerInfos[i]->mCheckOverlayFlag |= eUnknown; + } + } else { + mLayerInfos[i]->mCheckOverlayFlag |= eInvalidHandle; + } + + if (!mFbNeeded) { + mFirstFb = i; + mFbNeeded = true; + } + mLastFb = i; + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + + dumpLayer(eDebugOverlaySupported, &contents->hwLayers[i]); + } + + if (!mHwc->hdmi_hpd && mHwc->mS3DMode == S3D_MODE_RUNNING && !videoLayer) + mHwc->mS3DMode = S3D_MODE_DISABLED; + hwc_rect_t base_rect = {0, 0, 0, 0}; + hwc_rect_t intersect_rect; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY) { + if (i == 0) { + base_rect = layer.displayFrame; + } else if (hasPlaneAlpha(layer)) { + //if alpha layer is not completely overlapped with base layer, bypass the alpha layer to GLES. + intersect_rect = intersection(base_rect, layer.displayFrame); + if (!rectEqual(intersect_rect, layer.displayFrame)) { + layer.compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = layer.compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eUnsupportedBlending; + mFirstFb = min(mFirstFb, i); + mLastFb = max(mLastFb, i); + mFbNeeded = true; + break; + } + } + } else { + // if one of the bottom layer is HWC_FRAMEBUFFER type, no need to force the alpha layer to FRAMEBUFFER type. + break; + } + } + mFirstFb = min(mFirstFb, (size_t)NUM_HW_WINDOWS-1); + // can't composite overlays sandwiched between framebuffers + if (mFbNeeded) { + private_handle_t *handle = NULL; + for (size_t i = mFirstFb; i < mLastFb; i++) { + if (contents->hwLayers[i].flags & HWC_SKIP_RENDERING) + continue; + + hwc_layer_1_t &layer = contents->hwLayers[i]; + handle = NULL; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + if (handle && getDrmMode(handle->flags) != NO_DRM) { + layer.hints = HWC_HINT_CLEAR_FB; + } else { + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + if (mLayerInfos[i]->mExternalMPP != NULL) { + mLayerInfos[i]->mExternalMPP->mState = MPP_STATE_FREE; + mLayerInfos[i]->mExternalMPP = NULL; + } + if (mLayerInfos[i]->mInternalMPP != NULL) { + mLayerInfos[i]->mInternalMPP->mState = MPP_STATE_FREE; + mLayerInfos[i]->mInternalMPP = NULL; + } + } + } + } +} + +void ExynosDisplay::determineBandwidthSupport(hwc_display_contents_1_t *contents) +{ + // Incrementally try to add our supported layers to hardware windows. + // If adding a layer would violate a hardware constraint, force it + // into the framebuffer and try again. (Revisiting the entire list is + // necessary because adding a layer to the framebuffer can cause other + // windows to retroactively violate constraints.) + bool changed; + this->mBypassSkipStaticLayer = false; + unsigned int cannotComposeFlag = 0; + int internalDMAsUsed = 0; + int retry = 0; + int fbIndex = contents->numHwLayers - 1; + + // Initialize to inverse values so that + // min(left, l) = l, min(top, t) = t + // max(right, r) = r, max(bottom, b) = b + // for all l, t, r, b + mFbUpdateRegion.left = mXres; + mFbUpdateRegion.top = mYres; + mFbUpdateRegion.right = 0; + mFbUpdateRegion.bottom = 0; + + do { + uint32_t win_idx = 0; + size_t windows_left; + unsigned int directFbNum = 0; + int videoOverlays = 0; + mHwc->totPixels = 0; + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + bool ret = 0; + + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + if (mInternalMPPs[i]->mState != MPP_STATE_TRANSITION && + (!mHasDrmSurface || + (mLayerInfos[mForceOverlayLayerIndex]->mInternalMPP != mInternalMPPs[i]))) { + mInternalMPPs[i]->mState = MPP_STATE_FREE; + } + } + + for (size_t i = 0; i < mExternalMPPs.size(); i++) { + if (mExternalMPPs[i]->mState != MPP_STATE_TRANSITION && + (!mHasDrmSurface || + (mLayerInfos[mForceOverlayLayerIndex]->mExternalMPP != mExternalMPPs[i]))) { + mExternalMPPs[i]->mState = MPP_STATE_FREE; + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + if (!mHasDrmSurface || (int)i != mForceOverlayLayerIndex) { + mLayerInfos[i]->mInternalMPP = NULL; + mLayerInfos[i]->mExternalMPP = NULL; + } + } + + changed = false; + mMPPLayers = 0; + + if (mFbPreAssigned) { + DISPLAY_LOGD(eDebugResourceAssigning, "fb has been pre-assigned already"); + windows_left = min(mAllowedOverlays, mHwc->hwc_ctrl.max_num_ovly) - 1; + } else if (mFbNeeded && (contents->numHwLayers - 1 > 0)) { + hwc_layer_1_t &layer = contents->hwLayers[fbIndex]; + if (mUseSecureDMA && (mLastFb == (contents->numHwLayers - 2)) && isOverlaySupportedByIDMA(layer, fbIndex)) { + /* FramebufferTarget is the top layer, Secure DMA is used */ + windows_left = min(mAllowedOverlays, mHwc->hwc_ctrl.max_num_ovly); + mLayerInfos[contents->numHwLayers - 1]->mDmaType = IDMA_SECURE; + } else if ((mInternalDMAs.size() > 0) && isOverlaySupportedByIDMA(layer, fbIndex)) { + /* Internal DMA is used */ + windows_left = min(mAllowedOverlays, mHwc->hwc_ctrl.max_num_ovly) - 1; + mLayerInfos[contents->numHwLayers - 1]->mDmaType = mInternalDMAs[directFbNum]; + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + directFbNum++; + } else { + /* VPP should be used for DMA */ + windows_left = min(mAllowedOverlays, mHwc->hwc_ctrl.max_num_ovly) - 1; + ret = isOverlaySupported(layer, fbIndex, true, &supportedInternalMPP, &supportedExternalMPP); + if (ret && (supportedInternalMPP != NULL)) { + DISPLAY_LOGD(eDebugResourceAssigning, "FramebufferTarget internalMPP(%d, %d)", supportedInternalMPP->mType, supportedInternalMPP->mIndex); + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[fbIndex]->mInternalMPP = supportedInternalMPP; + mLayerInfos[fbIndex]->mDmaType = getDeconDMAType(mLayerInfos[fbIndex]->mInternalMPP); + if (mLayerInfos[fbIndex]->mDmaType >= MAX_DECON_DMA_TYPE) { + ALOGE("getDeconDMAType with InternalMPP for FramebufferTarget failed (MPP type: %d, MPP index: %d)", + mLayerInfos[fbIndex]->mInternalMPP->mType, mLayerInfos[fbIndex]->mInternalMPP->mIndex); + mLayerInfos[fbIndex]->mDmaType = 0; + } + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + } else { + ALOGE("VPP should be assigned to FramebufferTarget but it was failed ret(%d)", ret); + } + } + mHwc->totPixels += mXres * mYres; + } else { + windows_left = min(mAllowedOverlays, mHwc->hwc_ctrl.max_num_ovly); + } + + DISPLAY_LOGD(eDebugResourceAssigning, "determineBandwidthSupport:: retry(%d), mAllowedOverlays(%d), windows_left(%d), win_idx(%d)", + retry, mAllowedOverlays, windows_left, win_idx); + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + bool isTopLayer = (i == contents->numHwLayers - 2) ? true:false; + private_handle_t *handle = NULL; + + if (layer.flags & HWC_SKIP_RENDERING) + continue; + + if ((layer.flags & HWC_SKIP_LAYER) || + (layer.compositionType == HWC_FRAMEBUFFER_TARGET)) + continue; + + if (!layer.planeAlpha) + continue; + + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + else + continue; + + // we've already accounted for the framebuffer above + if (layer.compositionType == HWC_FRAMEBUFFER) + continue; + + // only layer 0 can be HWC_BACKGROUND, so we can + // unconditionally allow it without extra checks + if (layer.compositionType == HWC_BACKGROUND) { + windows_left--; + continue; + } + + cannotComposeFlag = 0; + bool can_compose = windows_left && (win_idx < NUM_HW_WINDOWS); + if (mUseSecureDMA && !isCompressed(layer) && isTopLayer) + can_compose = true; + else if (windows_left <= 0 || (win_idx >= NUM_HW_WINDOWS)) + cannotComposeFlag |= eInsufficientWindow; + if (!isFormatRgb(handle->format) && videoOverlays >= mHwc->hwc_ctrl.num_of_video_ovly) { + can_compose = false; + cannotComposeFlag |= eInsufficientMPP; + } + + /* mInternalMPP, mExternalMPP could be set by determineYuvOverlay */ + supportedInternalMPP = mLayerInfos[i]->mInternalMPP; + supportedExternalMPP = mLayerInfos[i]->mExternalMPP; + + if (can_compose && !isProcessingRequired(layer) && isOverlaySupportedByIDMA(layer, i) && + (directFbNum < mInternalDMAs.size() || (mUseSecureDMA && isTopLayer))) { + if (directFbNum < mInternalDMAs.size()) + directFbNum++; + DISPLAY_LOGD(eDebugResourceAssigning, "layer(%d) is directFB", i); + } else if (can_compose && isOverlaySupported(layer, i, + !isProcessingRequired(layer) | + (directFbNum >= mInternalDMAs.size() && !(mUseSecureDMA && isTopLayer)), + &supportedInternalMPP, &supportedExternalMPP)) { + DISPLAY_LOGD(eDebugResourceAssigning, "layer(%d) is OVERLAY ",i); + if (supportedInternalMPP != NULL) { + DISPLAY_LOGD(eDebugResourceAssigning, "layer(%d) is OVERLAY internalMPP(%d, %d)", i, supportedInternalMPP->mType, supportedInternalMPP->mIndex); + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[i]->mInternalMPP = supportedInternalMPP; + mLayerInfos[i]->mDmaType = getDeconDMAType(mLayerInfos[i]->mInternalMPP); + } + if (supportedExternalMPP != NULL) { + DISPLAY_LOGD(eDebugResourceAssigning, "layer(%d) is OVERLAY externalMPP(%d, %d)", i, supportedExternalMPP->mType, supportedExternalMPP->mIndex); + supportedExternalMPP->mState = MPP_STATE_ASSIGNED; + mLayerInfos[i]->mExternalMPP = supportedExternalMPP; + if ((supportedInternalMPP == NULL) && isOverlaySupportedByIDMA(layer, i) && + ((directFbNum < mInternalDMAs.size()) || (mUseSecureDMA && isTopLayer))) { + if (directFbNum < mInternalDMAs.size()) { + mLayerInfos[i]->mDmaType = mInternalDMAs[directFbNum]; + directFbNum++; + } else { + mLayerInfos[i]->mDmaType = IDMA_SECURE; + } + } + } + mMPPLayers++; + if (!isFormatRgb(handle->format)) + videoOverlays++; + } else { + DISPLAY_LOGD(eDebugResourceAssigning, "layer(%d) is changed to FRAMEBUFFER", i); + can_compose = false; + } + + if (!can_compose) { + size_t changed_index = i; + hwc_layer_1_t *layer_for_gles = &contents->hwLayers[i]; + if (getDrmMode(handle->flags) != NO_DRM) { + int j = 0; + for (j = i - 1; j >= 0 ; j--) { + layer_for_gles = &contents->hwLayers[j]; + if (layer_for_gles->compositionType == HWC_OVERLAY) { + changed_index = j; + break; + } + } + if (j < 0) { + mFirstFb = 0; + changed_index = 0; + layer_for_gles = &contents->hwLayers[changed_index]; + } + } + layer_for_gles->compositionType = HWC_FRAMEBUFFER; + mLayerInfos[changed_index]->compositionType = layer_for_gles->compositionType; + mLayerInfos[changed_index]->mCheckOverlayFlag |= cannotComposeFlag; + if (mLayerInfos[changed_index]->mInternalMPP != NULL) + mLayerInfos[changed_index]->mInternalMPP->mState = MPP_STATE_FREE; + if (mLayerInfos[changed_index]->mExternalMPP != NULL) + mLayerInfos[changed_index]->mExternalMPP->mState = MPP_STATE_FREE; + mLayerInfos[changed_index]->mInternalMPP = NULL; + mLayerInfos[changed_index]->mExternalMPP = NULL; + if (!mFbNeeded) { + mFirstFb = mLastFb = changed_index; + mFbNeeded = true; + mHwc->totPixels += mXres * mYres; + } + else { + mFirstFb = min(changed_index, mFirstFb); + mLastFb = max(changed_index, mLastFb); + } + changed = true; + mFirstFb = min(mFirstFb, (size_t)NUM_HW_WINDOWS-1); + break; + } else { + mHwc->totPixels += WIDTH(layer.displayFrame) * HEIGHT(layer.displayFrame); + } + + if ((mUseSecureDMA == false) || (isTopLayer == false)) { + win_idx++; + /* FB Target is not top layer */ + if (mFbNeeded && ((mUseSecureDMA && !isCompressed(layer))|| (mLastFb != (contents->numHwLayers - 2)))) + win_idx = (win_idx == mFirstFb) ? (win_idx + 1) : win_idx; + win_idx = min((size_t)win_idx, (size_t)(NUM_HW_WINDOWS - 1)); + windows_left--; + } + } + + if (changed) + for (size_t i = mFirstFb; i < mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle) + handle = private_handle_t::dynamicCast(layer.handle); + if (handle && getDrmMode(handle->flags) != NO_DRM) { + layer.hints = HWC_HINT_CLEAR_FB; + } else { + contents->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + mLayerInfos[i]->compositionType = contents->hwLayers[i].compositionType; + mLayerInfos[i]->mCheckOverlayFlag |= eSandwitchedBetweenGLES; + if (mLayerInfos[i]->mInternalMPP != NULL) + mLayerInfos[i]->mInternalMPP->mState = MPP_STATE_FREE; + if (mLayerInfos[i]->mExternalMPP != NULL) + mLayerInfos[i]->mExternalMPP->mState = MPP_STATE_FREE; + mLayerInfos[i]->mInternalMPP = NULL; + mLayerInfos[i]->mExternalMPP = NULL; + if (handle && !isFormatRgb(handle->format)) + videoOverlays--; + } + } + if (handleTotalBandwidthOverload(contents)) + changed = true; + retry++; + + if (retry > 100) { + DISPLAY_LOGE("%s", "retry 100, can't allocate vpp, dump error state"); + android::String8 result; + result.clear(); + dumpLayerInfo(result); + DISPLAY_LOGE("%s", result.string()); + break; + } + } while(changed); + + uint32_t rectCount = 0; + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + if (handle->format == HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_PRIV + && rectCount < mOriginFrect.size()) + layer.sourceCropf = mOriginFrect[rectCount++]; + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER) + mFbUpdateRegion = expand(mFbUpdateRegion, layer.displayFrame); + } + + for (size_t i = mLastFb + 1; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY && layer.planeAlpha > 0 && layer.planeAlpha < 255) + mFbUpdateRegion = expand(mFbUpdateRegion, contents->hwLayers[i].displayFrame); + } + + int minWidth = 0; +#ifdef USE_DRM_BURST_LEN + if (mHasDrmSurface) + minWidth = DRM_BURSTLEN_BYTES * 8 / formatToBpp(HAL_PIXEL_FORMAT_RGBA_8888); + else +#endif + minWidth = BURSTLEN_BYTES * 8 / formatToBpp(HAL_PIXEL_FORMAT_RGBA_8888); + + int w = WIDTH(mFbUpdateRegion); + if (w < minWidth) { +#if defined(USES_DUAL_DISPLAY) + uint32_t maxRightPosition = (mType == EXYNOS_PRIMARY_DISPLAY)?(mXres/2):mXres; + if (mFbUpdateRegion.left + minWidth <= maxRightPosition) + mFbUpdateRegion.right = mFbUpdateRegion.left + minWidth; + else + mFbUpdateRegion.left = mFbUpdateRegion.right - minWidth; +#else + if (mFbUpdateRegion.left + minWidth <= mXres) + mFbUpdateRegion.right = mFbUpdateRegion.left + minWidth; + else + mFbUpdateRegion.left = mFbUpdateRegion.right - minWidth; +#endif + } +} + +void ExynosDisplay::assignWindows(hwc_display_contents_1_t *contents) +{ + unsigned int nextWindow = 0; + unsigned int directFbNum = 0; + bool isTopLayer = false; + + hwc_layer_1_t &fbLayer = contents->hwLayers[contents->numHwLayers - 1]; + if (mFbNeeded && (contents->numHwLayers - 1 > 0)) { + /* FramebufferTarget is the top layer */ + if (mUseSecureDMA && !isCompressed(fbLayer) && mLastFb == (contents->numHwLayers - 2)) + mLayerInfos[contents->numHwLayers - 1]->mDmaType = IDMA_SECURE; + else if (mInternalDMAs.size() > 0 && !isCompressed(fbLayer)) { + mLayerInfos[contents->numHwLayers - 1]->mDmaType = mInternalDMAs[directFbNum]; + directFbNum++; + } else { + /* mDmaType was set by determineBandwidthSupport() */ + } + + DISPLAY_LOGD(eDebugResourceAssigning, "assigning layer %u to DMA %u", contents->numHwLayers - 1, mLayerInfos[contents->numHwLayers - 1]->mDmaType); + } + + if (mFbNeeded && mUseSecureDMA && !isCompressed(fbLayer) && (mLastFb == (contents->numHwLayers - 2))) + mFbWindow = NUM_HW_WINDOWS - 1; + else + mFbWindow = NO_FB_NEEDED; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + mLayerInfos[i]->mWindowIndex = -1; + isTopLayer = (i == contents->numHwLayers - 2) ? true:false; + + if (mFbNeeded && (mFbWindow != NUM_HW_WINDOWS - 1)) { + if (i == mLastFb) { + mFbWindow = nextWindow; + nextWindow++; + continue; + } + } + + if (layer.flags & HWC_SKIP_RENDERING) + continue; + + if (!layer.planeAlpha) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + continue; + } + + if (layer.compositionType != HWC_FRAMEBUFFER) { + if (mFbNeeded && (layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + DISPLAY_LOGD(eDebugResourceAssigning, "assigning framebuffer target %u to window %u", i, nextWindow); + mLayerInfos[i]->mWindowIndex = mFbWindow; + if (mLayerInfos[i]->mInternalMPP != NULL) + mLayerInfos[i]->mInternalMPP->setDisplay(this); + continue; + } + if (layer.compositionType == HWC_OVERLAY) { + if ((!isProcessingRequired(layer) || + ((mLayerInfos[i]->mInternalMPP == NULL) && (mLayerInfos[i]->mExternalMPP != NULL))) && + isOverlaySupportedByIDMA(layer, i) && (directFbNum < mInternalDMAs.size() || (mUseSecureDMA && isTopLayer))) + { + if (directFbNum < mInternalDMAs.size()) { + DISPLAY_LOGD(eDebugResourceAssigning, "assigning layer %u to DMA %u", i, mInternalDMAs[directFbNum]); + mLayerInfos[i]->mDmaType = mInternalDMAs[directFbNum]; + mLayerInfos[i]->mWindowIndex = nextWindow; + directFbNum++; + } else { + DISPLAY_LOGD(eDebugResourceAssigning, "assigning layer %u to DMA %u", i, IDMA_SECURE); + mLayerInfos[i]->mDmaType = IDMA_SECURE; + mLayerInfos[i]->mWindowIndex = NUM_HW_WINDOWS - 1; + } + } else { + DISPLAY_LOGD(eDebugResourceAssigning, "%u layer can't use internalDMA, isProcessingRequired(%d)", i, isProcessingRequired(layer)); + unsigned int dmaType = 0; + mLayerInfos[i]->mWindowIndex = nextWindow; + if (mLayerInfos[i]->mInternalMPP != NULL) { + mLayerInfos[i]->mInternalMPP->setDisplay(this); + mLayerInfos[i]->mDmaType = getDeconDMAType(mLayerInfos[i]->mInternalMPP); + DISPLAY_LOGD(eDebugResourceAssigning, "assigning layer %u to DMA %u", i, mLayerInfos[i]->mDmaType); + } else { + /* Find unused DMA connected with VPP */ + for (size_t j = 0; j < mInternalMPPs.size(); j++ ) + { + if ((mInternalMPPs[j]->mState == MPP_STATE_FREE) && + ((mInternalMPPs[j]->mDisplay == NULL) || (mInternalMPPs[j]->mDisplay == this))) { + mLayerInfos[i]->mInternalMPP = mInternalMPPs[j]; + mLayerInfos[i]->mDmaType = getDeconDMAType(mLayerInfos[i]->mInternalMPP); + mLayerInfos[i]->mInternalMPP->setDisplay(this); + mLayerInfos[i]->mInternalMPP->mState = MPP_STATE_ASSIGNED; + DISPLAY_LOGD(eDebugResourceAssigning, "assigning layer %u to DMA %u", i, mLayerInfos[i]->mDmaType); + break; + } + } + } + } + if (mLayerInfos[i]->mExternalMPP != NULL) + mLayerInfos[i]->mExternalMPP->setDisplay(this); + } + if (mLayerInfos[i]->mWindowIndex != NUM_HW_WINDOWS - 1) + nextWindow++; + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_FRAMEBUFFER || + layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + mLayerInfos[i]->mWindowIndex = mFbWindow; + if (contents->numHwLayers - 1 > 0) { + mLayerInfos[i]->mDmaType = mLayerInfos[contents->numHwLayers - 1]->mDmaType; + } + } + } +} + +int ExynosDisplay::postMPPM2M(hwc_layer_1_t &layer, struct decon_win_config *config, int win_map, int index) +{ + //exynos5_hwc_post_data_t *pdata = &mPostData; + //int gsc_idx = pdata->gsc_map[index].idx; + int dst_format = mExternalMPPDstFormat; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + ExynosMPPModule *exynosMPP = mLayerInfos[index]->mExternalMPP; + ExynosMPPModule *exynosInternalMPP = mLayerInfos[index]->mInternalMPP; + hwc_layer_1_t extMPPOutLayer; + + if (exynosMPP == NULL) { + DISPLAY_LOGE("postMPPM2M is called but externMPP is NULL"); + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + return -1; + } + + hwc_frect_t sourceCrop = { 0, 0, + (float)WIDTH(layer.displayFrame), (float)HEIGHT(layer.displayFrame) }; + int originalTransform = layer.transform; + hwc_rect_t originalDisplayFrame = layer.displayFrame; + + bool bothMPPUsed = isBothMPPProcessingRequired(layer, &extMPPOutLayer); + if (bothMPPUsed) { + sourceCrop.right = extMPPOutLayer.displayFrame.right; + sourceCrop.bottom = extMPPOutLayer.displayFrame.bottom; + layer.displayFrame = extMPPOutLayer.displayFrame; + } + + if (mType == EXYNOS_PRIMARY_DISPLAY) { + handle->flags &= ~GRALLOC_USAGE_VIDEO_EXT; + if (mHwc->mS3DMode == S3D_MODE_READY || mHwc->mS3DMode == S3D_MODE_RUNNING) { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + exynosMPP->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + exynosMPP->mS3DMode = S3D_TB; + } else { + exynosMPP->mS3DMode = S3D_NONE; + } + } + + /* OFF_Screen to ON_Screen changes */ + if (getDrmMode(handle->flags) != NO_DRM) + recalculateDisplayFrame(layer, mXres, mYres); + + if (mType != EXYNOS_VIRTUAL_DISPLAY && + (isFormatRgb(handle->format) || + (bothMPPUsed && !isFormatRgb(handle->format) && + exynosInternalMPP != NULL && + exynosInternalMPP->isCSCSupportedByMPP(handle->format, HAL_PIXEL_FORMAT_RGBX_8888, layer.dataSpace) && + exynosInternalMPP->isFormatSupportedByMPP(handle->format) && + WIDTH(extMPPOutLayer.displayFrame) % exynosInternalMPP->getCropWidthAlign(layer) == 0 && + HEIGHT(extMPPOutLayer.displayFrame) % exynosInternalMPP->getCropHeightAlign(layer) == 0))) + dst_format = handle->format; + int err = exynosMPP->processM2M(layer, dst_format, &sourceCrop); + + /* Restore displayFrame*/ + layer.displayFrame = originalDisplayFrame; + + if (err < 0) { + DISPLAY_LOGE("failed to configure MPP (type:%u, index:%u) for layer %u", + exynosMPP->mType, exynosMPP->mIndex, index); + return -1; + } + + buffer_handle_t dst_buf = exynosMPP->mDstBuffers[exynosMPP->mCurrentBuf]; + private_handle_t *dst_handle = + private_handle_t::dynamicCast(dst_buf); + int fence = exynosMPP->mDstConfig.releaseFenceFd; + hwc_frect originalCrop = layer.sourceCropf; + int originalFormat = handle->format; + + /* ExtMPP out is the input of Decon + * and Trsform was processed by ExtMPP + */ + layer.sourceCropf = sourceCrop; + layer.transform = 0; + handle->format = dst_format; + configureHandle(dst_handle, index, layer, fence, config[win_map]); + + /* Restore sourceCropf and transform */ + layer.sourceCropf = originalCrop; + layer.transform = originalTransform; + handle->format = originalFormat; + return 0; +} + +void ExynosDisplay::handleStaticLayers(hwc_display_contents_1_t *contents, struct decon_win_config_data &win_data, int __unused tot_ovly_wins) +{ + int win_map = 0; + if (mLastFbWindow >= NUM_HW_WINDOWS) { + DISPLAY_LOGE("handleStaticLayers:: invalid mLastFbWindow(%d)", mLastFbWindow); + return; + } + win_map = mLastFbWindow; + DISPLAY_LOGD(eDebugSkipStaicLayer, "[USE] SKIP_STATIC_LAYER_COMP, mLastFbWindow(%d), win_map(%d)\n", mLastFbWindow, win_map); + + memcpy(&win_data.config[win_map], + &mLastConfigData.config[win_map], sizeof(struct decon_win_config)); + win_data.config[win_map].fence_fd = -1; + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle == NULL) + continue; + else + handle = private_handle_t::dynamicCast(layer.handle); + + if ((getDrmMode(handle->flags) == NO_DRM) && + (layer.compositionType == HWC_OVERLAY)) { + DISPLAY_LOGD(eDebugSkipStaicLayer, "[SKIP_STATIC_LAYER_COMP] layer.handle: 0x%p, layer.acquireFenceFd: %d\n", layer.handle, layer.acquireFenceFd); + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + } +} + +bool ExynosDisplay::multipleRGBScaling(int format) +{ + return isFormatRgb(format) && + mMPPLayers >= 1; +} + +bool ExynosDisplay::isProcessingRequired(hwc_layer_1_t &layer) +{ + if (!layer.handle) + return false; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + return !(isFormatRgb(handle->format)) || isScaled(layer) || isTransformed(layer) || isCompressed(layer) || + isFloat(layer.sourceCropf.left) || isFloat(layer.sourceCropf.top) || + isFloat(layer.sourceCropf.right - layer.sourceCropf.left) || + isFloat(layer.sourceCropf.bottom - layer.sourceCropf.top); +} + +bool ExynosDisplay::isBothMPPProcessingRequired(hwc_layer_1_t &layer) +{ + bool needDoubleOperation = false; + private_handle_t *srcHandle = NULL; + + if (layer.handle == NULL) + return false; + + srcHandle = private_handle_t::dynamicCast(layer.handle); + if ((mExternalMPPs.size() == 0) || (mInternalMPPs.size() == 0)) + return false; + + /* Check scale ratio */ + int maxUpscaleExt = mExternalMPPs[0]->getMaxUpscale(); + int maxUpscaleInt = mCheckIntMPP->getMaxUpscale(); + bool rot90or270 = !!(layer.transform & HAL_TRANSFORM_ROT_90); + int srcW = WIDTH(layer.sourceCropf), srcH = HEIGHT(layer.sourceCropf); + int dstW, dstH; + if (rot90or270) { + dstW = HEIGHT(layer.displayFrame); + dstH = WIDTH(layer.displayFrame); + } else { + dstW = WIDTH(layer.displayFrame); + dstH = HEIGHT(layer.displayFrame); + } + needDoubleOperation = ((dstW > srcW * maxUpscaleExt) && (dstW <= srcW * maxUpscaleExt * maxUpscaleInt)) || + ((dstH > srcH * maxUpscaleExt) && (dstH <= srcH * maxUpscaleExt * maxUpscaleInt)); + + int maxDownscaleExt = mExternalMPPs[0]->getMaxDownscale(layer); + int maxDownscaleInt = mCheckIntMPP->getMaxDownscale(layer); + + needDoubleOperation |= ((dstW < srcW / maxDownscaleExt) && (dstW >= srcW / (maxDownscaleExt * maxDownscaleExt))) || + ((dstH < srcH / maxDownscaleExt) && (dstH >= srcH / (maxDownscaleExt * maxDownscaleExt))); + + /* + * Both VPP and MSC should be used if + * MSC should be used for DRM contents + */ + if (getDrmMode(srcHandle->flags) != NO_DRM) { + bool supportVPP = false; + for (size_t i = 0; i < mInternalMPPs.size(); i++ ) + { + if (mInternalMPPs[i]->isProcessingSupported(layer, srcHandle->format) > 0) { + supportVPP = true; + break; + } + } + if (supportVPP == false) + needDoubleOperation |= true; + } + + /* + * UHD case + * Destination format should be RGB if dstW or dstH is not aligned. + * VPP is not required at this time. + */ + if (!isFormatRgb(srcHandle->format) && + (srcW >= UHD_WIDTH || srcH >= UHD_HEIGHT) && + (mInternalMPPs.size() > 0) && + (dstW % mCheckIntMPP->getCropWidthAlign(layer) == 0) && + (dstH % mCheckIntMPP->getCropHeightAlign(layer) == 0)) { + needDoubleOperation |= true; + } + + return needDoubleOperation; +} + +bool ExynosDisplay::isBothMPPProcessingRequired(hwc_layer_1_t &layer, hwc_layer_1_t *extMPPOutLayer) +{ + bool needDoubleOperation = false; + if (layer.handle == NULL) + return false; + + private_handle_t *srcHandle = private_handle_t::dynamicCast(layer.handle); + if ((mExternalMPPs.size() == 0) || (mInternalMPPs.size() == 0)) + return false; + + int maxUpscaleExt = mExternalMPPs[0]->getMaxUpscale(); + int maxUpscaleInt = mCheckIntMPP->getMaxUpscale(); + bool rot90or270 = !!(layer.transform & HAL_TRANSFORM_ROT_90); + int srcW = WIDTH(layer.sourceCropf), srcH = HEIGHT(layer.sourceCropf); + int dstW, dstH; + if (rot90or270) { + dstW = HEIGHT(layer.displayFrame); + dstH = WIDTH(layer.displayFrame); + } else { + dstW = WIDTH(layer.displayFrame); + dstH = HEIGHT(layer.displayFrame); + } + + int maxDownscaleExt = mExternalMPPs[0]->getMaxDownscale(layer); + int maxDownscaleInt = mCheckIntMPP->getMaxDownscale(layer); + + needDoubleOperation = isBothMPPProcessingRequired(layer); + + /* set extMPPOutLayer */ + if (needDoubleOperation && extMPPOutLayer != NULL) { + memcpy(extMPPOutLayer, &layer, sizeof(hwc_layer_1_t)); + extMPPOutLayer->displayFrame.left = 0; + extMPPOutLayer->displayFrame.top = 0; + + if (dstW > srcW * maxUpscaleExt) { + extMPPOutLayer->displayFrame.right = ALIGN_UP((int)ceilf((float)dstW/ maxUpscaleInt), + mExternalMPPs[0]->getDstWidthAlign(srcHandle->format)); + } else if (dstW < srcW / maxDownscaleExt) { + extMPPOutLayer->displayFrame.right = ALIGN(dstW * maxDownscaleInt, + mExternalMPPs[0]->getDstWidthAlign(srcHandle->format)); + } else { + extMPPOutLayer->displayFrame.right = dstW; + } + + if (dstH > srcH * maxUpscaleExt) { + extMPPOutLayer->displayFrame.bottom = ALIGN_UP((int)ceilf((float)dstH/ maxUpscaleInt), + mExternalMPPs[0]->getDstHeightAlign(srcHandle->format)); + } else if (dstH < srcH / maxDownscaleExt) { + extMPPOutLayer->displayFrame.bottom = ALIGN(dstH * maxDownscaleInt, + mExternalMPPs[0]->getDstHeightAlign(srcHandle->format)); + } else { + extMPPOutLayer->displayFrame.bottom = dstH; + } + + if (rot90or270) { + dstW = extMPPOutLayer->displayFrame.bottom; + dstH = extMPPOutLayer->displayFrame.right; + extMPPOutLayer->displayFrame.right = dstW; + extMPPOutLayer->displayFrame.bottom = dstH; + } + } + + return needDoubleOperation; +} + +bool ExynosDisplay::isSourceCropfSupported(hwc_layer_1_t layer) +{ + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + unsigned int bpp = formatToBpp(handle->format); + + /* HACK: Disable overlay if the layer have float position or size */ +#if 0 + if ((isFormatRgb(handle->format) && (bpp == 32)) || + isFormatYUV420(handle->format)) + return true; +#endif + + return false; +} + +bool ExynosDisplay::checkConfigChanged(struct decon_win_config_data &lastConfigData, struct decon_win_config_data &newConfigData) +{ + for (size_t i = 0; i <= MAX_DECON_WIN; i++) { + if ((lastConfigData.config[i].state != newConfigData.config[i].state) || + (lastConfigData.config[i].fd_idma[0] != newConfigData.config[i].fd_idma[0]) || + (lastConfigData.config[i].fd_idma[1] != newConfigData.config[i].fd_idma[1]) || + (lastConfigData.config[i].fd_idma[2] != newConfigData.config[i].fd_idma[2]) || + (lastConfigData.config[i].dst.x != newConfigData.config[i].dst.x) || + (lastConfigData.config[i].dst.y != newConfigData.config[i].dst.y) || + (lastConfigData.config[i].dst.w != newConfigData.config[i].dst.w) || + (lastConfigData.config[i].dst.h != newConfigData.config[i].dst.h) || + (lastConfigData.config[i].src.x != newConfigData.config[i].src.x) || + (lastConfigData.config[i].src.y != newConfigData.config[i].src.y) || + (lastConfigData.config[i].src.w != newConfigData.config[i].src.w) || + (lastConfigData.config[i].src.h != newConfigData.config[i].src.h) || + (lastConfigData.config[i].format != newConfigData.config[i].format) || + (lastConfigData.config[i].blending != newConfigData.config[i].blending) || + (lastConfigData.config[i].plane_alpha != newConfigData.config[i].plane_alpha)) + return true; + } + return false; +} + +void ExynosDisplay::removeIDMA(decon_idma_type idma) +{ + for (size_t i = mInternalDMAs.size(); i-- > 0;) { + if (mInternalDMAs[i] == (unsigned int)idma) { + mInternalDMAs.removeItemsAt(i); + } + } +} + +int ExynosDisplay::getDisplayConfigs(uint32_t *configs, size_t *numConfigs) +{ + configs[0] = 0; + *numConfigs = 1; + return 0; +} + +int ExynosDisplay::getDeconDMAType(ExynosMPPModule* internalMPP) +{ + if (internalMPP->mType == MPP_VG) + return IDMA_VG0 + internalMPP->mIndex; + else if (internalMPP->mType == MPP_VGR) + return IDMA_VGR0 + internalMPP->mIndex; + else if (internalMPP->mType == MPP_VPP_G) { + switch (internalMPP->mIndex) { + case 0: + return IDMA_G0; + case 1: + return IDMA_G1; + case 2: + return IDMA_G2; + case 3: + return IDMA_G3; + default: + return -1; + } + } else + return -1; +} + +void ExynosDisplay::dumpContents(android::String8& result, hwc_display_contents_1_t *contents) +{ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + result.appendFormat("[%zu] type=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, " + "{%7.1f,%7.1f,%7.1f,%7.1f}, {%d,%d,%d,%d}\n", + i, + layer.compositionType, layer.flags, layer.handle, layer.transform, + layer.blending, + layer.sourceCropf.left, + layer.sourceCropf.top, + layer.sourceCropf.right, + layer.sourceCropf.bottom, + layer.displayFrame.left, + layer.displayFrame.top, + layer.displayFrame.right, + layer.displayFrame.bottom); + + } +} + +int ExynosDisplay::checkConfigValidation(decon_win_config *config) +{ + bool flagValidConfig = true; + for (size_t i = 0; i < MAX_DECON_WIN; i++) { + if (config[i].state != config[i].DECON_WIN_STATE_DISABLED) { + /* multiple dma mapping */ + for (size_t j = (i+1); j < MAX_DECON_WIN; j++) { + if ((config[i].state == config[i].DECON_WIN_STATE_BUFFER) && + (config[j].state == config[j].DECON_WIN_STATE_BUFFER)) { + if (config[i].idma_type == config[j].idma_type) { + ALOGE("WIN_CONFIG error: duplicated dma(%d) between win%d, win%d", + config[i].idma_type, i, j); + config[j].state = config[j].DECON_WIN_STATE_DISABLED; + flagValidConfig = false; + } + } + } + if ((config[i].src.x < 0) || (config[i].src.y < 0)|| + (config[i].dst.x < 0) || (config[i].dst.y < 0)|| + (config[i].dst.x + config[i].dst.w > (uint32_t)mXres) || + (config[i].dst.y + config[i].dst.h > (uint32_t)mYres)) { + ALOGE("WIN_CONFIG error: invalid pos or size win%d", i); + config[i].state = config[i].DECON_WIN_STATE_DISABLED; + flagValidConfig = false; + } + + if (i >= NUM_HW_WINDOWS) { + ALOGE("WIN_CONFIG error: invalid window number win%d", i); + config[i].state = config[i].DECON_WIN_STATE_DISABLED; + flagValidConfig = false; + } + } + } + + if (flagValidConfig) + return 0; + else + return -1; +} + +int ExynosDisplay::setPowerMode(int mode) +{ + return ioctl(this->mDisplayFd, S3CFB_POWER_MODE, &mode); +} diff --git a/libvppdisplay/ExynosDisplay.h b/libvppdisplay/ExynosDisplay.h new file mode 100644 index 0000000..5bd4cbd --- /dev/null +++ b/libvppdisplay/ExynosDisplay.h @@ -0,0 +1,264 @@ +#ifndef EXYNOS_DISPLAY_H +#define EXYNOS_DISPLAY_H + +#include +#include +#include +#include "ExynosHWC.h" +#include "ExynosDisplayResourceManager.h" + +class ExynosMPPModule; + +#define HWC_SKIP_RENDERING 0x80000000 +enum { + eSkipLayer = 0x00000001, + eUnsupportedPlaneAlpha = 0x00000002, + eInvalidHandle = 0x00000004, + eHasFloatSrcCrop = 0x00000008, + eUnsupportedDstWidth = 0x00000010, + eUnsupportedCoordinate = 0x00000020, + eUnsupportedFormat = 0x00000040, + eUnsupportedBlending = 0x00000080, + eDynamicRecomposition = 0x00000100, + eForceFbEnabled = 0x00000200, + eSandwitchedBetweenGLES = 0x00000400, + eHasPopupVideo = 0x00000800, + eHasDRMVideo = 0x00001000, + eInsufficientBandwidth = 0x00002000, + eInsufficientOverlapCount = 0x00004000, + eInsufficientWindow = 0x00008000, + eInsufficientMPP = 0x00010000, + eSwitchingLocalPath = 0x00020000, + eRGBLayerDuringVideoPlayback = 0x00040000, + eSkipStaticLayer = 0x00080000, + eNotAlignedDstPosition = 0x00100000, + eUnSupportedUseCase = 0x00200000, + eMPPUnsupported = 0x40000000, + eUnknown = 0x80000000, +}; + +enum { + eWindowUpdateDisabled = 0, + eWindowUpdateInvalidIndex = 1, + eWindowUpdateGeometryChanged = 2, + eWindowUpdateInvalidRegion = 3, + eWindowUpdateNotUpdated = 4, + eWindowUpdateOverThreshold = 5, + eWindowUpdateAdjustmentFail = 6, + eWindowUpdateInvalidConfig = 7, + eWindowUpdateUnsupportedUseCase = 8, + eWindowUpdateUnknownError, +}; + +enum { + eDamageRegionFull = 0, + eDamageRegionPartial, + eDamageRegionSkip, + eDamageRegionError, +}; + +const struct deconFormat { + uint32_t format; + const char *desc; +} deconFormat[] = { + {DECON_PIXEL_FORMAT_ARGB_8888, "ARGB8888"}, + {DECON_PIXEL_FORMAT_ABGR_8888, "ABGR8888"}, + {DECON_PIXEL_FORMAT_RGBA_8888, "RGBA8888"}, + {DECON_PIXEL_FORMAT_BGRA_8888, "BGRA8888"}, + {DECON_PIXEL_FORMAT_XRGB_8888, "XRGB8888"}, + {DECON_PIXEL_FORMAT_XBGR_8888, "XBGR8888"}, + {DECON_PIXEL_FORMAT_RGBX_8888, "RGBX8888"}, + {DECON_PIXEL_FORMAT_BGRX_8888, "BGRX8888"}, + {DECON_PIXEL_FORMAT_RGBA_5551, "RGBA5551"}, + {DECON_PIXEL_FORMAT_RGB_565, "RGB565"}, + {DECON_PIXEL_FORMAT_NV16, "FORMATNV16"}, + {DECON_PIXEL_FORMAT_NV61, "FORMATNV61"}, + {DECON_PIXEL_FORMAT_YVU422_3P, "YVU4223P"}, + {DECON_PIXEL_FORMAT_NV12, "FORMATNV12"}, + {DECON_PIXEL_FORMAT_NV21, "FORMATNV21"}, + {DECON_PIXEL_FORMAT_NV12M, "FORMATNV12M"}, + {DECON_PIXEL_FORMAT_NV21M, "FORMATNV21M"}, + {DECON_PIXEL_FORMAT_YUV420, "FORMATYUV420"}, + {DECON_PIXEL_FORMAT_YVU420, "FORMATYVU420"}, + {DECON_PIXEL_FORMAT_YUV420M, "FORMATYUV420M"}, + {DECON_PIXEL_FORMAT_YVU420M, "FORMATYVU420M"}, +}; + +enum { + EXYNOS_PRIMARY_DISPLAY = 0, +#if defined(USES_DUAL_DISPLAY) + EXYNOS_SECONDARY_DISPLAY, +#endif + EXYNOS_EXTERNAL_DISPLAY, + EXYNOS_VIRTUAL_DISPLAY +}; + +enum regionType { + eTransparentRegion = 0, + eCoveredOpaqueRegion = 1, + eDamageRegion = 2, +}; + +class ExynosLayerInfo { + public: + int32_t compositionType; + uint32_t mCheckOverlayFlag; + uint32_t mCheckMPPFlag; + int32_t mWindowIndex; + int32_t mDmaType; + bool mCompressed; + ExynosMPPModule *mExternalMPP; /* For MSC, GSC, FIMC ... */ + ExynosMPPModule *mInternalMPP; /* For VPP */ +}; + +class ExynosPreProcessedInfo { + public: + bool mHasDrmSurface; +}; + +struct exynos_mpp_map_t { + exynos_mpp_t internal_mpp; + exynos_mpp_t external_mpp; +}; + +enum decon_pixel_format halFormatToS3CFormat(int format); +bool isFormatSupported(int format); +enum decon_blending halBlendingToS3CBlending(int32_t blending); +bool isBlendingSupported(int32_t blending); +const char *deconFormat2str(uint32_t format); +bool winConfigChanged(decon_win_config *c1, decon_win_config *c2); +bool frameChanged(decon_frame *f1, decon_frame *f2); +enum vpp_rotate halTransformToHWRot(uint32_t halTransform); + +class ExynosDisplay { + public: + /* Methods */ + ExynosDisplay(int numMPPs); + ExynosDisplay(uint32_t type, struct exynos5_hwc_composer_device_1_t *pdev); + virtual ~ExynosDisplay(); + + virtual int prepare(hwc_display_contents_1_t *contents); + virtual int set(hwc_display_contents_1_t *contents); + virtual int setPowerMode(int mode); + virtual void dump(android::String8& result); + virtual void freeMPP(); + virtual void allocateLayerInfos(hwc_display_contents_1_t* contents); + virtual void dumpLayerInfo(android::String8& result); + + virtual bool handleTotalBandwidthOverload(hwc_display_contents_1_t *contents); + virtual int clearDisplay(); + int getCompModeSwitch(); + virtual int32_t getDisplayAttributes(const uint32_t attribute, uint32_t config = 0); + virtual void preAssignFbTarget(hwc_display_contents_1_t *contents, bool assign); + virtual void determineYuvOverlay(hwc_display_contents_1_t *contents); + virtual void determineSupportedOverlays(hwc_display_contents_1_t *contents); + virtual void determineBandwidthSupport(hwc_display_contents_1_t *contents); + virtual void assignWindows(hwc_display_contents_1_t *contents); + bool getPreviousDRMDMA(int *dma); + void removeIDMA(decon_idma_type idma); + void dumpMPPs(android::String8& result); + void dumpConfig(decon_win_config &c); + void dumpConfig(decon_win_config &c, android::String8& result); + void dumpContents(android::String8& result, hwc_display_contents_1_t *contents); + int checkConfigValidation(decon_win_config *config); + + virtual int getDisplayConfigs(uint32_t *configs, size_t *numConfigs); + virtual int getActiveConfig() {return mActiveConfigIndex;}; + virtual int setActiveConfig(int __unused index) {return 0;}; + virtual void dupFence(int fence, hwc_display_contents_1_t *contents); + + /* Fields */ + int mDisplayFd; + uint32_t mType; + int mPanelType; + int32_t mDSCHSliceNum; + int32_t mDSCYSliceSize; + int32_t mXres; + int32_t mYres; + int32_t mXdpi; + int32_t mYdpi; + int32_t mVsyncPeriod; + bool mBlanked; + struct exynos5_hwc_composer_device_1_t *mHwc; + alloc_device_t *mAllocDevice; + const private_module_t *mGrallocModule; + android::Mutex mLayerInfoMutex; + android::Vector mLayerInfos; + ExynosMPPVector mInternalMPPs; + android::Vector< ExynosMPPModule* > mExternalMPPs; + android::Vector mBackUpFrect; + android::Vector mOriginFrect; + + /* For SkipStatic Layer feature */ + size_t mLastFbWindow; + const void *mLastLayerHandles[NUM_VIRT_OVER]; + int mVirtualOverlayFlag; + bool mBypassSkipStaticLayer; + + int mMPPLayers; + int mYuvLayers; + bool mHasDrmSurface; + bool mLayersNeedScaling; + + bool mFbNeeded; + size_t mFirstFb; + size_t mLastFb; + size_t mFbWindow; + bool mForceFb; + hwc_rect mFbUpdateRegion; + int mForceOverlayLayerIndex; + int mAllowedOverlays; + android::Vector< unsigned int > mInternalDMAs; + + /* For dump last information */ + struct decon_win_config_data mLastConfigData; + exynos_mpp_map_t mLastMPPMap[NUM_HW_WINDOWS]; + const void *mLastHandles[NUM_HW_WINDOWS]; + + /* mOtfMode is not used. + * It is only for compatibility with ExynosMPP */ + int mOtfMode; + /* Below members are not used. + * They are only for compatibility with HWC */ + bool mGscUsed; + size_t mMaxWindowOverlapCnt; + android::String8 mDisplayName; + bool mUseSecureDMA; + + uint32_t mExternalMPPDstFormat; + bool mSkipStaticInitFlag; + size_t mNumStaticLayers; + int mLastRetireFenceFd; + + ExynosPreProcessedInfo mPreProcessedInfo; + + bool mFbPreAssigned; + uint32_t mActiveConfigIndex; + ExynosMPPModule *mCheckIntMPP; + struct decon_win_config_data *mWinData; + protected: + /* Methods */ + void skipStaticLayers(hwc_display_contents_1_t *contents); + virtual int handleWindowUpdate(hwc_display_contents_1_t *contents, struct decon_win_config *config); + void getLayerRegion(hwc_layer_1_t &layer, decon_win_rect &rect_area, uint32_t regionType); + unsigned int getLayerRegion(hwc_layer_1_t &layer, hwc_rect &rect_area, uint32_t regionType); + + virtual void handleStaticLayers(hwc_display_contents_1_t *contents, struct decon_win_config_data &win_data, int tot_ovly_wins); + virtual void configureHandle(private_handle_t *handle, size_t index, hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg); + virtual int postMPPM2M(hwc_layer_1_t &layer, struct decon_win_config *config, int win_map, int index); + virtual void configureOverlay(hwc_layer_1_t *layer, size_t index, decon_win_config &cfg); + virtual bool isOverlaySupported(hwc_layer_1_t &layer, size_t index, bool useVPPOverlay, ExynosMPPModule** supportedInternalMPP, ExynosMPPModule** supportedExternalMPP); + virtual int postFrame(hwc_display_contents_1_t *contents); + virtual int winconfigIoctl(decon_win_config_data *win_data); + virtual bool isProcessingRequired(hwc_layer_1_t &layer); + virtual bool isBothMPPProcessingRequired(hwc_layer_1_t &layer); + virtual bool isBothMPPProcessingRequired(hwc_layer_1_t &layer, hwc_layer_1_t *extMPPOutLayer); + virtual bool multipleRGBScaling(int format); + virtual void doPreProcessing(hwc_display_contents_1_t* contents); + virtual bool isSourceCropfSupported(hwc_layer_1_t layer); + virtual int getDeconDMAType(ExynosMPPModule* internalMPP); + virtual bool isOverlaySupportedByIDMA(hwc_layer_1_t &layer, size_t index); + virtual void getIDMAMinSize(hwc_layer_1_t &layer, int *w, int *h); + bool checkConfigChanged(struct decon_win_config_data &lastConfigData, struct decon_win_config_data &newConfigData); +}; +#endif diff --git a/libvppdisplay/ExynosDisplayResourceManager.cpp b/libvppdisplay/ExynosDisplayResourceManager.cpp new file mode 100644 index 0000000..0d8ea9f --- /dev/null +++ b/libvppdisplay/ExynosDisplayResourceManager.cpp @@ -0,0 +1,558 @@ +#define LOG_TAG "DisplayResourceManager" +//#define LOG_NDEBUG 0 + +#include "ExynosDisplayResourceManager.h" +#include "ExynosMPPModule.h" +#include "ExynosPrimaryDisplay.h" +#include "ExynosExternalDisplay.h" +#if defined(USES_DUAL_DISPLAY) +#include "ExynosSecondaryDisplayModule.h" +#endif +#ifdef USES_VIRTUAL_DISPLAY +#include "ExynosVirtualDisplay.h" +#endif +#include "ExynosHWCDebug.h" + +ExynosMPPVector::ExynosMPPVector() { +} + +ExynosMPPVector::ExynosMPPVector(const ExynosMPPVector& rhs) + : android::SortedVector(rhs) { +} + +int ExynosMPPVector::do_compare(const void* lhs, + const void* rhs) const +{ + if (lhs == NULL || rhs == NULL) + return 0; + + const ExynosMPPModule* l = *((ExynosMPPModule**)(lhs)); + const ExynosMPPModule* r = *((ExynosMPPModule**)(rhs)); + uint8_t mppNum = sizeof(VPP_ASSIGN_ORDER)/sizeof(VPP_ASSIGN_ORDER[0]); + + if (l == NULL || r == NULL) + return 0; + + if (l->mType != r->mType) { + uint8_t lhsOrder = 0; + uint8_t rhsOrder = 0; + + for (uint8_t i = 0; i < mppNum; i++) { + if (l->mType == VPP_ASSIGN_ORDER[i]) { + lhsOrder = i; + break; + } + } + for (uint8_t i = 0; i < mppNum; i++) { + if (r->mType == VPP_ASSIGN_ORDER[i]) { + rhsOrder = i; + break; + } + } + return lhsOrder - rhsOrder; + } + + return l->mIndex - r->mIndex; +} + +ExynosDisplayResourceManager::ExynosDisplayResourceManager(struct exynos5_hwc_composer_device_1_t *pdev) + : mHwc(pdev), + mNeedsReserveFbTargetPrimary(false) +{ +#ifdef DISABLE_IDMA_SECURE + mHwc->primaryDisplay->mUseSecureDMA = false; +#if defined(USES_DUAL_DISPLAY) + mHwc->secondaryDisplay->mUseSecureDMA = false; +#endif +#else +#if defined(USES_DUAL_DISPLAY) + mHwc->primaryDisplay->mUseSecureDMA = false; + mHwc->secondaryDisplay->mUseSecureDMA = true; +#else + mHwc->primaryDisplay->mUseSecureDMA = true; +#endif +#endif + mHwc->externalDisplay->mUseSecureDMA = false; +#ifdef USES_VIRTUAL_DISPLAY + mHwc->virtualDisplay->mUseSecureDMA = false; +#endif + ExynosMPP::mainDisplayWidth = mHwc->primaryDisplay->mXres; + if (ExynosMPP::mainDisplayWidth <= 0) { + ExynosMPP::mainDisplayWidth = 1440; + } +} + +ExynosDisplayResourceManager::~ExynosDisplayResourceManager() +{ + if (!mInternalMPPs.isEmpty()) { + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + delete mInternalMPPs[i]; + } + mInternalMPPs.clear(); + } + if (!mExternalMPPs.isEmpty()) { + for (size_t i = 0; i < mExternalMPPs.size(); i++) { + delete mExternalMPPs[i]; + } + mExternalMPPs.clear(); + } +} + +void ExynosDisplayResourceManager::removeUnAssignedIntMpp(ExynosMPPVector &internalMPPs) +{ + for (size_t i = internalMPPs.size(); i-- > 0;) { + ExynosMPPModule* exynosMPP = (ExynosMPPModule *)internalMPPs[i]; + if (exynosMPP->mState == MPP_STATE_FREE || exynosMPP->mState == MPP_STATE_TRANSITION) + internalMPPs.removeItemsAt(i); + } +} + +void ExynosDisplayResourceManager::addExternalMpp(hwc_display_contents_1_t** contents) +{ +#if defined(USES_DUAL_DISPLAY) + hwc_display_contents_1_t *fimd_contents = contents[HWC_DISPLAY_PRIMARY0]; + hwc_display_contents_1_t *fimd_contents1 = contents[HWC_DISPLAY_PRIMARY1]; +#else + hwc_display_contents_1_t *fimd_contents = contents[HWC_DISPLAY_PRIMARY]; +#endif + hwc_display_contents_1_t *hdmi_contents = contents[HWC_DISPLAY_EXTERNAL]; +#ifdef USES_VIRTUAL_DISPLAY + hwc_display_contents_1_t *virtual_contents = contents[HWC_DISPLAY_VIRTUAL]; +#endif + + hwc_display_contents_1_t *secondary_contents = hdmi_contents; + ExynosDisplay* secondary_display = mHwc->externalDisplay; + + if (mExternalMPPs.size() == 0) + return; + + ExynosMPPModule* exynosMPP = (ExynosMPPModule *)mExternalMPPs[FIMD_EXT_MPP_IDX]; + if (fimd_contents && + ((mHwc->primaryDisplay->mHasDrmSurface) || + (mHwc->primaryDisplay->mYuvLayers > 0) || + ((fimd_contents->flags & HWC_GEOMETRY_CHANGED) == 0))) { + mHwc->primaryDisplay->mExternalMPPs.add(exynosMPP); + } + +#if defined(USES_DUAL_DISPLAY) + if (mHwc->hdmi_hpd == false) { + secondary_contents = fimd_contents1; + secondary_display = mHwc->secondaryDisplay; + } +#endif + + if (secondary_contents) { + exynosMPP = (ExynosMPPModule *)mExternalMPPs[HDMI_EXT_MPP_IDX]; + secondary_display->mExternalMPPs.add(exynosMPP); + } + +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents) { + exynosMPP = (ExynosMPPModule *)mExternalMPPs[WFD_EXT_MPP_IDX]; + mHwc->virtualDisplay->mExternalMPPs.add(exynosMPP); +#ifdef USES_2MSC_FOR_WFD + /* 1st is for blending and 2nd is for scaling */ + exynosMPP = (ExynosMPPModule *)mExternalMPPs[WFD_EXT_MPP_IDX + 1]; + mHwc->virtualDisplay->mExternalMPPs.add(exynosMPP); +#endif +#ifdef USES_3MSC_FOR_WFD + /* To prevent lack of MSC, WFD use 3 external MPPs */ + exynosMPP = (ExynosMPPModule *)mExternalMPPs[WFD_EXT_MPP_IDX + 1]; + mHwc->virtualDisplay->mExternalMPPs.add(exynosMPP); + + exynosMPP = (ExynosMPPModule *)mExternalMPPs[WFD_EXT_MPP_IDX + 2]; + mHwc->virtualDisplay->mExternalMPPs.add(exynosMPP); +#endif + } +#endif +} + +void ExynosDisplayResourceManager::addUnAssignedIntMpp(ExynosDisplay *display) +{ + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + ExynosMPPModule* exynosMPP = (ExynosMPPModule *)mInternalMPPs[i]; + if ((exynosMPP->mState == MPP_STATE_FREE) && (exynosMPP->mCanBeUsed) && (exynosMPP->isAssignable(display))) + display->mInternalMPPs.add(exynosMPP); + } +} + +void ExynosDisplayResourceManager::cleanupMPPs() +{ + for (size_t i = 0; i < mExternalMPPs.size(); i++) + { + mExternalMPPs[i]->preAssignDisplay(NULL); + if (mExternalMPPs[i]->mState == MPP_STATE_FREE) + mExternalMPPs[i]->cleanupM2M(); + } + for (size_t i = 0; i < mInternalMPPs.size(); i++) + { + mInternalMPPs[i]->preAssignDisplay(NULL); + if (mInternalMPPs[i]->mState == MPP_STATE_FREE) + mInternalMPPs[i]->cleanupInternalMPP(); + } +} + +void ExynosDisplayResourceManager::dumpMPPs(android::String8& result) +{ + result.appendFormat("ExynosDisplayResourceManager Internal MPPs number: %zu\n", mInternalMPPs.size()); + result.append( + " mType | mIndex | mState | mCanBeUsed | mDisplay \n" + "-------+--------+--------------------------------\n"); + // 5____ | 6_____ | 6_____ | 10________ | 8_______ \n + for (size_t i = 0; i < mInternalMPPs.size(); i++) + { + ExynosMPPModule* internalMPP = mInternalMPPs[i]; + result.appendFormat(" %5d | %6d | %6d | %10d | %8d\n", + internalMPP->mType, internalMPP->mIndex, + internalMPP->mState, internalMPP->mCanBeUsed, (internalMPP->mDisplay == NULL)? -1: internalMPP->mDisplay->mType); + } + + result.appendFormat("ExynosDisplayResourceManager External MPPs number: %zu\n", mExternalMPPs.size()); + result.append( + " mType | mIndex | mState | mCanBeUsed \n" + "-------+--------+----------------------\n"); + // 5____ | 6_____ | 6_____ | 10________\n + + for (size_t i = 0; i < mExternalMPPs.size(); i++) + { + ExynosMPPModule* externalMPP = mExternalMPPs[i]; + result.appendFormat(" %5d | %6d | %6d | %10d\n", + externalMPP->mType, externalMPP->mIndex, + externalMPP->mState, externalMPP->mCanBeUsed); + } +} + +void ExynosDisplayResourceManager::printDisplyInfos(size_t type) +{ + if (hwcCheckDebugMessages(eDebugResourceManager) == false) + return; + + android::String8 result; + + dumpMPPs(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + if (type == EXYNOS_PRIMARY_DISPLAY) { + HDEBUGLOGD(eDebugResourceManager, "Primary display"); + result.clear(); + mHwc->primaryDisplay->dumpLayerInfo(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + + result.clear(); + mHwc->primaryDisplay->dumpMPPs(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); +#if defined(USES_DUAL_DISPLAY) + } else if (type == EXYNOS_SECONDARY_DISPLAY) { + HDEBUGLOGD(eDebugResourceManager, "Secondary display"); + result.clear(); + mHwc->secondaryDisplay->dumpLayerInfo(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + + result.clear(); + mHwc->secondaryDisplay->dumpMPPs(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); +#endif + } else if (type == EXYNOS_EXTERNAL_DISPLAY) { + HDEBUGLOGD(eDebugResourceManager, "External display"); + result.clear(); + mHwc->externalDisplay->dumpLayerInfo(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + + result.clear(); + mHwc->externalDisplay->dumpMPPs(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + } +#ifdef USES_VIRTUAL_DISPLAY + else if (type == EXYNOS_VIRTUAL_DISPLAY) { + HDEBUGLOGD(eDebugResourceManager, "Virtual display"); + result.clear(); + mHwc->virtualDisplay->dumpLayerInfo(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + + result.clear(); + mHwc->virtualDisplay->dumpMPPs(result); + HDEBUGLOGD(eDebugResourceManager, "%s", result.string()); + } +#endif +} + +void ExynosDisplayResourceManager::doPreProcessing(hwc_display_contents_1_t* contents, ExynosDisplay* display, + int *previous_drm_dma, ExynosMPPModule **previousDRMInternalMPP) +{ + if ((contents == NULL) || (display == NULL)) + return; + + android::Mutex::Autolock lock(display->mLayerInfoMutex); + /* + * Allocate LayerInfos of each DisplayDevice + * because assignResources will set data of LayerInfos + */ + display->allocateLayerInfos(contents); + display->mInternalMPPs.clear(); + display->mExternalMPPs.clear(); + + if (display->getPreviousDRMDMA(previous_drm_dma)) { + if (getInternalMPPFromDMA((unsigned int)*previous_drm_dma, previousDRMInternalMPP) < 0) { + *previous_drm_dma = -1; + } + } +} + +void ExynosDisplayResourceManager::handleHighPriorityLayers(hwc_display_contents_1_t* contents, ExynosDisplay* display, + int previous_drm_dma, ExynosMPPModule *previousDRMInternalMPP, bool reserveFbTarget) +{ + if ((contents == NULL) || (display == NULL)) + return; + + android::Mutex::Autolock lock(display->mLayerInfoMutex); + + /* Don't use the VPP that was used for DRM at the previous frame */ + if ((display->mPreProcessedInfo.mHasDrmSurface == false) && + (previous_drm_dma > 0) && (previousDRMInternalMPP != NULL)) { + previousDRMInternalMPP->mCanBeUsed = false; + } + + /* + * Assign all of MPPs to the diplay for Video + */ + addUnAssignedIntMpp(display); + display->preAssignFbTarget(contents, reserveFbTarget); + display->determineYuvOverlay(contents); + + // Remove all MPPs that were not assigned + removeUnAssignedIntMpp(display->mInternalMPPs); + + /* Check whether VPP for DRM is changed */ + if ((previous_drm_dma > 0) && + (mHwc->primaryDisplay->mHasDrmSurface == true) && (display->mForceOverlayLayerIndex >= 0) && + (previousDRMInternalMPP != NULL)) { + if (previousDRMInternalMPP != display->mLayerInfos[display->mForceOverlayLayerIndex]->mInternalMPP) + previousDRMInternalMPP->mCanBeUsed = false; + } + + HDEBUGLOGD(eDebugResourceManager, "Display(%d):: after determineYuvOverlay", display->mType); + printDisplyInfos(display->mType); +} + +void ExynosDisplayResourceManager::handleLowPriorityLayers(hwc_display_contents_1_t* contents, ExynosDisplay* display) +{ + if ((contents == NULL) || (display == NULL)) + return; + + android::Mutex::Autolock lock(display->mLayerInfoMutex); + + /* + * Assign the rest MPPs to the display for UI + */ + addUnAssignedIntMpp(display); + display->determineSupportedOverlays(contents); + HDEBUGLOGD(eDebugResourceManager, "Display(%d):: after determineSupportedOverlays", display->mType); + printDisplyInfos(display->mType); + display->mAllowedOverlays = display->mInternalMPPs.size() + display->mInternalDMAs.size(); + display->determineBandwidthSupport(contents); + + HDEBUGLOGD(eDebugResourceManager, "Display(%d):: after determineBandwidthSupport", display->mType); + printDisplyInfos(display->mType); + display->assignWindows(contents); + + // Remove all MPPs that were not assigned + removeUnAssignedIntMpp(display->mInternalMPPs); + + HDEBUGLOGD(eDebugResourceManager, "Display(%d):: after assignWindows", display->mType); + printDisplyInfos(display->mType); +} + +int ExynosDisplayResourceManager::assignResources(size_t numDisplays, hwc_display_contents_1_t** displays) +{ + mNeedsReserveFbTargetPrimary = false; + if (!numDisplays || !displays) + return 0; + +#if defined(USES_DUAL_DISPLAY) + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY0]; + hwc_display_contents_1_t *fimd_contents1 = displays[HWC_DISPLAY_PRIMARY1]; +#else + hwc_display_contents_1_t *fimd_contents = displays[HWC_DISPLAY_PRIMARY]; +#endif + hwc_display_contents_1_t *hdmi_contents = displays[HWC_DISPLAY_EXTERNAL]; +#ifdef USES_VIRTUAL_DISPLAY + hwc_display_contents_1_t *virtual_contents = displays[HWC_DISPLAY_VIRTUAL]; +#endif + + hwc_display_contents_1_t *secondary_contents = hdmi_contents; + ExynosDisplay* secondary_display = mHwc->externalDisplay; + + int primary_previous_drm_dma = -1; + int secondary_previous_drm_dma = -1; + int virtual_previous_drm_dma = -1; + ExynosMPPModule *previousDRMInternalMPPPrimary = NULL; + ExynosMPPModule *previousDRMInternalMPPSecondary = NULL; + ExynosMPPModule *previousDRMInternalMPPVirtual = NULL; + +#if defined(USES_DUAL_DISPLAY) + if (mHwc->hdmi_hpd == false) { + secondary_contents = fimd_contents1; + secondary_display = mHwc->secondaryDisplay; + } +#endif + + if ((mHwc->hdmi_hpd == false) && (mHwc->externalDisplay->isIONBufferAllocated())) { + bool noExtVideoBuffer = true; + for (size_t i = 0; i < mExternalMPPs.size(); i++) { + if (!mExternalMPPs[i]->checkNoExtVideoBuffer()) + noExtVideoBuffer = false; + } + + if (noExtVideoBuffer) + mHwc->externalDisplay->freeExtVideoBuffers(); + } + + /* Clear assigned flag */ + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + if (mInternalMPPs[i]->mState != MPP_STATE_TRANSITION) + mInternalMPPs[i]->mState = MPP_STATE_FREE; + mInternalMPPs[i]->mCanBeUsed = true; + } + for (size_t i = 0; i < mExternalMPPs.size(); i++) { + if (mExternalMPPs[i]->mState != MPP_STATE_TRANSITION) + mExternalMPPs[i]->mState = MPP_STATE_FREE; + } + + if (fimd_contents) { + doPreProcessing(fimd_contents, mHwc->primaryDisplay, &primary_previous_drm_dma, &previousDRMInternalMPPPrimary); + } + if (secondary_contents) { + doPreProcessing(secondary_contents, secondary_display, &secondary_previous_drm_dma, &previousDRMInternalMPPSecondary); + } +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents) { + doPreProcessing(virtual_contents, mHwc->virtualDisplay, &virtual_previous_drm_dma, &previousDRMInternalMPPVirtual); + } +#endif + + preAssignResource(); + addExternalMpp(displays); + + if (fimd_contents) { + handleHighPriorityLayers(fimd_contents, mHwc->primaryDisplay, + primary_previous_drm_dma, previousDRMInternalMPPPrimary, mNeedsReserveFbTargetPrimary); + } + + if (secondary_contents) { + handleHighPriorityLayers(secondary_contents, secondary_display, + secondary_previous_drm_dma, previousDRMInternalMPPSecondary, false); + } + +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents) { + handleHighPriorityLayers(virtual_contents, mHwc->virtualDisplay, + virtual_previous_drm_dma, previousDRMInternalMPPVirtual, false); + } +#endif + + if (fimd_contents) { + handleLowPriorityLayers(fimd_contents, mHwc->primaryDisplay); + } + + if (secondary_contents) { + /* It's mirror mode */ + if ((mHwc->hdmi_hpd == true) && fimd_contents && (fimd_contents->numHwLayers > 0) && + (fimd_contents->numHwLayers == secondary_contents->numHwLayers) && + (fimd_contents->hwLayers[0].handle == secondary_contents->hwLayers[0].handle)) + secondary_display->mForceFb = true; + + handleLowPriorityLayers(secondary_contents, secondary_display); + } + +#ifdef USES_VIRTUAL_DISPLAY + if (virtual_contents) { +#ifndef USES_OVERLAY_FOR_WFD_UI_MIRROR + /* It's mirror mode */ + if (fimd_contents && (fimd_contents->numHwLayers > 0) && + (fimd_contents->numHwLayers == virtual_contents->numHwLayers) && + (fimd_contents->hwLayers[0].handle == virtual_contents->hwLayers[0].handle)) + mHwc->virtualDisplay->mForceFb = true; +#endif + handleLowPriorityLayers(virtual_contents, mHwc->virtualDisplay); + } +#endif + + return 0; +} + +void ExynosDisplayResourceManager::preAssignResource() +{ + return; +} + +bool ExynosDisplayResourceManager::preAssignIntMpp(ExynosDisplay *display, unsigned int mppType) +{ + int lastFreeIntMppIndex = -1; + int lastReservedIntMppIndex = -1; + int lastPreemptableIntMppIndex = -1; + for (size_t i = 0; i < mInternalMPPs.size(); i++) { + ExynosMPPModule* exynosMPP = (ExynosMPPModule *)mInternalMPPs[i]; + if ((exynosMPP->mType == mppType) && (exynosMPP->mState == MPP_STATE_FREE)) { + if (exynosMPP->mDisplay == display) + lastReservedIntMppIndex = i; + else if (exynosMPP->mDisplay == NULL) + lastFreeIntMppIndex = i; + else + lastPreemptableIntMppIndex = i; + } + } + if (lastReservedIntMppIndex != -1) { + ((ExynosMPPModule *)mInternalMPPs[lastReservedIntMppIndex])->preAssignDisplay(display); + return true; + } else if (lastFreeIntMppIndex != -1) { + ((ExynosMPPModule *)mInternalMPPs[lastFreeIntMppIndex])->preAssignDisplay(display); + return true; + } else if (lastPreemptableIntMppIndex != -1) { + ((ExynosMPPModule *)mInternalMPPs[lastPreemptableIntMppIndex])->preAssignDisplay(display); + return true; + } + return false; +} + +int ExynosDisplayResourceManager::getInternalMPPFromDMA(unsigned int dma, ExynosMPPModule** internalMPP) +{ + unsigned int mpp_type; + unsigned int mpp_index; + *internalMPP = NULL; + + switch (dma) { + case IDMA_G0: + case IDMA_G1: + case IDMA_G2: + case IDMA_G3: + /* VPP is always used in DRM playback case */ + return -1; + case IDMA_VG0: + mpp_type = MPP_VG; + mpp_index = 0; + break; + case IDMA_VG1: + mpp_type = MPP_VG; + mpp_index = 1; + break; + case IDMA_VGR0: + mpp_type = MPP_VGR; + mpp_index = 0; + break; + case IDMA_VGR1: + mpp_type = MPP_VGR; + mpp_index = 1; + break; + default: + return -1; + } + + for (size_t i = 0; i < mInternalMPPs.size(); i++) + { + ExynosMPPModule* exynosMPP = (ExynosMPPModule *)mInternalMPPs[i]; + if ((exynosMPP->mType == mpp_type) && (exynosMPP->mIndex == mpp_index)) { + *internalMPP = exynosMPP; + return 1; + } + } + return -1; +} diff --git a/libvppdisplay/ExynosDisplayResourceManager.h b/libvppdisplay/ExynosDisplayResourceManager.h new file mode 100644 index 0000000..db9b46a --- /dev/null +++ b/libvppdisplay/ExynosDisplayResourceManager.h @@ -0,0 +1,45 @@ +#ifndef EXYNOS_RESOURCE_MANAGER_H +#define EXYNOS_RESOURCE_MANAGER_H + +#include +#include +#include + +class ExynosMPPModule; +class ExynosDisplay; + +class ExynosMPPVector : public android::SortedVector< ExynosMPPModule* > { + public: + ExynosMPPVector(); + ExynosMPPVector(const ExynosMPPVector& rhs); + virtual int do_compare(const void* lhs, const void* rhs) const; +}; + +class ExynosDisplayResourceManager { + public: + ExynosDisplayResourceManager(struct exynos5_hwc_composer_device_1_t *pdev); + virtual ~ExynosDisplayResourceManager(); + ExynosMPPVector mInternalMPPs; + android::Vector< ExynosMPPModule* > mExternalMPPs; + struct exynos5_hwc_composer_device_1_t *mHwc; + virtual int assignResources(size_t numDisplays, hwc_display_contents_1_t** displays); + void cleanupMPPs(); + void dumpMPPs(android::String8& result); + protected: + bool mNeedsReserveFbTargetPrimary; + + void removeUnAssignedIntMpp(ExynosMPPVector &internalMPPs); + void addUnAssignedIntMpp(ExynosDisplay *display); + void addExternalMpp(hwc_display_contents_1_t** contents); + virtual void preAssignResource(); + virtual bool preAssignIntMpp(ExynosDisplay *display, unsigned int mppType); + void doPreProcessing(hwc_display_contents_1_t* contents, ExynosDisplay* display, + int *previous_drm_dma, ExynosMPPModule **previousDRMInternalMPP); + void handleHighPriorityLayers(hwc_display_contents_1_t* contents, ExynosDisplay* display, + int previous_drm_dma, ExynosMPPModule *previousDRMInternalMPP, bool reserveFbTarget); + void handleLowPriorityLayers(hwc_display_contents_1_t* contents, ExynosDisplay* display); + void printDisplyInfos(size_t type); + int getInternalMPPFromDMA(unsigned int dma, ExynosMPPModule** internalMPP); +}; + +#endif diff --git a/libvppdisplay/ExynosOverlayDisplay.cpp b/libvppdisplay/ExynosOverlayDisplay.cpp new file mode 100644 index 0000000..f9a6e0f --- /dev/null +++ b/libvppdisplay/ExynosOverlayDisplay.cpp @@ -0,0 +1,101 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "display" +#include "ExynosOverlayDisplay.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosHWCDebug.h" +#if defined(USES_DUAL_DISPLAY) +#include "ExynosSecondaryDisplayModule.h" +#endif + +#ifdef G2D_COMPOSITION +#include "ExynosG2DWrapper.h" +#endif + +ExynosOverlayDisplay::ExynosOverlayDisplay(int __unused numMPPs, struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(EXYNOS_PRIMARY_DISPLAY, pdev) +{ + this->mHwc = pdev; +#ifndef USES_DUAL_DISPLAY + mInternalDMAs.add(IDMA_G0); +#endif + mInternalDMAs.add(IDMA_G1); +} + +void ExynosOverlayDisplay::doPreProcessing(hwc_display_contents_1_t* contents) +{ + mInternalDMAs.clear(); +#ifndef USES_DUAL_DISPLAY + mInternalDMAs.add(IDMA_G0); +#endif + mInternalDMAs.add(IDMA_G1); + ExynosDisplay::doPreProcessing(contents); +} + +ExynosOverlayDisplay::~ExynosOverlayDisplay() +{ +} + +#if defined(USES_DUAL_DISPLAY) +int ExynosOverlayDisplay::set_dual(hwc_display_contents_1_t *contentsPrimary, hwc_display_contents_1_t *contentsSecondary) +{ + hwc_layer_1_t *fb_layer = NULL; + int err = 0; + + if (mFbWindow != NO_FB_NEEDED) { + if (contentsPrimary->numHwLayers >= 1 && + contentsPrimary->hwLayers[contentsPrimary->numHwLayers - 1].compositionType == HWC_FRAMEBUFFER_TARGET) + fb_layer = &contentsPrimary->hwLayers[contentsPrimary->numHwLayers - 1]; + + if (CC_UNLIKELY(!fb_layer)) { + DISPLAY_LOGE("framebuffer target expected, but not provided"); + err = -EINVAL; + } else { + DISPLAY_LOGD(eDebugDefault, "framebuffer target buffer:"); + dumpLayer(fb_layer); + } + } + + int fence; + if (!err) { + fence = postFrame(contentsPrimary); + if (fence < 0) + err = fence; + } + + if (err) + fence = clearDisplay(); + + if (fence == 0) { + /* + * WIN_CONFIG is skipped, not error + */ + fence = -1; + if (mLastRetireFenceFd >= 0) { + int dup_fd = dup(mLastRetireFenceFd); + if (dup_fd >= 0) { + fence = dup_fd; + mLastRetireFenceFd = dup_fd; + dupFence(fence, contentsPrimary); + } else { + DISPLAY_LOGW("mLastRetireFenceFd dup failed: %s", strerror(errno)); + mLastRetireFenceFd = -1; + } + } else { + ALOGE("WIN_CONFIG is skipped, but mLastRetireFenceFd is not valid"); + } + } else { + mLastRetireFenceFd = fence; + dupFence(fence, contentsPrimary); + } + mHwc->secondaryDisplay->mLastRetireFenceFd = fence; + mHwc->secondaryDisplay->dupFence(fence, contentsSecondary); + + return err; +} +#else +int ExynosOverlayDisplay::set_dual(hwc_display_contents_1_t __unused *contentsPrimary, hwc_display_contents_1_t __unused *contentsSecondary) +{ + return -1; +} +#endif diff --git a/libvppdisplay/ExynosOverlayDisplay.h b/libvppdisplay/ExynosOverlayDisplay.h new file mode 100644 index 0000000..8d1fdee --- /dev/null +++ b/libvppdisplay/ExynosOverlayDisplay.h @@ -0,0 +1,26 @@ +#ifndef EXYNOS_PRIMARY_DISPLAY_H +#define EXYNOS_PRIMARY_DISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +#define S3D_ERROR -1 +#ifndef HDMI_INCAPABLE +#define HDMI_PRESET_DEFAULT V4L2_DV_1080P60 +#endif +#define HDMI_PRESET_ERROR -1 + +class ExynosMPPModule; + +class ExynosOverlayDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosOverlayDisplay(int numGSCs, struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosOverlayDisplay(); + int set_dual(hwc_display_contents_1_t *contentsPrimary, hwc_display_contents_1_t *contentsSecondary); + protected: + virtual void doPreProcessing(hwc_display_contents_1_t* contents); + +}; + +#endif diff --git a/libvppdisplay/ExynosSecondaryDisplay.cpp b/libvppdisplay/ExynosSecondaryDisplay.cpp new file mode 100644 index 0000000..ffd81da --- /dev/null +++ b/libvppdisplay/ExynosSecondaryDisplay.cpp @@ -0,0 +1,100 @@ +#include "ExynosSecondaryDisplay.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosPrimaryDisplay.h" + +#define LOG_TAG "display" +class ExynosPrimaryDisplay; + +ExynosSecondaryDisplay::ExynosSecondaryDisplay(struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(EXYNOS_SECONDARY_DISPLAY, pdev) +{ + this->mHwc = pdev; + mXres = pdev->primaryDisplay->mXres; + mYres = pdev->primaryDisplay->mYres; + mXdpi = pdev->primaryDisplay->mXdpi; + mYdpi = pdev->primaryDisplay->mYres; + mVsyncPeriod = pdev->primaryDisplay->mVsyncPeriod; + + ALOGD("using\n" + "xres = %d px\n" + "yres = %d px\n" + "xdpi = %f dpi\n" + "ydpi = %f dpi\n" + "vsyncPeriod = %d msec\n", + mXres, mYres, mXdpi, mYdpi, mVsyncPeriod); +} + +ExynosSecondaryDisplay::~ExynosSecondaryDisplay() +{ + disable(); +} + +int ExynosSecondaryDisplay::enable() +{ + if (mEnabled) + return 0; + + /* To do: Should be implemented */ + + mEnabled = true; + + return 0; +} + +int ExynosSecondaryDisplay::disable() +{ + if (!mEnabled) + return 0; + + /* To do: Should be implemented */ + + mEnabled = false; + + return 0; +} + +void ExynosSecondaryDisplay::doPreProcessing(hwc_display_contents_1_t* contents) +{ + mInternalDMAs.clear(); + mInternalDMAs.add(IDMA_G0); + ExynosDisplay::doPreProcessing(contents); + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.displayFrame.left += (mXres/2); + layer.displayFrame.right += (mXres/2); + } +} + +int ExynosSecondaryDisplay::prepare(hwc_display_contents_1_t *contents) +{ + ExynosDisplay::prepare(contents); + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.displayFrame.left -= (mXres/2); + layer.displayFrame.right -= (mXres/2); + } + return 0; +} + +int ExynosSecondaryDisplay::set(hwc_display_contents_1_t *contents) +{ + int ret = 0; + /* Change position */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.displayFrame.left += (mXres/2); + layer.displayFrame.right += (mXres/2); + } + + ret = ExynosDisplay::set(contents); + + /* Restore position */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + layer.displayFrame.left -= (mXres/2); + layer.displayFrame.right -= (mXres/2); + } + + return ret; +} diff --git a/libvppdisplay/ExynosSecondaryDisplay.h b/libvppdisplay/ExynosSecondaryDisplay.h new file mode 100644 index 0000000..02f3de2 --- /dev/null +++ b/libvppdisplay/ExynosSecondaryDisplay.h @@ -0,0 +1,25 @@ +#ifndef EXYNOS_SECONDARY_DISPLAY_H +#define EXYNOS_SECONDARY_DISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +class ExynosMPPModule; + +class ExynosSecondaryDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosSecondaryDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosSecondaryDisplay(); + int enable(); + int disable(); + virtual int prepare(hwc_display_contents_1_t *contents); + virtual int set(hwc_display_contents_1_t *contents); + + bool mEnabled; + protected: + virtual void doPreProcessing(hwc_display_contents_1_t* contents); + +}; + +#endif diff --git a/libvppdisplay/NOTICE b/libvppdisplay/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvppdisplay/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libvppdisplay_tiny/Android.mk b/libvppdisplay_tiny/Android.mk new file mode 100644 index 0000000..aa9a13e --- /dev/null +++ b/libvppdisplay_tiny/Android.mk @@ -0,0 +1,45 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libsync + + +LOCAL_CFLAGS += -DHLOG_CODE=1 +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwc_tiny \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule + + + + +LOCAL_SRC_FILES := \ + ExynosDisplay.cpp \ + ExynosOverlayDisplay.cpp \ + ExynosPrimaryDisplay.cpp + + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libdisplay + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libvppdisplay_tiny/ExynosDisplay.cpp b/libvppdisplay_tiny/ExynosDisplay.cpp new file mode 100644 index 0000000..8e6ddf7 --- /dev/null +++ b/libvppdisplay_tiny/ExynosDisplay.cpp @@ -0,0 +1,491 @@ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +//#define LOG_NDEBUG 0 +#define LOG_TAG "display" +#include "ExynosDisplay.h" + +#include +#include +#include + +#define DISPLAY_LOGD(msg, ...) ALOGD("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGV(msg, ...) ALOGV("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGI(msg, ...) ALOGI("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGW(msg, ...) ALOGW("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) +#define DISPLAY_LOGE(msg, ...) ALOGE("[%s] " msg, mDisplayName.string(), ##__VA_ARGS__) + +uint8_t formatToBpp(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + case HAL_PIXEL_FORMAT_RGBX_8888: + case HAL_PIXEL_FORMAT_BGRA_8888: +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: +#endif + return 32; + case HAL_PIXEL_FORMAT_RGB_565: + return 16; + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + case HAL_PIXEL_FORMAT_YV12: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M_TILED: + return 12; + default: + ALOGW("unrecognized pixel format %u", format); + return 0; + } +} + +bool frameChanged(decon_frame *f1, decon_frame *f2) +{ + return f1->x != f2->x || + f1->y != f2->y || + f1->w != f2->w || + f1->h != f2->h || + f1->f_w != f2->f_w || + f1->f_h != f2->f_h; +} + +bool winConfigChanged(decon_win_config *c1, decon_win_config *c2) +{ + return c1->state != c2->state || + c1->fd_idma[0] != c2->fd_idma[0] || + c1->fd_idma[1] != c2->fd_idma[1] || + c1->fd_idma[2] != c2->fd_idma[2] || + frameChanged(&c1->src, &c2->src) || + frameChanged(&c1->dst, &c2->dst) || + c1->format != c2->format || + c1->blending != c2->blending || + c1->plane_alpha != c2->plane_alpha; +} + +void ExynosDisplay::dumpConfig(decon_win_config &c) +{ + DISPLAY_LOGV("\tstate = %u", c.state); + if (c.state == c.DECON_WIN_STATE_BUFFER) { + DISPLAY_LOGV("\t\tfd = %d, dma = %u " + "src_f_w = %u, src_f_h = %u, src_x = %d, src_y = %d, src_w = %u, src_h = %u, " + "dst_f_w = %u, dst_f_h = %u, dst_x = %d, dst_y = %d, dst_w = %u, dst_h = %u, " + "format = %u, blending = %u, protection = %u", + c.fd_idma[0], c.idma_type, + c.src.f_w, c.src.f_h, c.src.x, c.src.y, c.src.w, c.src.h, + c.dst.f_w, c.dst.f_h, c.dst.x, c.dst.y, c.dst.w, c.dst.h, + c.format, c.blending, c.protection); + } + else if (c.state == c.DECON_WIN_STATE_COLOR) { + DISPLAY_LOGV("\t\tcolor = %u", c.color); + } +} + +enum decon_pixel_format halFormatToS3CFormat(int format) +{ + switch (format) { + case HAL_PIXEL_FORMAT_RGBA_8888: + return DECON_PIXEL_FORMAT_RGBA_8888; + case HAL_PIXEL_FORMAT_RGBX_8888: + return DECON_PIXEL_FORMAT_RGBX_8888; + case HAL_PIXEL_FORMAT_RGB_565: + return DECON_PIXEL_FORMAT_RGB_565; + case HAL_PIXEL_FORMAT_BGRA_8888: + return DECON_PIXEL_FORMAT_BGRA_8888; +#ifdef EXYNOS_SUPPORT_BGRX_8888 + case HAL_PIXEL_FORMAT_BGRX_8888: + return DECON_PIXEL_FORMAT_BGRX_8888; +#endif + case HAL_PIXEL_FORMAT_EXYNOS_YV12_M: + return DECON_PIXEL_FORMAT_YVU420M; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_P_M: + return DECON_PIXEL_FORMAT_YUV420M; + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M: + case HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP_M_FULL: + return DECON_PIXEL_FORMAT_NV21M; + case HAL_PIXEL_FORMAT_YCrCb_420_SP: + return DECON_PIXEL_FORMAT_NV21; + case HAL_PIXEL_FORMAT_EXYNOS_YCbCr_420_SP_M: + return DECON_PIXEL_FORMAT_NV12M; + default: + return DECON_PIXEL_FORMAT_MAX; + } +} + +bool isFormatSupported(int format) +{ + return halFormatToS3CFormat(format) < DECON_PIXEL_FORMAT_MAX; +} + +enum decon_blending halBlendingToS3CBlending(int32_t blending) +{ + switch (blending) { + case HWC_BLENDING_NONE: + return DECON_BLENDING_NONE; + case HWC_BLENDING_PREMULT: + return DECON_BLENDING_PREMULT; + case HWC_BLENDING_COVERAGE: + return DECON_BLENDING_COVERAGE; + + default: + return DECON_BLENDING_MAX; + } +} + +bool isBlendingSupported(int32_t blending) +{ + return halBlendingToS3CBlending(blending) < DECON_BLENDING_MAX; +} + +#define NUMA(a) (sizeof(a) / sizeof(a [0])) +const char *deconFormat2str(uint32_t format) +{ + android::String8 result; + + for (unsigned int n1 = 0; n1 < NUMA(deconFormat); n1++) { + if (format == deconFormat[n1].format) { + return deconFormat[n1].desc; + } + } + + result.appendFormat("? %08x", format); + return result; +} + +ExynosDisplay::ExynosDisplay(int numGSCs) + : mDisplayFd(-1), + mType(0), + mXres(0), + mYres(0), + mXdpi(0), + mYdpi(0), + mVsyncPeriod(0), + mBlanked(true), + mHwc(NULL) + +{ + + +} +ExynosDisplay::ExynosDisplay(uint32_t type, struct exynos5_hwc_composer_device_1_t *pdev) + : mDisplayFd(-1), + mType(type), + mXres(0), + mYres(0), + mXdpi(0), + mYdpi(0), + mVsyncPeriod(0), + mBlanked(true), + mHwc(pdev) + +{ + + switch (mType) { + case EXYNOS_VIRTUAL_DISPLAY: + mDisplayName = android::String8("VirtualDisplay"); + break; + case EXYNOS_EXTERNAL_DISPLAY: + mDisplayName = android::String8("ExternalDisplay"); + break; + case EXYNOS_PRIMARY_DISPLAY: + default: + mDisplayName = android::String8("PrimaryDisplay"); + } + +} + +ExynosDisplay::~ExynosDisplay() +{ +} + + +int ExynosDisplay::prepare(hwc_display_contents_1_t *contents) +{ + ATRACE_CALL(); + DISPLAY_LOGV("preparing %u layers for FIMD", contents->numHwLayers); + + mFbNeeded = false; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + //ALOGD("### ExynosDisplay::prepare [%d/%d] - compositionType %d", i, contents->numHwLayers, layer.compositionType); + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + mFbNeeded = true; + } else if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + ALOGD("### ExynosDisplay::prepare [%d/%d] - OVERLAY layer detected. changing into GLES", i, contents->numHwLayers); + layer.compositionType = HWC_FRAMEBUFFER; + } + } + + + return 0; +} + +int ExynosDisplay::set(hwc_display_contents_1_t *contents) +{ + hwc_layer_1_t *fb_layer = NULL; + int err = 0; + + if (mFbNeeded) { + if (contents->numHwLayers >= 1 && + contents->hwLayers[contents->numHwLayers - 1].compositionType == HWC_FRAMEBUFFER_TARGET) { + fb_layer = &contents->hwLayers[contents->numHwLayers - 1]; + } + if (CC_UNLIKELY(!fb_layer)) { + DISPLAY_LOGE("framebuffer target expected, but not provided"); + err = -EINVAL; + } else { + DISPLAY_LOGV("framebuffer target buffer:"); + } + } + + + int fence; + if (!err) { + fence = postFrame(contents); + if (fence < 0) + err = fence; + } + + if (err) + fence = clearDisplay(); + + if (fence == 0) { + /* + * Only happens in mLocalExternalDisplayPause scenario, changing S3D mode. + * not error + */ + fence = -1; + } else { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + + if (!(layer.flags & HWC_SKIP_RENDERING) && (mFbNeeded == true && layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + int dup_fd = dup(fence); + if (dup_fd < 0) + ALOGD("release fence dup failed: %s", strerror(errno)); + layer.releaseFenceFd = dup_fd; + } + } + } + + contents->retireFenceFd = fence; + + return err; +} + +void ExynosDisplay::dump(android::String8& result) +{ + return; +} + + +void ExynosDisplay::dumpLayerInfo(android::String8& result) +{ + result.append("\n"); +} + + +int ExynosDisplay::clearDisplay() +{ + struct decon_win_config_data win_data; + memset(&win_data, 0, sizeof(win_data)); + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + LOG_ALWAYS_FATAL_IF(ret < 0, + "%s ioctl S3CFB_WIN_CONFIG failed to clear screen: %s", + mDisplayName.string(), strerror(errno)); + // the causes of an empty config failing are all unrecoverable + + return win_data.fence; +} + + +int32_t ExynosDisplay::getDisplayAttributes(const uint32_t attribute) +{ + switch(attribute) { + case HWC_DISPLAY_VSYNC_PERIOD: + return this->mVsyncPeriod; + + case HWC_DISPLAY_WIDTH: + return this->mXres; + + case HWC_DISPLAY_HEIGHT: + return this->mYres; + + case HWC_DISPLAY_DPI_X: + return this->mXdpi; + + case HWC_DISPLAY_DPI_Y: + return this->mYdpi; + + default: + DISPLAY_LOGE("unknown display attribute %u", attribute); + return -EINVAL; + } +} + +void ExynosDisplay::configureHandle(private_handle_t *handle, size_t index, + hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg) +{ + hwc_frect_t &sourceCrop = layer.sourceCropf; + hwc_rect_t &displayFrame = layer.displayFrame; + int32_t blending = layer.blending; + int32_t planeAlpha = layer.planeAlpha; + uint32_t x, y; + uint32_t w = WIDTH(displayFrame); + uint32_t h = HEIGHT(displayFrame); + uint8_t bpp = formatToBpp(handle->format); + uint32_t offset = ((uint32_t)sourceCrop.top * handle->stride + (uint32_t)sourceCrop.left) * bpp / 8; + if (displayFrame.left < 0) { + unsigned int crop = -displayFrame.left; + DISPLAY_LOGV("layer off left side of screen; cropping %u pixels from left edge", + crop); + x = 0; + w -= crop; + offset += crop * bpp / 8; + } else { + x = displayFrame.left; + } + + if (displayFrame.right > this->mXres) { + unsigned int crop = displayFrame.right - this->mXres; + DISPLAY_LOGV("layer off right side of screen; cropping %u pixels from right edge", + crop); + w -= crop; + } + + if (displayFrame.top < 0) { + unsigned int crop = -displayFrame.top; + DISPLAY_LOGV("layer off top side of screen; cropping %u pixels from top edge", + crop); + y = 0; + h -= crop; + offset += handle->stride * crop * bpp / 8; + } else { + y = displayFrame.top; + } + + if (displayFrame.bottom > this->mYres) { + int crop = displayFrame.bottom - this->mYres; + DISPLAY_LOGV("layer off bottom side of screen; cropping %u pixels from bottom edge", + crop); + h -= crop; + } + cfg.state = cfg.DECON_WIN_STATE_BUFFER; + cfg.fd_idma[0] = handle->fd; + cfg.fd_idma[1] = handle->fd1; + cfg.fd_idma[2] = handle->fd2; + cfg.idma_type = IDMA_G1; + cfg.dst.x = x; + cfg.dst.y = y; + cfg.dst.w = w; + cfg.dst.h = h; + cfg.dst.f_w = w; + cfg.dst.f_h = h; + cfg.format = halFormatToS3CFormat(handle->format); + + cfg.src.f_w = handle->stride; + cfg.src.f_h = handle->vstride; + cfg.src.x = (int)sourceCrop.left; + cfg.src.y = (int)sourceCrop.top; + + cfg.src.w = WIDTH(sourceCrop) - (cfg.src.x - (uint32_t)sourceCrop.left); + if (cfg.src.x + cfg.src.w > cfg.src.f_w) + cfg.src.w = cfg.src.f_w - cfg.src.x; + cfg.src.h = HEIGHT(sourceCrop) - (cfg.src.y - (uint32_t)sourceCrop.top); + if (cfg.src.y + cfg.src.h > cfg.src.f_h) + cfg.src.h = cfg.src.f_h - cfg.src.y; + cfg.blending = halBlendingToS3CBlending(blending); + cfg.fence_fd = fence_fd; + cfg.plane_alpha = 255; + if (planeAlpha && (planeAlpha < 255)) { + cfg.plane_alpha = planeAlpha; + } +} + +void ExynosDisplay::configureOverlay(hwc_layer_1_t *layer, size_t index, decon_win_config &cfg) +{ + if (layer->compositionType == HWC_BACKGROUND) { + hwc_color_t color = layer->backgroundColor; + cfg.state = cfg.DECON_WIN_STATE_COLOR; + cfg.color = (color.r << 16) | (color.g << 8) | color.b; + cfg.dst.x = 0; + cfg.dst.y = 0; + cfg.dst.w = this->mXres; + cfg.dst.h = this->mYres; + return; + } + private_handle_t *handle = private_handle_t::dynamicCast(layer->handle); + configureHandle(handle, index, *layer, layer->acquireFenceFd, cfg); +} + + +int ExynosDisplay::winconfigIoctl(decon_win_config_data *win_data) +{ + ATRACE_CALL(); + return ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, win_data); +} + +int ExynosDisplay::postFrame(hwc_display_contents_1_t* contents) +{ + ATRACE_CALL(); + struct decon_win_config_data win_data; + struct decon_win_config *config = win_data.config; + int win_map = 0; + memset(config, 0, sizeof(win_data.config)); + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + config[i].fence_fd = -1; + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + size_t window_index = 0; + + + if (layer.flags & HWC_SKIP_RENDERING) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + continue; + } + + + if (mFbNeeded == true && layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + config[window_index].protection = 0; + + configureOverlay(&layer, i, config[window_index]); + } + if (window_index == 0 && config[window_index].blending != DECON_BLENDING_NONE) { + DISPLAY_LOGV("blending not supported on window 0; forcing BLENDING_NONE"); + config[window_index].blending = DECON_BLENDING_NONE; + } + } + + int ret = winconfigIoctl(&win_data); + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + + if (ret < 0) { + DISPLAY_LOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + return ret; + } + if (contents->numHwLayers == 1) { + hwc_layer_1_t &layer = contents->hwLayers[0]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + + + return win_data.fence; +} + diff --git a/libvppdisplay_tiny/ExynosDisplay.h b/libvppdisplay_tiny/ExynosDisplay.h new file mode 100644 index 0000000..b1cf679 --- /dev/null +++ b/libvppdisplay_tiny/ExynosDisplay.h @@ -0,0 +1,114 @@ +#ifndef EXYNOS_DISPLAY_H +#define EXYNOS_DISPLAY_H + +#include +#include +#include +#include "ExynosHWC.h" + +inline int WIDTH(const hwc_rect &rect) { return rect.right - rect.left; } +inline int HEIGHT(const hwc_rect &rect) { return rect.bottom - rect.top; } +inline int WIDTH(const hwc_frect_t &rect) { return (int)(rect.right - rect.left); } +inline int HEIGHT(const hwc_frect_t &rect) { return (int)(rect.bottom - rect.top); } + + +#define HWC_SKIP_RENDERING 0x80000000 + +const struct deconFormat { + uint32_t format; + const char *desc; +} deconFormat[] = { + {DECON_PIXEL_FORMAT_ARGB_8888, "ARGB8888"}, + {DECON_PIXEL_FORMAT_ABGR_8888, "ABGR8888"}, + {DECON_PIXEL_FORMAT_RGBA_8888, "RGBA8888"}, + {DECON_PIXEL_FORMAT_BGRA_8888, "BGRA8888"}, + {DECON_PIXEL_FORMAT_XRGB_8888, "XRGB8888"}, + {DECON_PIXEL_FORMAT_XBGR_8888, "XBGR8888"}, + {DECON_PIXEL_FORMAT_RGBX_8888, "RGBX8888"}, + {DECON_PIXEL_FORMAT_BGRX_8888, "BGRX8888"}, + {DECON_PIXEL_FORMAT_RGBA_5551, "RGBA5551"}, + {DECON_PIXEL_FORMAT_RGB_565, "RGB565"}, + {DECON_PIXEL_FORMAT_NV16, "FORMATNV16"}, + {DECON_PIXEL_FORMAT_NV61, "FORMATNV61"}, + {DECON_PIXEL_FORMAT_YVU422_3P, "YVU4223P"}, + {DECON_PIXEL_FORMAT_NV12, "FORMATNV12"}, + {DECON_PIXEL_FORMAT_NV21, "FORMATNV21"}, + {DECON_PIXEL_FORMAT_NV12M, "FORMATNV12M"}, + {DECON_PIXEL_FORMAT_NV21M, "FORMATNV21M"}, + {DECON_PIXEL_FORMAT_YUV420, "FORMATYUV420"}, + {DECON_PIXEL_FORMAT_YVU420, "FORMATYVU420"}, + {DECON_PIXEL_FORMAT_YUV420M, "FORMATYUV420M"}, + {DECON_PIXEL_FORMAT_YVU420M, "FORMATYVU420M"}, +}; + +enum { + EXYNOS_PRIMARY_DISPLAY = 0, +#if defined(USES_DUAL_DISPLAY) + EXYNOS_SECONDARY_DISPLAY, +#endif + EXYNOS_EXTERNAL_DISPLAY, + EXYNOS_VIRTUAL_DISPLAY +}; + +class ExynosLayerInfo { + public: + int32_t compositionType; + uint32_t mCheckOverlayFlag; + uint32_t mCheckMPPFlag; + int32_t mWindowIndex; + uint32_t mDmaType; +}; + + +enum decon_pixel_format halFormatToS3CFormat(int format); +bool isFormatSupported(int format); +enum decon_blending halBlendingToS3CBlending(int32_t blending); +bool isBlendingSupported(int32_t blending); +const char *deconFormat2str(uint32_t format); +bool winConfigChanged(decon_win_config *c1, decon_win_config *c2); +bool frameChanged(decon_frame *f1, decon_frame *f2); + +class ExynosDisplay { + public: + /* Methods */ + ExynosDisplay(int numMPPs); + ExynosDisplay(uint32_t type, struct exynos5_hwc_composer_device_1_t *pdev); + virtual ~ExynosDisplay(); + + virtual int prepare(hwc_display_contents_1_t *contents); + virtual int set(hwc_display_contents_1_t *contents); + virtual void dump(android::String8& result); + virtual void dumpLayerInfo(android::String8& result); + + virtual int clearDisplay(); + virtual int32_t getDisplayAttributes(const uint32_t attribute); + void dumpConfig(decon_win_config &c); + + /* Fields */ + int mDisplayFd; + uint32_t mType; + int32_t mXres; + int32_t mYres; + int32_t mXdpi; + int32_t mYdpi; + int32_t mVsyncPeriod; + bool mBlanked; + struct exynos5_hwc_composer_device_1_t *mHwc; + + + + bool mFbNeeded; + + android::String8 mDisplayName; + + + + protected: + /* Methods */ + virtual void configureHandle(private_handle_t *handle, size_t index, hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg); + virtual void configureOverlay(hwc_layer_1_t *layer, size_t index, decon_win_config &cfg); + + virtual int postFrame(hwc_display_contents_1_t *contents); + virtual int winconfigIoctl(decon_win_config_data *win_data); +}; +#endif diff --git a/libvppdisplay_tiny/ExynosOverlayDisplay.cpp b/libvppdisplay_tiny/ExynosOverlayDisplay.cpp new file mode 100644 index 0000000..b605543 --- /dev/null +++ b/libvppdisplay_tiny/ExynosOverlayDisplay.cpp @@ -0,0 +1,12 @@ +#include "ExynosOverlayDisplay.h" + +ExynosOverlayDisplay::ExynosOverlayDisplay(int __UNUSED__ numMPPs, struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(EXYNOS_PRIMARY_DISPLAY, pdev) +{ + this->mHwc = pdev; +} + +ExynosOverlayDisplay::~ExynosOverlayDisplay() +{ +} + diff --git a/libvppdisplay_tiny/ExynosOverlayDisplay.h b/libvppdisplay_tiny/ExynosOverlayDisplay.h new file mode 100644 index 0000000..61b26e6 --- /dev/null +++ b/libvppdisplay_tiny/ExynosOverlayDisplay.h @@ -0,0 +1,14 @@ +#ifndef EXYNOS_PRIMARY_DISPLAY_H +#define EXYNOS_PRIMARY_DISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" + +class ExynosOverlayDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosOverlayDisplay(int numGSCs, struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosOverlayDisplay(); +}; + +#endif diff --git a/libvppdisplay_tiny/ExynosPrimaryDisplay.cpp b/libvppdisplay_tiny/ExynosPrimaryDisplay.cpp new file mode 100644 index 0000000..ec51e21 --- /dev/null +++ b/libvppdisplay_tiny/ExynosPrimaryDisplay.cpp @@ -0,0 +1,10 @@ +#include "ExynosPrimaryDisplay.h" + +ExynosPrimaryDisplay::ExynosPrimaryDisplay(int numGSCs, struct exynos5_hwc_composer_device_1_t *pdev) : + ExynosOverlayDisplay(numGSCs, pdev) +{ +} + +ExynosPrimaryDisplay::~ExynosPrimaryDisplay() +{ +} \ No newline at end of file diff --git a/libvppdisplay_tiny/ExynosPrimaryDisplay.h b/libvppdisplay_tiny/ExynosPrimaryDisplay.h new file mode 100644 index 0000000..5d7255d --- /dev/null +++ b/libvppdisplay_tiny/ExynosPrimaryDisplay.h @@ -0,0 +1,13 @@ +#ifndef EXYNOS_DISPLAY_MODULE_H +#define EXYNOS_DISPLAY_MODULE_H + +#include "ExynosOverlayDisplay.h" + +class ExynosPrimaryDisplay : public ExynosOverlayDisplay { + public: + ExynosPrimaryDisplay(int numGSCs, struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosPrimaryDisplay(); + +}; + +#endif diff --git a/libvppdisplay_tiny/NOTICE b/libvppdisplay_tiny/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvppdisplay_tiny/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libvpphdmi/Android.mk b/libvpphdmi/Android.mk new file mode 100644 index 0000000..5f2872a --- /dev/null +++ b/libvpphdmi/Android.mk @@ -0,0 +1,58 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libsync libhwcutils libdisplay libmpp + +LOCAL_CFLAGS += -DLOG_TAG=\"hdmi\" + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libvppdisplay \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/system/core/libsync/include + +ifeq ($(filter 3.10, $(TARGET_LINUX_KERNEL_VERSION)), 3.10) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/kernel-3.10-headers +else +ifeq ($(filter 3.18, $(TARGET_LINUX_KERNEL_VERSION)), 3.18) +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/kernel-3.18-headers +else +LOCAL_C_INCLUDES += $(TOP)/hardware/samsung_slsi/exynos/kernel-3.4-headers +endif +endif + +LOCAL_SRC_FILES := \ + ExynosExternalDisplay.cpp dv_timings.c + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhdmimodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libhdmi + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libvpphdmi/ExynosExternalDisplay.cpp b/libvpphdmi/ExynosExternalDisplay.cpp new file mode 100644 index 0000000..cc9fd89 --- /dev/null +++ b/libvpphdmi/ExynosExternalDisplay.cpp @@ -0,0 +1,1151 @@ +//#define LOG_NDEBUG 0 +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosExternalDisplay.h" +#include "ExynosSecondaryDisplayModule.h" +#include "decon_tv.h" +#include + +extern struct v4l2_dv_timings dv_timings[]; +bool is_same_dv_timings(const struct v4l2_dv_timings *t1, + const struct v4l2_dv_timings *t2) +{ + if (t1->type == t2->type && + t1->bt.width == t2->bt.width && + t1->bt.height == t2->bt.height && + t1->bt.interlaced == t2->bt.interlaced && + t1->bt.polarities == t2->bt.polarities && + t1->bt.pixelclock == t2->bt.pixelclock && + t1->bt.hfrontporch == t2->bt.hfrontporch && + t1->bt.vfrontporch == t2->bt.vfrontporch && + t1->bt.vsync == t2->bt.vsync && + t1->bt.vbackporch == t2->bt.vbackporch && + (!t1->bt.interlaced || + (t1->bt.il_vfrontporch == t2->bt.il_vfrontporch && + t1->bt.il_vsync == t2->bt.il_vsync && + t1->bt.il_vbackporch == t2->bt.il_vbackporch))) + return true; + return false; +} + +int ExynosExternalDisplay::getDVTimingsIndex(int preset) +{ + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + if (preset == preset_index_mappings[i].preset) + return preset_index_mappings[i].dv_timings_index; + } + return -1; +} + +ExynosExternalDisplay::ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev) + : ExynosDisplay(EXYNOS_EXTERNAL_DISPLAY, pdev), + mEnabled(false), + mBlanked(false), + mUseSubtitles(false), + mReserveMemFd(-1), + mDRMTempBuffer(NULL), + mFlagIONBufferAllocated(false) +{ + mXres = 0; + mYres = 0; + mXdpi = 0; + mYdpi = 0; + mVsyncPeriod = 0; + mInternalDMAs.add(IDMA_G3); + mReserveMemFd = open(HDMI_RESERVE_MEM_DEV_NAME, O_RDWR); + if (mReserveMemFd < 0) + ALOGE("Fail to open hdmi_reserve_mem_fd %s, error(%d)", HDMI_RESERVE_MEM_DEV_NAME, mReserveMemFd); + else + ALOGI("Open %s", HDMI_RESERVE_MEM_DEV_NAME); +} + +ExynosExternalDisplay::~ExynosExternalDisplay() +{ + if (mDRMTempBuffer != NULL) { + mAllocDevice->free(mAllocDevice, mDRMTempBuffer); + mDRMTempBuffer = NULL; + } + if (mReserveMemFd > 0) + close(mReserveMemFd); +} + +void ExynosExternalDisplay::allocateLayerInfos(hwc_display_contents_1_t* contents) +{ + ExynosDisplay::allocateLayerInfos(contents); +} + +int ExynosExternalDisplay::prepare(hwc_display_contents_1_t* contents) +{ + ExynosDisplay::prepare(contents); + return 0; +} + +int ExynosExternalDisplay::postMPPM2M(hwc_layer_1_t &layer, struct decon_win_config *config, int win_map, int index) +{ + int err = 0; + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if ((mHasDrmSurface == true) & (getDrmMode(handle->flags) != NO_DRM)) { + if (checkIONBufferPrepared() == false) { + ALOGV("skip DRM video"); + handle->flags &= ~GRALLOC_USAGE_VIDEO_EXT; + err = configureDRMSkipHandle(config[win_map]); + config[win_map].idma_type = (decon_idma_type)mLayerInfos[index]->mDmaType; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + return err; + } else { + handle->flags |= GRALLOC_USAGE_VIDEO_EXT; + } + } + + return ExynosDisplay::postMPPM2M(layer, config, win_map, index); +} + +void ExynosExternalDisplay::handleStaticLayers(hwc_display_contents_1_t *contents, struct decon_win_config_data &win_data, int __unused tot_ovly_wins) +{ + int win_map = 0; + if (mLastFbWindow >= NUM_HW_WINDOWS - 1) { + ALOGE("handleStaticLayers:: invalid mLastFbWindow(%d)", mLastFbWindow); + return; + } + win_map = mLastFbWindow + 1; + ALOGV("[USE] SKIP_STATIC_LAYER_COMP, mLastFbWindow(%d), win_map(%d)\n", mLastFbWindow, win_map); + + memcpy(&win_data.config[win_map], + &mLastConfigData.config[win_map], sizeof(struct decon_win_config)); + win_data.config[win_map].fence_fd = -1; + + for (size_t i = mFirstFb; i <= mLastFb; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.compositionType == HWC_OVERLAY) { + ALOGV("[SKIP_STATIC_LAYER_COMP] %d layer.handle: 0x%p, layer.acquireFenceFd: %d\n", i, layer.handle, layer.acquireFenceFd); + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + } +} + +int ExynosExternalDisplay::postFrame(hwc_display_contents_1_t* contents) +{ + struct decon_win_config_data win_data; + struct decon_win_config *config = win_data.config; + int win_map = 0; + int tot_ovly_wins = 0; + bool hdmiDisabled = false; + + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(mLastMPPMap, 0, sizeof(mLastMPPMap)); + memset(config, 0, sizeof(win_data.config)); + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + config[i].fence_fd = -1; + mLastMPPMap[i].internal_mpp.type = -1; + mLastMPPMap[i].external_mpp.type = -1; + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle != NULL) + handle = private_handle_t::dynamicCast(layer.handle); + // window 0 is background layer + size_t window_index = mLayerInfos[i]->mWindowIndex + 1; + + if ((layer.flags & HWC_SKIP_RENDERING) && + ((handle == NULL) || (getDrmMode(handle->flags) == NO_DRM))) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + continue; + } + + if ((layer.compositionType == HWC_OVERLAY) || + (mFbNeeded == true && layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + mLastHandles[window_index] = layer.handle; + + if (handle == NULL) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + continue; + } + + if (getDrmMode(handle->flags) == SECURE_DRM) + config[window_index].protection = 1; + else + config[window_index].protection = 0; + + if ((int)i == mForceOverlayLayerIndex && + mHwc->mS3DMode != S3D_MODE_DISABLED && mHwc->mHdmiResolutionChanged) { + if (isPresetSupported(mHwc->mHdmiPreset)) { + mHwc->mS3DMode = S3D_MODE_RUNNING; + setPreset(mHwc->mHdmiPreset); + /* + * HDMI was disabled by setPreset + * This frame will be handled from next frame + */ + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + layer.flags = HWC_SKIP_RENDERING; + hdmiDisabled = true; + continue; + } else { + mHwc->mS3DMode = S3D_MODE_RUNNING; + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = true; + } + } + if (mLayerInfos[i]->mInternalMPP != NULL) { + mLastMPPMap[window_index].internal_mpp.type = mLayerInfos[i]->mInternalMPP->mType; + mLastMPPMap[window_index].internal_mpp.index = mLayerInfos[i]->mInternalMPP->mIndex; + } + if (mLayerInfos[i]->mExternalMPP != NULL) { + mLastMPPMap[window_index].external_mpp.type = mLayerInfos[i]->mExternalMPP->mType; + mLastMPPMap[window_index].external_mpp.index = mLayerInfos[i]->mExternalMPP->mIndex; + if ((int)i == mForceOverlayLayerIndex && mHwc->mS3DMode == S3D_MODE_RUNNING) { + if (isPresetSupported(mHwc->mHdmiPreset)) { + mLayerInfos[i]->mExternalMPP->mS3DMode = S3D_NONE; + } else { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + mLayerInfos[i]->mExternalMPP->mS3DMode = S3D_SBS; + else if (S3DFormat == S3D_TB) + mLayerInfos[i]->mExternalMPP->mS3DMode = S3D_TB; + } + } + if (postMPPM2M(layer, config, window_index, i) < 0) + continue; + } else { + configureOverlay(&layer, i, config[window_index]); + } + } + if (window_index == 0 && config[window_index].blending != DECON_BLENDING_NONE) { + ALOGV("blending not supported on window 0; forcing BLENDING_NONE"); + config[window_index].blending = DECON_BLENDING_NONE; + } + } + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + ALOGV("external display: window %u configuration:", i); + dumpConfig(config[i]); + } + + if (hdmiDisabled) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + return 0; + } + + if (this->mVirtualOverlayFlag) { + handleStaticLayers(contents, win_data, tot_ovly_wins); + } + + if (contents->numHwLayers == 1) { + hwc_layer_1_t &layer = contents->hwLayers[0]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + } + + if (checkConfigChanged(win_data, mLastConfigData) == false) + { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + size_t window_index = mLayerInfos[i]->mWindowIndex; + + if ((layer.compositionType == HWC_OVERLAY) || + (layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + + if (mLayerInfos[i]->mExternalMPP != NULL) { + mLayerInfos[i]->mExternalMPP->mCurrentBuf = (mLayerInfos[i]->mExternalMPP->mCurrentBuf + 1) % mLayerInfos[i]->mExternalMPP->mNumAvailableDstBuffers; + } + } + } + return 0; + } + + int ret = ioctl(this->mDisplayFd, S3CFB_WIN_CONFIG, &win_data); + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + if (ret < 0) { + ALOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + return ret; + } + + memcpy(&(this->mLastConfigData), &win_data, sizeof(win_data)); + + if (!this->mVirtualOverlayFlag) + this->mLastFbWindow = mFbWindow; + + if ((mYuvLayers != 0) && (mDRMTempBuffer != NULL)) { + mAllocDevice->free(mAllocDevice, mDRMTempBuffer); + mDRMTempBuffer = NULL; + } + return win_data.fence; +} + +int ExynosExternalDisplay::set(hwc_display_contents_1_t* contents) +{ + int err = 0; + bool drm_skipped = false; + + if (!mEnabled || mBlanked) { + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.acquireFenceFd >= 0) { + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + } + } + return 0; + } + + if ((mHasDrmSurface == true) && (mForceOverlayLayerIndex != -1) && + (mLayerInfos[mForceOverlayLayerIndex]->mExternalMPP != NULL)) { + hwc_layer_1_t &layer = contents->hwLayers[mForceOverlayLayerIndex]; + if (mFlagIONBufferAllocated == false) { + layer.flags |= HWC_SKIP_RENDERING; + drm_skipped = true; + } else { + layer.flags &= ~(HWC_SKIP_RENDERING); + } + } + err = ExynosDisplay::set(contents); + + /* HDMI was disabled to change S3D mode */ + if (mEnabled == false) + return 0; + + /* Restore flags */ + if (drm_skipped) { + if ((mHasDrmSurface == true) && (mForceOverlayLayerIndex != -1) && + (mLayerInfos[mForceOverlayLayerIndex]->mExternalMPP != NULL)) { + hwc_layer_1_t &layer = contents->hwLayers[mForceOverlayLayerIndex]; + layer.flags &= ~(HWC_SKIP_RENDERING); + } + } + + if (this->mYuvLayers == 0 && !mHwc->local_external_display_pause) { + if (mHwc->mS3DMode == S3D_MODE_RUNNING && contents->numHwLayers > 1) { + int preset = convert3DTo2D(mHwc->mHdmiCurrentPreset); + if (isPresetSupported(preset)) { + ALOGI("S3D video is removed, Set Resolution(%d)", preset); + setPreset(preset); + mHwc->mS3DMode = S3D_MODE_STOPPING; + mHwc->mHdmiPreset = preset; + if (mHwc->procs) + mHwc->procs->invalidate(mHwc->procs); + } else { + ALOGI("S3D video is removed, Resolution(%d) is not supported. mHdmiCurrentPreset(%d)", preset, mHwc->mHdmiCurrentPreset); + mHwc->mS3DMode = S3D_MODE_DISABLED; + mHwc->mHdmiPreset = mHwc->mHdmiCurrentPreset; + } + } + } + + return err; +} + +void ExynosExternalDisplay::determineYuvOverlay(hwc_display_contents_1_t *contents) +{ + mForceOverlayLayerIndex = -1; + mHasDrmSurface = false; + mYuvLayers = 0; + bool useVPPOverlayFlag = false; + + for (size_t i = 0; i < contents->numHwLayers; i++) { + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + + hwc_layer_1_t &layer = contents->hwLayers[i]; + useVPPOverlayFlag = false; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (getDrmMode(handle->flags) != NO_DRM) + useVPPOverlayFlag = true; +#if defined(GSC_VIDEO) + /* check yuv surface */ + if (!mForceFb && !isFormatRgb(handle->format)) { + if (isOverlaySupported(contents->hwLayers[i], i, useVPPOverlayFlag, &supportedInternalMPP, &supportedExternalMPP)) { + this->mYuvLayers++; + if (this->mHasDrmSurface == false) { + /* Assign MPP */ + if (supportedExternalMPP != NULL) + supportedExternalMPP->mState = MPP_STATE_ASSIGNED; + if (supportedInternalMPP != NULL) + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + + mForceOverlayLayerIndex = i; + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->mExternalMPP = supportedExternalMPP; + mLayerInfos[i]->mInternalMPP = supportedInternalMPP; + mLayerInfos[i]->compositionType = layer.compositionType; + + if ((mHwc->mS3DMode != S3D_MODE_DISABLED) && + mHwc->mHdmiResolutionChanged) + mHwc->mS3DMode = S3D_MODE_RUNNING; + /* Set destination size as full screen */ + if (mHwc->mS3DMode != S3D_MODE_DISABLED) { + layer.displayFrame.left = 0; + layer.displayFrame.top = 0; + layer.displayFrame.right = mXres; + layer.displayFrame.bottom = mYres; + } + + if ((getDrmMode(handle->flags) != NO_DRM) && + isBothMPPProcessingRequired(layer) && + (supportedInternalMPP != NULL)) { + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getCropWidthAlign(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getCropHeightAlign(layer)); + layer.flags &= ~HWC_SKIP_RENDERING; + } + + if ((getDrmMode(handle->flags) != NO_DRM) && + (supportedInternalMPP != NULL)) { + if (WIDTH(layer.displayFrame) < supportedInternalMPP->getMinWidth(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame width %d is smaller than vpp minWidth %d", + i, WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + } + if (HEIGHT(layer.displayFrame) < supportedInternalMPP->getMinHeight(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame height %d is smaller than vpp minHeight %d", + i, HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + } + } + } + } else { + if (getDrmMode(handle->flags) != NO_DRM) { + /* This layer should be overlay but HWC can't handle it */ + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + layer.flags |= HWC_SKIP_RENDERING; + } + } + + if (getDrmMode(handle->flags) != NO_DRM) { + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + } + } +#endif + } + } +} + + +void ExynosExternalDisplay::determineSupportedOverlays(hwc_display_contents_1_t *contents) +{ +#if defined(GSC_VIDEO) + if ((mHwc->mS3DMode != S3D_MODE_DISABLED) && (this->mYuvLayers == 1) && !mUseSubtitles) { + // UI layers will be skiped when S3D video is playing + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (mForceOverlayLayerIndex != (int)i) { + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_RENDERING; + } + } + } +#endif + +#if !defined(GSC_VIDEO) + mForceFb = true; +#endif + + ExynosDisplay::determineSupportedOverlays(contents); + /* + * If GSC_VIDEO is not defined, + * all of layers are GLES except DRM video + */ +} + +void ExynosExternalDisplay::configureHandle(private_handle_t *handle, size_t index, + hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg) +{ + ExynosDisplay::configureHandle(handle, index, layer, fence_fd, cfg); + if ((mHwc->mS3DMode == S3D_MODE_RUNNING) && + ((int)index == mForceOverlayLayerIndex) && + (isPresetSupported(mHwc->mHdmiPreset) == false) && + (mLayerInfos[index]->mInternalMPP != NULL) && + (mLayerInfos[index]->mExternalMPP == NULL)) { + int S3DFormat = getS3DFormat(mHwc->mHdmiPreset); + if (S3DFormat == S3D_SBS) + cfg.src.w /= 2; + else if (S3DFormat == S3D_TB) + cfg.src.h /= 2; + } +} + +int ExynosExternalDisplay::openHdmi() +{ + int ret = 0; + int sw_fd; + + if (mHwc->externalDisplay->mDisplayFd > 0) + ret = mHwc->externalDisplay->mDisplayFd; + else { + mHwc->externalDisplay->mDisplayFd = open("/dev/graphics/fb1", O_RDWR); + if (mHwc->externalDisplay->mDisplayFd < 0) { + ALOGE("failed to open framebuffer for externalDisplay"); + } + ret = mHwc->externalDisplay->mDisplayFd; + } + + ALOGD("open fd for HDMI(%d)", ret); + + return ret; +} + +void ExynosExternalDisplay::closeHdmi() +{ + if (mDisplayFd > 0) { + close(mDisplayFd); + ALOGD("Close fd for HDMI"); + } + mDisplayFd = -1; +} + +void ExynosExternalDisplay::setHdmiStatus(bool status) +{ + if (status) { +#if defined(USES_VIRTUAL_DISPLAY) + char value[PROPERTY_VALUE_MAX]; + property_get("wlan.wfd.status", value, "disconnected"); + bool bWFDDisconnected = !strcmp(value, "disconnected"); + + if (bWFDDisconnected) { +#endif + if (mEnabled == false && mHwc->mS3DMode != S3D_MODE_DISABLED) + mHwc->mHdmiResolutionChanged = true; + + if (mEnabled == false) + requestIONMemory(); + enable(); +#if defined(USES_VIRTUAL_DISPLAY) + } +#endif + } else { + disable(); + closeHdmi(); + + if (mDRMTempBuffer != NULL) { + mAllocDevice->free(mAllocDevice, mDRMTempBuffer); + mDRMTempBuffer = NULL; + } + } +} + +bool ExynosExternalDisplay::isPresetSupported(unsigned int preset) +{ + bool found = false; + int index = 0; + int ret = 0; + exynos_hdmi_data hdmi_data; + int dv_timings_index = getDVTimingsIndex(preset); + + if (dv_timings_index < 0) { + ALOGE("%s: unsupported preset, %d", __func__, preset); + return -1; + } + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_ENUM_PRESET; + while (true) { + hdmi_data.etimings.index = index++; + ret = ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data); + + if (ret < 0) { + if (errno == EINVAL) + break; + ALOGE("%s: enum_dv_timings error, %d", __func__, errno); + return -1; + } + + ALOGV("%s: %d width=%d height=%d", + __func__, hdmi_data.etimings.index, + hdmi_data.etimings.timings.bt.width, hdmi_data.etimings.timings.bt.height); + + if (is_same_dv_timings(&hdmi_data.etimings.timings, &dv_timings[dv_timings_index])) { + mXres = hdmi_data.etimings.timings.bt.width; + mYres = hdmi_data.etimings.timings.bt.height; + found = true; + mHwc->mHdmiCurrentPreset = preset; + break; + } + } + return found; +} + +int ExynosExternalDisplay::getConfig() +{ + if (!mHwc->hdmi_hpd) + return -1; + + exynos_hdmi_data hdmi_data; + int dv_timings_index = 0; + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_PRESET; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: g_dv_timings error, %d", __func__, errno); + return -1; + } + + if (hwcHasApiVersion((hwc_composer_device_1_t*)mHwc, HWC_DEVICE_API_VERSION_1_4) == false) + mActiveConfigIndex = 0; + else { + /* + * getConfig is called only if cable is connected + * mActiveConfigIndex is 0 at this time + */ + mActiveConfigIndex = 0; + } + + for (int i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + dv_timings_index = preset_index_mappings[i].dv_timings_index; + if (is_same_dv_timings(&hdmi_data.timings, &dv_timings[dv_timings_index])) { + float refreshRate = (float)((float)hdmi_data.timings.bt.pixelclock / + ((hdmi_data.timings.bt.width + hdmi_data.timings.bt.hfrontporch + hdmi_data.timings.bt.hsync + hdmi_data.timings.bt.hbackporch) * + (hdmi_data.timings.bt.height + hdmi_data.timings.bt.vfrontporch + hdmi_data.timings.bt.vsync + hdmi_data.timings.bt.vbackporch))); + mXres = hdmi_data.timings.bt.width; + mYres = hdmi_data.timings.bt.height; + mVsyncPeriod = 1000000000 / refreshRate; + mHwc->mHdmiCurrentPreset = preset_index_mappings[i].preset; + break; + } + } + ALOGD("HDMI resolution is (%d x %d)", mXres, mYres); + + return 0; +} + +int ExynosExternalDisplay::getDisplayConfigs(uint32_t *configs, size_t *numConfigs) +{ + int ret = 0; + if (!mHwc->hdmi_hpd) + return -1; + + exynos_hdmi_data hdmi_data; + size_t index = 0; + + cleanConfigurations(); + + /* configs store the index of mConfigurations */ + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_ENUM_PRESET; + while (index < (*numConfigs)) { + hdmi_data.etimings.index = index; + ret = ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data); + + if (ret < 0) { + if (errno == EINVAL) { + ALOGI("%s:: Total configurations %d", __func__, index); + break; + } + ALOGE("%s: enum_dv_timings error, %d", __func__, errno); + return -1; + } + + for (size_t i = 0; i < SUPPORTED_DV_TIMINGS_NUM; i++) { + int dv_timings_index = preset_index_mappings[i].dv_timings_index; + if (is_same_dv_timings(&hdmi_data.etimings.timings, &dv_timings[dv_timings_index])) { + mConfigurations.push_back(dv_timings_index); + configs[mConfigurations.size() - 1] = dv_timings_index; + break; + } + } + index++; + } + + ALOGD("HDMI resolution is (%d x %d)", mXres, mYres); + *numConfigs = mConfigurations.size(); + dumpConfigurations(); + return 0; +} + +int ExynosExternalDisplay::getActiveConfig() +{ + if (!mHwc->hdmi_hpd) + return -1; + + return mActiveConfigIndex; +} + +void ExynosExternalDisplay::setHdmiResolution(int resolution, int s3dMode) +{ + if (resolution == 0) + resolution = mHwc->mHdmiCurrentPreset; + if (s3dMode == S3D_NONE) { + if (mHwc->mHdmiCurrentPreset == resolution) + return; + mHwc->mHdmiPreset = resolution; + mHwc->mHdmiResolutionChanged = true; + mHwc->procs->invalidate(mHwc->procs); + return; + } + + switch (resolution) { + case HDMI_720P_60: + resolution = S3D_720P_60_BASE + s3dMode; + break; + case HDMI_720P_59_94: + resolution = S3D_720P_59_94_BASE + s3dMode; + break; + case HDMI_720P_50: + resolution = S3D_720P_50_BASE + s3dMode; + break; + case HDMI_1080P_24: + resolution = S3D_1080P_24_BASE + s3dMode; + break; + case HDMI_1080P_23_98: + resolution = S3D_1080P_23_98_BASE + s3dMode; + break; + case HDMI_1080P_30: + resolution = S3D_1080P_30_BASE + s3dMode; + break; + case HDMI_1080I_60: + if (s3dMode != S3D_SBS) + return; + resolution = V4L2_DV_1080I60_SB_HALF; + break; + case HDMI_1080I_59_94: + if (s3dMode != S3D_SBS) + return; + resolution = V4L2_DV_1080I59_94_SB_HALF; + break; + case HDMI_1080P_60: + if (s3dMode != S3D_SBS && s3dMode != S3D_TB) + return; + resolution = S3D_1080P_60_BASE + s3dMode; + break; + default: + return; + } + mHwc->mHdmiPreset = resolution; + mHwc->mHdmiResolutionChanged = true; + mHwc->mS3DMode = S3D_MODE_READY; + mHwc->procs->invalidate(mHwc->procs); +} + +int ExynosExternalDisplay::setActiveConfig(int index) +{ + if (!mHwc->hdmi_hpd) + return -1; + /* Find Preset with index*/ + int preset = -1; + unsigned int s3dMode = S3D_NONE; + preset = (int)preset_index_mappings[mConfigurations[index]].preset; + + if (preset < 0) { + ALOGE("%s:: Unsupported preset, index(%d)", __func__, index); + return -1; + } + + v4l2_dv_timings dv_timing = dv_timings[preset_index_mappings[mConfigurations[index]].dv_timings_index]; + if (dv_timing.type == V4L2_DV_BT_SB_HALF) + s3dMode = S3D_SBS; + else if (dv_timing.type == V4L2_DV_BT_TB) + s3dMode = S3D_TB; + else + s3dMode = S3D_NONE; + + setHdmiResolution(preset, s3dMode); + mActiveConfigIndex = index; + return 0; +} + +int32_t ExynosExternalDisplay::getDisplayAttributes(const uint32_t attribute, uint32_t config) +{ + if (config >= SUPPORTED_DV_TIMINGS_NUM) { + ALOGE("%s:: Invalid config(%d), mConfigurations(%d)", __func__, config, mConfigurations.size()); + return -EINVAL; + } + + v4l2_dv_timings dv_timing = dv_timings[preset_index_mappings[config].dv_timings_index]; + switch(attribute) { + case HWC_DISPLAY_VSYNC_PERIOD: + { + float refreshRate = (float)((float)dv_timing.bt.pixelclock / + ((dv_timing.bt.width + dv_timing.bt.hfrontporch + dv_timing.bt.hsync + dv_timing.bt.hbackporch) * + (dv_timing.bt.height + dv_timing.bt.vfrontporch + dv_timing.bt.vsync + dv_timing.bt.vbackporch))); + return (1000000000/refreshRate); + } + case HWC_DISPLAY_WIDTH: + return dv_timing.bt.width; + + case HWC_DISPLAY_HEIGHT: + return dv_timing.bt.height; + + case HWC_DISPLAY_DPI_X: + return this->mXdpi; + + case HWC_DISPLAY_DPI_Y: + return this->mYdpi; + + default: + ALOGE("unknown display attribute %u", attribute); + return -EINVAL; + } +} + +void ExynosExternalDisplay::cleanConfigurations() +{ + mConfigurations.clear(); +} + +void ExynosExternalDisplay::dumpConfigurations() +{ + ALOGI("External display configurations:: total(%d), active configuration(%d)", + mConfigurations.size(), mActiveConfigIndex); + for (size_t i = 0; i < mConfigurations.size(); i++ ) { + unsigned int dv_timings_index = preset_index_mappings[mConfigurations[i]].dv_timings_index; + v4l2_dv_timings configuration = dv_timings[dv_timings_index]; + float refresh_rate = (float)((float)configuration.bt.pixelclock / + ((configuration.bt.width + configuration.bt.hfrontporch + configuration.bt.hsync + configuration.bt.hbackporch) * + (configuration.bt.height + configuration.bt.vfrontporch + configuration.bt.vsync + configuration.bt.vbackporch))); + uint32_t vsyncPeriod = 1000000000 / refresh_rate; + ALOGI("%d : type(%d), %d x %d, fps(%f), vsyncPeriod(%d)", i, configuration.type, configuration.bt.width, + configuration.bt.height, + refresh_rate, vsyncPeriod); + } +} + +int ExynosExternalDisplay::enable() +{ + if (mEnabled) + return 0; + + if (mBlanked) + return 0; + + char value[PROPERTY_VALUE_MAX]; + property_get("persist.hdmi.hdcp_enabled", value, "1"); + int hdcp_enabled = atoi(value); + ALOGD("%s:: hdcp_enabled (%d)", __func__, hdcp_enabled); + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_HDCP; + hdmi_data.hdcp = hdcp_enabled; + + if ((mDisplayFd < 0) && (openHdmi() < 0)) + return -1; + + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set HDCP status %d", __func__, errno); + } + + /* "2" is RGB601_16_235 */ + property_get("persist.hdmi.color_range", value, "2"); + int color_range = atoi(value); + +#if 0 // This should be changed + if (exynos_v4l2_s_ctrl(mMixerLayers[mUiIndex].fd, V4L2_CID_TV_SET_COLOR_RANGE, + color_range) < 0) + ALOGE("%s: s_ctrl(CID_TV_COLOR_RANGE) failed %d", __func__, errno); +#endif + + int err = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_UNBLANK); + if (err < 0) { + if (errno == EBUSY) + ALOGI("unblank ioctl failed (display already unblanked)"); + else + ALOGE("unblank ioctl failed: %s", strerror(errno)); + return -errno; + } + + mEnabled = true; + return 0; +} + +void ExynosExternalDisplay::disable() +{ + if (!mEnabled) + return; + + blank(); + + mEnabled = false; + checkIONBufferPrepared(); +} + +void ExynosExternalDisplay::setPreset(int preset) +{ + mHwc->mHdmiResolutionChanged = false; + mHwc->mHdmiResolutionHandled = false; + mHwc->hdmi_hpd = false; + int dv_timings_index = getDVTimingsIndex(preset); + if (dv_timings_index < 0) { + ALOGE("invalid preset(%d)", preset); + return; + } + + disable(); + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_PRESET; + hdmi_data.timings = dv_timings[dv_timings_index]; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) != -1) { + if (mHwc->procs) + mHwc->procs->hotplug(mHwc->procs, HWC_DISPLAY_EXTERNAL, false); + } +} + +int ExynosExternalDisplay::convert3DTo2D(int preset) +{ + switch (preset) { + case V4L2_DV_720P60_FP: + case V4L2_DV_720P60_SB_HALF: + case V4L2_DV_720P60_TB: + return V4L2_DV_720P60; + case V4L2_DV_720P50_FP: + case V4L2_DV_720P50_SB_HALF: + case V4L2_DV_720P50_TB: + return V4L2_DV_720P50; + case V4L2_DV_1080P60_SB_HALF: + case V4L2_DV_1080P60_TB: + return V4L2_DV_1080P60; + case V4L2_DV_1080P30_FP: + case V4L2_DV_1080P30_SB_HALF: + case V4L2_DV_1080P30_TB: + return V4L2_DV_1080P30; + default: + return HDMI_PRESET_ERROR; + } +} + +void ExynosExternalDisplay::setHdcpStatus(int status) +{ + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_HDCP; + hdmi_data.hdcp = !!status; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set HDCP status %d", __func__, errno); + } +} + +void ExynosExternalDisplay::setAudioChannel(uint32_t channels) +{ + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_AUDIO; + hdmi_data.audio_info = channels; + if (ioctl(this->mDisplayFd, EXYNOS_SET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to set audio channels %d", __func__, errno); + } +} + +uint32_t ExynosExternalDisplay::getAudioChannel() +{ + int channels = 0; + + exynos_hdmi_data hdmi_data; + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_AUDIO; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: failed to get audio channels %d", __func__, errno); + } + channels = hdmi_data.audio_info; + + return channels; +} + +int ExynosExternalDisplay::getCecPaddr() +{ + if (!mHwc->hdmi_hpd) + return -1; + + exynos_hdmi_data hdmi_data; + + hdmi_data.state = hdmi_data.EXYNOS_HDMI_STATE_CEC_ADDR; + if (ioctl(this->mDisplayFd, EXYNOS_GET_HDMI_CONFIG, &hdmi_data) < 0) { + ALOGE("%s: g_dv_timings error, %d", __func__, errno); + return -1; + } + + return (int)hdmi_data.cec_addr; +} + +int ExynosExternalDisplay::blank() +{ + int fence = clearDisplay(); + if (fence >= 0) + close(fence); + int err = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_POWERDOWN); + if (err < 0) { + if (errno == EBUSY) + ALOGI("blank ioctl failed (display already blanked)"); + else + ALOGE("blank ioctl failed: %s", strerror(errno)); + return -errno; + } + + return 0; +} + +int ExynosExternalDisplay::clearDisplay() +{ + if (!mEnabled) + return 0; + return ExynosDisplay::clearDisplay(); +} + +void ExynosExternalDisplay::requestIONMemory() +{ + if (mReserveMemFd > 0) { + unsigned int value; + char buffer[4096]; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + + if ((err > 0) && (value == 0)) { + memset(buffer, 0, sizeof(buffer)); + buffer[0] = '2'; + if (write(mReserveMemFd, buffer, sizeof(buffer)) < 0) + ALOGE("fail to request isolation of memmory for HDMI"); + else + ALOGV("isolation of memmory for HDMI was requested"); + } else { + if (err < 0) + ALOGE("fail to read hdmi_reserve_mem_fd"); + else + ALOGE("ion memmory for HDMI is isolated already"); + } + } +} +void ExynosExternalDisplay::freeIONMemory() +{ + if ((mHwc->hdmi_hpd == false) && (mReserveMemFd > 0)) { + unsigned int value; + char buffer[4096]; + int ret = 0; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + if ((err > 0) && (value == 1)) { + memset(buffer, 0, sizeof(buffer)); + buffer[0] = '0'; + if (write(mReserveMemFd, buffer, sizeof(buffer)) < 0) + ALOGE("fail to request isolation of memmory for HDMI"); + else + ALOGV("deisolation of memmory for HDMI was requested"); + } else { + if (err < 0) + ALOGE("fail to read hdmi_reserve_mem_fd"); + else + ALOGE("ion memmory for HDMI is deisolated already"); + } + mFlagIONBufferAllocated = false; + } +} +bool ExynosExternalDisplay::checkIONBufferPrepared() +{ + if (mFlagIONBufferAllocated) + return true; + + if ((mReserveMemFd > 0)) { + unsigned int value; + char buffer[4096]; + int ret = 0; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + + if ((err > 0) && (value == 1)) { + mFlagIONBufferAllocated = true; + return true; + } else { + mFlagIONBufferAllocated = false; + return false; + } + return false; + } else { + /* isolation of video_ext is not used */ + mFlagIONBufferAllocated = true; + return true; + } +} + +int ExynosExternalDisplay::configureDRMSkipHandle(decon_win_config &cfg) +{ + int err = 0; + private_handle_t *dst_handle = NULL; + + if (mDRMTempBuffer == NULL) { + int dst_stride; + int usage = GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_HW_COMPOSER; + + err = mAllocDevice->alloc(mAllocDevice, 32, 32, HAL_PIXEL_FORMAT_BGRA_8888, + usage, &mDRMTempBuffer, &dst_stride); + if (err < 0) { + ALOGE("failed to allocate destination buffer(%dx%d): %s", 32, 32, + strerror(-err)); + return err; + } else { + ALOGV("temBuffer for DRM video was allocated"); + } + } + + dst_handle = private_handle_t::dynamicCast(mDRMTempBuffer); + cfg.state = cfg.DECON_WIN_STATE_BUFFER; + cfg.fd_idma[0] = dst_handle->fd; + cfg.fd_idma[1] = dst_handle->fd1; + cfg.fd_idma[2] = dst_handle->fd2; + cfg.dst.f_w = dst_handle->stride; + cfg.dst.f_h = dst_handle->vstride; + cfg.dst.x = 0; + cfg.dst.y = 0; + cfg.dst.w = cfg.dst.f_w; + cfg.dst.h = cfg.dst.f_h; + cfg.format = halFormatToS3CFormat(HAL_PIXEL_FORMAT_RGBX_8888); + + cfg.src.f_w = dst_handle->stride; + cfg.src.f_h = dst_handle->vstride; + cfg.src.x = 0; + cfg.src.y = 0; + cfg.src.w = cfg.dst.f_w; + cfg.src.h = cfg.dst.f_h; + cfg.blending = DECON_BLENDING_NONE; + cfg.fence_fd = -1; + cfg.plane_alpha = 255; + + return 0; +} + +void ExynosExternalDisplay::freeExtVideoBuffers() +{ + if (mFlagIONBufferAllocated) + freeIONMemory(); +} + +void ExynosExternalDisplay::doPreProcessing(hwc_display_contents_1_t* contents) +{ + mInternalDMAs.clear(); + mInternalDMAs.add(IDMA_G3); + ExynosDisplay::doPreProcessing(contents); +} diff --git a/libvpphdmi/ExynosExternalDisplay.h b/libvpphdmi/ExynosExternalDisplay.h new file mode 100644 index 0000000..fd277d5 --- /dev/null +++ b/libvpphdmi/ExynosExternalDisplay.h @@ -0,0 +1,147 @@ +#ifndef EXYNOS_VPP_HDMI_H +#define EXYNOS_VPP_HDMI_H + +#include +#include "ExynosHWC.h" +#include "ExynosDisplay.h" +#include +#include "videodev2_exynos_hdmi.h" + +#define MAX_HDMI_VIDEO_LAYERS 1 + +#define SUPPORTED_DV_TIMINGS_NUM 28 + +struct preset_index_mapping { + int preset; + int dv_timings_index; +}; + +const struct preset_index_mapping preset_index_mappings[SUPPORTED_DV_TIMINGS_NUM] = { + {V4L2_DV_480P59_94, 0}, + {V4L2_DV_576P50, 1}, + {V4L2_DV_720P50, 2}, + {V4L2_DV_720P60, 3}, + {V4L2_DV_1080I50, 4}, + {V4L2_DV_1080I60, 5}, + {V4L2_DV_1080P24, 6}, + {V4L2_DV_1080P25, 7}, + {V4L2_DV_1080P30, 8}, + {V4L2_DV_1080P50, 9}, + {V4L2_DV_1080P60, 10}, + {V4L2_DV_2160P24, 11}, + {V4L2_DV_2160P25, 12}, + {V4L2_DV_2160P30, 13}, + {V4L2_DV_2160P24_1, 14}, + {V4L2_DV_720P60_SB_HALF, 15}, + {V4L2_DV_720P60_TB, 16}, + {V4L2_DV_720P50_SB_HALF, 17}, + {V4L2_DV_720P50_TB, 18}, + {V4L2_DV_1080P24_FP, 19}, + {V4L2_DV_1080P24_SB_HALF, 20}, + {V4L2_DV_1080P24_TB, 21}, + {V4L2_DV_1080I60_SB_HALF, 22}, + {V4L2_DV_1080I50_SB_HALF, 23}, + {V4L2_DV_1080P60_SB_HALF, 24}, + {V4L2_DV_1080P60_TB, 25}, + {V4L2_DV_1080P30_SB_HALF, 26}, + {V4L2_DV_1080P30_TB, 27} +}; + +enum { + HDMI_RESOLUTION_BASE = 0, + HDMI_480P_59_94, + HDMI_576P_50, + HDMI_720P_24, + HDMI_720P_25, + HDMI_720P_30, + HDMI_720P_50, + HDMI_720P_59_94, + HDMI_720P_60, + HDMI_1080I_29_97, + HDMI_1080I_30, + HDMI_1080I_25, + HDMI_1080I_50, + HDMI_1080I_60, + HDMI_1080P_24, + HDMI_1080P_25, + HDMI_1080P_30, + HDMI_1080P_50, + HDMI_1080P_60, + HDMI_480P_60, + HDMI_1080I_59_94, + HDMI_1080P_59_94, + HDMI_1080P_23_98, + HDMI_2160P_30 = 47, +}; + +#define S3D_720P_60_BASE 22 +#define S3D_720P_59_94_BASE 25 +#define S3D_720P_50_BASE 28 +#define S3D_1080P_24_BASE 31 +#define S3D_1080P_23_98_BASE 34 +#define S3D_1080P_60_BASE 39 +#define S3D_1080P_30_BASE 42 + +class ExynosExternalDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosExternalDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosExternalDisplay(); + + void setHdmiStatus(bool status); + + bool isPresetSupported(unsigned int preset); + int getConfig(); + int enable(); + void disable(); + void setPreset(int preset); + int convert3DTo2D(int preset); + void setHdcpStatus(int status); + void setAudioChannel(uint32_t channels); + uint32_t getAudioChannel(); + int getCecPaddr(); + bool isIONBufferAllocated() {return mFlagIONBufferAllocated;}; + + virtual int openHdmi(); + virtual void closeHdmi(); + virtual int blank(); + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + virtual void allocateLayerInfos(hwc_display_contents_1_t* contents); + virtual int32_t getDisplayAttributes(const uint32_t attribute, uint32_t config = 0); + virtual void determineYuvOverlay(hwc_display_contents_1_t *contents); + virtual void determineSupportedOverlays(hwc_display_contents_1_t *contents); + virtual int clearDisplay(); + virtual void freeExtVideoBuffers(); + virtual int getDisplayConfigs(uint32_t *configs, size_t *numConfigs); + virtual int getActiveConfig(); + virtual int setActiveConfig(int index); + + bool mEnabled; + bool mBlanked; + + bool mUseSubtitles; + int mReserveMemFd; + android::Vector< unsigned int > mConfigurations; + + protected: + void skipUILayers(hwc_display_contents_1_t *contents); + int getDVTimingsIndex(int preset); + virtual void handleStaticLayers(hwc_display_contents_1_t *contents, struct decon_win_config_data &win_data, int tot_ovly_wins); + void cleanConfigurations(); + void dumpConfigurations(); + virtual void configureHandle(private_handle_t *handle, size_t index, hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg); + virtual int postMPPM2M(hwc_layer_1_t &layer, struct decon_win_config *config, int win_map, int index); + virtual int postFrame(hwc_display_contents_1_t *contents); + virtual void doPreProcessing(hwc_display_contents_1_t* contents); + private: + buffer_handle_t mDRMTempBuffer; + bool mFlagIONBufferAllocated; + void requestIONMemory(); + void freeIONMemory(); + bool checkIONBufferPrepared(); + int configureDRMSkipHandle(decon_win_config &cfg); + void setHdmiResolution(int resolution, int s3dMode); +}; + +#endif diff --git a/libvpphdmi/NOTICE b/libvpphdmi/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvpphdmi/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/libvpphdmi/decon_tv.h b/libvpphdmi/decon_tv.h new file mode 100644 index 0000000..b5b6e49 --- /dev/null +++ b/libvpphdmi/decon_tv.h @@ -0,0 +1,24 @@ +#ifndef SAMSUNG_DEOCN_TV_H +#define SAMSUNG_DECON_TV_H + +struct exynos_hdmi_data { + enum { + EXYNOS_HDMI_STATE_PRESET = 0, + EXYNOS_HDMI_STATE_ENUM_PRESET, + EXYNOS_HDMI_STATE_CEC_ADDR, + EXYNOS_HDMI_STATE_HDCP, + EXYNOS_HDMI_STATE_AUDIO, + } state; + struct v4l2_dv_timings timings; + struct v4l2_enum_dv_timings etimings; + __u32 cec_addr; + __u32 audio_info; + int hdcp; +}; + +#define EXYNOS_GET_HDMI_CONFIG _IOW('F', 220, \ + struct exynos_hdmi_data) +#define EXYNOS_SET_HDMI_CONFIG _IOW('F', 221, \ + struct exynos_hdmi_data) + +#endif /* SAMSUNG_DECON_TV_H */ diff --git a/libvpphdmi/dv_timings.c b/libvpphdmi/dv_timings.c new file mode 100644 index 0000000..273cbb9 --- /dev/null +++ b/libvpphdmi/dv_timings.c @@ -0,0 +1,35 @@ +#include +#include "videodev2_exynos_hdmi.h" +#include "decon_tv.h" + +const struct v4l2_dv_timings dv_timings[] = { + {.type = V4L2_DV_BT_656_1120, .bt = {720, 480, 0, 0, 27000000, 16, 62, 60, 9, 6, 30, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_720X480P59_94 */ + {.type = V4L2_DV_BT_656_1120, .bt = {720, 576, 0, 0, 27000000, 12, 64, 68, 5, 5, 39, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_720X576P50 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P50 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P60 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080I50 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS | V4L2_DV_FL_HALF_LINE, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080I60 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P24 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P25 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P30 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 528, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P50 */ + {.type = V4L2_DV_BT_656_1120, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P60 */ + {.type = V4L2_DV_BT_656_1120, .bt = {3840, 2160, 0, V4L2_DV_HSYNC_POS_POL, 297000000, 1276, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_3840X2160P24 */ + {.type = V4L2_DV_BT_656_1120, .bt = {3840, 2160, 0, V4L2_DV_HSYNC_POS_POL, 297000000, 1056, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_3840X2160P25 */ + {.type = V4L2_DV_BT_656_1120, .bt = {3840, 2160, 0, V4L2_DV_HSYNC_POS_POL, 297000000, 176, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_3840X2160P30 */ + {.type = V4L2_DV_BT_656_1120, .bt = {4096, 2160, 0, V4L2_DV_HSYNC_POS_POL, 297000000, 1020, 88, 296, 8, 10, 72, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_4096X2160P24 */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P60_SB_HALF */ + {.type = V4L2_DV_BT_TB, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 110, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P60_TB */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P50_SB_HALF */ + {.type = V4L2_DV_BT_TB, .bt = {1280, 720, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 440, 40, 220, 5, 5, 20, 0, 0, 0, V4L2_DV_BT_STD_CEA861, 0, {0, }}}, /* V4L2_DV_BT_CEA_1280X720P50_TB */ + {.type = V4L2_DV_BT_FP, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P24_FP */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P24_SB_HALF */ + {.type = V4L2_DV_BT_TB, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 638, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P24_TB */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS | V4L2_DV_FL_HALF_LINE, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080I60_SB_HALF */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1920, 1080, 1, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 528, 44, 148, 2, 5, 15, 2, 5, 16, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_HALF_LINE, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080I50_SB_HALF */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P60_SB_HALF */ + {.type = V4L2_DV_BT_TB, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 148500000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_DMT | V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P60_TB */ + {.type = V4L2_DV_BT_SB_HALF, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}}, /* V4L2_DV_BT_CEA_1920X1080P30_SB_HALF */ + {.type = V4L2_DV_BT_TB, .bt = {1920, 1080, 0, V4L2_DV_HSYNC_POS_POL | V4L2_DV_VSYNC_POS_POL, 74250000, 88, 44, 148, 4, 5, 36, 0, 0, 0, V4L2_DV_BT_STD_CEA861, V4L2_DV_FL_CAN_REDUCE_FPS, {0, }}} /* V4L2_DV_BT_CEA_1920X1080P30_TB */ +}; + diff --git a/libvpphdmi/videodev2_exynos_hdmi.h b/libvpphdmi/videodev2_exynos_hdmi.h new file mode 100644 index 0000000..26639da --- /dev/null +++ b/libvpphdmi/videodev2_exynos_hdmi.h @@ -0,0 +1,86 @@ +/**************************************************************************** + **************************************************************************** + *** + *** This header was automatically generated from a Linux kernel header + *** of the same name, to make information necessary for userspace to + *** call into the kernel available to libc. It contains only constants, + *** structures, and macros generated from the original header, and thus, + *** contains no copyrightable information. + *** + *** To edit the content of this header, modify the corresponding + *** source file (e.g. under external/kernel-headers/original/) then + *** run bionic/libc/kernel/tools/update_all.py + *** + *** Any manual change here will be lost the next time this script will + *** be run. You've been warned! + *** + **************************************************************************** + ****************************************************************************/ +#ifndef __LINUX_VIDEODEV2_EXYNOS_HDMI_H +#define __LINUX_VIDEODEV2_EXYNOS_HDMI_H +#define V4L2_DV_BT_SB_HALF (1 << 8) +#define V4L2_DV_BT_TB (1 << 6) +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_BT_FP (1 << 0) +#define V4L2_DV_INVALID 0 +#define V4L2_DV_480P59_94 1 +#define V4L2_DV_576P50 2 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P24 3 +#define V4L2_DV_720P25 4 +#define V4L2_DV_720P30 5 +#define V4L2_DV_720P50 6 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P59_94 7 +#define V4L2_DV_720P60 8 +#define V4L2_DV_1080I29_97 9 +#define V4L2_DV_1080I30 10 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I25 11 +#define V4L2_DV_1080I50 12 +#define V4L2_DV_1080I60 13 +#define V4L2_DV_1080P24 14 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P25 15 +#define V4L2_DV_1080P30 16 +#define V4L2_DV_1080P50 17 +#define V4L2_DV_1080P60 18 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_480P60 19 +#define V4L2_DV_1080I59_94 20 +#define V4L2_DV_1080P59_94 21 +#define V4L2_DV_720P60_FP 22 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P60_SB_HALF 23 +#define V4L2_DV_720P60_TB 24 +#define V4L2_DV_720P59_94_FP 25 +#define V4L2_DV_720P59_94_SB_HALF 26 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_720P59_94_TB 27 +#define V4L2_DV_720P50_FP 28 +#define V4L2_DV_720P50_SB_HALF 29 +#define V4L2_DV_720P50_TB 30 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P24_FP 31 +#define V4L2_DV_1080P24_SB_HALF 32 +#define V4L2_DV_1080P24_TB 33 +#define V4L2_DV_1080P23_98_FP 34 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P23_98_SB_HALF 35 +#define V4L2_DV_1080P23_98_TB 36 +#define V4L2_DV_1080I60_SB_HALF 37 +#define V4L2_DV_1080I59_94_SB_HALF 38 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080I50_SB_HALF 39 +#define V4L2_DV_1080P60_SB_HALF 40 +#define V4L2_DV_1080P60_TB 41 +#define V4L2_DV_1080P30_FP 42 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_1080P30_SB_HALF 43 +#define V4L2_DV_1080P30_TB 44 +#define V4L2_DV_2160P24 45 +#define V4L2_DV_2160P25 46 +/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */ +#define V4L2_DV_2160P30 47 +#define V4L2_DV_2160P24_1 48 +#endif diff --git a/libvppvirtualdisplay/Android.mk b/libvppvirtualdisplay/Android.mk new file mode 100644 index 0000000..a710050 --- /dev/null +++ b/libvppvirtualdisplay/Android.mk @@ -0,0 +1,52 @@ +# Copyright (C) 2008 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_PRELINK_MODULE := false +LOCAL_SHARED_LIBRARIES := liblog libutils libcutils libexynosutils \ + libexynosv4l2 libsync libhwcutils libdisplay libmpp libion_exynos libdisplay + +LOCAL_CFLAGS += -DLOG_TAG=\"virtualdisplay\" + +LOCAL_C_INCLUDES := \ + $(LOCAL_PATH)/../include \ + $(LOCAL_PATH)/../libhwcutils \ + $(LOCAL_PATH)/../libvppdisplay \ + $(LOCAL_PATH)/../libhwc \ + $(TOP)/hardware/samsung_slsi/$(TARGET_BOARD_PLATFORM)/include \ + $(TOP)/hardware/samsung_slsi/exynos/libhwc \ + $(TOP)/hardware/samsung_slsi/exynos/libhwcService \ + $(TOP)/hardware/samsung_slsi/exynos/libexynosutils \ + $(TOP)/hardware/samsung_slsi/exynos/libvppdisplay \ + $(TOP)/hardware/samsung_slsi/exynos/libvpphdmi \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/include \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libhwcutilsmodule \ + $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libdisplaymodule \ + $(TOP)/hardware/samsung_slsi/exynos/libmpp \ + $(TOP)/system/core/libsync/include + +LOCAL_SRC_FILES := \ + ExynosVirtualDisplay.cpp + +include $(TOP)/hardware/samsung_slsi/$(TARGET_SOC)/libvirtualdisplaymodule/Android.mk + +LOCAL_MODULE_TAGS := eng +LOCAL_MODULE := libvirtualdisplay + +include $(TOP)/hardware/samsung_slsi/exynos/BoardConfigCFlags.mk +include $(BUILD_SHARED_LIBRARY) + diff --git a/libvppvirtualdisplay/ExynosVirtualDisplay.cpp b/libvppvirtualdisplay/ExynosVirtualDisplay.cpp new file mode 100644 index 0000000..a2404b1 --- /dev/null +++ b/libvppvirtualdisplay/ExynosVirtualDisplay.cpp @@ -0,0 +1,1107 @@ +#define ATRACE_TAG ATRACE_TAG_GRAPHICS + +//#define LOG_NDEBUG 0 +#define LOG_TAG "virtualdisplay" +#include "exynos_format.h" +#include "ExynosHWC.h" +#include "ExynosHWCUtils.h" +#include "ExynosMPPModule.h" +#include "ExynosVirtualDisplay.h" +#include "decon_tv.h" +#ifdef USES_DISABLE_COMPOSITIONTYPE_GLES +#include "ExynosPrimaryDisplay.h" +#endif +#include +#include + +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM +#define WAIT_COUNT_FOR_ISOLATION 50 +#endif + +ExynosVirtualDisplay::ExynosVirtualDisplay(struct exynos5_hwc_composer_device_1_t *pdev) : + ExynosDisplay(EXYNOS_VIRTUAL_DISPLAY, pdev), + mWidth(0), + mHeight(0), + mDisplayWidth(0), + mDisplayHeight(0), + mIsWFDState(false), + mIsRotationState(false), + mPresentationMode(0), + mDeviceOrientation(0), + mFrameBufferTargetTransform(0), + mCompositionType(COMPOSITION_GLES), + mPrevCompositionType(COMPOSITION_GLES), + mGLESFormat(HAL_PIXEL_FORMAT_RGBA_8888), + mSinkUsage(GRALLOC_USAGE_HW_COMPOSER), + mIsSecureDRM(false), + mIsNormalDRM(false) +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + , mSMemFd(-1), + mSMemProtected(false), + mReserveMemFd(-1), + mFlagIONBufferAllocated(false) +#endif +{ + mXres = 0; + mYres = 0; + mXdpi = 0; + mYdpi = 0; +#ifdef USES_DISABLE_COMPOSITIONTYPE_GLES + mExternalMPPforCSC = new ExynosMPPModule(this, MPP_MSC, 0); + mExternalMPPforCSC->setAllocDevice(pdev->primaryDisplay->mAllocDevice); +#endif + + mOverlayLayer = NULL; + mFBTargetLayer = NULL; + memset(mFBLayer, 0x0, sizeof(hwc_layer_1_t *) * NUM_FRAME_BUFFER); + mNumFB = 0; + +#ifdef USES_VDS_BGRA8888 + mForceDoubleOperation = false; + mExternalMPPDstFormat = HAL_PIXEL_FORMAT_RGBA_8888; +#endif + +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + mSMemFd = open(SMEM_PATH, O_RDWR); + if (mSMemFd < 0) + ALOGE("Fail to open smem_fd %s, error(%d)", SMEM_PATH, mSMemFd); + else + ALOGI("Open %s", SMEM_PATH); + + mReserveMemFd = open(HDMI_RESERVE_MEM_DEV_NAME, O_RDWR); + if (mReserveMemFd < 0) + ALOGE("Fail to open hdmi_reserve_mem_fd %s, error(%d)", HDMI_RESERVE_MEM_DEV_NAME, mReserveMemFd); + else + ALOGI("Open %s", HDMI_RESERVE_MEM_DEV_NAME); +#endif +} + +ExynosVirtualDisplay::~ExynosVirtualDisplay() +{ +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + if (mSMemFd > 0) + close(mSMemFd); + if (mReserveMemFd > 0) + close(mReserveMemFd); +#endif +} + +void ExynosVirtualDisplay::allocateLayerInfos(hwc_display_contents_1_t* contents) +{ + ExynosDisplay::allocateLayerInfos(contents); + +} + +void ExynosVirtualDisplay::setSinkBufferUsage() +{ + ALOGV("setSinkBufferUsage() mSinkUsage 0x%x, mIsSecureDRM %d, mIsNormalDRM %d", + mSinkUsage, mIsSecureDRM, mIsNormalDRM); + mSinkUsage = GRALLOC_USAGE_HW_COMPOSER; + + if (mIsSecureDRM) { + mSinkUsage |= GRALLOC_USAGE_SW_READ_NEVER | + GRALLOC_USAGE_SW_WRITE_NEVER | + GRALLOC_USAGE_PROTECTED; +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + if (mReserveMemFd > 0) { + mSinkUsage |= GRALLOC_USAGE_VIDEO_EXT; + if (!mSMemProtected) { + setMemoryProtection(1); + mSMemProtected = true; + } + } +#endif + } else if (mIsNormalDRM) + mSinkUsage |= GRALLOC_USAGE_PRIVATE_NONSECURE; + +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + if (mReserveMemFd > 0 && !mIsSecureDRM && mSMemProtected) { + setMemoryProtection(0); + mSMemProtected = false; + } +#endif + + ALOGV("Sink Buffer's Usage: 0x%x", mSinkUsage); +} + +int ExynosVirtualDisplay::prepare(hwc_display_contents_1_t* contents) +{ + ATRACE_CALL(); + ALOGV("prepare %u layers for virtual, outbuf %p", contents->numHwLayers, contents->outbuf); + + int ret = 0; + mCompositionType = COMPOSITION_GLES; + mOverlayLayer = NULL; + mFBTargetLayer = NULL; + mNumFB = 0; + + determineSkipLayer(contents); + + /* determine composition type */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER) { + mNumFB++; + if (!layer.handle) + continue; + + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + ALOGV("FB layer %d f=%x, w=%d, h=%d, s=%d, vs=%d, " + "{%.1f,%.1f,%.1f,%.1f}, {%d,%d,%d,%d}" + "type=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x", + i, h->format, h->width, h->height, h->stride, h->vstride, + layer.sourceCropf.left, layer.sourceCropf.top, + layer.sourceCropf.right, layer.sourceCropf.bottom, + layer.displayFrame.left, layer.displayFrame.top, + layer.displayFrame.right, layer.displayFrame.bottom, + layer.compositionType, layer.flags, layer.handle, layer.transform, layer.blending); + } + + if (layer.compositionType == HWC_OVERLAY) { + if (!layer.handle) + continue; + + if (layer.flags & HWC_SKIP_RENDERING) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + ALOGV("Overlay layer %d f=%x, w=%d, h=%d, s=%d, vs=%d, " + "{%.1f,%.1f,%.1f,%.1f}, {%d,%d,%d,%d}" + "type=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x", + i, h->format, h->width, h->height, h->stride, h->vstride, + layer.sourceCropf.left, layer.sourceCropf.top, + layer.sourceCropf.right, layer.sourceCropf.bottom, + layer.displayFrame.left, layer.displayFrame.top, + layer.displayFrame.right, layer.displayFrame.bottom, + layer.compositionType, layer.flags, layer.handle, layer.transform, layer.blending); + + mOverlayLayer = &layer; + continue; + } + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + if (!layer.handle) + continue; + + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + ALOGV("FB target layer %d f=%x, w=%d, h=%d, s=%d, vs=%d, " + "{%.1f,%.1f,%.1f,%.1f}, {%d,%d,%d,%d}" + "type=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x", + i, h->format, h->width, h->height, h->stride, h->vstride, + layer.sourceCropf.left, layer.sourceCropf.top, + layer.sourceCropf.right, layer.sourceCropf.bottom, + layer.displayFrame.left, layer.displayFrame.top, + layer.displayFrame.right, layer.displayFrame.bottom, + layer.compositionType, layer.flags, layer.handle, layer.transform, layer.blending); + + mFBTargetLayer = &layer; + continue; + } + } + + if (mOverlayLayer && (mNumFB > 0)) + mCompositionType = COMPOSITION_MIXED; + else if (mOverlayLayer || mIsRotationState) + mCompositionType = COMPOSITION_HWC; + ALOGV("mCompositionType 0x%x, mPrevCompositionType 0x%x, overlay_layer 0x%p, mNumFB %d", + mCompositionType, mPrevCompositionType, mOverlayLayer, mNumFB); + + if (mCompositionType == COMPOSITION_GLES) + blank(); + else + unblank(); +#ifdef USES_DISABLE_COMPOSITIONTYPE_GLES + if (mCompositionType == COMPOSITION_GLES) { + mHwc->mVirtualDisplayRect.left = 0; + mHwc->mVirtualDisplayRect.top = 0; + mHwc->mVirtualDisplayRect.width = mWidth; + mHwc->mVirtualDisplayRect.height = mHeight; + mCompositionType = COMPOSITION_MIXED; + } +#endif + +#ifdef USES_VDS_BGRA8888 + if (mCompositionType != COMPOSITION_GLES && contents->outbuf) { + private_handle_t *outbuf_handle = private_handle_t::dynamicCast(contents->outbuf); + if (outbuf_handle) { + mGLESFormat = outbuf_handle->format; + } + } +#endif + + setSinkBufferUsage(); + + return 0; +} + +void ExynosVirtualDisplay::configureHandle(private_handle_t *handle, size_t index, + hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg) +{ + ExynosDisplay::configureHandle(handle, index, layer, fence_fd, cfg); +#ifdef USES_VDS_BGRA8888 + if (mForceDoubleOperation == true && handle->format == HAL_PIXEL_FORMAT_RGBA_8888) + cfg.format = halFormatToS3CFormat(HAL_PIXEL_FORMAT_BGRA_8888); +#endif +} + +void ExynosVirtualDisplay::configureWriteBack(hwc_display_contents_1_t __unused *contents, + decon_win_config_data __unused &win_data) +{ +} + +int ExynosVirtualDisplay::postFrame(hwc_display_contents_1_t* contents) +{ +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + ATRACE_CALL(); + struct decon_win_config_data win_data; + struct decon_win_config *config = win_data.config; + int win_map = 0; + int tot_ovly_wins = 0; + bool hasSecureLayer = false; + + memset(mLastHandles, 0, sizeof(mLastHandles)); + memset(mLastMPPMap, 0, sizeof(mLastMPPMap)); + memset(config, 0, sizeof(win_data.config)); + + if (contents->outbuf) { + configureWriteBack(contents, win_data); + } + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + config[i].fence_fd = -1; + mLastMPPMap[i].internal_mpp.type = -1; + mLastMPPMap[i].external_mpp.type = -1; + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + private_handle_t *handle = NULL; + if (layer.handle != NULL) + handle = private_handle_t::dynamicCast(layer.handle); + int32_t window_index = mLayerInfos[i]->mWindowIndex + DECON_EXT_BASE_WINDOW; + + if ((layer.flags & HWC_SKIP_RENDERING) || handle == NULL || + ((layer.compositionType == HWC_OVERLAY) && + ((window_index < 0) || (window_index > MAX_DECON_WIN)))) { + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + layer.releaseFenceFd = -1; + + if ((window_index < 0) || (window_index > MAX_DECON_WIN)) { + android::String8 result; + ALOGW("window of layer %d was not assigned (window_index: %d)", i, window_index); + dumpContents(result, contents); + ALOGW(result.string()); + result.clear(); + dumpLayerInfo(result); + ALOGW(result.string()); + } + + continue; + } + + if ((layer.compositionType == HWC_OVERLAY) || + (mFbNeeded == true && layer.compositionType == HWC_FRAMEBUFFER_TARGET)) { + mLastHandles[window_index] = layer.handle; + + if ((getDrmMode(handle->flags) == SECURE_DRM) && layer.compositionType != HWC_FRAMEBUFFER_TARGET) { + config[window_index].protection = 1; + hasSecureLayer = true; + } + else + config[window_index].protection = 0; + + if (mLayerInfos[i]->mInternalMPP != NULL) { + mLastMPPMap[window_index].internal_mpp.type = mLayerInfos[i]->mInternalMPP->mType; + mLastMPPMap[window_index].internal_mpp.index = mLayerInfos[i]->mInternalMPP->mIndex; + } + if (mLayerInfos[i]->mExternalMPP != NULL) { + mLastMPPMap[window_index].external_mpp.type = mLayerInfos[i]->mExternalMPP->mType; + mLastMPPMap[window_index].external_mpp.index = mLayerInfos[i]->mExternalMPP->mIndex; + if (postMPPM2M(layer, config, window_index, i) < 0) + continue; + } else { + configureOverlay(&layer, i, config[window_index]); + } + } + if (window_index == 0 && config[window_index].blending != DECON_BLENDING_NONE) { + ALOGV("blending not supported on window 0; forcing BLENDING_NONE"); + config[window_index].blending = DECON_BLENDING_NONE; + } + } + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) { + ALOGV("window %u configuration:", i); + dumpConfig(config[i]); + } + + /* This should be enabled */ +#if 0 + if (this->mVirtualOverlayFlag) { + handleStaticLayers(contents, win_data, tot_ovly_wins); + } +#endif + + if (mIsSecureDRM != hasSecureLayer) { + ALOGW("secure state mismatch (mIsSecureDRM: %d, hasSecureLayer: %d)", mIsSecureDRM, hasSecureLayer); + private_handle_t *outbuf_handle = private_handle_t::dynamicCast(contents->outbuf); + if (outbuf_handle) + ALOGW("outbuf format: 0x%x, mCompositionType: %d", outbuf_handle->format, mCompositionType); + } + + int ret = winconfigIoctl(&win_data); + ALOGV("ExynosDisplay::postFrame() ioctl(S3CFB_WIN_CONFIG) ret %d", ret); + + for (size_t i = 0; i < NUM_HW_WINDOWS; i++) + if (config[i].fence_fd != -1) + close(config[i].fence_fd); + if (ret < 0) { + ALOGE("ioctl S3CFB_WIN_CONFIG failed: %s", strerror(errno)); + return ret; + } + if (contents->numHwLayers == 1) { + hwc_layer_1_t &layer = contents->hwLayers[0]; + if (layer.acquireFenceFd >= 0) + close(layer.acquireFenceFd); + } + + memcpy(&(this->mLastConfigData), &win_data, sizeof(win_data)); + + if (!this->mVirtualOverlayFlag) + this->mLastFbWindow = mFbWindow; + + return win_data.fence; +#else + return 0; +#endif +} + +void ExynosVirtualDisplay::processGles(hwc_display_contents_1_t* contents) +{ + ALOGV("processGles, mFBTargetLayer->acquireFenceFd %d, mFBTargetLayer->releaseFenceFd %d, contents->outbufAcquireFenceFd %d", + mFBTargetLayer->acquireFenceFd, mFBTargetLayer->releaseFenceFd, contents->outbufAcquireFenceFd); + int ret = 0; + if (mFBTargetLayer != NULL && mFBTargetLayer->acquireFenceFd >= 0) + contents->retireFenceFd = mFBTargetLayer->acquireFenceFd; + + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } +} + +void ExynosVirtualDisplay::processHwc(hwc_display_contents_1_t* contents) +{ + ALOGV("processHwc, mFBTargetLayer->acquireFenceFd %d, mFBTargetLayer->releaseFenceFd %d, contents->outbufAcquireFenceFd %d", + mFBTargetLayer->acquireFenceFd, mFBTargetLayer->releaseFenceFd, contents->outbufAcquireFenceFd); + + ExynosDisplay::set(contents); + + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } +} + +void ExynosVirtualDisplay::processMixed(hwc_display_contents_1_t* contents) +{ + ALOGV("processMixed, mFBTargetLayer->acquireFenceFd %d, mFBTargetLayer->releaseFenceFd %d, contents->outbufAcquireFenceFd %d", + mFBTargetLayer->acquireFenceFd, mFBTargetLayer->releaseFenceFd, contents->outbufAcquireFenceFd); + + ExynosDisplay::set(contents); + + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } +} + +int ExynosVirtualDisplay::set(hwc_display_contents_1_t* contents) +{ + ALOGV("set %u layers for virtual, mCompositionType %d, contents->outbuf %p", + contents->numHwLayers, mCompositionType, contents->outbuf); + mOverlayLayer = NULL; + mFBTargetLayer = NULL; + mNumFB = 0; + int IsNormalDRMWithSkipLayer = false; + int err = 0; + private_handle_t *outBufHandle = private_handle_t::dynamicCast(contents->outbuf); + + /* Find normal drm layer with HWC_SKIP_LAYER */ + /* HDCP disabled and normal DRM playback */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + if (layer.flags & HWC_SKIP_LAYER) { + ALOGV("skipping layer %d", i); + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(h->flags) == NORMAL_DRM && mIsWFDState) { + ALOGV("skipped normal drm layer %d", i); + IsNormalDRMWithSkipLayer = true; + } + } + continue; + } + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER) { + if (!layer.handle) + continue; + ALOGV("framebuffer layer %d", i); + mNumFB++; + } + + if (layer.compositionType == HWC_OVERLAY) { + if (!layer.handle) + continue; + + if (layer.flags & HWC_SKIP_RENDERING) { + layer.releaseFenceFd = layer.acquireFenceFd; + continue; + } + + if (outBufHandle == NULL && layer.acquireFenceFd != -1) { + close(layer.acquireFenceFd); + layer.acquireFenceFd = -1; + continue; + } + + ALOGV("overlay layer %d", i); + mOverlayLayer = &layer; + continue; + } + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + if (!layer.handle) + continue; + + ALOGV("FB target layer %d", i); + + mFBTargetLayer = &layer; + continue; + } + } + + if (outBufHandle == NULL) { + ALOGE("set, outbuf is invalid, no overlay"); + mOverlayLayer = NULL; + mCompositionType = COMPOSITION_GLES; + } + + if (mFBTargetLayer && IsNormalDRMWithSkipLayer) { + if (mFBTargetLayer->acquireFenceFd >= 0) + contents->retireFenceFd = mFBTargetLayer->acquireFenceFd; + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } + } else if (mFBTargetLayer && mCompositionType == COMPOSITION_GLES) { + processGles(contents); + } else if (mFBTargetLayer && mOverlayLayer && mNumFB > 0 && mCompositionType == COMPOSITION_MIXED) { + processMixed(contents); + } else if (mOverlayLayer) { + processHwc(contents); +#ifdef USES_DISABLE_COMPOSITIONTYPE_GLES + } else if (mOverlayLayer == NULL && mCompositionType == COMPOSITION_MIXED) { + ALOGV("Use Scaler to copy from SCRATCH buffer to SINK buffer"); + int ret = 0; + buffer_handle_t tempBuffers = mExternalMPPforCSC->mDstBuffers[mExternalMPPforCSC->mCurrentBuf]; + int tempFence = mExternalMPPforCSC->mDstBufFence[mExternalMPPforCSC->mCurrentBuf]; + + mExternalMPPforCSC->mDstBuffers[mExternalMPPforCSC->mCurrentBuf] = contents->outbuf; + mExternalMPPforCSC->mDstBufFence[mExternalMPPforCSC->mCurrentBuf] = contents->outbufAcquireFenceFd; + + ret = mExternalMPPforCSC->processM2M(*mFBTargetLayer, outBufHandle->format, NULL, false); + if (ret < 0) + ALOGE("failed to configure scaler"); + contents->outbufAcquireFenceFd = -1; + contents->retireFenceFd = mExternalMPPforCSC->mDstConfig.releaseFenceFd; + mExternalMPPforCSC->mDstBuffers[mExternalMPPforCSC->mCurrentBuf] = tempBuffers; + mExternalMPPforCSC->mDstBufFence[mExternalMPPforCSC->mCurrentBuf] = tempFence; + + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } +#endif + } else { + ALOGV("animation layer skip"); + if (mFBTargetLayer && mFBTargetLayer->acquireFenceFd >= 0) + contents->retireFenceFd = mFBTargetLayer->acquireFenceFd; + if (contents->outbufAcquireFenceFd >= 0) { + close(contents->outbufAcquireFenceFd); + contents->outbufAcquireFenceFd = -1; + } + } + + mPrevCompositionType = mCompositionType; + + return err; +} + +#ifdef USES_VDS_OTHERFORMAT +bool ExynosVirtualDisplay::isSupportGLESformat() +{ + return false; +} +#endif + +void ExynosVirtualDisplay::determineSkipLayer(hwc_display_contents_1_t *contents) +{ + /* If there is rotation animation layer, */ + /* all layer is HWC_OVERLAY and HWC_SKIP_RENDERING */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) { + continue; + } + + if (mIsRotationState) { + // normal layers can be skip layer. + if (layer.handle) { + private_handle_t *h = private_handle_t::dynamicCast(layer.handle); + if (getDrmMode(h->flags) != NO_DRM) + continue; + } + layer.compositionType = HWC_OVERLAY; + layer.flags = HWC_SKIP_RENDERING; + } + + if (!layer.handle) + layer.compositionType = HWC_FRAMEBUFFER; + } +} + +void ExynosVirtualDisplay::determineYuvOverlay(hwc_display_contents_1_t *contents) +{ + ALOGV("ExynosVirtualDisplay::determineYuvOverlay"); + + mForceOverlayLayerIndex = -1; + mHasDrmSurface = false; + mYuvLayers = 0; + mIsRotationState = false; + mIsSecureDRM = false; + mIsNormalDRM = false; + bool useVPPOverlayFlag = false; + + if (mDisplayFd < 0) { + ALOGE("determineYuvOverlay, mDisplayFd is invalid , no overlay"); + return; + } + +#ifdef USES_VDS_OTHERFORMAT + if (!isSupportGLESformat()) { + ALOGE("determineYuvOverlay, GLES format is not suppoted, no overlay"); + return; + } +#endif + + /* find rotation animation layer */ + for (size_t i = 0; i < contents->numHwLayers; i++) { + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.flags & HWC_SCREENSHOT_ANIMATOR_LAYER) { + ALOGV("include rotation animation layer"); + mIsRotationState = true; + return; + } + } + + private_handle_t *outBufHandle = private_handle_t::dynamicCast(contents->outbuf); + if (outBufHandle == NULL) { + ALOGE("determineYuvOverlay, outbuf is invalid, no overlay"); + return; + } + + for (size_t i = 0; i < contents->numHwLayers; i++) { + ExynosMPPModule* supportedInternalMPP = NULL; + ExynosMPPModule* supportedExternalMPP = NULL; + hwc_layer_1_t &layer = contents->hwLayers[i]; + + if (layer.compositionType == HWC_FRAMEBUFFER_TARGET) + continue; + + useVPPOverlayFlag = false; + if (layer.handle) { + private_handle_t *handle = private_handle_t::dynamicCast(layer.handle); + + if (getDrmMode(handle->flags) != NO_DRM) { + useVPPOverlayFlag = true; + layer.flags &= ~HWC_SKIP_RENDERING; + } + + /* check yuv surface */ + if (!mForceFb && !isFormatRgb(handle->format)) { + + /* HACK: force integer source crop */ + layer.sourceCropf.top = floor(layer.sourceCropf.top); + layer.sourceCropf.left = floor(layer.sourceCropf.left); + layer.sourceCropf.bottom = ceil(layer.sourceCropf.bottom); + layer.sourceCropf.right = ceil(layer.sourceCropf.right); + + if (isOverlaySupported(contents->hwLayers[i], i, useVPPOverlayFlag, &supportedInternalMPP, &supportedExternalMPP)) { + this->mYuvLayers++; + if (this->mHasDrmSurface == false) { + /* Assign MPP */ + if (supportedExternalMPP != NULL) + supportedExternalMPP->mState = MPP_STATE_ASSIGNED; + if (supportedInternalMPP != NULL) + supportedInternalMPP->mState = MPP_STATE_ASSIGNED; + + mForceOverlayLayerIndex = i; + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->mExternalMPP = supportedExternalMPP; + mLayerInfos[i]->mInternalMPP = supportedInternalMPP; + mLayerInfos[i]->compositionType = layer.compositionType; + + if ((getDrmMode(handle->flags) != NO_DRM) && + isBothMPPProcessingRequired(layer) && + (supportedInternalMPP != NULL)) { + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getCropWidthAlign(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getCropHeightAlign(layer)); + } + + if ((getDrmMode(handle->flags) != NO_DRM) && + (supportedInternalMPP != NULL)) { + if (WIDTH(layer.displayFrame) < supportedInternalMPP->getMinWidth(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame width %d is smaller than vpp minWidth %d", + i, WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + layer.displayFrame.right = layer.displayFrame.left + + ALIGN_DOWN(WIDTH(layer.displayFrame), supportedInternalMPP->getMinWidth(layer)); + } + if (HEIGHT(layer.displayFrame) < supportedInternalMPP->getMinHeight(layer)) { + ALOGE("determineYuvOverlay layer %d displayFrame height %d is smaller than vpp minHeight %d", + i, HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + layer.displayFrame.bottom = layer.displayFrame.top + + ALIGN_DOWN(HEIGHT(layer.displayFrame), supportedInternalMPP->getMinHeight(layer)); + } + } + + if (getDrmMode(handle->flags) == SECURE_DRM) { + mIsSecureDRM = true; + mOverlayLayer = &layer; + calcDisplayRect(layer); + ALOGV("include secure drm layer"); + } + if (getDrmMode(handle->flags) == NORMAL_DRM) { + mIsNormalDRM = true; + mOverlayLayer = &layer; + calcDisplayRect(layer); + ALOGV("include normal drm layer"); + } + } + } else { + if (getDrmMode(handle->flags) != NO_DRM) { + /* This layer should be overlay but HWC can't handle it */ + layer.compositionType = HWC_OVERLAY; + mLayerInfos[i]->compositionType = layer.compositionType; + layer.flags |= HWC_SKIP_RENDERING; + } + } + if (getDrmMode(handle->flags) != NO_DRM) { + this->mHasDrmSurface = true; + mForceOverlayLayerIndex = i; + } + } + } + } +} + +void ExynosVirtualDisplay::determineSupportedOverlays(hwc_display_contents_1_t *contents) +{ + ALOGV("ExynosVirtualDisplay::determineSupportedOverlays"); + + if (mDisplayFd < 0) { + ALOGE("determineSupportedOverlays, mDisplayFd is invalid , no overlay"); + return; + } + +#ifdef USES_VDS_OTHERFORMAT + if (!isSupportGLESformat()) { + ALOGE("determineSupportedOverlays, GLES format is not suppoted, no overlay"); + return; + } +#endif + + if (mIsRotationState) + return; + + private_handle_t *outBufHandle = private_handle_t::dynamicCast(contents->outbuf); + if (outBufHandle == NULL) { + ALOGE("determineSupportedOverlays, outbuf is invalid, no overlay"); + return; + } + + ExynosDisplay::determineSupportedOverlays(contents); +} + +void ExynosVirtualDisplay::determineBandwidthSupport(hwc_display_contents_1_t *contents) +{ + ALOGV("ExynosVirtualDisplay::determineBandwidthSupport"); + + if (mDisplayFd < 0) { + ALOGE("determineBandwidthSupport, mDisplayFd is invalid , no overlay"); + return; + } + +#ifdef USES_VDS_OTHERFORMAT + if (!isSupportGLESformat()) { + ALOGE("determineBandwidthSupport, GLES format is not suppoted, no overlay"); + return; + } +#endif + + if (mIsRotationState) + return; + + private_handle_t *outBufHandle = private_handle_t::dynamicCast(contents->outbuf); + if (outBufHandle == NULL) { + ALOGE("determineBandwidthSupport, outbuf is invalid, no overlay"); + return; + } + + ExynosDisplay::determineBandwidthSupport(contents); +} + +#ifdef USES_VDS_BGRA8888 +bool ExynosVirtualDisplay::isBothMPPProcessingRequired(hwc_layer_1_t &layer) +{ + if (mForceDoubleOperation) + return true; + else + return ExynosDisplay::isBothMPPProcessingRequired(layer); +} +#endif + +void ExynosVirtualDisplay::calcDisplayRect(hwc_layer_1_t &layer) +{ + bool needToTransform = false; + unsigned int newTransform = 0; + unsigned int calc_w = (mWidth - mDisplayWidth) >> 1; + unsigned int calc_h = (mHeight - mDisplayHeight) >> 1; + + if (layer.compositionType) { + if (mPresentationMode) { + /* Use EXTERNAL_TB directly (DRM-extention) */ + newTransform = layer.transform; + needToTransform = false; + } else if (mFrameBufferTargetTransform) { + switch(mFrameBufferTargetTransform) { + case HAL_TRANSFORM_ROT_90: + newTransform = 0; + needToTransform = true; + break; + case HAL_TRANSFORM_ROT_180: + newTransform = HAL_TRANSFORM_ROT_90; + needToTransform = false; + break; + case HAL_TRANSFORM_ROT_270: + newTransform = HAL_TRANSFORM_ROT_180; + needToTransform = true; + break; + default: + newTransform = 0; + needToTransform = false; + break; + } + } else { + switch(mDeviceOrientation) { + case 1: /* HAL_TRANSFORM_ROT_90 */ + newTransform = HAL_TRANSFORM_ROT_270; + needToTransform = false; + break; + case 3: /* HAL_TRANSFORM_ROT_270 */ + newTransform = HAL_TRANSFORM_ROT_90; + needToTransform = false; + break; + default: /* Default | HAL_TRANSFORM_ROT_180 */ + newTransform = 0; + needToTransform = false; + break; + } + } + + if (layer.compositionType == HWC_OVERLAY) { + if (needToTransform) { + mHwc->mVirtualDisplayRect.left = layer.displayFrame.left + calc_h; + mHwc->mVirtualDisplayRect.top = layer.displayFrame.top + calc_w; + mHwc->mVirtualDisplayRect.width = WIDTH(layer.displayFrame) - (calc_h << 1); + mHwc->mVirtualDisplayRect.height = HEIGHT(layer.displayFrame) - (calc_w << 1); + } else { + mHwc->mVirtualDisplayRect.left = layer.displayFrame.left + calc_w; + mHwc->mVirtualDisplayRect.top = layer.displayFrame.top + calc_h; + mHwc->mVirtualDisplayRect.width = WIDTH(layer.displayFrame) - (calc_w << 1); + mHwc->mVirtualDisplayRect.height = HEIGHT(layer.displayFrame) - (calc_h << 1); + } + + if (layer.displayFrame.left < 0 || layer.displayFrame.top < 0 || + mWidth < (unsigned int)WIDTH(layer.displayFrame) || mHeight < (unsigned int)HEIGHT(layer.displayFrame)) { + if (needToTransform) { + mHwc->mVirtualDisplayRect.left = 0 + calc_h; + mHwc->mVirtualDisplayRect.top = 0 + calc_w; + + mHwc->mVirtualDisplayRect.width = mWidth - (calc_h << 1); + mHwc->mVirtualDisplayRect.height = mHeight - (calc_w << 1); + } else { + mHwc->mVirtualDisplayRect.left = 0 + calc_w; + mHwc->mVirtualDisplayRect.top = 0 + calc_h; + + mHwc->mVirtualDisplayRect.width = mWidth - (calc_w << 1); + mHwc->mVirtualDisplayRect.height = mHeight - (calc_h << 1); + } + } + } else { /* HWC_FRAMEBUFFER_TARGET */ + if (needToTransform) { + mHwc->mVirtualDisplayRect.width = (mDisplayHeight * mDisplayHeight) / mDisplayWidth; + mHwc->mVirtualDisplayRect.height = mDisplayHeight; + mHwc->mVirtualDisplayRect.left = (mDisplayWidth - mHwc->mVirtualDisplayRect.width) / 2; + mHwc->mVirtualDisplayRect.top = 0; + } else { + mHwc->mVirtualDisplayRect.left = 0; + mHwc->mVirtualDisplayRect.top = 0; + mHwc->mVirtualDisplayRect.width = mDisplayWidth; + mHwc->mVirtualDisplayRect.height = mDisplayHeight; + } + } + } +} + +void ExynosVirtualDisplay::init(hwc_display_contents_1_t __unused *contents) +{ + ALOGV("ExynosVirtualDisplay::init() mDisplayFd %d", mDisplayFd); + +#ifdef USES_VDS_BGRA8888 + private_handle_t *handle = private_handle_t::dynamicCast(contents->outbuf); + if (handle) { + if (handle->format == HAL_PIXEL_FORMAT_BGRA_8888) { + mForceDoubleOperation = false; + } else { + mForceDoubleOperation = true; + } + } + ALOGV("ExynosVirtualDisplay::init() mForceDoubleOperation %d", mForceDoubleOperation); +#endif + +#ifdef USES_VIRTUAL_DISPLAY_DECON_EXT_WB + int ret = 0; + if (mDisplayFd < 0) { + mDisplayFd = open(DECON_WB_DEV_NAME, O_RDWR); + if (mDisplayFd < 0) { + ALOGE("failed to open decon ext wb for virtualDisplay"); + } else { + ALOGD("open fd for WFD(%d)", mDisplayFd); + int subdev_fd; + struct v4l2_subdev_format sd_fmt; + char devname[32]; + + sd_fmt.pad = DECON_PAD_WB; + sd_fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE; + sd_fmt.format.width = mWidth; + sd_fmt.format.height = mHeight; + + snprintf(devname, sizeof(devname), DECON_WB_SUBDEV_NAME); + + subdev_fd = exynos_subdev_open_devname(devname, O_RDWR); + if (subdev_fd < 0) + ALOGE("failed to open subdev for virtualDisplay"); + else { + ret = exynos_subdev_s_fmt(subdev_fd, &sd_fmt); + close(subdev_fd); + if (ret < 0) { + ALOGE("failed to subdev s_fmt for virtualDisplay"); + close(mDisplayFd); + mDisplayFd = -1; + } + } + } + } +#endif +} + +void ExynosVirtualDisplay::deInit() +{ + ALOGV("ExynosVirtualDisplay::deInit() ,mDisplayFd %d", mDisplayFd); + + blank(); + + if (mDisplayFd > 0) { + close(mDisplayFd); + ALOGD("Close fd for WFD"); + } + mDisplayFd = -1; +} + +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM +void ExynosVirtualDisplay::requestIONMemory() +{ + ALOGV("ExynosVirtualDisplay::requestIONMemory()"); + /* prepare reserved memory */ + if (mReserveMemFd > 0) { + unsigned int value; + int waitCount = WAIT_COUNT_FOR_ISOLATION; + char buffer[4096]; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + + if ((err > 0) && (value == 0)) { + memset(buffer, 0, sizeof(buffer)); + buffer[0] = '2'; + if (write(mReserveMemFd, buffer, sizeof(buffer)) < 0) + ALOGE("fail to request isolation of memmory for WFD"); + else { + while(!checkIONBufferPrepared()) { + usleep(20000); + if (--waitCount) + ALOGV("wait count for isolation: %d", waitCount); + else + ALOGE("fail to request isolation (time out)"); + } + ALOGI("video ext ion memory for WFD was isolated"); + } + } else { + if (err < 0) + ALOGE("fail to read mReserveMemFd"); + else + ALOGE("ion memmory for WFD is isolated already"); + } + } +} + +void ExynosVirtualDisplay::freeIONMemory() +{ + ALOGV("ExynosVirtualDisplay::freeIONMemory()"); + if (mReserveMemFd > 0 && mFlagIONBufferAllocated) { + unsigned int value; + char buffer[4096]; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + if ((err > 0) && (value == 1)) { + setMemoryProtection(0); + memset(buffer, 0, sizeof(buffer)); + buffer[0] = '0'; + if (write(mReserveMemFd, buffer, sizeof(buffer)) < 0) + ALOGE("fail to request isolation of memmory for WFD"); + else + ALOGI("deisolation of memmory for WFD was requested"); + } else { + if (err < 0) + ALOGE("fail to read mReserveMemFd"); + else + ALOGE("ion memmory for WFD is deisolated already"); + } + mFlagIONBufferAllocated = false; + } +} + +bool ExynosVirtualDisplay::checkIONBufferPrepared() +{ + ALOGV("ExynosVirtualDisplay::checkIONBufferPrepared()"); + if (mReserveMemFd > 0) { + unsigned int value; + char buffer[4096]; + memset(buffer, 0, sizeof(buffer)); + int err = lseek(mReserveMemFd, 0, SEEK_SET); + err = read(mReserveMemFd, buffer, sizeof(buffer)); + value = atoi(buffer); + + if ((err > 0) && (value == 1)) { + mFlagIONBufferAllocated = true; + return true; + } else { + mFlagIONBufferAllocated = false; + return false; + } + } + return false; +} + +void ExynosVirtualDisplay::setMemoryProtection(int protection) +{ + ALOGV("ExynosVirtualDisplay::setMemoryProtection() protection %d", protection); + int ret = 0; + + if (mSMemFd > 0) { + ret = ioctl(mSMemFd, SECMEM_IOC_SET_VIDEO_EXT_PROC, &protection); + if (ret < 0) + ALOGE("Protection failed, ret(%d)", ret); + } +} +#endif + +void ExynosVirtualDisplay::setWFDOutputResolution(unsigned int width, unsigned int height, + unsigned int disp_w, unsigned int disp_h) +{ + mWidth = width; + mHeight = height; + mDisplayWidth = disp_w; + mDisplayHeight = disp_h; + mXres = width; + mYres = height; +} + +#ifdef USES_VDS_OTHERFORMAT +void ExynosVirtualDisplay::setVDSGlesFormat(int format) +{ + ALOGV("ExynosVirtualDisplay::setVDSGlesFormat(), format %d", format); + mGLESFormat = format; +} +#endif + +void ExynosVirtualDisplay::setPriContents(hwc_display_contents_1_t __unused *contents) +{ + +} + +int ExynosVirtualDisplay::blank() +{ + ALOGV("ExynosVirtualDisplay::blank(), mDisplayFd %d, mBlanked %d", mDisplayFd, mBlanked); + int ret = 0; + + if (mDisplayFd > 0 && !mBlanked) { + ret = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_POWERDOWN); + if (ret < 0) + ALOGE("failed blank for virtualDisplay"); + else + mBlanked = true; + } + return ret; +} + +int ExynosVirtualDisplay::unblank() +{ + ALOGV("ExynosVirtualDisplay::unblank(), mDisplayFd %d, mBlanked %d", mDisplayFd, mBlanked); + int ret = 0; + if (mDisplayFd > 0 && mBlanked) { + ret = ioctl(mDisplayFd, FBIOBLANK, FB_BLANK_UNBLANK); + if (ret < 0) + ALOGE("failed unblank for virtualDisplay"); + else + mBlanked = false; + } + return ret; +} + +int ExynosVirtualDisplay::getConfig() +{ + return 0; +} + +int32_t ExynosVirtualDisplay::getDisplayAttributes(const uint32_t __unused attribute) +{ + return 0; +} + diff --git a/libvppvirtualdisplay/ExynosVirtualDisplay.h b/libvppvirtualdisplay/ExynosVirtualDisplay.h new file mode 100644 index 0000000..1b26425 --- /dev/null +++ b/libvppvirtualdisplay/ExynosVirtualDisplay.h @@ -0,0 +1,123 @@ +#ifndef EXYNOS_VPP_VIRTUALDISPLAY_H +#define EXYNOS_VPP_VIRTUALDISPLAY_H + +#include "ExynosHWC.h" +#include "ExynosDisplay.h" +#include "../../exynos/kernel-3.10-headers/videodev2.h" +#ifdef USES_VDS_BGRA8888 +#include "ExynosMPPModule.h" +#endif + +#define NUM_FRAME_BUFFER 5 +#define HWC_SKIP_RENDERING 0x80000000 +#define MAX_BUFFER_COUNT 8 + +#define MAX_VIRTUALDISPLAY_VIDEO_LAYERS 1 + +class ExynosVirtualDisplay : public ExynosDisplay { + public: + /* Methods */ + ExynosVirtualDisplay(struct exynos5_hwc_composer_device_1_t *pdev); + ~ExynosVirtualDisplay(); + + virtual int32_t getDisplayAttributes(const uint32_t attribute); + virtual void configureWriteBack(hwc_display_contents_1_t *contents, decon_win_config_data &win_data); + + virtual int blank(); + virtual int unblank(); + + virtual int getConfig(); + + virtual int prepare(hwc_display_contents_1_t* contents); + virtual int set(hwc_display_contents_1_t* contents); + + virtual void allocateLayerInfos(hwc_display_contents_1_t* contents); + virtual void determineYuvOverlay(hwc_display_contents_1_t *contents); + virtual void determineSupportedOverlays(hwc_display_contents_1_t *contents); + virtual void determineBandwidthSupport(hwc_display_contents_1_t *contents); + + virtual void init(hwc_display_contents_1_t* contents); + virtual void deInit(); + + void setWFDOutputResolution(unsigned int width, unsigned int height, unsigned int disp_w, unsigned int disp_h); +#ifdef USES_VDS_OTHERFORMAT + void setVDSGlesFormat(int format); +#endif + void setPriContents(hwc_display_contents_1_t* contents); + + enum CompositionType { + COMPOSITION_UNKNOWN = 0, + COMPOSITION_GLES = 1, + COMPOSITION_HWC = 2, + COMPOSITION_MIXED = COMPOSITION_GLES | COMPOSITION_HWC + }; + + unsigned int mWidth; + unsigned int mHeight; + unsigned int mDisplayWidth; + unsigned int mDisplayHeight; + + bool mIsWFDState; + bool mIsRotationState; + + bool mPresentationMode; + unsigned int mDeviceOrientation; + unsigned int mFrameBufferTargetTransform; + + CompositionType mCompositionType; + CompositionType mPrevCompositionType; + int mGLESFormat; + int mSinkUsage; + + protected: + void setSinkBufferUsage(); + void processGles(hwc_display_contents_1_t* contents); + void processHwc(hwc_display_contents_1_t* contents); + void processMixed(hwc_display_contents_1_t* contents); + + virtual void configureHandle(private_handle_t *handle, size_t index, hwc_layer_1_t &layer, int fence_fd, decon_win_config &cfg); + virtual int postFrame(hwc_display_contents_1_t *contents); +#ifdef USES_VDS_BGRA8888 + virtual bool isBothMPPProcessingRequired(hwc_layer_1_t &layer); +#endif + virtual void determineSkipLayer(hwc_display_contents_1_t *contents); +#ifdef USES_VDS_OTHERFORMAT + virtual bool isSupportGLESformat(); +#endif + bool mIsSecureDRM; + bool mIsNormalDRM; + + hwc_layer_1_t *mOverlayLayer; + hwc_layer_1_t *mFBTargetLayer; + hwc_layer_1_t *mFBLayer[NUM_FRAME_BUFFER]; + size_t mNumFB; + + void calcDisplayRect(hwc_layer_1_t &layer); + +#ifdef USES_DISABLE_COMPOSITIONTYPE_GLES + ExynosMPPModule *mExternalMPPforCSC; +#endif + +#ifdef USES_VDS_BGRA8888 + bool mForceDoubleOperation; + ExynosMPPModule *mExternalMPPforCSC; +#endif + +#ifdef USE_VIDEO_EXT_FOR_WFD_DRM + protected: + int mSMemFd; + bool mSMemProtected; + + void setMemoryProtection(int protection); + + int mReserveMemFd; + bool mFlagIONBufferAllocated; + bool checkIONBufferPrepared(); + public: + void requestIONMemory(); + void freeIONMemory(); +#endif + +}; + +#endif diff --git a/libvppvirtualdisplay/NOTICE b/libvppvirtualdisplay/NOTICE new file mode 100644 index 0000000..316b4eb --- /dev/null +++ b/libvppvirtualdisplay/NOTICE @@ -0,0 +1,190 @@ + + Copyright (c) 2014, The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + diff --git a/rpmbd/Android.mk b/rpmbd/Android.mk new file mode 100644 index 0000000..6cce3d9 --- /dev/null +++ b/rpmbd/Android.mk @@ -0,0 +1,37 @@ +# +# Copyright (C) 2015 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH := $(call my-dir) + +ifeq ($(BOARD_USES_RPMB), true) +include $(CLEAR_VARS) + +LOCAL_MODULE := rpmbd +LOCAL_SRC_FILES := \ + rpmbd.c +LOCAL_SHARED_LIBRARIES := libc libcutils +LOCAL_MODULE_TAGS := optional +ifeq ($(BOARD_USES_MMC_RPMB), true) +LOCAL_CFLAGS := -DUSE_MMC_RPMB +endif + +ifeq ($(TARGET_IS_64_BIT), true) +LOCAL_CFLAGS += -DTARGET_IS_64_BIT +endif + +include $(BUILD_EXECUTABLE) + +endif diff --git a/rpmbd/rpmbd.c b/rpmbd/rpmbd.c new file mode 100644 index 0000000..d833432 --- /dev/null +++ b/rpmbd/rpmbd.c @@ -0,0 +1,517 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "rpmbd.h" + +/* Logging function for outputing to stderr or log */ +void log_print(int level, char *format, ...) +{ + if (level >= 0 && level <= LOG_MAX) { + static int levels[5] = { + ANDROID_LOG_DEBUG, ANDROID_LOG_INFO, ANDROID_LOG_WARN, + ANDROID_LOG_ERROR, ANDROID_LOG_FATAL + }; + va_list ap; + va_start(ap, format); + __android_log_vprint(levels[level], APP_NAME, format, ap); + va_end(ap); + + } +} + +static void title(void) +{ + printf("%s", program_version); +} + +static void dump_packet(unsigned char *data, int len) +{ + unsigned char s[17]; + int i, j; + + s[16]='\0'; + for (i = 0; i < len; i += 16) { + log_print(ERROR, "%06x :", i); + for (j=0; j<16; j++) { + log_print(ERROR, " %02x", data[i+j]); + s[j] = (data[i+j]<' ' ? '.' : (data[i+j]>'}' ? '.' : data[i+j])); + } + log_print(ERROR, " |%s|\n",s); + } + log_print(ERROR, "\n"); +} + +static void swap_packet(u8 *p, u8 *d) +{ + int i; + for (i = 0; i < RPMB_PACKET_SIZE; i++) + d[i] = p[RPMB_PACKET_SIZE - 1 - i]; +} + +int rpmbd_code_for_cipher_string(char *user_name) +{ + int code = 0; + + /* RPMB 1KB area is used per one code number. + Code 1, 2 shouldn't be used, because that is already used in bootloader. + Code number has to be used over 3. */ + if (strncmp(user_name, "unittest", 10) == 0) + code = 3; + /* Will add additional scenario */ + + return code; +} + +/* The beginning of everything */ +int main(int argc, char **argv) +{ + int server_sock, rval, connfd = 0, ret, code = 0, cnt = 0; + struct sockaddr_un serv_addr; + Rpmb_Req *req; +#ifdef USE_MMC_RPMB + MMC_Ioctl_Command *ic; + u8 *result_buf = NULL; +#else + Scsi_Ioctl_Command *ic; +#endif + char buf[4118]; + u16 address = 0; + struct rpmb_packet packet; + + /* display application header */ + title(); + log_print(INFO, "*************** start rpmb daemon *************** \n"); + + req = (Rpmb_Req *)malloc(sizeof(Rpmb_Req)); + if (req == NULL) { + log_print(ERROR, "Memory allocation fail"); + exit(1); + } + + server_sock = socket(AF_UNIX, SOCK_STREAM, 0); + if (server_sock < 0) { + log_print(ERROR, "Can't open stream socket"); + free(req); + exit(1); + } + + bzero(&serv_addr, sizeof(serv_addr)); + serv_addr.sun_family = AF_UNIX; + + unlink("/data/app/rpmbd"); + strncpy(serv_addr.sun_path, "/data/app/rpmbd", sizeof(serv_addr.sun_path) - 1); + + if (bind(server_sock, (struct sockaddr*)&serv_addr, sizeof(serv_addr)) == -1) { + log_print(ERROR, "Can't bind stream socket"); + close(server_sock); + free(req); + exit(1); + } + log_print(INFO, "Socket has name %s\n", serv_addr.sun_path); + + if (listen(server_sock, 5) == -1) { + log_print(ERROR, "Can't listen for connection socket"); + close(server_sock); + free(req); + exit(1); + } + + if (chmod("/data/app/rpmbd", 0660) < 0) { + log_print(ERROR, "chmod fail"); + close(server_sock); + free(req); + exit(1); + } + +#ifdef USE_MMC_RPMB + dev = open("/dev/block/mmcblk0rpmb", O_RDWR); +#else + dev = open("/dev/block/sdb", O_RDWR); +#endif + if (dev < 0) { + log_print(ERROR, " fail"); + close(server_sock); + free(req); + exit(1); + } + + while(1) { + connfd = accept(server_sock, 0, 0); + if (connfd == -1) { + log_print(ERROR, "Err accept"); + continue; + } + + bzero(buf, sizeof(buf)); + if ((rval = read(connfd, buf, sizeof(buf))) <= 0) { + log_print(ERROR, "Reading stream message"); + goto con; + } else { + req = (Rpmb_Req *)buf; + + log_print(INFO, "type: %x", req->type); + log_print(INFO, "user: %s\n", req->user); + + ret = 0; + switch(req->type) { + case GET_WRITE_COUNTER: + if (dev == 0) { + log_print(ERROR, " fail"); + ret = -1; + break; + } + + if (req->data_len != RPMB_PACKET_SIZE) { + log_print(ERROR, "data len is invalid"); + ret = -1; + break; + } + + if (cnt != 0) { + log_print(ERROR, "already lock: user: %s", req->user); + ret = ERROR_RETRY; + break; + } + cnt++; + +#ifdef USE_MMC_RPMB + ic = (MMC_Ioctl_Command *)malloc(sizeof(MMC_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + break; + } + + /* Security OUT protocol */ + mmc_cmd_init(ic); + ic->write_flag = true; + ic->flags = MMC_RSP_R1; + ic->opcode = MMC_WRITE_MULTIPLE_BLOCK; + ic->data_ptr = (unsigned long)(buf + sizeof(Rpmb_Req)); + + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + + /* Security IN protocol */ + memset(ic->data_ptr, 0x0, RPMB_PACKET_SIZE); + ic->write_flag = false; + ic->opcode = MMC_READ_MULTIPLE_BLOCK; + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + memcpy(buf + sizeof(Rpmb_Req), ic->data_ptr, req->data_len); +#else + ic = (Scsi_Ioctl_Command *)malloc(sizeof(Scsi_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + break; + } + memcpy(ic->data , buf + sizeof(Rpmb_Req), req->data_len); + ic->outlen = req->data_len; + + /* Security OUT protocol */ + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_OUT, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + + /* Security IN protocol */ + memset(ic->data, 0x0, req->data_len); + ic->inlen = req->data_len; + + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_IN, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + memcpy(buf + sizeof(Rpmb_Req), ic->data, req->data_len); +#endif + free(ic); + break; + case WRITE_DATA: + if (dev == 0) { + log_print(ERROR, " fail"); + ret = -1; + break; + } + + if (cnt <= 0) { + log_print(ERROR, "wrong cnt: %d", cnt); + ret = -1; + break; + } + + if ((req->data_len < RPMB_PACKET_SIZE) || (req->data_len > RPMB_PACKET_SIZE * (RPMB_AREA/DATA_SIZE))) { + log_print(ERROR, "data len is invalid"); + ret = -1; + break; + } + +#ifdef USE_MMC_RPMB + ic = (MMC_Ioctl_Command *)malloc(sizeof(MMC_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + break; + } + + /* Security OUT protocol */ + mmc_cmd_init(ic); + ic->write_flag = RELIABLE_WRITE_REQ_SET; + ic->flags = MMC_RSP_R1; + ic->blocks = req->data_len/RPMB_PACKET_SIZE; + ic->opcode = MMC_WRITE_MULTIPLE_BLOCK; + ic->data_ptr = (unsigned long)(buf + sizeof(Rpmb_Req)); + + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + goto wout; + } + + result_buf = (u8 *)malloc(RPMB_PACKET_SIZE); + if (result_buf == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + goto wout; + } + ic->write_flag = true; + ic->blocks = 1; + ic->data_ptr = (unsigned long)result_buf; + memset(&packet, 0x0, RPMB_PACKET_SIZE); + packet.request = RESULT_READ_REQ; + swap_packet((unsigned char *)&packet, result_buf); + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(result_buf); + goto wout; + } + + /* Security IN protocol */ + memset((void *)result_buf, 0, RPMB_PACKET_SIZE); + ic->write_flag = false; + ic->blocks = 1; + ic->opcode = MMC_READ_MULTIPLE_BLOCK; + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(result_buf); + goto wout; + } + memcpy(buf + sizeof(Rpmb_Req), result_buf, RPMB_PACKET_SIZE); + free(result_buf); +#else + ic = (Scsi_Ioctl_Command *)malloc(sizeof(Scsi_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + goto wout; + } + memcpy(ic->data , buf + sizeof(Rpmb_Req), req->data_len); + ic->outlen = req->data_len; + + /* Security OUT protocol */ + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_OUT, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + goto wout; + } + + memset(ic->data, 0x0, req->data_len); + memset(&packet, 0x0, RPMB_PACKET_SIZE); + packet.request = RESULT_READ_REQ; + swap_packet((unsigned char *)&packet, ic->data); + ic->outlen = RPMB_PACKET_SIZE; + + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_OUT, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + goto wout; + } + + /* Security IN protocol */ + memset(ic->data, 0x0, req->data_len); + ic->inlen = RPMB_PACKET_SIZE; + + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_IN, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + goto wout; + } + memcpy(buf + sizeof(Rpmb_Req), ic->data, req->data_len); +#endif +wout: + cnt--; + free(ic); + break; + case READ_DATA: + if (dev == 0) { + log_print(ERROR, " fail"); + ret = -1; + break; + } + + if ((req->data_len < RPMB_PACKET_SIZE) || (req->data_len > RPMB_PACKET_SIZE * (RPMB_AREA/DATA_SIZE))) { + log_print(ERROR, "data len is invalid"); + ret = -1; + break; + } + +#ifdef USE_MMC_RPMB + ic = (MMC_Ioctl_Command *)malloc(sizeof(MMC_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + break; + } + + /* Security OUT protocol */ + mmc_cmd_init(ic); + ic->write_flag = true; + ic->flags = MMC_RSP_R1; + ic->opcode = MMC_WRITE_MULTIPLE_BLOCK; + ic->data_ptr = (unsigned long)(buf + sizeof(Rpmb_Req)); + + result_buf = (u8 *)malloc(RPMB_PACKET_SIZE); + if (result_buf == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + free(ic); + break; + } + + /* At read data with MMC, block count has to '0' */ + swap_packet((u8 *)((addr_size_t)ic->data_ptr), result_buf); + ((struct rpmb_packet *)(result_buf))->count = 0; + swap_packet(result_buf, (u8 *)((addr_size_t)ic->data_ptr)); + free(result_buf); + + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + + /* Security IN protocol */ + memset(ic->data_ptr, 0x0, req->data_len); + ic->write_flag = false; + ic->opcode = MMC_READ_MULTIPLE_BLOCK; + ic->blocks = req->data_len/RPMB_PACKET_SIZE; + + ret = ioctl(dev, MMC_IOC_CMD, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + memcpy(buf + sizeof(Rpmb_Req), ic->data_ptr, req->data_len); +#else + ic = (Scsi_Ioctl_Command *)malloc(sizeof(Scsi_Ioctl_Command) + req->data_len); + if (ic == NULL) { + log_print(ERROR, "memory allocation fail"); + ret = -1; + break; + } + memcpy(ic->data , buf + sizeof(Rpmb_Req), RPMB_PACKET_SIZE); + ic->outlen = RPMB_PACKET_SIZE; + + /* Security OUT protocol */ + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_OUT, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + + /* Security IN protocol */ + memset(ic->data, 0x0, req->data_len); + ic->inlen = req->data_len; + ret = ioctl(dev, SCSI_IOCTL_SECURITY_PROTOCOL_IN, ic); + if (ret != 0) { + log_print(ERROR, " ioctl fail!"); + free(ic); + break; + } + memcpy(buf + sizeof(Rpmb_Req), ic->data, req->data_len); +#endif + + free(ic); + break; + case GET_ADDRESS: + code = rpmbd_code_for_cipher_string(req->user); + if (!code) { + log_print(ERROR, "Invalid user"); + ret = -1; + break; + } + address = (code - 1) * RPMB_AREA/DATA_SIZE; + memcpy(buf + sizeof(Rpmb_Req), &address, sizeof(address)); + + break; + default: + log_print(ERROR, "Invalid request type!"); + break; + } + + req->ret = ret; + rval = 0; + memcpy(buf, req, sizeof(Rpmb_Req)); + if ((rval = write(connfd, buf, sizeof(buf))) < 0) + log_print(ERROR, "socket write fail!"); + } +con: + close(connfd); + connfd = 0; + } + + if (dev) + close(dev); + + close(server_sock); + unlink("/data/app/rpmbd"); + free(req); + + log_print(INFO,"\n *************** end rpmb daemon *************** \n"); + + return 0; +} diff --git a/rpmbd/rpmbd.h b/rpmbd/rpmbd.h new file mode 100644 index 0000000..362d2d7 --- /dev/null +++ b/rpmbd/rpmbd.h @@ -0,0 +1,172 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __RPMBD_H__ +#define __RPMBD_H__ + +/* Version number of this source */ +#define APP_NAME "rpmbd" +#define RPMB_PACKET_SIZE 512 + +#define GET_WRITE_COUNTER 1 +#define WRITE_DATA 2 +#define READ_DATA 3 +#define GET_ADDRESS 4 + +#define AUTHEN_KEY_PROGRAM_RES 0x0100 +#define AUTHEN_KEY_PROGRAM_REQ 0x0001 +#define RESULT_READ_REQ 0x0005 +#define RPMB_END_ADDRESS 0x4000 +#define ERROR_RETRY 0xff + +#define RPMB_AREA 1024 +#define DATA_SIZE 256 + +int dev; + +const char *program_version = +APP_NAME "\n" +"version 0.0"; + +typedef unsigned char bool; +typedef unsigned short u16; +typedef unsigned char u8; +typedef unsigned int u32; +typedef unsigned long long u64; +#ifdef TARGET_IS_64_BIT +/* define address size type for 32bit and 64bit address */ +typedef uint64_t addr_size_t; +#else +typedef uint32_t addr_size_t; +#endif + +typedef struct rpmb_req { + unsigned int type; + unsigned char user[10]; + unsigned int ret; + unsigned int data_len; + unsigned char rpmb_data[0]; +} Rpmb_Req; + +#ifdef USE_MMC_RPMB + +#define AUTHEN_DATA_WRITE_RES 0x0300 +#define AUTHEN_DATA_READ_RES 0x0400 +#define RELIABLE_WRITE_REQ_SET (1 << 31) + +#define true 1 +#define false 0 + +#define MMC_RSP_PRESENT (1 << 0) +#define MMC_RSP_CRC (1 << 2) /* expect valid crc */ +#define MMC_RSP_OPCODE (1 << 4) /* response contains opcode */ + +#define MMC_RSP_NONE (0) +#define MMC_RSP_R1 (MMC_RSP_PRESENT|MMC_RSP_CRC|MMC_RSP_OPCODE) + +#define MMC_BLOCK_MAJOR 179 +#define MMC_IOC_CMD _IOWR(MMC_BLOCK_MAJOR, 0, MMC_Ioctl_Command) +#define MMC_READ_MULTIPLE_BLOCK 18 /* adtc [31:0] data addr R1 */ +#define MMC_WRITE_MULTIPLE_BLOCK 25 /* adtc R1 */ + +typedef struct mmc_ioc_cmd { + /* Implies direction of data. true = write, false = read */ + int write_flag; + + /* Application-specific command. true = precede with CMD55 */ + int is_acmd; + + u32 opcode; + u32 arg; + u32 response[4]; /* CMD response */ + unsigned int flags; + unsigned int blksz; + unsigned int blocks; + + /* + * Sleep at least postsleep_min_us useconds, and at most + * postsleep_max_us useconds *after* issuing command. Needed for + * some read commands for which cards have no other way of indicating + * they're ready for the next command (i.e. there is no equivalent of + * a "busy" indicator for read operations). + */ + unsigned int postsleep_min_us; + unsigned int postsleep_max_us; + + /* + * Override driver-computed timeouts. Note the difference in units! + */ + unsigned int data_timeout_ns; + unsigned int cmd_timeout_ms; + + /* + * For 64-bit machines, the next member, ``__u64 data_ptr``, wants to + * be 8-byte aligned. Make sure this struct is the same size when + * built for 32-bit. + */ + u32 __pad; + + /* DAT buffer */ + u64 data_ptr; +} MMC_Ioctl_Command; + +static void mmc_cmd_init(MMC_Ioctl_Command *cmd) +{ + cmd->is_acmd = false; + cmd->arg = 0; + cmd->flags = MMC_RSP_R1; + cmd->blksz = RPMB_PACKET_SIZE; + cmd->blocks = 1; + cmd->postsleep_min_us = 0; + cmd->postsleep_max_us = 0; + cmd->data_timeout_ns = 0; + cmd->cmd_timeout_ms = 0; +} + +#else +#define SCSI_IOCTL_SECURITY_PROTOCOL_IN 7 +#define SCSI_IOCTL_SECURITY_PROTOCOL_OUT 8 + +typedef struct scsi_ioctl_command { + unsigned int inlen; + unsigned int outlen; + unsigned char data[0]; +} Scsi_Ioctl_Command; +#endif + +struct rpmb_packet { + u16 request; + u16 result; + u16 count; + u16 address; + u32 write_counter; + u8 nonce[16]; + u8 data[256]; + u8 Key_MAC[32]; + u8 stuff[196]; +}; + +/* Logging information */ +enum log_level { + DEBUG = 0, + INFO = 1, + WARNING = 2, + ERROR = 3, + FATAL = 4, + LOG_MAX = 4, +}; + +#endif // __RPMBD_H__ -- 2.20.1