diff --git a/aosp_diff/caas/hardware/interfaces/camera/0001-add-property-check-to-load-camera-HAL.patch b/aosp_diff/caas/hardware/interfaces/camera/0001-add-property-check-to-load-camera-HAL.patch new file mode 100644 index 0000000000..f247c26db3 --- /dev/null +++ b/aosp_diff/caas/hardware/interfaces/camera/0001-add-property-check-to-load-camera-HAL.patch @@ -0,0 +1,46 @@ +From b1e2f4f645e3b0aaf0216461f6f0f925f8dc3d72 Mon Sep 17 00:00:00 2001 +From: gkdeepa +Date: Mon, 11 Oct 2021 15:36:21 +0530 +Subject: [PATCH] add property check to load camera HAL + +Tracked-On: +--- + .../2.4/default/ExternalCameraProviderImpl_2_4.cpp | 2 ++ + .../provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp | 8 ++++++++ + 2 files changed, 10 insertions(+) + +diff --git a/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp +index 64a51f614..23653b336 100644 +--- a/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp ++++ b/camera/provider/2.4/default/ExternalCameraProviderImpl_2_4.cpp +@@ -213,6 +213,8 @@ void ExternalCameraProviderImpl_2_4::addExternalCamera(const char* devName) { + ALOGI("ExtCam: adding %s to External Camera HAL!", devName); + Mutex::Autolock _l(mLock); + std::string deviceName; ++ //set the camera property as External usb cam usage ++ property_set("vendor.camera.external","USBV"); + std::string cameraId = std::to_string(mCfg.cameraIdOffset + + std::atoi(devName + kDevicePrefixLen)); + if (mPreferredHal3MinorVersion == 6) { +diff --git a/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp +index 4cff1b79a..6a799afc7 100644 +--- a/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp ++++ b/camera/provider/2.4/default/LegacyCameraProviderImpl_2_4.cpp +@@ -57,6 +57,14 @@ bool matchDeviceName(const hidl_string& deviceName, std::string* deviceVersion, + if (cameraId != nullptr) { + *cameraId = sm[2]; + } ++ //get camera property to check if external camera is detected ++ char mode[PROPERTY_VALUE_MAX]; ++ if(property_get("vendor.camera.external", mode, nullptr) > 0){ ++ if (!strcmp(mode, "USBV")){ ++ ALOGE("Donot load legacy camera as USB is PT and video nodes with guest"); ++ return false; ++ } ++ } + return true; + } + return false; +-- +2.17.1 + diff --git a/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-civ-changes-on-top-of-vhal-changes.patch b/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-civ-changes-on-top-of-vhal-changes.patch new file mode 100644 index 0000000000..efcd42ee31 --- /dev/null +++ b/bsp_diff/common/vendor/intel/external/project-celadon/camera-vhal/0001-civ-changes-on-top-of-vhal-changes.patch @@ -0,0 +1,1170 @@ +From 4f09f32e2e961d1390a8d768c1389fd1e6579026 Mon Sep 17 00:00:00 2001 +From: gkdeepa +Date: Mon, 4 Oct 2021 11:45:46 +0530 +Subject: [PATCH] civ changes on top of vhal changes + +commit 34c8052e40ceeba0d4c12b5d31cffa57454f5eee +--- + virtualcamera/Android.mk | 51 ++++-- + virtualcamera/include/CameraSocketCommand.h | 2 +- + .../include/CameraSocketServerThread.h | 18 ++ + virtualcamera/include/GrallocModule.h | 32 +++- + virtualcamera/include/VirtualCameraFactory.h | 18 +- + virtualcamera/include/VirtualFakeCamera3.h | 12 +- + virtualcamera/include/fake-pipeline2/Sensor.h | 10 +- + .../src/CameraSocketServerThread.cpp | 159 ++++++++++++++++-- + virtualcamera/src/VirtualCameraFactory.cpp | 55 ++++-- + virtualcamera/src/VirtualFakeCamera3.cpp | 81 ++++++++- + virtualcamera/src/fake-pipeline2/Sensor.cpp | 28 ++- + 11 files changed, 409 insertions(+), 57 deletions(-) + +diff --git a/virtualcamera/Android.mk b/virtualcamera/Android.mk +index 8acc35a..019eb78 100644 +--- a/virtualcamera/Android.mk ++++ b/virtualcamera/Android.mk +@@ -12,17 +12,19 @@ + # See the License for the specific language governing permissions and + # limitations under the License. + +-ifeq ($(TARGET_USE_CAMERA_VHAL), true) ++#ifeq ($(TARGET_USE_CAMERA_VHAL), true) + LOCAL_PATH := $(call my-dir) + + include $(CLEAR_VARS) + ++ifneq ($(TARGET_BOARD_PLATFORM), celadon) + ####### Build FFmpeg modules from prebuilt libs ######### + + FFMPEG_PREBUILD := prebuilts/ffmpeg-4.2.2/android-x86_64 + FFMPEG_LIB_PATH := ${FFMPEG_PREBUILD}/lib + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libavcodec + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -32,6 +34,7 @@ LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libswresample + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -42,6 +45,7 @@ include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) + LOCAL_MODULE := libavutil ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so + LOCAL_PROPRIETARY_MODULE := true +@@ -50,6 +54,7 @@ LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libavdevice + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -59,6 +64,7 @@ LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libavfilter + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -68,6 +74,7 @@ LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libavformat + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -77,6 +84,7 @@ LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + + include $(CLEAR_VARS) ++LOCAL_CHECK_ELF_FILES := false + LOCAL_MODULE := libswscale + LOCAL_MULTILIB := 64 + LOCAL_SRC_FILES := $(FFMPEG_LIB_PATH)/$(LOCAL_MODULE).so +@@ -85,12 +93,18 @@ LOCAL_MODULE_SUFFIX := .so + LOCAL_MODULE_CLASS := SHARED_LIBRARIES + include $(BUILD_PREBUILT) + ########################################################## ++endif + + include $(CLEAR_VARS) + + ##################### Build camera-vhal ####################### + ++ifeq ($(TARGET_BOARD_PLATFORM), celadon) ++LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM) ++else + LOCAL_MODULE := camera.$(TARGET_PRODUCT) ++endif ++ + LOCAL_MULTILIB := 64 + LOCAL_VENDOR_MODULE := true + +@@ -108,9 +122,10 @@ camera_vhal_src := \ + src/Exif.cpp \ + src/Thumbnail.cpp \ + src/CameraSocketServerThread.cpp \ +- src/CameraSocketCommand.cpp \ +- src/CGCodec.cpp +- ++ src/CameraSocketCommand.cpp ++ifneq ($(TARGET_BOARD_PLATFORM), celadon) ++camera_vhal_src += src/CGCodec.cpp ++endif + camera_vhal_c_includes := external/libjpeg-turbo \ + external/libexif \ + external/libyuv/files/include \ +@@ -122,6 +137,9 @@ camera_vhal_c_includes := external/libjpeg-turbo \ + $(LOCAL_PATH)/$(FFMPEG_PREBUILD)/include \ + $(call include-path-for, camera) + ++ifeq ($(TARGET_BOARD_PLATFORM), celadon) ++camera_vhal_c_includes += $(INTEL_MINIGBM)/cros_gralloc ++endif + camera_vhal_shared_libraries := \ + libbinder \ + libexif \ +@@ -136,14 +154,17 @@ camera_vhal_shared_libraries := \ + libjpeg \ + libcamera_metadata \ + libhardware \ +- libsync \ +- libavcodec \ ++ libsync ++ ++ifneq ($(TARGET_BOARD_PLATFORM), celadon) ++camera_vhal_shared_libraries += libavcodec \ + libavdevice \ + libavfilter \ + libavformat \ + libavutil \ + libswresample \ +- libswscale ++ libswscale ++endif + + camera_vhal_static_libraries := \ + android.hardware.camera.common@1.0-helper \ +@@ -158,6 +179,12 @@ ifeq ($(BOARD_USES_GRALLOC1), true) + camera_vhal_cflags += -DUSE_GRALLOC1 + endif + ++ifeq ($(TARGET_BOARD_PLATFORM), celadon) ++camera_vhal_cflags += -DGRALLOC_MAPPER4 ++else ++camera_vhal_cflags += -DENABLE_FFMPEG ++endif ++ + LOCAL_MODULE_RELATIVE_PATH := ${camera_vhal_module_relative_path} + LOCAL_CFLAGS := ${camera_vhal_cflags} + LOCAL_CPPFLAGS += -std=c++17 +@@ -200,8 +227,8 @@ jpeg_shared_libraries := \ + jpeg_c_includes := external/libjpeg-turbo \ + external/libexif \ + frameworks/native/include \ +- $(LOCAL_PATH)/include \ +- $(LOCAL_PATH)/include/jpeg-stub \ ++ $(LOCAL_PATH)/include \ ++ $(LOCAL_PATH)/include/jpeg-stub \ + + jpeg_src := \ + src/jpeg-stub/Compressor.cpp \ +@@ -217,10 +244,14 @@ LOCAL_SHARED_LIBRARIES := ${jpeg_shared_libraries} + LOCAL_C_INCLUDES += ${jpeg_c_includes} + LOCAL_SRC_FILES := ${jpeg_src} + ++ifeq ($(TARGET_BOARD_PLATFORM), celadon) ++LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM).jpeg ++else + LOCAL_MODULE := camera.$(TARGET_PRODUCT).jpeg ++endif + + include $(BUILD_SHARED_LIBRARY) + + ###################################################### + +-endif # TARGET_USE_CAMERA_VHAL ++#endif # TARGET_USE_CAMERA_VHAL +diff --git a/virtualcamera/include/CameraSocketCommand.h b/virtualcamera/include/CameraSocketCommand.h +index 93d63ef..07b835f 100644 +--- a/virtualcamera/include/CameraSocketCommand.h ++++ b/virtualcamera/include/CameraSocketCommand.h +@@ -32,7 +32,7 @@ namespace android { + + namespace socket { + +-enum class VideoCodecType { kH264 = 0 }; ++enum class VideoCodecType { kH264 = 0, kI420 }; + enum class FrameResolution { k480p = 0, k720p, k1080p }; + + struct CameraFrameInfo { +diff --git a/virtualcamera/include/CameraSocketServerThread.h b/virtualcamera/include/CameraSocketServerThread.h +index cf376d5..e8aa5de 100644 +--- a/virtualcamera/include/CameraSocketServerThread.h ++++ b/virtualcamera/include/CameraSocketServerThread.h +@@ -28,16 +28,31 @@ + #include + #include + #include ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" ++#endif + #include "CameraSocketCommand.h" ++#include + + namespace android { + ++enum tranSock ++{ ++ UNIX = 0, ++ TCP = 1, ++ VSOCK = 2, ++}; ++ + class VirtualCameraFactory; + class CameraSocketServerThread : public Thread { + public: ++#ifdef ENABLE_FFMPEG + CameraSocketServerThread(std::string suffix, std::shared_ptr decoder, + std::atomic &state); ++#else ++ CameraSocketServerThread(std::string suffix, ++ std::atomic &state); ++#endif + ~CameraSocketServerThread(); + + virtual void requestExit(); +@@ -55,8 +70,11 @@ private: + int mSocketServerFd = -1; + std::string mSocketPath; + int mClientFd = -1; ++ ssize_t size_update = 0; + ++#ifdef ENABLE_FFMPEG + std::shared_ptr mVideoDecoder; ++#endif + std::atomic &mCameraSessionState; + + // maximum size of a H264 packet in any aggregation packet is 65535 bytes. +diff --git a/virtualcamera/include/GrallocModule.h b/virtualcamera/include/GrallocModule.h +index ca9cab7..9a1ec1e 100644 +--- a/virtualcamera/include/GrallocModule.h ++++ b/virtualcamera/include/GrallocModule.h +@@ -15,8 +15,10 @@ + + #ifdef USE_GRALLOC1 + #include ++#ifndef GRALLOC_MAPPER4 + #include + #endif ++#endif + + class GrallocModule { + public: +@@ -110,8 +112,10 @@ public: + int32_t fenceFd = -1; + int error = m_gralloc1_unlock(m_gralloc1_device, handle, &fenceFd); + if (!error) { ++#ifndef GRALLOC_MAPPER4 + sync_wait(fenceFd, -1); + close(fenceFd); ++#endif + } + return error; + } +@@ -125,7 +129,25 @@ public: + } + } + } +- ++#ifdef GRALLOC_MAPPER4 ++ int importBuffer(buffer_handle_t handle, buffer_handle_t *outBuffer) { ++ switch (m_major_version) { ++ case 1: ++#ifdef USE_GRALLOC1 ++ { ++ return m_gralloc1_importbuffer(m_gralloc1_device, handle, outBuffer); ++ } ++#endif ++ default: { ++ ALOGE( ++ "[Gralloc] no gralloc module to import; unknown gralloc major " ++ "version (%d)", ++ m_major_version); ++ return -1; ++ } ++ } ++ } ++#endif + private: + GrallocModule() { + const hw_module_t *module = nullptr; +@@ -152,6 +174,11 @@ private: + m_gralloc1_getNumFlexPlanes = + (GRALLOC1_PFN_GET_NUM_FLEX_PLANES)m_gralloc1_device->getFunction( + m_gralloc1_device, GRALLOC1_FUNCTION_GET_NUM_FLEX_PLANES); ++#ifdef GRALLOC_MAPPER4 ++ m_gralloc1_importbuffer = (GRALLOC1_PFN_IMPORT_BUFFER)m_gralloc1_device->getFunction( ++ m_gralloc1_device, GRALLOC1_FUNCTION_IMPORT_BUFFER); ++ ++#endif + break; + #endif + default: +@@ -167,6 +194,9 @@ private: + GRALLOC1_PFN_UNLOCK m_gralloc1_unlock = nullptr; + GRALLOC1_PFN_LOCK_FLEX m_gralloc1_lockflex = nullptr; + GRALLOC1_PFN_GET_NUM_FLEX_PLANES m_gralloc1_getNumFlexPlanes = nullptr; ++#ifdef GRALLOC_MAPPER4 ++ GRALLOC1_PFN_IMPORT_BUFFER m_gralloc1_importbuffer=nullptr; ++#endif + #endif + }; + +diff --git a/virtualcamera/include/VirtualCameraFactory.h b/virtualcamera/include/VirtualCameraFactory.h +index a5b388d..183ce48 100644 +--- a/virtualcamera/include/VirtualCameraFactory.h ++++ b/virtualcamera/include/VirtualCameraFactory.h +@@ -26,8 +26,9 @@ + #include + #include + #include "CameraSocketServerThread.h" ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" +- ++#endif + namespace android { + + class CameraSocketServerThread; +@@ -178,8 +179,13 @@ private: + * true, it will be created as if it were a camera on the back of the phone. + * Otherwise, it will be front-facing. + */ ++#ifdef ENABLE_FFMPEG + void createFakeCamera(std::shared_ptr socket_server, + std::shared_ptr decoder, bool backCamera); ++#else ++ void createFakeCamera(std::shared_ptr socket_server, ++ bool backCamera); ++#endif + /* + * Waits till remote-props has done setup, timeout after 500ms. + */ +@@ -223,13 +229,19 @@ public: + static struct hw_module_methods_t mCameraModuleMethods; + + private: ++#ifdef ENABLE_FFMPEG + // NV12 Decoder + std::shared_ptr mDecoder; +- ++#endif + // Socket server + std::shared_ptr mSocketServer; +- ++#ifdef ENABLE_FFMPEG ++ // NV12 Decoder ++ std::shared_ptr mDecoder; + bool createSocketServer(std::shared_ptr decoder); ++#else ++ bool createSocketServer(); ++#endif + }; + + }; // end of namespace android +diff --git a/virtualcamera/include/VirtualFakeCamera3.h b/virtualcamera/include/VirtualFakeCamera3.h +index ec45e00..0b17f8a 100644 +--- a/virtualcamera/include/VirtualFakeCamera3.h ++++ b/virtualcamera/include/VirtualFakeCamera3.h +@@ -33,7 +33,9 @@ + #include + #include + #include ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" ++#endif + #include "CameraSocketServerThread.h" + #include "CameraSocketCommand.h" + +@@ -52,11 +54,16 @@ namespace android { + */ + class VirtualFakeCamera3 : public VirtualCamera3, private Sensor::SensorListener { + public: ++#ifdef ENABLE_FFMPEG + VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, + std::shared_ptr socket_server, + std::shared_ptr decoder, + std::atomic &state); +- ++#else ++ VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, ++ std::shared_ptr socket_server, ++ std::atomic &state); ++#endif + virtual ~VirtualFakeCamera3(); + + /**************************************************************************** +@@ -194,9 +201,10 @@ private: + + // socket server + std::shared_ptr mSocketServer; ++#ifdef ENABLE_FFMPEG + // NV12 Video decoder handle + std::shared_ptr mDecoder = nullptr; +- ++#endif + std::atomic &mCameraSessionState; + + bool createSocketServer(bool facing_back); +diff --git a/virtualcamera/include/fake-pipeline2/Sensor.h b/virtualcamera/include/fake-pipeline2/Sensor.h +index ca198c5..b83e771 100644 +--- a/virtualcamera/include/fake-pipeline2/Sensor.h ++++ b/virtualcamera/include/fake-pipeline2/Sensor.h +@@ -78,8 +78,10 @@ + #include "utils/Thread.h" + #include "utils/Mutex.h" + #include "utils/Timers.h" ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" + #include "CGLog.h" ++#endif + #include + #include + #include +@@ -98,7 +100,11 @@ class Sensor : private Thread, public virtual RefBase { + public: + // width: Width of pixel array + // height: Height of pixel array ++#ifdef ENABLE_FFMPEG + Sensor(uint32_t width, uint32_t height, std::shared_ptr decoder = nullptr); ++#else ++ Sensor(uint32_t width, uint32_t height); ++#endif + ~Sensor(); + + /* +@@ -270,9 +276,9 @@ private: + // vHAL buffer + int mSrcWidth = 640; + int mSrcHeight = 480; +- ++#ifdef ENABLE_FFMPEG + std::shared_ptr mDecoder = {}; +- ++#endif + bool getNV12Frames(uint8_t *out_buf, int *out_size, std::chrono::milliseconds timeout_ms = 5ms); + void dump_yuv(uint8_t *img1, size_t img1_size, uint8_t *img2, size_t img2_size, + const std::string &filename); +diff --git a/virtualcamera/src/CameraSocketServerThread.cpp b/virtualcamera/src/CameraSocketServerThread.cpp +index 116580c..62d4f25 100644 +--- a/virtualcamera/src/CameraSocketServerThread.cpp ++++ b/virtualcamera/src/CameraSocketServerThread.cpp +@@ -42,6 +42,7 @@ + #include "CameraSocketServerThread.h" + #include "VirtualBuffer.h" + #include "VirtualCameraFactory.h" ++#include + #include + + android::ClientVideoBuffer *android::ClientVideoBuffer::ic_instance = 0; +@@ -49,11 +50,18 @@ android::ClientVideoBuffer *android::ClientVideoBuffer::ic_instance = 0; + namespace android { + + using namespace socket; ++#ifdef ENABLE_FFMPEG + CameraSocketServerThread::CameraSocketServerThread(std::string suffix, + std::shared_ptr decoder, + std::atomic &state) + : Thread(/*canCallJava*/ false), mRunning{true}, mSocketServerFd{-1}, + mVideoDecoder{decoder}, mCameraSessionState{state} { ++#else ++CameraSocketServerThread::CameraSocketServerThread(std::string suffix, ++ std::atomic &state) ++ : Thread(/*canCallJava*/ false), mRunning{true}, mSocketServerFd{-1}, ++ mCameraSessionState{state} { ++#endif + std::string sock_path = "/ipc/camera-socket" + suffix; + char *k8s_env_value = getenv("K8S_ENV"); + mSocketPath = (k8s_env_value != NULL && !strcmp(k8s_env_value, "true")) +@@ -119,9 +127,38 @@ void CameraSocketServerThread::clearBuffer(char *buffer, int width, int height) + } + + bool CameraSocketServerThread::threadLoop() { +- mSocketServerFd = ::socket(AF_UNIX, SOCK_STREAM, 0); +- if (mSocketServerFd < 0) { +- ALOGE("%s:%d Fail to construct camera socket with error: %s", __FUNCTION__, __LINE__, ++ struct sockaddr_un addr_un; ++ memset(&addr_un, 0, sizeof(addr_un)); ++ addr_un.sun_family = AF_UNIX; ++ int ret = 0; ++ int new_client_fd =-1; ++ int so_reuseaddr = 1; ++ struct sockaddr_vm addr_vm ; ++ struct sockaddr_in addr_ip; ++ int trans_mode = 0; ++ char mode[PROPERTY_VALUE_MAX]; ++ ++ if ((property_get("ro.vendor.camera.transference", mode, nullptr) > 0) ){ ++ if (!strcmp(mode, "TCP")) { ++ trans_mode = TCP; ++ }else if (!strcmp(mode, "UNIX")) { ++ trans_mode = UNIX; ++ }else if (!strcmp(mode, "VSOCK")) { ++ trans_mode = VSOCK; ++ } ++ } ++ else{ ++ //Fall back to unix socket by default ++ //trans_mode = UNIX; ++ //Deepa to do ++ trans_mode = VSOCK; ++ ALOGVV("%s: falling back to UNIX as the trans mode is not set",__FUNCTION__); ++ } ++ if(trans_mode == UNIX) ++ { ++ mSocketServerFd = ::socket(AF_UNIX, SOCK_STREAM, 0); ++ if (mSocketServerFd < 0) { ++ ALOGE("%s:%d Fail to construct camera socket with error: %s", __FUNCTION__, __LINE__, + strerror(errno)); + return false; + } +@@ -161,18 +198,93 @@ bool CameraSocketServerThread::threadLoop() { + chmod(mSocketPath.c_str(), mod); + stat(mSocketPath.c_str(), &st); + +- ret = listen(mSocketServerFd, 5); +- if (ret < 0) { +- ALOGE("%s Failed to listen on %s", __FUNCTION__, mSocketPath.c_str()); +- return false; ++ ret = listen(mSocketServerFd, 5); ++ if (ret < 0) { ++ ALOGE("%s Failed to listen on %s", __FUNCTION__, mSocketPath.c_str()); ++ return false; ++ } + } ++ else if(trans_mode == TCP){ ++ int ret = 0; ++ int new_client_fd =-1; ++ int port = 8085; ++ int so_reuseaddr = 1; ++ ++ mSocketServerFd = ::socket(AF_INET, SOCK_STREAM, 0); ++ if (mSocketServerFd < 0) { ++ ALOGE(LOG_TAG " %s:Line:[%d] Fail to construct camera socket with error: [%s]", ++ __FUNCTION__, __LINE__, strerror(errno)); ++ return false; ++ } ++ if (setsockopt(mSocketServerFd, SOL_SOCKET, SO_REUSEADDR, &so_reuseaddr, ++ sizeof(int)) < 0) { ++ ALOGE(LOG_TAG " %s setsockopt(SO_REUSEADDR) failed. : %d\n", __func__, ++ mSocketServerFd); ++ return false; ++ } ++ addr_ip.sin_family = AF_INET; ++ addr_ip.sin_addr.s_addr = htonl(INADDR_ANY); ++ addr_ip.sin_port = htons(port); ++ ++ ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_ip, ++ sizeof(struct sockaddr_in)); ++ if (ret < 0) { ++ ALOGE(LOG_TAG " %s Failed to bind port(%d). ret: %d, %s", __func__, port, ret, ++ strerror(errno)); ++ return false; ++ } ++ ret = listen(mSocketServerFd, 5); ++ if (ret < 0) { ++ ALOGE("%s Failed to listen on ", __FUNCTION__); ++ return false; ++ } ++ }else{ ++ memset(&addr_ip, 0, sizeof(addr_ip)); ++ addr_vm.svm_family = AF_VSOCK; ++ addr_vm.svm_port = 1982; ++ addr_vm.svm_cid = 3; ++ //addr_vm.svm_port = htons(1234); ++ //addr_vm.svm_cid = 4; ++ int ret = 0; ++ int port = 1234; ++ int so_reuseaddr = 1; ++ size_update = 0; ++ mSocketServerFd = ::socket(AF_VSOCK, SOCK_STREAM, 0); ++ if (mSocketServerFd < 0) { ++ ALOGE(LOG_TAG " %s:Line:[%d] Fail to construct camera socket with error: [%s]", ++ __FUNCTION__, __LINE__, strerror(errno)); ++ return false; ++ } ++ ret = ::bind(mSocketServerFd, (struct sockaddr *)&addr_vm, ++ sizeof(struct sockaddr_vm)); ++ if (ret < 0) { ++ ALOGE(LOG_TAG " %s Failed to bind port(%d). ret: %d, %s", __func__, port, ret, ++ strerror(errno)); ++ return false; ++ } ++ ret = listen(mSocketServerFd, 32); ++ if (ret < 0) { ++ ALOGE("%s Failed to listen on ", __FUNCTION__); ++ return false; ++ } + ++ } + while (mRunning) { + ALOGI(LOG_TAG " %s: Wait for camera client to connect. . .", __FUNCTION__); + +- socklen_t alen = sizeof(struct sockaddr_un); +- +- int new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_un, &alen); ++ if (trans_mode == TCP) { ++ socklen_t alen = sizeof(struct sockaddr_in); ++ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_ip, &alen); ++ } ++ else if(trans_mode == VSOCK){ ++ socklen_t alen = sizeof(struct sockaddr_vm); ++ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_vm, &alen); ++ } ++ else ++ { ++ socklen_t alen = sizeof(struct sockaddr_un); ++ new_client_fd = ::accept(mSocketServerFd, (struct sockaddr *)&addr_un, &alen); ++ } + ALOGI(LOG_TAG " %s: Accepted client: [%d]", __FUNCTION__, new_client_fd); + if (new_client_fd < 0) { + ALOGE(LOG_TAG " %s: Fail to accept client. Error: [%s]", __FUNCTION__, strerror(errno)); +@@ -206,19 +318,37 @@ bool CameraSocketServerThread::threadLoop() { + mClientFd = -1; + clearBuffer(fbuffer, 640, 480); + break; +- } else if (event & POLLIN) { // preview / record ++ } else if ((event & POLLIN) || (trans_mode == VSOCK) || (trans_mode == TCP) ) { // preview / record + // data is available in socket => read data + if (gIsInFrameI420) { + ssize_t size = 0; + +- if ((size = recv(mClientFd, (char *)fbuffer, 460800, MSG_WAITALL)) > 0) { ++ //in VSOCk case the MSG_WAITALL is not helping in getting the complete buffer ++ if(trans_mode == VSOCK) ++ { ++ while(size_update != 460800){ ++ size = recv(mClientFd, (char *)fbuffer+size_update, 460800, 0); ++ size_update += size; ++ if (size_update == 460800){ ++ handle->clientRevCount++; ++ size_update = 0; ++ ALOGVV(LOG_TAG ++ "[I420] %s: Packet rev %d and " ++ "size %zd", ++ __FUNCTION__, handle->clientRevCount, size); ++ } ++ } ++ }else{ ++ if ((size = recv(mClientFd, (char *)fbuffer, 460800, MSG_WAITALL)) > 0) { + handle->clientRevCount++; + ALOGVV(LOG_TAG +- "[I420] %s: Pocket rev %d and " ++ "[I420] %s: Packet rev %d and " + "size %zd", + __FUNCTION__, handle->clientRevCount, size); ++ } + } + } else if (gIsInFrameH264) { // default H264 ++#ifdef ENABLE_FFMPEG + size_t recv_frame_size = 0; + ssize_t size = 0; + if ((size = recv(mClientFd, (char *)&recv_frame_size, sizeof(size_t), +@@ -243,6 +373,7 @@ bool CameraSocketServerThread::threadLoop() { + case CameraSessionState::kCameraOpened: + mCameraSessionState = CameraSessionState::kDecodingStarted; + ALOGVV("%s [H264] Decoding started now.", __func__); ++ [[fallthrough]]; + case CameraSessionState::kDecodingStarted: + mVideoDecoder->decode(mSocketBuffer.data(), mSocketBufferSize); + handle->clientRevCount++; +@@ -258,12 +389,14 @@ bool CameraSocketServerThread::threadLoop() { + case CameraSessionState::kDecodingStopped: + ALOGVV("%s [H264] Decoding is already stopped, skip the packets", + __func__); ++ [[fallthrough]]; + default: + ALOGE("%s [H264] Invalid Camera session state!", __func__); + break; + } + } + } ++#endif + } else { + ALOGE("%s: only H264, I420 input frames supported", __FUNCTION__); + } +diff --git a/virtualcamera/src/VirtualCameraFactory.cpp b/virtualcamera/src/VirtualCameraFactory.cpp +index ce2fecd..8454fa2 100644 +--- a/virtualcamera/src/VirtualCameraFactory.cpp ++++ b/virtualcamera/src/VirtualCameraFactory.cpp +@@ -19,14 +19,15 @@ + * available for emulation. + */ + +-//#define LOG_NDEBUG 0 ++#define LOG_NDEBUG 0 + #define LOG_TAG "VirtualCamera_Factory" + + #include "VirtualCameraFactory.h" + #include "VirtualFakeCamera3.h" + #include "CameraSocketServerThread.h" ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" +- ++#endif + #include + #include + +@@ -52,7 +53,8 @@ void VirtualCameraFactory::readSystemProperties() { + + property_get("ro.vendor.camera.in_frame_format.i420", prop_val, "false"); + gIsInFrameI420 = !strcmp(prop_val, "true"); +- ++//Deepa to do ++ gIsInFrameI420 = true; + property_get("ro.vendor.camera.decode.vaapi", prop_val, "false"); + gUseVaapi = !strcmp(prop_val, "true"); + +@@ -105,19 +107,33 @@ VirtualCameraFactory::VirtualCameraFactory() + if (gIsInFrameH264) { + // create decoder + ALOGV("%s Creating decoder.", __func__); ++#ifdef ENABLE_FFMPEG + mDecoder = std::make_shared(); ++#endif + } + + // create socket server who push packets to decoder ++#ifdef ENABLE_FFMPEG + createSocketServer(mDecoder); ++#else ++ createSocketServer(); ++#endif + ALOGV("%s socket server created: ", __func__); + + // Create fake cameras, if enabled. + if (isFakeCameraEmulationOn(/* backCamera */ true)) { ++#ifdef ENABLE_FFMPEG + createFakeCamera(mSocketServer, mDecoder, /* backCamera */ true); ++#else ++ createFakeCamera(mSocketServer, /* backCamera */ true); ++#endif + } + if (isFakeCameraEmulationOn(/* backCamera */ false)) { ++#ifdef ENABLE_FFMPEG + createFakeCamera(mSocketServer, mDecoder, /* backCamera */ false); ++#else ++ createFakeCamera(mSocketServer, /* backCamera */ false); ++#endif + } + + ALOGI("%d cameras are being virtual. %d of them are fake cameras.", mVirtualCameraNum, +@@ -125,20 +141,28 @@ VirtualCameraFactory::VirtualCameraFactory() + + mConstructedOK = true; + } +- ++#ifdef ENABLE_FFMPEG + bool VirtualCameraFactory::createSocketServer(std::shared_ptr decoder) { ++#else ++bool VirtualCameraFactory::createSocketServer() { ++#endif + ALOGV("%s: E", __FUNCTION__); + + char id[PROPERTY_VALUE_MAX] = {0}; ++#ifdef ENABLE_FFMPEG + if (property_get("ro.boot.container.id", id, "") > 0) { + mSocketServer = + std::make_shared(id, decoder, std::ref(mCameraSessionState)); +- +- mSocketServer->run("FrontBackCameraSocketServerThread"); + } else + ALOGE("%s: FATAL: container id is not set!!", __func__); + + ALOGV("%s: X", __FUNCTION__); ++#else ++ ALOGE("Deepa CameraSocketServerThread run"); ++ mSocketServer = ++ std::make_shared(id, std::ref(mCameraSessionState)); ++#endif ++ mSocketServer->run("FrontBackCameraSocketServerThread"); + // TODO need to return false if error. + return true; + } +@@ -265,10 +289,14 @@ int VirtualCameraFactory::open_legacy(const struct hw_module_t *module, const ch + /******************************************************************************** + * Internal API + *******************************************************************************/ +- ++#ifdef ENABLE_FFMPEG + void VirtualCameraFactory::createFakeCamera(std::shared_ptr socket_server, + std::shared_ptr decoder, + bool backCamera) { ++#else ++void VirtualCameraFactory::createFakeCamera(std::shared_ptr socket_server, ++ bool backCamera) { ++#endif + int halVersion = getCameraHalVersion(backCamera); + + /* +@@ -282,8 +310,13 @@ void VirtualCameraFactory::createFakeCamera(std::shared_ptr 0) && ++ if ((property_get("ro.vendor.remote.sf.fake_camera", prop, nullptr) > 0) && + (!strcmp(prop, "both") || !strcmp(prop, backCamera ? "back" : "front"))) { + return true; + } else { + return false; + } ++ return true; + } + + int VirtualCameraFactory::getCameraHalVersion(bool backCamera) { +@@ -355,7 +390,7 @@ int VirtualCameraFactory::getCameraHalVersion(bool backCamera) { + * doesn't exist, it is assumed we are working with HAL v1. + */ + char prop[PROPERTY_VALUE_MAX]; +- const char *propQuery = backCamera ? "remote.sf.back_camera_hal" : "remote.sf.front_camera_hal"; ++ const char *propQuery = backCamera ? "ro.vendor.remote.sf.back_camera_hal" : "ro.vendor.remote.sf.front_camera_hal"; + if (property_get(propQuery, prop, nullptr) > 0) { + char *propEnd = prop; + int val = strtol(prop, &propEnd, 10); +diff --git a/virtualcamera/src/VirtualFakeCamera3.cpp b/virtualcamera/src/VirtualFakeCamera3.cpp +index 55cd0ac..3743cd1 100644 +--- a/virtualcamera/src/VirtualFakeCamera3.cpp ++++ b/virtualcamera/src/VirtualFakeCamera3.cpp +@@ -22,6 +22,7 @@ + #include + + //#define LOG_NNDEBUG 0 ++//#define LOG_NDEBUG 0 + #define LOG_TAG "VirtualFakeCamera3: " + #include + #include +@@ -52,6 +53,10 @@ + using namespace std; + using namespace chrono; + using namespace chrono_literals; ++buffer_handle_t bufferHandle; ++buffer_handle_t bufferHandle1; ++buffer_handle_t bufferHandle2; ++buffer_handle_t bufferHandle_3; + + namespace android { + +@@ -96,7 +101,7 @@ const float VirtualFakeCamera3::kExposureWanderMax = 1; + /** + * Camera device lifecycle methods + */ +- ++#ifdef ENABLE_FFMPEG + VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, + std::shared_ptr socket_server, + std::shared_ptr decoder, +@@ -106,6 +111,15 @@ VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_ + mSocketServer(socket_server), + mDecoder(decoder), + mCameraSessionState{state} { ++#else ++VirtualFakeCamera3::VirtualFakeCamera3(int cameraId, bool facingBack, struct hw_module_t *module, ++ std::shared_ptr socket_server, ++ std::atomic &state) ++ : VirtualCamera3(cameraId, module), ++ mFacingBack(facingBack), ++ mSocketServer(socket_server), ++ mCameraSessionState{state} { ++#endif + ALOGI("Constructing virtual fake camera 3: ID %d, facing %s", mCameraID, + facingBack ? "back" : "front"); + +@@ -170,6 +184,15 @@ status_t VirtualFakeCamera3::sendCommandToClient(socket::CameraOperation operati + ALOGE("%s: We're not connected to client yet!", __FUNCTION__); + return INVALID_OPERATION; + } ++ char mode[PROPERTY_VALUE_MAX]; ++ //incase vsock add yuv command ++ //Deepa :to do ++ //if ((property_get("ro.vendor.camera.transference", mode, nullptr) > 0)) ++ { ++ // if (!strcmp(mode, "VSOCK")) ++ ALOGE("%s:! Deepa sending Vsock ingo!", __FUNCTION__); ++ camera_config.frame_info.codec_type = VideoCodecType::kI420; ++ } + ALOGI("%s: Camera client fd %d!", __FUNCTION__, client_fd); + if (send(client_fd, &camera_config, sizeof(camera_config), 0) < 0) { + ALOGE(LOG_TAG "%s: Failed to send Camera Open command to client, err %s ", __FUNCTION__, +@@ -189,6 +212,7 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { + + if (gIsInFrameH264) { + const char *device_name = gUseVaapi ? "vaapi" : nullptr; ++#ifdef ENABLE_FFMPEG + // initialize decoder + if (mDecoder->init(VideoCodecType::kH264, FrameResolution::k480p, device_name, 0) < 0) { + ALOGE("%s VideoDecoder init failed. %s decoding", __func__, +@@ -197,8 +221,8 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { + ALOGI("%s VideoDecoder init done. Device: %s", __func__, + !device_name ? "SW" : device_name); + } ++#endif + } +- + ALOGI("%s Calling sendCommandToClient", __func__); + status_t ret; + if ((ret = sendCommandToClient(socket::CameraOperation::kOpen)) != OK) { +@@ -209,7 +233,11 @@ status_t VirtualFakeCamera3::connectCamera(hw_device_t **device) { + mCameraSessionState = socket::CameraSessionState::kCameraOpened; + + // create sensor who gets decoded frames and forwards them to framework ++#ifdef ENABLE_FFMPEG + mSensor = new Sensor(mSensorWidth, mSensorHeight, mDecoder); ++#else ++ mSensor = new Sensor(mSensorWidth, mSensorHeight); ++#endif + mSensor->setSensorListener(this); + + status_t res = mSensor->startUp(); +@@ -1014,7 +1042,19 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ + if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { + if (destBuf.format == HAL_PIXEL_FORMAT_YCbCr_420_888) { + android_ycbcr ycbcr = android_ycbcr(); ++ bufferHandle2 = native_handle_clone(*(destBuf.buffer)); ++#ifdef GRALLOC_MAPPER4 ++ res = GrallocModule::getInstance().importBuffer(bufferHandle2, &bufferHandle1); ++ //res = GrallocModule::getInstance().importBuffer(*(destBuf.buffer), &bufferHandle1); ++ if (res!= OK) { ++ // ALOGE("%s: Gralloc importBuffer failed",__FUNCTION__); ++ } ++ res = GrallocModule::getInstance().lock_ycbcr(bufferHandle2, ++ //res = GrallocModule::getInstance().lock_ycbcr(bufferHandle1, ++#else + res = GrallocModule::getInstance().lock_ycbcr(*(destBuf.buffer), ++#endif ++ + #ifdef USE_GRALLOC1 + GRALLOC1_PRODUCER_USAGE_CPU_WRITE, + #else +@@ -1030,7 +1070,20 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ + res = INVALID_OPERATION; + } + } else { ++#ifdef GRALLOC_MAPPER4 ++ bufferHandle_3 = native_handle_clone(*(destBuf.buffer)); ++ res = GrallocModule::getInstance().importBuffer(bufferHandle_3, &bufferHandle); ++ //res = GrallocModule::getInstance().importBuffer(*(destBuf.buffer), &bufferHandle); ++ if (res!= OK) { ++ ALOGE("%s: Gralloc importBuffer failed",__FUNCTION__); ++ } ++ ++ res = GrallocModule::getInstance().lock(bufferHandle_3, ++ //res = GrallocModule::getInstance().lock(bufferHandle, ++#else + res = GrallocModule::getInstance().lock(*(destBuf.buffer), ++#endif ++ + #ifdef USE_GRALLOC1 + GRALLOC1_PRODUCER_USAGE_CPU_WRITE, + #else +@@ -1062,8 +1115,23 @@ status_t VirtualFakeCamera3::processCaptureRequest(camera3_capture_request *requ + + sensorBuffers->push_back(destBuf); + buffers->push_back(srcBuf); ++#ifdef GRALLOC_MAPPER4 ++ if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) ++ { ++ GrallocModule::getInstance().unlock(bufferHandle2); ++ native_handle_close(bufferHandle2); ++ //GrallocModule::getInstance().release_handle(bufferHandle1); ++ //GrallocModule::getInstance().unlock(bufferHandle1); ++ } ++ else ++ { ++ GrallocModule::getInstance().unlock(bufferHandle_3); ++ native_handle_close(bufferHandle_3); ++ //GrallocModule::getInstance().release_handle(bufferHandle); ++ // GrallocModule::getInstance().unlock(bufferHandle); ++ } ++#endif + } +- + /** + * Wait for JPEG compressor to not be busy, if needed + */ +@@ -2697,8 +2765,9 @@ bool VirtualFakeCamera3::ReadoutThread::threadLoop() { + res); + // fallthrough for cleanup + } ++#ifndef GRALLOC_MAPPER4 + GrallocModule::getInstance().unlock(*(buf->buffer)); +- ++#endif + buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; + buf->acquire_fence = -1; + buf->release_fence = -1; +@@ -2788,9 +2857,9 @@ bool VirtualFakeCamera3::ReadoutThread::threadLoop() { + + void VirtualFakeCamera3::ReadoutThread::onJpegDone(const StreamBuffer &jpegBuffer, bool success) { + Mutex::Autolock jl(mJpegLock); +- ++#ifndef GRALLOC_MAPPER4 + GrallocModule::getInstance().unlock(*(jpegBuffer.buffer)); +- ++#endif + mJpegHalBuffer.status = success ? CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR; + mJpegHalBuffer.acquire_fence = -1; + mJpegHalBuffer.release_fence = -1; +diff --git a/virtualcamera/src/fake-pipeline2/Sensor.cpp b/virtualcamera/src/fake-pipeline2/Sensor.cpp +index 068af5c..577cd91 100644 +--- a/virtualcamera/src/fake-pipeline2/Sensor.cpp ++++ b/virtualcamera/src/fake-pipeline2/Sensor.cpp +@@ -25,7 +25,9 @@ + #endif + + #include "fake-pipeline2/Sensor.h" ++#ifdef ENABLE_FFMPEG + #include "CGCodec.h" ++#endif + #include + #include + #include +@@ -111,16 +113,22 @@ float sqrtf_approx(float r) { + + return *(float *)(&r_i); + } +- ++#ifdef ENABLE_FFMPEG + Sensor::Sensor(uint32_t width, uint32_t height, std::shared_ptr decoder) ++#else ++Sensor::Sensor(uint32_t width, uint32_t height) ++#endif + : Thread(false), + mResolution{width, height}, + mActiveArray{0, 0, width, height}, + mRowReadoutTime(kFrameDurationRange[0] / height), + mExposureTime(kFrameDurationRange[0] - kMinVerticalBlank), + mFrameDuration(kFrameDurationRange[0]), +- mScene(width, height, kElectronsPerLuxSecond), +- mDecoder{decoder} {} ++ mScene(width, height, kElectronsPerLuxSecond) ++#ifdef ENABLE_FFMPEG ++ ,mDecoder{decoder} ++#endif ++ {} + + Sensor::~Sensor() { shutDown(); } + +@@ -474,7 +482,7 @@ void Sensor::dump_yuv(uint8_t *img1, size_t img1_size, uint8_t *img2, size_t img + fwrite(img2, img2_size, 1, f); + fclose(f); + } +- ++#ifdef ENABLE_FFMPEG + bool Sensor::getNV12Frames(uint8_t *out_buf, int *out_size, + std::chrono::milliseconds timeout_ms /* default 5ms */) { + auto cg_video_frame = std::make_shared(); +@@ -514,7 +522,7 @@ bool Sensor::getNV12Frames(uint8_t *out_buf, int *out_size, + + return true; + } +- ++#endif + void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t height) { + ALOGVV("%s: E", __FUNCTION__); + +@@ -539,7 +547,7 @@ void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + + // Initialize to the size based on resolution. + out_size = destPrevBufSize; +- ++#ifdef ENABLE_FFMPEG + if (gIsInFrameH264) { + if (handle->clientBuf[handle->clientRevCount % 1].decoded) { + // Note: bufData already assigned in the function start +@@ -556,7 +564,7 @@ void Sensor::captureRGBA(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + ulock.unlock(); + } + } +- ++#endif + int src_size = mSrcWidth * mSrcHeight; + int dstFrameSize = width * height; + +@@ -793,7 +801,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + + // Initialize to the size based on resolution. + out_size = mDstBufSize; +- ++#ifdef ENABLE_FFMPEG + if (gIsInFrameH264) { + if (handle->clientBuf[handle->clientRevCount % 1].decoded) { + // Note: bufData already assigned in the function start +@@ -809,7 +817,7 @@ void Sensor::captureNV12(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + ulock.unlock(); + } + } +- ++#endif + // For default resolotion 640x480p + if (width == (uint32_t)mSrcWidth && height == (uint32_t)mSrcHeight) { + if (gIsInFrameI420) { +@@ -1002,6 +1010,7 @@ void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + //Initialize to the size based on resolution. + out_size = mDstJpegBufSize; + ++#ifdef ENABLE_FFMPEG + if (gIsInFrameH264) { + if (handle->clientBuf[handle->clientRevCount % 1].decoded) { + //Note: bufData already assigned in the function start +@@ -1017,6 +1026,7 @@ void Sensor::captureJPEG(uint8_t *img, uint32_t gain, uint32_t width, uint32_t h + ulock.unlock(); + } + } ++#endif + + //For default resolution 640x480p + if (width == (uint32_t)mSrcWidth && height == (uint32_t)mSrcHeight) { +-- +2.17.1 +