From 867b8b7b729282c7e14e200ca277435329ebe747 Mon Sep 17 00:00:00 2001 From: liyujie <2352380935@qq.com> Date: Thu, 28 Aug 2025 12:04:19 +0000 Subject: [PATCH] [3/4]解决USB摄像头打开相机预览界面绿屏 --- android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h | 1 android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp | 34 ++++- android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp | 282 ++++++++++++++++++++++++++++++++++++++++------ android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp | 14 ++ android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c | 15 - 5 files changed, 287 insertions(+), 59 deletions(-) diff --git a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp old mode 100755 new mode 100644 index 4c78563..d556aba --- a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp +++ b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp @@ -38,8 +38,8 @@ // Other formats to consider in the future: // * V4L2_PIX_FMT_YVU420 (== YV12) // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats) -const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{ - {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11 +const std::array<uint32_t, /*size*/ 3> kSupportedFourCCs{ + {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16, V4L2_PIX_FMT_YUYV}}; // double braces required in C++11 constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times. constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds @@ -274,6 +274,7 @@ switch (fmt.fourcc) { case V4L2_PIX_FMT_Z16: hasDepth = true; break; case V4L2_PIX_FMT_MJPEG: hasColor = true; break; + case V4L2_PIX_FMT_YUYV: hasColor = true; break; default: ALOGW("%s: Unsupported format found", __FUNCTION__); } } @@ -705,6 +706,12 @@ // For V4L2_PIX_FMT_MJPEG std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; + // For V4L2_PIX_FMT_YUYV + std::array<int, /*size*/ 3> halYuyvFormats{{HAL_PIXEL_FORMAT_BLOB, + HAL_PIXEL_FORMAT_YCbCr_420_888, + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}}; + bool isMJPEG = false; + bool isYUYV = false; for (const auto& supportedFormat : mSupportedFormats) { switch (supportedFormat.fourcc) { @@ -713,6 +720,11 @@ break; case V4L2_PIX_FMT_MJPEG: hasColor = true; + isMJPEG = true; + break; + case V4L2_PIX_FMT_YUYV: + hasColor = true; + isYUYV = true; break; default: ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__, @@ -729,11 +741,19 @@ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS); } if (hasColor) { - initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats, - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, - ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, - ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); + if (isMJPEG) { + initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); + } else if (isYUYV) { + initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_YUYV, halYuyvFormats, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, + ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, + ANDROID_SCALER_AVAILABLE_STALL_DURATIONS); + } } calculateMinFps(metadata); diff --git a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp old mode 100755 new mode 100644 index aa4dbbc..af11b73 --- a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp +++ b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp @@ -41,6 +41,125 @@ namespace implementation { namespace { + +#define DBG_SAVE_OUTPUT 0 +#if DBG_SAVE_OUTPUT +static int dq_yuv_count = 0; +static int awjpec_count = 0; +const int output_counts = 10; + +bool saveBuffers(char *str, void *p, unsigned int length, bool is_oneframe) { + int fd; + ALOGD("Debug to save a frame!"); + if ((access(str, 0) != -1) && (is_oneframe)) { + ALOGD("File %s is exists!!!\n", str); + } + if (is_oneframe) { + fd = open(str, O_CREAT|O_RDWR|O_TRUNC, 0777); // save one frame data + } else { + fd = open(str, O_CREAT|O_RDWR|O_APPEND, 0777); // save more frames + } + if (!fd) { + ALOGE("Open file error %s", strerror(errno)); + return false; + } + if (write(fd, p, length)) { + ALOGE("Write file fail %s", strerror(errno)); + close(fd); + return true; + } else { + ALOGE("Write file fail"); + close(fd); + return false; + } +} + + +#endif + +void YUYVToNV21(uint8_t* image_in, + uint8_t* image_out, + int width, + int height) { + + int pixNUM = width * height; + + uint8_t *y = image_out; + uint8_t *uv = image_out + pixNUM ; + uint8_t *start = image_in; + int j = 0, k = 0; + + int index =0; + for (j = 0; j < pixNUM * 2; j = j + 2) { + *(y + index) = *(start + j); + index++; + } + + start = image_in; + int uv_index = 0; + for (j = 0; j < height; j = j + 2) { + for (k = j * width * 2 + 1; k < width * 2 * (j + 1); k = k + 4) { + *(uv + uv_index) = *(start + k + 2); + *(uv + uv_index + 1) = *(start + k); + uv_index += 2; + } + } +} + +int NV21Scale(uint8_t *psrc_buf_y, + uint8_t *psrc_buf_uv, + int psrc_w, + int psrc_h, + uint8_t *pdst_buf, + uint8_t *pdst_buf_uv, + int pdst_w, + int pdst_h, + libyuv::FilterModeEnum pfmode) { + uint8_t *i420_buf1 = (uint8_t *)malloc((psrc_w * psrc_h * 3) >> 1); + if (i420_buf1 == nullptr) { + ALOGE("malloc i420_buf1 failed!"); + return -1; + } + uint8_t *i420_buf2 = (uint8_t *)malloc((pdst_w * pdst_h * 3) >> 1); + if (i420_buf2 == nullptr) { + ALOGE("malloc i420_buf2 failed!"); + return -1; + } + + libyuv::NV12ToI420(psrc_buf_y, psrc_w, + psrc_buf_uv, psrc_w, + &i420_buf1[0], psrc_w, + &i420_buf1[psrc_w * psrc_h], psrc_w >> 1, + &i420_buf1[(psrc_w * psrc_h * 5) >> 2], psrc_w >> 1, + psrc_w, psrc_h); + + libyuv::I420Scale(&i420_buf1[0], psrc_w, + &i420_buf1[psrc_w * psrc_h], psrc_w >> 1, + &i420_buf1[(psrc_w * psrc_h * 5) >> 2], psrc_w >> 1, + psrc_w, psrc_h, + &i420_buf2[0], pdst_w, + &i420_buf2[pdst_w * pdst_h], pdst_w >> 1, + &i420_buf2[(pdst_w * pdst_h * 5) >> 2], pdst_w >> 1, + pdst_w, pdst_h, + pfmode); + + libyuv::I420ToNV12(&i420_buf2[0], pdst_w, + &i420_buf2[pdst_w * pdst_h], pdst_w >> 1, + &i420_buf2[(pdst_w * pdst_h * 5) >> 2], pdst_w >> 1, + pdst_buf, pdst_w, + pdst_buf_uv, pdst_w, + pdst_w,pdst_h); + + if (i420_buf1 != nullptr) { + free(i420_buf1); + } + if (i420_buf2 != nullptr) { + free(i420_buf2); + } + return 0; +} + + // Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; @@ -484,7 +603,9 @@ closeOutputThread(); } - Libve_exit2(&mDecoder); + if (mDecoder != NULL){ + Libve_exit2(&mDecoder); + } Mutex::Autolock _l(mLock); // free all buffers @@ -731,6 +852,13 @@ } configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax); } + } + + camera_metadata_entry entry = + mLatestReqSetting.find(ANDROID_JPEG_ORIENTATION); + int jpegOrientation = entry.data.u8[0]; + if (jpegOrientation != mCfg.jpegOrientation) { + mLatestReqSetting.update(ANDROID_JPEG_ORIENTATION, &mCfg.jpegOrientation, 1); } status = importRequestLocked(request, allBufPtrs, allFences); @@ -1758,9 +1886,8 @@ EXIFInfo exifInfo; memset(&exifInfo, 0, sizeof(EXIFInfo)); - //exifInfo.Orientation = jpegOrientation; - exifInfo.Orientation = 0; - exifInfo.ThumbWidth = thumbSize.width; + exifInfo.Orientation = jpegOrientation; + exifInfo.ThumbWidth = thumbSize.width; exifInfo.ThumbHeight = thumbSize.height; time_t t; @@ -1823,6 +1950,12 @@ exifInfo.ExposureMode = 0; int bufSize = 0; +#if DBG_SAVE_OUTPUT + char awjpec_path[100]; + sprintf(awjpec_path, "/data/camera/jpec_%d_%dx%d.bin", + awjpec_count++,mYu12Frame->mWidth,mYu12Frame->mHeight); + saveBuffers(awjpec_path, (void*)mYu12FrameLayout.y, mYu12Frame->mWidth * mYu12Frame->mHeight * 3 / 2, true); +#endif ret = AWJpecEnc(&sjpegInfo, &exifInfo, bufPtr, &bufSize); /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out @@ -1877,7 +2010,9 @@ return false; }; - if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) { + if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && + req->frameIn->mFourcc != V4L2_PIX_FMT_Z16 && + req->frameIn->mFourcc != V4L2_PIX_FMT_YUYV) { return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, req->frameIn->mFourcc & 0xFF, (req->frameIn->mFourcc >> 8) & 0xFF, @@ -1913,6 +2048,7 @@ &parent->mDataInfo, &parent->mVideoConf); ATRACE_END(); + //ALOGD("inDataSize = %d mYu12Frame wxh(%dx%d)", inDataSize, mYu12Frame->mWidth, mYu12Frame->mHeight); if (res != 0) { // For some webcam, the first few V4L2 frames might be malformed... @@ -1926,10 +2062,25 @@ return true; } } + else if (req->frameIn->mFourcc == V4L2_PIX_FMT_YUYV) { + ATRACE_BEGIN("YUY2toNV21"); + YUYVToNV21(inData, + static_cast<uint8_t*>(mYu12FrameLayout.y), + mYu12Frame->mWidth, + mYu12Frame->mHeight); + ATRACE_END(); + } - ATRACE_BEGIN("Wait for BufferRequest done"); +#if DBG_SAVE_OUTPUT + char yuv_path[100]; + dq_yuv_count = dq_yuv_count % output_counts; + sprintf(yuv_path, "/data/camera/nv21_%d_%dx%d.bin", + dq_yuv_count++,mYu12Frame->mWidth,mYu12Frame->mHeight); + int copySize = req->frameIn->mWidth * req->frameIn->mHeight * 3 / 2; + saveBuffers(yuv_path, (void*)mYu12FrameLayout.y, copySize, true); +#endif + res = waitForBufferRequestDone(&req->buffers); - ATRACE_END(); if (res != 0) { ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res); @@ -1971,10 +2122,45 @@ case PixelFormat::Y16: case PixelFormat::YCBCR_420_888: case PixelFormat::YV12:{ - void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize); - int yuv_size = mYu12Frame->mWidth * mYu12Frame->mHeight * 3/2; - std::memcpy(outLayout, (void*)(mYu12FrameLayout.y),yuv_size); - + IMapper::Rect outRect {0, 0, + static_cast<int32_t>(halBuf.width), + static_cast<int32_t>(halBuf.height)}; + YCbCrLayout outLayout = sHandleImporter.lockYCbCr(*(halBuf.bufPtr), + halBuf.usage, + outRect); + ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d" + " w = %d h = %d" + " mYu12Frame w = %d h = %d", + __FUNCTION__, + outLayout.y, + outLayout.cb, + outLayout.cr, + outLayout.yStride, + outLayout.cStride, + outLayout.chromaStep, + halBuf.width, + halBuf.height, + mYu12Frame->mWidth, + mYu12Frame->mHeight); + if (mYu12Frame->mWidth != halBuf.width || mYu12Frame->mHeight != halBuf.height) { + int ret = NV21Scale(static_cast<uint8_t*>(mYu12FrameLayout.y), + static_cast<uint8_t*>(mYu12FrameLayout.cb), + mYu12Frame->mWidth, + mYu12Frame->mHeight, + static_cast<uint8_t*>(outLayout.y), + static_cast<uint8_t*>(outLayout.cr), + halBuf.width, + halBuf.height, + libyuv::FilterMode::kFilterNone); + if (ret != 0) { + ALOGE("%s: NV12Scale failed!!", __FUNCTION__); + } + } else { + int y_size = mYu12Frame->mWidth * mYu12Frame->mHeight; + int c_size = mYu12Frame->mWidth * mYu12Frame->mHeight / 2; + std::memcpy(outLayout.y, mYu12FrameLayout.y, y_size); + std::memcpy(outLayout.cr, mYu12FrameLayout.cb, c_size - 1); + } int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); if (relFence >= 0) { halBuf.acquireFence = relFence; @@ -2694,7 +2880,18 @@ } // Find the smallest format that matches the desired aspect ratio and is wide/high enough SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; + + bool hasMjpeg = false; for (const auto& fmt : mSupportedFormats) { + if (fmt.fourcc == V4L2_PIX_FMT_MJPEG) { + hasMjpeg = true; + break; + } + } + for (const auto& fmt : mSupportedFormats) { + if (hasMjpeg && fmt.fourcc != V4L2_PIX_FMT_MJPEG) { + continue; + } uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; if (dim >= maxDim) { float aspectRatio = ASPECT_RATIO(fmt); @@ -2709,6 +2906,9 @@ if (v4l2Fmt.width == 0) { // Cannot find exact good aspect ratio candidate, try to find a close one for (const auto& fmt : mSupportedFormats) { + if (hasMjpeg && fmt.fourcc != V4L2_PIX_FMT_MJPEG) { + continue; + } uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; if (dim >= maxDim) { float aspectRatio = ASPECT_RATIO(fmt); @@ -2738,35 +2938,39 @@ return Status::INTERNAL_ERROR; } + if (v4l2Fmt.fourcc == V4L2_PIX_FMT_MJPEG) { + memset(&mVideoConf, 0, sizeof(mVideoConf)); + mVideoConf.memops = NULL; + mVideoConf.eOutputPixelFormat = PIXEL_FORMAT_NV21; + mVideoConf.bDisable3D = 1; + mVideoConf.bScaleDownEn = 0; + mVideoConf.bRotationEn = 0; + mVideoConf.bSecOutputEn = 0; + mVideoConf.bDispErrorFrame = 1; + mVideoConf.nVbvBufferSize = 0; + mVideoConf.nAlignStride = 32; - memset(&mVideoConf, 0, sizeof(mVideoConf)); - mVideoConf.memops = NULL; - mVideoConf.eOutputPixelFormat = PIXEL_FORMAT_NV21; - mVideoConf.bDisable3D = 1; - mVideoConf.bScaleDownEn = 0; - mVideoConf.bRotationEn = 0; - mVideoConf.bSecOutputEn = 0; - mVideoConf.bDispErrorFrame = 1; - mVideoConf.nVbvBufferSize = 0; - mVideoConf.nAlignStride = 32; - - mVideoInfo.eCodecFormat = VIDEO_CODEC_FORMAT_MJPEG; - mVideoInfo.nWidth = v4l2Fmt.width; - mVideoInfo.nHeight = v4l2Fmt.height; - ALOGD("FUNC:%s, Line:%d width = %d,height = %d,", __FUNCTION__, __LINE__, - mVideoInfo.nWidth,mVideoInfo.nHeight); - mVideoInfo.nFrameRate = mFrameRate; - mVideoInfo.nFrameDuration = 1000 * 1000 / mFrameRate; - mVideoInfo.nAspectRatio = 1000; - mVideoInfo.bIs3DStream = 0; - mVideoInfo.nCodecSpecificDataLen = 0; - mVideoInfo.pCodecSpecificData = NULL; - if(mDecoder != NULL){ - Libve_exit2(&mDecoder); - } - Libve_init2(&mDecoder, &mVideoInfo, &mVideoConf); - if(mDecoder == NULL){ - ALOGE("FUNC:%s, Line:%d ",__FUNCTION__,__LINE__); + mVideoInfo.eCodecFormat = VIDEO_CODEC_FORMAT_MJPEG; + mVideoInfo.nWidth = v4l2Fmt.width; + mVideoInfo.nHeight = v4l2Fmt.height; + ALOGD("FUNC:%s, Line:%d width = %d,height = %d,", + __FUNCTION__, + __LINE__, + mVideoInfo.nWidth, + mVideoInfo.nHeight); + mVideoInfo.nFrameRate = mFrameRate; + mVideoInfo.nFrameDuration = 1000 * 1000 / mFrameRate; + mVideoInfo.nAspectRatio = 1000; + mVideoInfo.bIs3DStream = 0; + mVideoInfo.nCodecSpecificDataLen = 0; + mVideoInfo.pCodecSpecificData = NULL; + if (mDecoder != NULL) { + Libve_exit2(&mDecoder); + } + Libve_init2(&mDecoder, &mVideoInfo, &mVideoConf); + if (mDecoder == NULL) { + ALOGE("FUNC:%s, Line:%d ", __FUNCTION__, __LINE__); + } } Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; diff --git a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp old mode 100755 new mode 100644 index e25deff..1ae32d0 --- a/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp +++ b/android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp @@ -164,6 +164,7 @@ const int kDefaultNumStillBuffer = 2; const int kDefaultOrientation = 0; // suitable for natural landscape displays like tablet/TV // For phone devices 270 is better + const int kDefaultJpegOrientation = 0; } // anonymous namespace const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml"; @@ -284,10 +285,19 @@ ret.orientation = orientation->IntAttribute("degree", /*Default*/kDefaultOrientation); } + XMLElement *jpegOrientation = deviceCfg->FirstChildElement("JpegOrientation"); + if (jpegOrientation == nullptr) { + ALOGI("%s: no jpeg orientation specified", __FUNCTION__); + } else { + ret.jpegOrientation = jpegOrientation->IntAttribute("degree", + /*Default*/kDefaultJpegOrientation); + } + ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d," - " num video buffers %d, num still buffers %d, orientation %d", + " num video buffers %d, num still buffers %d, orientation %d jpeg orientation %d", __FUNCTION__, ret.maxJpegBufSize, - ret.numVideoBuffers, ret.numStillBuffers, ret.orientation); + ret.numVideoBuffers, ret.numStillBuffers, + ret.orientation, ret.jpegOrientation); for (const auto& limit : ret.fpsLimits) { ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__, limit.size.width, limit.size.height, limit.fpsUpperBound); diff --git a/android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c b/android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c old mode 100755 new mode 100644 index 6728a34..f086a2a --- a/android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c +++ b/android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c @@ -118,21 +118,14 @@ pVconfig->memops->flush_cache((void*)pPicture->pData1, ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/2); memcpy(outY, (void*)pPicture->pData0, - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)); + pVideoInfo->nWidth * pVideoInfo->nHeight); memcpy((char*)outU, (void*)pPicture->pData1, - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4); + pVideoInfo->nWidth * pVideoInfo->nHeight / 4); memcpy((char*)outV, (void*)(pPicture->pData1 + - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4), - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4); - - pVconfig->memops->flush_cache((char*)outY, - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)); - pVconfig->memops->flush_cache((char*)outU, - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4); - pVconfig->memops->flush_cache((char*)outV, - ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4); + pVideoInfo->nWidth * pVideoInfo->nHeight / 4), + pVideoInfo->nWidth * pVideoInfo->nHeight / 4); ReturnPicture(*mVideoDecoder, pPicture); } } diff --git a/android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h b/android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h old mode 100755 new mode 100644 index 341c622..09eb3e1 --- a/android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h +++ b/android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h @@ -87,6 +87,7 @@ // The value of android.sensor.orientation int32_t orientation; + int32_t jpegOrientation; private: ExternalCameraConfig(); -- Gitblit v1.6.2