liyujie
2025-08-28 867b8b7b729282c7e14e200ca277435329ebe747
[3/4]解决USB摄像头打开相机预览界面绿屏
5 files modified
346 ■■■■ changed files
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp 34 ●●●● patch | view | raw | blame | history
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp 282 ●●●● patch | view | raw | blame | history
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp 14 ●●●● patch | view | raw | blame | history
android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c 15 ●●●● patch | view | raw | blame | history
android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h 1 ●●●● patch | view | raw | blame | history
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDevice.cpp
old mode 100755new mode 100644
....@@ -38,8 +38,8 @@
3838 // Other formats to consider in the future:
3939 // * V4L2_PIX_FMT_YVU420 (== YV12)
4040 // * V4L2_PIX_FMT_YVYU (YVYU: can be converted to YV12 or other YUV420_888 formats)
41
-const std::array<uint32_t, /*size*/ 2> kSupportedFourCCs{
42
- {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16}}; // double braces required in C++11
41
+const std::array<uint32_t, /*size*/ 3> kSupportedFourCCs{
42
+ {V4L2_PIX_FMT_MJPEG, V4L2_PIX_FMT_Z16, V4L2_PIX_FMT_YUYV}}; // double braces required in C++11
4343
4444 constexpr int MAX_RETRY = 5; // Allow retry v4l2 open failures a few times.
4545 constexpr int OPEN_RETRY_SLEEP_US = 100000; // 100ms * MAX_RETRY = 0.5 seconds
....@@ -274,6 +274,7 @@
274274 switch (fmt.fourcc) {
275275 case V4L2_PIX_FMT_Z16: hasDepth = true; break;
276276 case V4L2_PIX_FMT_MJPEG: hasColor = true; break;
277
+ case V4L2_PIX_FMT_YUYV: hasColor = true; break;
277278 default: ALOGW("%s: Unsupported format found", __FUNCTION__);
278279 }
279280 }
....@@ -705,6 +706,12 @@
705706 // For V4L2_PIX_FMT_MJPEG
706707 std::array<int, /*size*/ 3> halFormats{{HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
707708 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
709
+ // For V4L2_PIX_FMT_YUYV
710
+ std::array<int, /*size*/ 3> halYuyvFormats{{HAL_PIXEL_FORMAT_BLOB,
711
+ HAL_PIXEL_FORMAT_YCbCr_420_888,
712
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}};
713
+ bool isMJPEG = false;
714
+ bool isYUYV = false;
708715
709716 for (const auto& supportedFormat : mSupportedFormats) {
710717 switch (supportedFormat.fourcc) {
....@@ -713,6 +720,11 @@
713720 break;
714721 case V4L2_PIX_FMT_MJPEG:
715722 hasColor = true;
723
+ isMJPEG = true;
724
+ break;
725
+ case V4L2_PIX_FMT_YUYV:
726
+ hasColor = true;
727
+ isYUYV = true;
716728 break;
717729 default:
718730 ALOGW("%s: format %c%c%c%c is not supported!", __FUNCTION__,
....@@ -729,11 +741,19 @@
729741 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
730742 }
731743 if (hasColor) {
732
- initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
733
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
734
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
735
- ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
736
- ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
744
+ if (isMJPEG) {
745
+ initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_MJPEG, halFormats,
746
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
747
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
748
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
749
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
750
+ } else if (isYUYV) {
751
+ initOutputCharskeysByFormat(metadata, V4L2_PIX_FMT_YUYV, halYuyvFormats,
752
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
753
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
754
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
755
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
756
+ }
737757 }
738758
739759 calculateMinFps(metadata);
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraDeviceSession.cpp
old mode 100755new mode 100644
....@@ -41,6 +41,125 @@
4141 namespace implementation {
4242
4343 namespace {
44
+
45
+#define DBG_SAVE_OUTPUT 0
46
+#if DBG_SAVE_OUTPUT
47
+static int dq_yuv_count = 0;
48
+static int awjpec_count = 0;
49
+const int output_counts = 10;
50
+
51
+bool saveBuffers(char *str, void *p, unsigned int length, bool is_oneframe) {
52
+ int fd;
53
+ ALOGD("Debug to save a frame!");
54
+ if ((access(str, 0) != -1) && (is_oneframe)) {
55
+ ALOGD("File %s is exists!!!\n", str);
56
+ }
57
+ if (is_oneframe) {
58
+ fd = open(str, O_CREAT|O_RDWR|O_TRUNC, 0777); // save one frame data
59
+ } else {
60
+ fd = open(str, O_CREAT|O_RDWR|O_APPEND, 0777); // save more frames
61
+ }
62
+ if (!fd) {
63
+ ALOGE("Open file error %s", strerror(errno));
64
+ return false;
65
+ }
66
+ if (write(fd, p, length)) {
67
+ ALOGE("Write file fail %s", strerror(errno));
68
+ close(fd);
69
+ return true;
70
+ } else {
71
+ ALOGE("Write file fail");
72
+ close(fd);
73
+ return false;
74
+ }
75
+}
76
+
77
+
78
+#endif
79
+
80
+void YUYVToNV21(uint8_t* image_in,
81
+ uint8_t* image_out,
82
+ int width,
83
+ int height) {
84
+
85
+ int pixNUM = width * height;
86
+
87
+ uint8_t *y = image_out;
88
+ uint8_t *uv = image_out + pixNUM ;
89
+ uint8_t *start = image_in;
90
+ int j = 0, k = 0;
91
+
92
+ int index =0;
93
+ for (j = 0; j < pixNUM * 2; j = j + 2) {
94
+ *(y + index) = *(start + j);
95
+ index++;
96
+ }
97
+
98
+ start = image_in;
99
+ int uv_index = 0;
100
+ for (j = 0; j < height; j = j + 2) {
101
+ for (k = j * width * 2 + 1; k < width * 2 * (j + 1); k = k + 4) {
102
+ *(uv + uv_index) = *(start + k + 2);
103
+ *(uv + uv_index + 1) = *(start + k);
104
+ uv_index += 2;
105
+ }
106
+ }
107
+}
108
+
109
+int NV21Scale(uint8_t *psrc_buf_y,
110
+ uint8_t *psrc_buf_uv,
111
+ int psrc_w,
112
+ int psrc_h,
113
+ uint8_t *pdst_buf,
114
+ uint8_t *pdst_buf_uv,
115
+ int pdst_w,
116
+ int pdst_h,
117
+ libyuv::FilterModeEnum pfmode) {
118
+ uint8_t *i420_buf1 = (uint8_t *)malloc((psrc_w * psrc_h * 3) >> 1);
119
+ if (i420_buf1 == nullptr) {
120
+ ALOGE("malloc i420_buf1 failed!");
121
+ return -1;
122
+ }
123
+ uint8_t *i420_buf2 = (uint8_t *)malloc((pdst_w * pdst_h * 3) >> 1);
124
+ if (i420_buf2 == nullptr) {
125
+ ALOGE("malloc i420_buf2 failed!");
126
+ return -1;
127
+ }
128
+
129
+ libyuv::NV12ToI420(psrc_buf_y, psrc_w,
130
+ psrc_buf_uv, psrc_w,
131
+ &i420_buf1[0], psrc_w,
132
+ &i420_buf1[psrc_w * psrc_h], psrc_w >> 1,
133
+ &i420_buf1[(psrc_w * psrc_h * 5) >> 2], psrc_w >> 1,
134
+ psrc_w, psrc_h);
135
+
136
+ libyuv::I420Scale(&i420_buf1[0], psrc_w,
137
+ &i420_buf1[psrc_w * psrc_h], psrc_w >> 1,
138
+ &i420_buf1[(psrc_w * psrc_h * 5) >> 2], psrc_w >> 1,
139
+ psrc_w, psrc_h,
140
+ &i420_buf2[0], pdst_w,
141
+ &i420_buf2[pdst_w * pdst_h], pdst_w >> 1,
142
+ &i420_buf2[(pdst_w * pdst_h * 5) >> 2], pdst_w >> 1,
143
+ pdst_w, pdst_h,
144
+ pfmode);
145
+
146
+ libyuv::I420ToNV12(&i420_buf2[0], pdst_w,
147
+ &i420_buf2[pdst_w * pdst_h], pdst_w >> 1,
148
+ &i420_buf2[(pdst_w * pdst_h * 5) >> 2], pdst_w >> 1,
149
+ pdst_buf, pdst_w,
150
+ pdst_buf_uv, pdst_w,
151
+ pdst_w,pdst_h);
152
+
153
+ if (i420_buf1 != nullptr) {
154
+ free(i420_buf1);
155
+ }
156
+ if (i420_buf2 != nullptr) {
157
+ free(i420_buf2);
158
+ }
159
+ return 0;
160
+}
161
+
162
+
44163 // Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer.
45164 static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */;
46165
....@@ -484,7 +603,9 @@
484603 closeOutputThread();
485604 }
486605
487
- Libve_exit2(&mDecoder);
606
+ if (mDecoder != NULL){
607
+ Libve_exit2(&mDecoder);
608
+ }
488609
489610 Mutex::Autolock _l(mLock);
490611 // free all buffers
....@@ -731,6 +852,13 @@
731852 }
732853 configureV4l2StreamLocked(mV4l2StreamingFmt, requestFpsMax);
733854 }
855
+ }
856
+
857
+ camera_metadata_entry entry =
858
+ mLatestReqSetting.find(ANDROID_JPEG_ORIENTATION);
859
+ int jpegOrientation = entry.data.u8[0];
860
+ if (jpegOrientation != mCfg.jpegOrientation) {
861
+ mLatestReqSetting.update(ANDROID_JPEG_ORIENTATION, &mCfg.jpegOrientation, 1);
734862 }
735863
736864 status = importRequestLocked(request, allBufPtrs, allFences);
....@@ -1758,9 +1886,8 @@
17581886
17591887 EXIFInfo exifInfo;
17601888 memset(&exifInfo, 0, sizeof(EXIFInfo));
1761
- //exifInfo.Orientation = jpegOrientation;
1762
- exifInfo.Orientation = 0;
1763
- exifInfo.ThumbWidth = thumbSize.width;
1889
+ exifInfo.Orientation = jpegOrientation;
1890
+ exifInfo.ThumbWidth = thumbSize.width;
17641891 exifInfo.ThumbHeight = thumbSize.height;
17651892
17661893 time_t t;
....@@ -1823,6 +1950,12 @@
18231950 exifInfo.ExposureMode = 0;
18241951
18251952 int bufSize = 0;
1953
+#if DBG_SAVE_OUTPUT
1954
+ char awjpec_path[100];
1955
+ sprintf(awjpec_path, "/data/camera/jpec_%d_%dx%d.bin",
1956
+ awjpec_count++,mYu12Frame->mWidth,mYu12Frame->mHeight);
1957
+ saveBuffers(awjpec_path, (void*)mYu12FrameLayout.y, mYu12Frame->mWidth * mYu12Frame->mHeight * 3 / 2, true);
1958
+#endif
18261959 ret = AWJpecEnc(&sjpegInfo, &exifInfo, bufPtr, &bufSize);
18271960
18281961 /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out
....@@ -1877,7 +2010,9 @@
18772010 return false;
18782011 };
18792012
1880
- if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG && req->frameIn->mFourcc != V4L2_PIX_FMT_Z16) {
2013
+ if (req->frameIn->mFourcc != V4L2_PIX_FMT_MJPEG &&
2014
+ req->frameIn->mFourcc != V4L2_PIX_FMT_Z16 &&
2015
+ req->frameIn->mFourcc != V4L2_PIX_FMT_YUYV) {
18812016 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__,
18822017 req->frameIn->mFourcc & 0xFF,
18832018 (req->frameIn->mFourcc >> 8) & 0xFF,
....@@ -1913,6 +2048,7 @@
19132048 &parent->mDataInfo,
19142049 &parent->mVideoConf);
19152050 ATRACE_END();
2051
+ //ALOGD("inDataSize = %d mYu12Frame wxh(%dx%d)", inDataSize, mYu12Frame->mWidth, mYu12Frame->mHeight);
19162052
19172053 if (res != 0) {
19182054 // For some webcam, the first few V4L2 frames might be malformed...
....@@ -1926,10 +2062,25 @@
19262062 return true;
19272063 }
19282064 }
2065
+ else if (req->frameIn->mFourcc == V4L2_PIX_FMT_YUYV) {
2066
+ ATRACE_BEGIN("YUY2toNV21");
2067
+ YUYVToNV21(inData,
2068
+ static_cast<uint8_t*>(mYu12FrameLayout.y),
2069
+ mYu12Frame->mWidth,
2070
+ mYu12Frame->mHeight);
2071
+ ATRACE_END();
2072
+ }
19292073
1930
- ATRACE_BEGIN("Wait for BufferRequest done");
2074
+#if DBG_SAVE_OUTPUT
2075
+ char yuv_path[100];
2076
+ dq_yuv_count = dq_yuv_count % output_counts;
2077
+ sprintf(yuv_path, "/data/camera/nv21_%d_%dx%d.bin",
2078
+ dq_yuv_count++,mYu12Frame->mWidth,mYu12Frame->mHeight);
2079
+ int copySize = req->frameIn->mWidth * req->frameIn->mHeight * 3 / 2;
2080
+ saveBuffers(yuv_path, (void*)mYu12FrameLayout.y, copySize, true);
2081
+#endif
2082
+
19312083 res = waitForBufferRequestDone(&req->buffers);
1932
- ATRACE_END();
19332084
19342085 if (res != 0) {
19352086 ALOGE("%s: wait for BufferRequest done failed! res %d", __FUNCTION__, res);
....@@ -1971,10 +2122,45 @@
19712122 case PixelFormat::Y16:
19722123 case PixelFormat::YCBCR_420_888:
19732124 case PixelFormat::YV12:{
1974
- void* outLayout = sHandleImporter.lock(*(halBuf.bufPtr), halBuf.usage, inDataSize);
1975
- int yuv_size = mYu12Frame->mWidth * mYu12Frame->mHeight * 3/2;
1976
- std::memcpy(outLayout, (void*)(mYu12FrameLayout.y),yuv_size);
1977
-
2125
+ IMapper::Rect outRect {0, 0,
2126
+ static_cast<int32_t>(halBuf.width),
2127
+ static_cast<int32_t>(halBuf.height)};
2128
+ YCbCrLayout outLayout = sHandleImporter.lockYCbCr(*(halBuf.bufPtr),
2129
+ halBuf.usage,
2130
+ outRect);
2131
+ ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d"
2132
+ " w = %d h = %d"
2133
+ " mYu12Frame w = %d h = %d",
2134
+ __FUNCTION__,
2135
+ outLayout.y,
2136
+ outLayout.cb,
2137
+ outLayout.cr,
2138
+ outLayout.yStride,
2139
+ outLayout.cStride,
2140
+ outLayout.chromaStep,
2141
+ halBuf.width,
2142
+ halBuf.height,
2143
+ mYu12Frame->mWidth,
2144
+ mYu12Frame->mHeight);
2145
+ if (mYu12Frame->mWidth != halBuf.width || mYu12Frame->mHeight != halBuf.height) {
2146
+ int ret = NV21Scale(static_cast<uint8_t*>(mYu12FrameLayout.y),
2147
+ static_cast<uint8_t*>(mYu12FrameLayout.cb),
2148
+ mYu12Frame->mWidth,
2149
+ mYu12Frame->mHeight,
2150
+ static_cast<uint8_t*>(outLayout.y),
2151
+ static_cast<uint8_t*>(outLayout.cr),
2152
+ halBuf.width,
2153
+ halBuf.height,
2154
+ libyuv::FilterMode::kFilterNone);
2155
+ if (ret != 0) {
2156
+ ALOGE("%s: NV12Scale failed!!", __FUNCTION__);
2157
+ }
2158
+ } else {
2159
+ int y_size = mYu12Frame->mWidth * mYu12Frame->mHeight;
2160
+ int c_size = mYu12Frame->mWidth * mYu12Frame->mHeight / 2;
2161
+ std::memcpy(outLayout.y, mYu12FrameLayout.y, y_size);
2162
+ std::memcpy(outLayout.cr, mYu12FrameLayout.cb, c_size - 1);
2163
+ }
19782164 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr));
19792165 if (relFence >= 0) {
19802166 halBuf.acquireFence = relFence;
....@@ -2694,7 +2880,18 @@
26942880 }
26952881 // Find the smallest format that matches the desired aspect ratio and is wide/high enough
26962882 SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0};
2883
+
2884
+ bool hasMjpeg = false;
26972885 for (const auto& fmt : mSupportedFormats) {
2886
+ if (fmt.fourcc == V4L2_PIX_FMT_MJPEG) {
2887
+ hasMjpeg = true;
2888
+ break;
2889
+ }
2890
+ }
2891
+ for (const auto& fmt : mSupportedFormats) {
2892
+ if (hasMjpeg && fmt.fourcc != V4L2_PIX_FMT_MJPEG) {
2893
+ continue;
2894
+ }
26982895 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
26992896 if (dim >= maxDim) {
27002897 float aspectRatio = ASPECT_RATIO(fmt);
....@@ -2709,6 +2906,9 @@
27092906 if (v4l2Fmt.width == 0) {
27102907 // Cannot find exact good aspect ratio candidate, try to find a close one
27112908 for (const auto& fmt : mSupportedFormats) {
2909
+ if (hasMjpeg && fmt.fourcc != V4L2_PIX_FMT_MJPEG) {
2910
+ continue;
2911
+ }
27122912 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height;
27132913 if (dim >= maxDim) {
27142914 float aspectRatio = ASPECT_RATIO(fmt);
....@@ -2738,35 +2938,39 @@
27382938 return Status::INTERNAL_ERROR;
27392939 }
27402940
2941
+ if (v4l2Fmt.fourcc == V4L2_PIX_FMT_MJPEG) {
2942
+ memset(&mVideoConf, 0, sizeof(mVideoConf));
2943
+ mVideoConf.memops = NULL;
2944
+ mVideoConf.eOutputPixelFormat = PIXEL_FORMAT_NV21;
2945
+ mVideoConf.bDisable3D = 1;
2946
+ mVideoConf.bScaleDownEn = 0;
2947
+ mVideoConf.bRotationEn = 0;
2948
+ mVideoConf.bSecOutputEn = 0;
2949
+ mVideoConf.bDispErrorFrame = 1;
2950
+ mVideoConf.nVbvBufferSize = 0;
2951
+ mVideoConf.nAlignStride = 32;
27412952
2742
- memset(&mVideoConf, 0, sizeof(mVideoConf));
2743
- mVideoConf.memops = NULL;
2744
- mVideoConf.eOutputPixelFormat = PIXEL_FORMAT_NV21;
2745
- mVideoConf.bDisable3D = 1;
2746
- mVideoConf.bScaleDownEn = 0;
2747
- mVideoConf.bRotationEn = 0;
2748
- mVideoConf.bSecOutputEn = 0;
2749
- mVideoConf.bDispErrorFrame = 1;
2750
- mVideoConf.nVbvBufferSize = 0;
2751
- mVideoConf.nAlignStride = 32;
2752
-
2753
- mVideoInfo.eCodecFormat = VIDEO_CODEC_FORMAT_MJPEG;
2754
- mVideoInfo.nWidth = v4l2Fmt.width;
2755
- mVideoInfo.nHeight = v4l2Fmt.height;
2756
- ALOGD("FUNC:%s, Line:%d width = %d,height = %d,", __FUNCTION__, __LINE__,
2757
- mVideoInfo.nWidth,mVideoInfo.nHeight);
2758
- mVideoInfo.nFrameRate = mFrameRate;
2759
- mVideoInfo.nFrameDuration = 1000 * 1000 / mFrameRate;
2760
- mVideoInfo.nAspectRatio = 1000;
2761
- mVideoInfo.bIs3DStream = 0;
2762
- mVideoInfo.nCodecSpecificDataLen = 0;
2763
- mVideoInfo.pCodecSpecificData = NULL;
2764
- if(mDecoder != NULL){
2765
- Libve_exit2(&mDecoder);
2766
- }
2767
- Libve_init2(&mDecoder, &mVideoInfo, &mVideoConf);
2768
- if(mDecoder == NULL){
2769
- ALOGE("FUNC:%s, Line:%d ",__FUNCTION__,__LINE__);
2953
+ mVideoInfo.eCodecFormat = VIDEO_CODEC_FORMAT_MJPEG;
2954
+ mVideoInfo.nWidth = v4l2Fmt.width;
2955
+ mVideoInfo.nHeight = v4l2Fmt.height;
2956
+ ALOGD("FUNC:%s, Line:%d width = %d,height = %d,",
2957
+ __FUNCTION__,
2958
+ __LINE__,
2959
+ mVideoInfo.nWidth,
2960
+ mVideoInfo.nHeight);
2961
+ mVideoInfo.nFrameRate = mFrameRate;
2962
+ mVideoInfo.nFrameDuration = 1000 * 1000 / mFrameRate;
2963
+ mVideoInfo.nAspectRatio = 1000;
2964
+ mVideoInfo.bIs3DStream = 0;
2965
+ mVideoInfo.nCodecSpecificDataLen = 0;
2966
+ mVideoInfo.pCodecSpecificData = NULL;
2967
+ if (mDecoder != NULL) {
2968
+ Libve_exit2(&mDecoder);
2969
+ }
2970
+ Libve_init2(&mDecoder, &mVideoInfo, &mVideoConf);
2971
+ if (mDecoder == NULL) {
2972
+ ALOGE("FUNC:%s, Line:%d ", __FUNCTION__, __LINE__);
2973
+ }
27702974 }
27712975
27722976 Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height};
android/hardware/interfaces/camera/device/3.4/default/ExternalCameraUtils.cpp
old mode 100755new mode 100644
....@@ -164,6 +164,7 @@
164164 const int kDefaultNumStillBuffer = 2;
165165 const int kDefaultOrientation = 0; // suitable for natural landscape displays like tablet/TV
166166 // For phone devices 270 is better
167
+ const int kDefaultJpegOrientation = 0;
167168 } // anonymous namespace
168169
169170 const char* ExternalCameraConfig::kDefaultCfgPath = "/vendor/etc/external_camera_config.xml";
....@@ -284,10 +285,19 @@
284285 ret.orientation = orientation->IntAttribute("degree", /*Default*/kDefaultOrientation);
285286 }
286287
288
+ XMLElement *jpegOrientation = deviceCfg->FirstChildElement("JpegOrientation");
289
+ if (jpegOrientation == nullptr) {
290
+ ALOGI("%s: no jpeg orientation specified", __FUNCTION__);
291
+ } else {
292
+ ret.jpegOrientation = jpegOrientation->IntAttribute("degree",
293
+ /*Default*/kDefaultJpegOrientation);
294
+ }
295
+
287296 ALOGI("%s: external camera cfg loaded: maxJpgBufSize %d,"
288
- " num video buffers %d, num still buffers %d, orientation %d",
297
+ " num video buffers %d, num still buffers %d, orientation %d jpeg orientation %d",
289298 __FUNCTION__, ret.maxJpegBufSize,
290
- ret.numVideoBuffers, ret.numStillBuffers, ret.orientation);
299
+ ret.numVideoBuffers, ret.numStillBuffers,
300
+ ret.orientation, ret.jpegOrientation);
291301 for (const auto& limit : ret.fpsLimits) {
292302 ALOGI("%s: fpsLimitList: %dx%d@%f", __FUNCTION__,
293303 limit.size.width, limit.size.height, limit.fpsUpperBound);
android/hardware/interfaces/camera/device/3.4/default/Libve_Decoder2.c
old mode 100755new mode 100644
....@@ -118,21 +118,14 @@
118118 pVconfig->memops->flush_cache((void*)pPicture->pData1,
119119 ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/2);
120120 memcpy(outY, (void*)pPicture->pData0,
121
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight));
121
+ pVideoInfo->nWidth * pVideoInfo->nHeight);
122122 memcpy((char*)outU,
123123 (void*)pPicture->pData1,
124
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4);
124
+ pVideoInfo->nWidth * pVideoInfo->nHeight / 4);
125125 memcpy((char*)outV,
126126 (void*)(pPicture->pData1 +
127
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4),
128
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4);
129
-
130
- pVconfig->memops->flush_cache((char*)outY,
131
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight));
132
- pVconfig->memops->flush_cache((char*)outU,
133
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4);
134
- pVconfig->memops->flush_cache((char*)outV,
135
- ALIGN_16B(pVideoInfo->nWidth)*ALIGN_16B(pVideoInfo->nHeight)/4);
127
+ pVideoInfo->nWidth * pVideoInfo->nHeight / 4),
128
+ pVideoInfo->nWidth * pVideoInfo->nHeight / 4);
136129 ReturnPicture(*mVideoDecoder, pPicture);
137130 }
138131 }
android/hardware/interfaces/camera/device/3.4/default/include/ext_device_v3_4_impl/ExternalCameraUtils.h
old mode 100755new mode 100644
....@@ -87,6 +87,7 @@
8787
8888 // The value of android.sensor.orientation
8989 int32_t orientation;
90
+ int32_t jpegOrientation;
9091
9192 private:
9293 ExternalCameraConfig();