/*
|
* Copyright (c) 2021 by Allwinnertech Co., Ltd.
|
*
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
* you may not use this file except in compliance with the License.
|
* You may obtain a copy of the License at
|
*
|
* http://www.apache.org/licenses/LICENSE-2.0
|
*
|
* Unless required by applicable law or agreed to in writing, software
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* See the License for the specific language governing permissions and
|
* limitations under the License.
|
*/
|
|
#define LOG_TAG "CameraHALv3_CameraStream"
|
#include "camera_stream.h"
|
|
#include <fcntl.h>
|
#include <hardware/camera3.h>
|
#include <inttypes.h>
|
#include <string.h>
|
#include <sunxi_camera_v2.h>
|
#include <sys/stat.h>
|
#include <sys/time.h>
|
#include <sys/types.h>
|
|
#include <cstdlib>
|
#include <memory>
|
|
#include "CameraMetadata.h"
|
#include "metadata/metadata_common.h"
|
#include "stream_manager.h"
|
|
namespace v4l2_camera_hal {
|
|
template <typename T>
|
int CameraStream::inRange(T low, T high, T val) {
|
return ((val - high) * (val - low) <= 0);
|
}
|
|
void CameraStream::calProperRectSize(cam_crop_rect_t srcRect,
|
cam_crop_rect_t* dstRect) {
|
std::array<int32, 2> mActiveSize = {640, 480};
|
int res = SingleTagValue(mStaticMetadata.get(),
|
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
|
&mActiveSize);
|
if (res) {
|
HAL_LOGE("Cannot find ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE size");
|
}
|
int size_width = ALIGN_16B(mActiveSize[0]);
|
int size_height = ALIGN_16B(mActiveSize[1]);
|
double ratio_w = static_cast<double>(mWidth) /
|
static_cast<double>(size_width);
|
double ratio_h = static_cast<double>(mHeight) /
|
static_cast<double>(size_height);
|
if (inRange(0.0, 1.0, ratio_w) && inRange(0.0, 1.0, ratio_h)) {
|
dstRect->width = ratio_w*srcRect.width;
|
dstRect->height = ratio_h*srcRect.height;
|
dstRect->left = (mWidth-dstRect->width)/2;
|
dstRect->top = (mHeight-dstRect->height)/2;
|
} else {
|
dstRect->width = srcRect.width;
|
dstRect->height = srcRect.height;
|
dstRect->left = srcRect.left;
|
dstRect->top = srcRect.top;
|
}
|
}
|
|
CameraStream* CameraStream::NewCameraStream(std::shared_ptr<V4L2Stream> stream,
|
int isBlob,
|
int isMirror) {
|
HAL_LOG_ENTER();
|
|
STREAM_SERIAL ss_tmp = stream->getStreamSerial();
|
HAL_LOGD("Device getStreamSerial %d, isBlob:%d, isMirror:%d.",
|
ss_tmp, isBlob, isMirror);
|
|
if (isMirror) {
|
if (ss_tmp == MAIN_STREAM) {
|
return new CameraMainMirrorStream(stream, isBlob);
|
} else if (ss_tmp == SUB_0_STREAM) {
|
return new CameraSubMirrorStream(stream, isBlob);
|
} else {
|
return nullptr;
|
}
|
} else {
|
if (ss_tmp == MAIN_STREAM) {
|
return new CameraMainStream(stream, isBlob);
|
} else if (ss_tmp == SUB_0_STREAM) {
|
return new CameraSubStream(stream, isBlob);
|
} else {
|
return nullptr;
|
}
|
}
|
}
|
|
CameraStream::CameraStream(std::shared_ptr<V4L2Stream> stream, int isBlob):
|
isBlobFlag(isBlob),
|
stream_(stream) {
|
HAL_LOG_ENTER();
|
manager_ = nullptr;
|
mMetadata = nullptr;
|
mStreamOn = false;
|
initialized = false;
|
mWidth = 0;
|
mHeight = 0;
|
mFormat = 0;
|
mUsage = 0;
|
}
|
|
int CameraStream::configurateManager(StreamManager* manager) {
|
HAL_LOG_ENTER();
|
int res = 0;
|
manager_ = manager;
|
return res;
|
}
|
|
CameraStream::~CameraStream() {
|
HAL_LOG_ENTER();
|
}
|
|
int CameraStream::encodebuffer(void* dst_addr,
|
void* src_addr,
|
unsigned long mJpegBufferSizes) {
|
int res = -ENOMEM;
|
JPEG_ENC_t jpeg_enc;
|
memset(&jpeg_enc, 0, sizeof(jpeg_enc));
|
|
std::array<uint8_t, 33> mGpsMethod = {0};
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_GPS_PROCESSING_METHOD,
|
&mGpsMethod);
|
HAL_LOGV("%s: jpeg info:mGpsMethod:%s.",
|
mStreamType.c_str(), (unsigned char *)(&(mGpsMethod[0])));
|
|
if (0 != mGpsMethod[0]) {
|
std::array<double, 3> gpsCoordinates = {0, 0, 0};
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_GPS_COORDINATES,
|
&gpsCoordinates);
|
HAL_LOGV("%s: jpeg info:latitude:%f, longitude:%f, altitude:%f.",
|
mStreamType.c_str(),
|
gpsCoordinates[0],
|
gpsCoordinates[1],
|
gpsCoordinates[2]);
|
int64 mGpsTimestamp = 0;
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_GPS_TIMESTAMP,
|
&mGpsTimestamp);
|
HAL_LOGV("%s: jpeg info:mGpsTimestamp:%" PRId64 ".",
|
mStreamType.c_str(), mGpsTimestamp);
|
|
jpeg_enc.enable_gps = 1;
|
jpeg_enc.gps_latitude = gpsCoordinates[0];
|
jpeg_enc.gps_longitude = gpsCoordinates[1];
|
jpeg_enc.gps_altitude = gpsCoordinates[2];
|
jpeg_enc.gps_timestamp = mGpsTimestamp;
|
for (int i = 0; i < 33; i++) {
|
jpeg_enc.gps_processing_method[i] = mGpsMethod[i];
|
}
|
} else {
|
jpeg_enc.enable_gps = 0;
|
}
|
|
int32 mOrientation = 0;
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_ORIENTATION,
|
&mOrientation);
|
jpeg_enc.rotate = mOrientation;
|
|
byte mQuality = 90;
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_QUALITY,
|
&mQuality);
|
HAL_LOGV("%s: jpeg info:mQuality:%d.", mStreamType.c_str(), mQuality);
|
jpeg_enc.quality = mQuality;
|
|
byte mThumbnailQuality = 90;
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_THUMBNAIL_QUALITY,
|
&mThumbnailQuality);
|
HAL_LOGV("%s: jpeg info:mThumbnailQuality:%d.",
|
mStreamType.c_str(), mQuality);
|
|
std::array<int32, 2> mThumbnailSize = {320, 240};
|
res = SingleTagValue(mMetadata,
|
ANDROID_JPEG_THUMBNAIL_SIZE,
|
&mThumbnailSize);
|
HAL_LOGV("%s: jpeg info:mThumbnailSize:%dx%d.",
|
mStreamType.c_str(),
|
mThumbnailSize[0],
|
mThumbnailSize[1]);
|
|
jpeg_enc.pic_w = mWidth;
|
jpeg_enc.pic_h = mHeight;
|
jpeg_enc.thumbWidth = mThumbnailSize[0];
|
jpeg_enc.thumbHeight = mThumbnailSize[1];
|
|
uint8_t capture_intent = 0;
|
if (mMetadata->exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
|
capture_intent =
|
mMetadata->find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
|
}
|
if (isBlobFlag && capture_intent !=
|
ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT) {
|
jpeg_enc.enable_crop = 1;
|
} else {
|
jpeg_enc.enable_crop = 0;
|
}
|
res = stream_->EncodeBuffer(dst_addr, src_addr, mJpegBufferSizes, jpeg_enc);
|
if (res) {
|
HAL_LOGE("%s: Device EncodeBuffer failed.", mStreamType.c_str());
|
}
|
return res;
|
}
|
|
int CameraStream::copy_ycbcr_buffer(android_ycbcr* dst_addr_ycbcr,
|
void * src_addr) {
|
HAL_LOGV("%s: enter", mStreamType.c_str());
|
return stream_->CopyYCbCrBuffer(dst_addr_ycbcr, src_addr);
|
}
|
|
int CameraStream::start() {
|
HAL_LOGV("%s: enter", mStreamType.c_str());
|
int res = 0;
|
if (initialized) {
|
// Stream on first.
|
if (!mStreamOn) {
|
res = stream_->StreamOn();
|
if (res) {
|
HAL_LOGE("%s: Device failed to turn on stream.", mStreamType.c_str());
|
return res;
|
}
|
mStreamOn = true;
|
}
|
}
|
return res;
|
}
|
|
int CameraStream::stop() {
|
HAL_LOGV("%s: enter", mStreamType.c_str());
|
int res = 0;
|
if (initialized) {
|
// Stream on first.
|
if (!mStreamOn) {
|
HAL_LOGE("Device failed to turn off stream, turn on first.");
|
} else {
|
res = stream_->StreamOff();
|
if (res) {
|
HAL_LOGE("Device failed to turn off stream.");
|
return res;
|
}
|
mStreamOn = false;
|
}
|
}
|
return res;
|
}
|
|
int CameraStream::setFormat(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage) {
|
HAL_LOGD("%s:%d x %d, format:%d(%s), usage:%d(%s).",
|
mStreamType.c_str(),
|
width,
|
height,
|
format,
|
getHalPixelFormatString(format).c_str(),
|
usage,
|
getGrallocUsageString(usage).c_str());
|
mWidth = width;
|
mHeight = height;
|
mFormat = format;
|
mUsage = usage;
|
return 0;
|
}
|
|
int CameraStream::enqueueBuffer() {
|
HAL_LOGV("%s", mStreamType.c_str());
|
int res = 0;
|
if (initialized) {
|
res = stream_->EnqueueBuffer();
|
if (res) {
|
HAL_LOGE("Device failed to turn on stream.");
|
return res;
|
}
|
}
|
return res;
|
}
|
|
int CameraStream::dequeueBuffer(void ** src_addr, struct timeval * ts) {
|
HAL_LOGV("%s: enter", mStreamType.c_str());
|
int res = 0;
|
if (initialized) {
|
res = stream_->WaitCameraReady();
|
if (res != 0) {
|
HAL_LOGW("%s: wait v4l2 buffer time out in dequeueBuffer!",
|
mStreamType.c_str());
|
return res;
|
}
|
|
res = stream_->DequeueBuffer(src_addr, ts);
|
if (res) {
|
HAL_LOGE("%s: Device failed to DequeueBuffer.", mStreamType.c_str());
|
return res;
|
}
|
}
|
return res;
|
}
|
|
int CameraStream::setIspFlash(int CameraFlashMode) {
|
int res = 0;
|
if (CameraFlashMode == CAM_FLASH_MODE_ON) {
|
HAL_LOGV("%s: CAM_FLASH_MODE_ON", mStreamType.c_str());
|
stream_->SetTakePictureCtrl(V4L2_TAKE_PICTURE_FLASH);
|
stream_->SetFlashMode(V4L2_FLASH_LED_MODE_TORCH);
|
} else if (CameraFlashMode == CAM_FLASH_MODE_AUTO) {
|
HAL_LOGV("%s: CAM_FLASH_MODE_AUTO", mStreamType.c_str());
|
stream_->SetTakePictureCtrl(V4L2_TAKE_PICTURE_FLASH);
|
stream_->SetFlashMode(V4L2_FLASH_LED_MODE_AUTO);
|
} else if (CameraFlashMode == CAM_FLASH_MODE_TORCH) {
|
HAL_LOGV("%s: CAM_FLASH_MODE_TORCH", mStreamType.c_str());
|
stream_->SetFlashMode(V4L2_FLASH_LED_MODE_TORCH);
|
} else if (CameraFlashMode == CAM_FLASH_MODE_VIDEO_OFF) {
|
HAL_LOGV("%s: CAM_FLASH_MODE_VIDEO_OFF", mStreamType.c_str());
|
stream_->SetFlashMode(V4L2_FLASH_LED_MODE_NONE);
|
} else if (CameraFlashMode == CAM_FLASH_MODE_OFF) {
|
HAL_LOGV("%s: CAM_FLASH_MODE_OFF", mStreamType.c_str());
|
stream_->SetTakePictureCtrl(V4L2_TAKE_PICTURE_STOP);
|
stream_->SetFlashMode(V4L2_FLASH_LED_MODE_NONE);
|
}
|
return res;
|
}
|
|
int CameraStream::getFocusStatus() {
|
return stream_->GetAutoFocusStatus();
|
}
|
|
int CameraStream::setFocusInit() {
|
return stream_->SetAutoFocusInit();
|
}
|
|
int CameraStream::setIspFocus(int cameraFocusMode) {
|
int res = -1;
|
if (cameraFocusMode == CAM_FOCUS_MODE_STOP) {
|
res = stream_->SetAutoFocusStop();
|
} else if (cameraFocusMode == CAM_FOCUS_MODE_START) {
|
res = stream_->SetAutoFocusStart();
|
}
|
return res;
|
}
|
|
int CameraStream::setIspCrop(cam_crop_rect_t camCropRect) {
|
int res = -1;
|
HAL_LOGV("%s left:%d top:%d width:%d height:%d",
|
mStreamType.c_str(),
|
camCropRect.left,
|
camCropRect.top,
|
camCropRect.width,
|
camCropRect.height);
|
if (!isBlobFlag) {
|
res = stream_->SetCropRect(camCropRect);
|
} else {
|
cam_crop_rect_t jpegCropRect;
|
memset(&jpegCropRect, 0, sizeof(cam_crop_rect_t));
|
calProperRectSize(camCropRect, &jpegCropRect);
|
res = stream_->SetJpegCropRect(jpegCropRect);
|
}
|
return res;
|
}
|
|
int CameraStream::setIspFocusRegions(cam_rect_t cam_regions) {
|
HAL_LOG_ENTER();
|
return stream_->SetAutoFocusRegions(cam_regions);
|
}
|
|
int CameraStream::flush() {
|
int res = 0;
|
if (initialized) {
|
// Stream on first.
|
if (!mStreamOn) {
|
HAL_LOGE("%s: Device failed to flush stream, turn on first.",
|
mStreamType.c_str());
|
} else {
|
res = stream_->flush();
|
if (res) {
|
HAL_LOGE("%s: Device failed to flush stream.", mStreamType.c_str());
|
return res;
|
}
|
}
|
}
|
return res;
|
}
|
|
int CameraStream::initialize(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage,
|
bool mergeStreamFlag) {
|
int res = -1;
|
HAL_LOGD("%s: %d x %d, format:%d(%s), usage:%d.",
|
mStreamType.c_str(),
|
width,
|
height,
|
format,
|
getHalPixelFormatString(format).c_str(),
|
usage);
|
|
res = stream_->SetParm(V4L2_MODE_VIDEO, width, height);
|
if (res) {
|
HAL_LOGE("%s: Failed to SetParm.", mStreamType.c_str());
|
}
|
|
uint32_t max_buffers = MAX_BUFFER_NUM;
|
if (format == HAL_PIXEL_FORMAT_BLOB) {
|
format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
|
}
|
StreamFormat stream_format(format, width, height);
|
res = stream_->SetFormat(stream_format, &max_buffers, mergeStreamFlag);
|
if (res) {
|
HAL_LOGE("%s: Failed to set stream to correct format for stream: %d.",
|
mStreamType.c_str(), res);
|
}
|
|
res = stream_->PrepareBuffer();
|
if (res) {
|
HAL_LOGE("%s: Failed to prepare buffer.", mStreamType.c_str());
|
return -ENODEV;
|
}
|
initialized = true;
|
// Stream on first.
|
if (!mStreamOn) {
|
res = stream_->StreamOn();
|
if (res) {
|
HAL_LOGE("%s: Device failed to turn on stream.", mStreamType.c_str());
|
return res;
|
}
|
mStreamOn = true;
|
}
|
return res;
|
}
|
|
int CameraStream::request(buffer_handle_t * buffer,
|
uint32_t frameNumber,
|
CameraMetadata* metadata) {
|
int res = 0;
|
mMetadata = metadata;
|
|
if (!isBlobFlag) {
|
std::lock_guard<std::mutex> guard(yuv_buffer_queue_lock_);
|
frame_bufferHandle_map_t tmp;
|
tmp.bufferHandleptr = buffer;
|
tmp.frameNum = frameNumber;
|
frameNumber_buffers_map_queue_.push(tmp);
|
HAL_LOGV("%s: frameNumber_buffers_map_queue_ buffer_handle_t*:%p,"
|
"FrameNumber:%d pushed.", mStreamType.c_str(), buffer, frameNumber);
|
} else {
|
std::lock_guard<std::mutex> guard(blob_buffer_queue_lock_);
|
frame_bufferHandle_map_t tmp;
|
tmp.bufferHandleptr = buffer;
|
tmp.frameNum = frameNumber;
|
blob_frameNumber_buffers_map_queue_.push(tmp);
|
HAL_LOGV("%s: blob_frameNumber_buffers_map_queue_ buffer_handle_t*:%p,"
|
"FrameNumber:%d pushed.", mStreamType.c_str(), buffer, frameNumber);
|
}
|
|
return res;
|
}
|
|
int CameraStream::getBuffer(buffer_handle_t ** buffer,
|
uint32_t* frameNumber) {
|
int res = -ENOMEM;
|
HAL_LOGV("%s: frameNumber_buffers_map_queue_ has %zu buffer(s).",
|
mStreamType.c_str(),
|
frameNumber_buffers_map_queue_.size());
|
HAL_LOGV("%s: blob_frameNumber_buffers_map_queue_ has %zu buffer(s).",
|
mStreamType.c_str(),
|
blob_frameNumber_buffers_map_queue_.size());
|
|
if (frameNumber_buffers_map_queue_.size() && !isBlobFlag) {
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
frame_bufferHandle_map_t tmp = frameNumber_buffers_map_queue_.front();
|
*frameNumber = tmp.frameNum;
|
*buffer = tmp.bufferHandleptr;
|
frameNumber_buffers_map_queue_.pop();
|
HAL_LOGV("%s: frameNumber_buffers_map_queue_ buffer_handle_t*:%p,"
|
"FrameNumber:%d poped.",
|
mStreamType.c_str(),
|
*buffer,
|
*frameNumber);
|
res = 0;
|
} else if (blob_frameNumber_buffers_map_queue_.size()) {
|
std::unique_lock<std::mutex> lock(blob_buffer_queue_lock_);
|
frame_bufferHandle_map_t tmp =
|
blob_frameNumber_buffers_map_queue_.front();;
|
*frameNumber = tmp.frameNum;
|
*buffer = tmp.bufferHandleptr;
|
blob_frameNumber_buffers_map_queue_.pop();
|
HAL_LOGV("%s: blob_frameNumber_buffers_map_queue_ buffer_handle_t*:%p,"
|
"FrameNumber:%d poped.",
|
mStreamType.c_str(),
|
*buffer,
|
*frameNumber);
|
res = 0;
|
}
|
return res;
|
}
|
|
int CameraStream::waitBuffer() {
|
while (frameNumber_buffers_map_queue_.empty()) {
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
HAL_LOGV("%s: Wait the yuv_buffer_queue_lock_ lock.", mStreamType.c_str());
|
yuv_buffer_availabl_queue_.wait(lock);
|
HAL_LOGV("%s: The lock has been notified.", mStreamType.c_str());
|
}
|
return 0;
|
}
|
|
CameraMainStream::CameraMainStream(std::shared_ptr<V4L2Stream> stream,
|
int isBlob):
|
CameraStream(stream, isBlob) {
|
HAL_LOG_ENTER();
|
mStreamType = "CameraMainStream";
|
}
|
|
CameraMainStream::~CameraMainStream() {
|
HAL_LOG_ENTER();
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
}
|
|
int CameraMainStream::start() {
|
return CameraStream::start();
|
}
|
|
int CameraMainStream::stop() {
|
return CameraStream::stop();
|
}
|
|
int CameraMainStream::flush() {
|
return CameraStream::flush();
|
}
|
|
int CameraMainStream::initialize(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage,
|
bool mergeStreamFlag) {
|
return CameraStream::initialize(width,
|
height,
|
format,
|
usage,
|
mergeStreamFlag);
|
}
|
|
int CameraMainStream::setFormat(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage) {
|
return CameraStream::setFormat(width, height, format, usage);
|
}
|
|
int CameraMainStream::waitBuffer() {
|
return CameraStream::waitBuffer();
|
}
|
|
int CameraMainStream::getBuffer(buffer_handle_t ** buffer,
|
uint32_t* frameNumber) {
|
return CameraStream::getBuffer(buffer, frameNumber);
|
}
|
|
int CameraMainStream::encodebuffer(void* dst_addr,
|
void* src_addr,
|
unsigned long mJpegBufferSizes) {
|
return CameraStream::encodebuffer(dst_addr, src_addr, mJpegBufferSizes);
|
}
|
|
int CameraMainStream::copy_ycbcr_buffer(android_ycbcr* dst_addr_ycbcr,
|
void* src_addr) {
|
int res = -ENOMEM;
|
res = CameraStream::copy_ycbcr_buffer(dst_addr_ycbcr, src_addr);
|
if (res) {
|
HAL_LOGE("Device CopyBuffer failed.");
|
}
|
return res;
|
}
|
|
int CameraMainStream::request(buffer_handle_t * buffer,
|
uint32_t frameNumber,
|
CameraMetadata* metadata) {
|
int res = CameraStream::request(buffer, frameNumber, metadata);
|
if (!res) {
|
HAL_LOGE("%s: request failed!", mStreamType.c_str());
|
return res;
|
}
|
|
if (!mStreamOn) {
|
res = stream_->StreamOn();
|
if (res) {
|
HAL_LOGE("%s: Device failed to turn on stream.", mStreamType.c_str());
|
return res;
|
}
|
mStreamOn = true;
|
manager_->start(stream_->getStreamSerial());
|
}
|
|
return res;
|
}
|
|
int CameraMainStream::enqueueBuffer() {
|
return CameraStream::enqueueBuffer();
|
}
|
|
int CameraMainStream::dequeueBuffer(void ** src_addr, struct timeval * ts) {
|
return CameraStream::dequeueBuffer(src_addr, ts);
|
}
|
|
int CameraMainStream::setIspFlash(int CameraFlashMode) {
|
HAL_LOGV("%s CameraFlashMode:%d", mStreamType.c_str(), CameraFlashMode);
|
return CameraStream::setIspFlash(CameraFlashMode);
|
}
|
|
int CameraMainStream::getFocusStatus() {
|
int res = CameraStream::getFocusStatus();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraMainStream::setFocusInit() {
|
int res = CameraStream::setFocusInit();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraMainStream::setIspFocus(int cameraFocusMode) {
|
int res = -1;
|
res = CameraStream::setIspFocus(cameraFocusMode);
|
HAL_LOGV("%s res:%d cameraFocusMode:%d",
|
mStreamType.c_str(), res, cameraFocusMode);
|
return res;
|
}
|
|
int CameraMainStream::setIspCrop(cam_crop_rect_t camCropRect) {
|
int res = CameraStream::setIspCrop(camCropRect);
|
return res;
|
}
|
|
int CameraMainStream::setIspFocusRegions(cam_rect_t cam_regions) {
|
HAL_LOG_ENTER();
|
int res = CameraStream::setIspFocusRegions(cam_regions);
|
HAL_LOGV("%s SetAutoFocusRegions"
|
"x_min:%d y_min:%d x_max:%d y_max:%d",
|
mStreamType.c_str(),
|
cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
return res;
|
}
|
|
CameraSubStream::CameraSubStream(std::shared_ptr<V4L2Stream> stream,
|
int isBlob):
|
CameraStream(stream, isBlob) {
|
HAL_LOG_ENTER();
|
mStreamType = "CameraSubStream";
|
}
|
|
CameraSubStream::~CameraSubStream() {
|
HAL_LOG_ENTER();
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
}
|
|
int CameraSubStream::start() {
|
return CameraStream::start();
|
}
|
|
int CameraSubStream::stop() {
|
return CameraStream::stop();
|
}
|
|
int CameraSubStream::flush() {
|
return CameraStream::flush();
|
}
|
|
int CameraSubStream::initialize(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage,
|
bool mergeStreamFlag) {
|
return CameraStream::initialize(width,
|
height,
|
format,
|
usage,
|
mergeStreamFlag);
|
|
}
|
|
int CameraSubStream::setFormat(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage) {
|
return CameraStream::setFormat(width, height, format, usage);
|
}
|
|
int CameraSubStream::waitBuffer() {
|
return CameraStream::waitBuffer();
|
}
|
|
int CameraSubStream::getBuffer(buffer_handle_t ** buffer,
|
uint32_t* frameNumber) {
|
return CameraStream::getBuffer(buffer, frameNumber);
|
}
|
|
int CameraSubStream::encodebuffer(void * dst_addr,
|
void * src_addr,
|
unsigned long mJpegBufferSizes) {
|
return CameraStream::encodebuffer(dst_addr, src_addr, mJpegBufferSizes);
|
}
|
|
int CameraSubStream::copy_ycbcr_buffer(android_ycbcr* dst_addr_ycbcr,
|
void * src_addr) {
|
int res = -ENOMEM;
|
res = CameraStream::copy_ycbcr_buffer(dst_addr_ycbcr, src_addr);
|
if (res) {
|
HAL_LOGE("Device CopyBuffer.");
|
}
|
return res;
|
}
|
|
int CameraSubStream::request(buffer_handle_t * buffer,
|
uint32_t frameNumber,
|
CameraMetadata* metadata) {
|
return CameraStream::request(buffer, frameNumber, metadata);
|
}
|
|
int CameraSubStream::enqueueBuffer() {
|
return CameraStream::enqueueBuffer();
|
}
|
|
int CameraSubStream::dequeueBuffer(void ** src_addr, struct timeval * ts) {
|
return CameraStream::dequeueBuffer(src_addr, ts);
|
}
|
|
int CameraSubStream::setIspFlash(int CameraFlashMode) {
|
HAL_LOGV("%s CameraFlashMode:%d", mStreamType.c_str(), CameraFlashMode);
|
return CameraStream::setIspFlash(CameraFlashMode);
|
}
|
|
int CameraSubStream::getFocusStatus() {
|
int res = CameraStream::getFocusStatus();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraSubStream::setFocusInit() {
|
int res = CameraStream::setFocusInit();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraSubStream::setIspFocus(int cameraFocusMode) {
|
HAL_LOG_ENTER();
|
int res = -1;
|
res = CameraStream::setIspFocus(cameraFocusMode);
|
HAL_LOGV("%s res:%d cameraFocusMode:%d",
|
mStreamType.c_str(), res, cameraFocusMode);
|
return res;
|
}
|
|
int CameraSubStream::setIspCrop(cam_crop_rect_t camCropRect) {
|
int res = CameraStream::setIspCrop(camCropRect);
|
return res;
|
}
|
|
int CameraSubStream::setIspFocusRegions(cam_rect_t cam_regions) {
|
HAL_LOG_ENTER();
|
int res = CameraStream::setIspFocusRegions(cam_regions);
|
HAL_LOGV("%s SetAutoFocusRegions"
|
"x_min:%d y_min:%d x_max:%d y_max:%d",
|
mStreamType.c_str(),
|
cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
return res;
|
}
|
|
CameraMainMirrorStream::CameraMainMirrorStream(
|
std::shared_ptr<V4L2Stream> stream, int isBlob):
|
CameraStream(stream, isBlob) {
|
HAL_LOG_ENTER();
|
mStreamType = "CameraMainMirrorStream";
|
}
|
|
CameraMainMirrorStream::~CameraMainMirrorStream() {
|
HAL_LOG_ENTER();
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
}
|
|
int CameraMainMirrorStream::start() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraMainMirrorStream::stop() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraMainMirrorStream::flush() {
|
return 0;
|
}
|
|
int CameraMainMirrorStream::initialize(uint32_t /*width*/,
|
uint32_t /*height*/,
|
int /*format*/,
|
uint32_t /*usage*/,
|
bool /*mergeStreamFlag*/) {
|
initialized = true;
|
return 0;
|
}
|
|
int CameraMainMirrorStream::setFormat(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage) {
|
return CameraStream::setFormat(width, height, format, usage);
|
}
|
|
int CameraMainMirrorStream::waitBuffer() {
|
return 0;
|
}
|
|
int CameraMainMirrorStream::getBuffer(buffer_handle_t ** buffer,
|
uint32_t* frameNumber) {
|
return CameraStream::getBuffer(buffer, frameNumber);
|
}
|
|
int CameraMainMirrorStream::encodebuffer(void * dst_addr,
|
void * src_addr,
|
unsigned long mJpegBufferSizes) {
|
return CameraStream::encodebuffer(dst_addr, src_addr, mJpegBufferSizes);
|
}
|
|
int CameraMainMirrorStream::copy_ycbcr_buffer(android_ycbcr* dst_addr_ycbcr,
|
void * src_addr) {
|
int res = -ENOMEM;
|
res = CameraStream::copy_ycbcr_buffer(dst_addr_ycbcr, src_addr);
|
if (res) {
|
HAL_LOGE("%s: Device CopyBuffer.", mStreamType.c_str());
|
}
|
return res;
|
}
|
|
int CameraMainMirrorStream::request(buffer_handle_t * buffer,
|
uint32_t frameNumber,
|
CameraMetadata* metadata) {
|
return CameraStream::request(buffer, frameNumber, metadata);
|
}
|
|
int CameraMainMirrorStream::enqueueBuffer() {
|
return 0;
|
}
|
|
int CameraMainMirrorStream::dequeueBuffer(void ** /*src_addr*/,
|
struct timeval * /*ts*/) {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraMainMirrorStream::setIspFlash(int CameraFlashMode) {
|
HAL_LOGV("%s CameraFlashMode:%d", mStreamType.c_str(), CameraFlashMode);
|
return 0;
|
}
|
|
int CameraMainMirrorStream::getFocusStatus() {
|
int res = CameraStream::getFocusStatus();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraMainMirrorStream::setFocusInit() {
|
int res = CameraStream::setFocusInit();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraMainMirrorStream::setIspFocus(int cameraFocusMode) {
|
HAL_LOG_ENTER();
|
int res = -1;
|
HAL_LOGV("%s not implemented cameraFocusMode = %d",
|
mStreamType.c_str(), cameraFocusMode);
|
return res;
|
}
|
|
int CameraMainMirrorStream::setIspCrop(cam_crop_rect_t camCropRect) {
|
int res = CameraStream::setIspCrop(camCropRect);
|
return res;
|
}
|
|
int CameraMainMirrorStream::setIspFocusRegions(cam_rect_t cam_regions) {
|
HAL_LOG_ENTER();
|
int res = -1;
|
HAL_LOGV("%s not implemented "
|
"x_min:%d y_min:%d x_max:%d y_max:%d",
|
mStreamType.c_str(),
|
cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
return res;
|
}
|
|
CameraSubMirrorStream::CameraSubMirrorStream(std::shared_ptr<V4L2Stream> stream,
|
int isBlob):
|
CameraStream(stream, isBlob) {
|
HAL_LOG_ENTER();
|
mStreamType = "CameraSubMirrorStream";
|
mScaleFlag = false;
|
}
|
|
CameraSubMirrorStream::~CameraSubMirrorStream() {
|
HAL_LOG_ENTER();
|
std::unique_lock<std::mutex> lock(yuv_buffer_queue_lock_);
|
}
|
|
int CameraSubMirrorStream::start() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraSubMirrorStream::stop() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraSubMirrorStream::flush() {
|
return 0;
|
}
|
|
int CameraSubMirrorStream::setScaleFlag() {
|
HAL_LOG_ENTER();
|
mScaleFlag = true;
|
return 0;
|
}
|
|
int CameraSubMirrorStream::initialize(uint32_t /*width*/,
|
uint32_t /*height*/,
|
int /*format*/,
|
uint32_t /*usage*/,
|
bool /*mergeStreamFlag*/) {
|
int res = 0;
|
initialized = true;
|
return res;
|
}
|
|
int CameraSubMirrorStream::setFormat(uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage) {
|
return CameraStream::setFormat(width, height, format, usage);
|
}
|
|
int CameraSubMirrorStream::waitBuffer() {
|
return 0;
|
}
|
|
int CameraSubMirrorStream::getBuffer(buffer_handle_t ** buffer,
|
uint32_t* frameNumber) {
|
return CameraStream::getBuffer(buffer, frameNumber);
|
}
|
|
int CameraSubMirrorStream::encodebuffer(void* dst_addr,
|
void* src_addr,
|
unsigned long mJpegBufferSizes) {
|
return CameraStream::encodebuffer(dst_addr, src_addr, mJpegBufferSizes);
|
}
|
|
int CameraSubMirrorStream::yuv420spDownScale(void* psrc,
|
android_ycbcr* dst_addr_ycbcr,
|
int src_w,
|
int src_h,
|
int dst_w,
|
int dst_h) {
|
HAL_LOG_ENTER();
|
char * psrc_y = static_cast<char *>(psrc);
|
char * pdst_y = static_cast<char *>(dst_addr_ycbcr->y);
|
char * psrc_uv = static_cast<char *>(psrc) + src_w * src_h;
|
char * pdst_uv = static_cast<char *>(dst_addr_ycbcr->cr);
|
int scale = 1;
|
int scale_w = src_w / dst_w;
|
int scale_h = src_h / dst_h;
|
int h, w;
|
|
if (dst_w > src_w || dst_h > src_h) {
|
HAL_LOGE("error size, %dx%d -> %dx%d\n", src_w, src_h, dst_w, dst_h);
|
return -1;
|
}
|
|
if (scale_w == scale_h) {
|
scale = scale_w;
|
}
|
|
HAL_LOGV("scale = %d\n", scale);
|
|
if (scale == 1) {
|
if ((src_w == dst_w) && (src_h == dst_h)) {
|
memcpy(static_cast<char*>(pdst_y), static_cast<char*>(psrc), dst_w * dst_h);
|
memcpy(static_cast<char*>(pdst_uv),
|
static_cast<char*>(psrc) + dst_w * dst_h, dst_w * dst_h/2);
|
} else {
|
// crop
|
for (h = 0; h < dst_h; h++) {
|
memcpy(static_cast<char*>(pdst_y) + h * dst_w,
|
static_cast<char*>(psrc) + h * src_w, dst_w);
|
}
|
for (h = 0; h < dst_h / 2; h++) {
|
memcpy(static_cast<char*>(pdst_uv) + h * dst_w,
|
static_cast<char*>(psrc_uv) + h * src_w, dst_w);
|
}
|
}
|
|
return 0;
|
}
|
|
for (h = 0; h < dst_h; h++) {
|
for (w = 0; w < dst_w; w++) {
|
*(pdst_y + h * dst_w + w) = *(psrc_y + h * scale * src_w + w * scale);
|
}
|
}
|
for (h = 0; h < dst_h / 2; h++) {
|
for (w = 0; w < dst_w; w+=2) {
|
*(reinterpret_cast<short*>(pdst_uv) + h * dst_w + w) =
|
*(reinterpret_cast<short*>(psrc_uv) + h * scale * src_w + w * scale);
|
}
|
}
|
return 0;
|
}
|
|
int CameraSubMirrorStream::copy_ycbcr_buffer(android_ycbcr* dst_addr_ycbcr,
|
void * src_addr) {
|
int res = -ENOMEM;
|
if (mScaleFlag) {
|
res = yuv420spDownScale(src_addr,
|
dst_addr_ycbcr,
|
stream_->GetDeviceWidth(),
|
stream_->GetDeviceHeight(),
|
mWidth,
|
mHeight);
|
} else {
|
res = CameraStream::copy_ycbcr_buffer(dst_addr_ycbcr, src_addr);
|
}
|
if (res) {
|
HAL_LOGE("Device CopyBuffer.");
|
}
|
return res;
|
}
|
|
int CameraSubMirrorStream::request(buffer_handle_t * buffer,
|
uint32_t frameNumber,
|
CameraMetadata* metadata) {
|
return CameraStream::request(buffer, frameNumber, metadata);
|
}
|
|
int CameraSubMirrorStream::enqueueBuffer() {
|
return 0;
|
}
|
|
int CameraSubMirrorStream::dequeueBuffer(void ** /*src_addr*/,
|
struct timeval * /*ts*/) {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int CameraSubMirrorStream::setIspFlash(int CameraFlashMode) {
|
HAL_LOGV("%s CameraFlashMode:%d", mStreamType.c_str(), CameraFlashMode);
|
return 0;
|
}
|
|
int CameraSubMirrorStream::getFocusStatus() {
|
int res = CameraStream::getFocusStatus();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraSubMirrorStream::setFocusInit() {
|
int res = CameraStream::setFocusInit();
|
HAL_LOGV("%s res:%d", mStreamType.c_str(), res);
|
return res;
|
}
|
|
int CameraSubMirrorStream::setIspFocus(int cameraFocusMode) {
|
HAL_LOG_ENTER();
|
int res = -1;
|
HAL_LOGV("%s not implemented cameraFocusMode = %d",
|
mStreamType.c_str(), cameraFocusMode);
|
return res;
|
}
|
|
int CameraSubMirrorStream::setIspCrop(cam_crop_rect_t camCropRect) {
|
int res = CameraStream::setIspCrop(camCropRect);
|
return res;
|
}
|
|
int CameraSubMirrorStream::setIspFocusRegions(cam_rect_t cam_regions) {
|
HAL_LOG_ENTER();
|
int res = -1;
|
HAL_LOGV("%s not implemented "
|
"x_min:%d y_min:%d x_max:%d y_max:%d",
|
mStreamType.c_str(),
|
cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
return res;
|
}
|
} // namespace v4l2_camera_hal
|