/*
|
* Copyright (c) 2021 by Allwinnertech Co., Ltd.
|
*
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
* you may not use this file except in compliance with the License.
|
* You may obtain a copy of the License at
|
*
|
* http://www.apache.org/licenses/LICENSE-2.0
|
*
|
* Unless required by applicable law or agreed to in writing, software
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* See the License for the specific language governing permissions and
|
* limitations under the License.
|
*/
|
|
|
#define LOG_TAG "CameraHALv3_V4L2Camera"
|
#include "v4l2_camera.h"
|
|
#include <fcntl.h>
|
#include <hardware/camera3.h>
|
#include <linux/videodev2.h>
|
#include <sys/stat.h>
|
#include <sys/time.h>
|
#include <sys/types.h>
|
|
#include <algorithm>
|
#include <cstdlib>
|
#include <memory>
|
#include <utility>
|
#include <vector>
|
|
#include "CameraMetadata.h"
|
#include "camera_config.h"
|
#include "function_thread.h"
|
#include "metadata/metadata_common.h"
|
#include "stream_format.h"
|
#include "v4l2_metadata_factory.h"
|
|
namespace v4l2_camera_hal {
|
|
using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
|
|
// Helper function for managing metadata.
|
static std::vector<int32_t> UNUSED_FUNCTION(getMetadataKeys)(
|
const camera_metadata_t* metadata) {
|
std::vector<int32_t> keys;
|
size_t num_entries = get_camera_metadata_entry_count(metadata);
|
for (size_t i = 0; i < num_entries; ++i) {
|
camera_metadata_ro_entry_t entry;
|
get_camera_metadata_ro_entry(metadata, i, &entry);
|
keys.push_back(entry.tag);
|
}
|
return keys;
|
}
|
|
V4L2Camera* V4L2Camera::NewV4L2Camera(
|
int id,
|
std::shared_ptr<CCameraConfig> pCameraCfg,
|
bool mergeStreamStatus) {
|
HAL_LOG_ENTER();
|
// Select one stream for fill metadata.
|
// The path has been pick the stream for query format.
|
std::shared_ptr<V4L2Wrapper> v4l2_wrapper(V4L2Wrapper::NewV4L2Wrapper(
|
id, pCameraCfg));
|
if (!v4l2_wrapper) {
|
HAL_LOGE("Failed to initialize V4L2 wrapper.");
|
return nullptr;
|
}
|
HAL_LOGD("v4l2_wrapper.count %ld", v4l2_wrapper.use_count());
|
|
std::unique_ptr<Metadata> metadata;
|
int res = GetV4L2Metadata(v4l2_wrapper, &metadata, pCameraCfg);
|
if (res) {
|
HAL_LOGE("Failed to initialize V4L2 metadata: %d", res);
|
return nullptr;
|
}
|
|
HAL_LOGD("v4l2_wrapper.count %ld", v4l2_wrapper.use_count());
|
return new V4L2Camera(id,
|
std::move(v4l2_wrapper),
|
std::move(metadata),
|
std::move(pCameraCfg),
|
mergeStreamStatus);
|
}
|
|
V4L2Camera::V4L2Camera(int id,
|
std::shared_ptr<V4L2Wrapper> v4l2_wrapper,
|
std::unique_ptr<Metadata> metadata,
|
std::shared_ptr<CCameraConfig> pCameraCfg,
|
bool mergeStreamStatus)
|
: default_camera_hal::Camera(id),
|
mCameraConfig(std::move(pCameraCfg)),
|
device_(std::move(v4l2_wrapper)),
|
device_id_(id),
|
mgSituationMode(INIT),
|
metadata_(std::move(metadata)),
|
camera_fd(-1),
|
in_flight_buffer_count(0),
|
buffers_in_flight_flag_(false),
|
max_input_streams_(0),
|
max_output_streams_({{0, 0, 0}}),
|
FlashStatusFlag(0),
|
MarkFrameNum(0),
|
curFlashStateE(CAM_FLASH_STATE_MAX),
|
curFlashModeE(CAM_FLASH_MODE_MAX),
|
merge_stream_status_(mergeStreamStatus),
|
is_merge_stream_flag(false){
|
|
HAL_LOG_ENTER();
|
instance = std::shared_ptr<V4L2Camera>(this);
|
|
HAL_LOGD("v4l2_wrapper.count %ld", v4l2_wrapper.use_count());
|
|
memset(&mBeffer_status, 0, sizeof(mBeffer_status));
|
memset(&mStream_status, 0, sizeof(mStream_status));
|
for (int i = 0; i < MAX_STREAM; i++) {
|
mCameraStream[i] = nullptr;
|
}
|
|
camFocusImpl.focus_frame_num = -1;
|
camFocusImpl.focused_times = -1;
|
camFocusImpl.update_status = -1;
|
camFocusImpl.status_flag = -1;
|
camFocusImpl.focus_status = -1;
|
camFocusImpl.update_status_times = -1;
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_MAX;
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_MAX;
|
memset(&camAreaRoi, 0, sizeof(cam_area_t));
|
memset(&camCropRect, 0, sizeof(cam_crop_rect_t));
|
mStream_status.pstream = OPEN_STREAM;
|
}
|
|
V4L2Camera::~V4L2Camera() {
|
HAL_LOG_ENTER();
|
}
|
|
int V4L2Camera::connect() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
void V4L2Camera::disconnect() {
|
HAL_LOG_ENTER();
|
HAL_LOGD("before disconnect.");
|
// stop stream.
|
if (mStreamManager_ != nullptr) {
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mStreamManager_->stop((STREAM_SERIAL)ss);
|
}
|
}
|
HAL_LOGD("after disconnect.");
|
in_flight_buffer_count = 0;
|
mStreamManager_.reset();
|
flushRequests(-ENODEV);
|
mBeffer_status.pbuffers = INIT_BUFFER;
|
mBeffer_status.vbuffers = INIT_BUFFER;
|
if (mCameraConfig->supportFlashMode()) {
|
std::lock_guard<std::mutex> guard(camera_fd_lock_);
|
if (camera_fd != -1) {
|
int ret_close = 0;
|
ret_close = ::close(camera_fd);
|
HAL_LOGD("mCameraFd:%d, ret_close: %d, errno:%s",
|
camera_fd, ret_close, strerror(errno));
|
camera_fd = -1;
|
}
|
}
|
// TODO(b/29158098): Inform service of any flashes that are available again
|
// because this camera is no longer in use.
|
}
|
|
int V4L2Camera::flushBuffers() {
|
HAL_LOG_ENTER();
|
int res = 0;
|
return res;
|
}
|
|
int V4L2Camera::flushRequests(int err) {
|
HAL_LOG_ENTER();
|
// Calvin: encount wrong in picture mode.
|
// This is not strictly necessary, but prevents a buildup of aborted
|
// requests in the in_flight_ map. These should be cleared
|
// whenever the stream is turned off.
|
HAL_LOGV("We hold %zu request(s) in Camera HAL,"
|
" let us send them to frameworks.",
|
in_flight_.size());
|
for (int i = (MAX_BUFFER_NUM -1); i >= 0; i--) {
|
auto brequest = in_flight_.find(i);
|
if (brequest != in_flight_.end()) {
|
HAL_LOGD("Send No.%d request to frameworks request queue.", i);
|
completeRequest(brequest->second, err);
|
in_flight_.erase(i);
|
} else {
|
HAL_LOGD("No.%d request request had been called back.", i);
|
in_flight_.erase(i);
|
}
|
}
|
HAL_LOG_EXIT();
|
return 0;
|
}
|
|
int V4L2Camera::flushRequestsForCTS(int err) {
|
HAL_LOG_ENTER();
|
uint32_t mFrameNum = 0;
|
std::shared_ptr<default_camera_hal::CaptureRequest> mFrameNumRequest;
|
// This is not strictly necessary, but prevents a buildup of aborted
|
// requests in the in_flight_ map. These should be cleared
|
// whenever the stream is turned off.
|
{
|
std::lock_guard<std::mutex> guard(request_queue_stream_lock_);
|
while (!request_queue_pstream_.empty()) {
|
request_queue_pstream_.pop();
|
}
|
while (!request_queue_tstream_.empty()) {
|
request_queue_tstream_.pop();
|
}
|
while (!request_queue_vstream_.empty()) {
|
request_queue_vstream_.pop();
|
}
|
}
|
|
HAL_LOGD("We hold %zu request(s) in Camera HAL,"
|
"let us send them to frameworks.", in_flight_.size());
|
{
|
std::lock_guard<std::mutex> guard(in_flight_lock_);
|
for (int i = (MAX_BUFFER_NUM -1); i >= 0; i--) {
|
auto brequest = in_flight_.find(i);
|
if (brequest != in_flight_.end()) {
|
HAL_LOGD("Send No.%d request to frameworks request queue,"
|
" frame_number:%d.", i, brequest->second->frame_number);
|
if ((brequest->second->frame_number < mFrameNum) &&
|
(0 != mFrameNum)) {
|
completeRequest(brequest->second, err);
|
completeRequest(mFrameNumRequest, err);
|
} else if ((brequest->second->frame_number > mFrameNum) &&
|
(0 != mFrameNum)) {
|
completeRequest(mFrameNumRequest, err);
|
completeRequest(brequest->second, err);
|
}
|
mFrameNum = brequest->second->frame_number;
|
mFrameNumRequest = brequest->second;
|
// Erase the buffer in the lock of in_flight_lock_.
|
in_flight_.erase(i);
|
} else {
|
// Erase the buffer in the lock of in_flight_lock_.
|
// There are leave buffer in csi driver that
|
// will lead the request delay the buffer.
|
in_flight_.erase(i);
|
HAL_LOGD("No.%d request request had been called back.", i);
|
}
|
}
|
}
|
HAL_LOG_EXIT();
|
return 0;
|
}
|
|
int V4L2Camera::initStaticInfo(CameraMetadata* out) {
|
HAL_LOG_ENTER();
|
|
int focus_supported = 0;
|
int crop_regions_supported = 0;
|
if (mCameraConfig->supportFocusMode()) {
|
focus_supported = 1;
|
}
|
if (mCameraConfig->supportZoom()) {
|
crop_regions_supported = 1;
|
}
|
int res = metadata_->FillStaticMetadata(out,
|
device_id_,
|
focus_supported,
|
crop_regions_supported);
|
if (res) {
|
HAL_LOGE("Failed to get static metadata.");
|
return res;
|
}
|
|
if (mCameraConfig->supportFlashMode() && device_id_ == 0) {
|
HAL_LOGV("initStaticInfo");
|
std::vector<uint8_t> avail_ae_modes;
|
avail_ae_modes.push_back(ANDROID_CONTROL_AE_MODE_ON);
|
avail_ae_modes.push_back(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
|
avail_ae_modes.push_back(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
|
out->update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
|
avail_ae_modes.data(),
|
avail_ae_modes.size());
|
}
|
|
if (mCameraConfig->supportFocusMode()) {
|
std::vector<uint8_t> avail_af_modes;
|
HAL_LOGV("initStaticInfo avail_af_modes");
|
avail_af_modes.push_back(ANDROID_CONTROL_AF_MODE_OFF);
|
avail_af_modes.push_back(ANDROID_CONTROL_AF_MODE_AUTO);
|
out->update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
|
avail_af_modes.data(),
|
avail_af_modes.size());
|
}
|
|
// Extract max streams for use in verifying stream configs.
|
res = SingleTagValue(
|
*out, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS, &max_input_streams_);
|
if (res) {
|
HAL_LOGE("Failed to get max num input streams from static metadata.");
|
return res;
|
}
|
res = SingleTagValue(
|
*out, ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, &max_output_streams_);
|
if (res) {
|
HAL_LOGE("Failed to get max num output streams from static metadata.");
|
return res;
|
}
|
|
return 0;
|
}
|
|
int V4L2Camera::initTemplate(int type, CameraMetadata* out) {
|
HAL_LOG_ENTER();
|
return metadata_->GetRequestTemplate(type, out);
|
}
|
|
void V4L2Camera::initDeviceInfo(camera_info_t* info) {
|
HAL_LOG_ENTER();
|
|
// TODO(b/31044975): move this into device interface.
|
// For now, just constants.
|
info->resource_cost = 100;
|
info->conflicting_devices = nullptr;
|
info->conflicting_devices_length = 0;
|
}
|
|
int V4L2Camera::initDevice() {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int V4L2Camera::closeFlashTorch() {
|
std::lock_guard<std::mutex> guard(camera_fd_lock_);
|
if (camera_fd != -1) {
|
int ret_close = 0;
|
ret_close = ::close(camera_fd);
|
HAL_LOGE("mCameraFd:%d, ret_close: %d, errno:%s",
|
camera_fd, ret_close, strerror(errno));
|
camera_fd = -1;
|
return 1;
|
} else {
|
return 0;
|
}
|
}
|
|
int V4L2Camera::setFlashTorchMode(bool enabled) {
|
int retVal = 0;
|
struct v4l2_input inp;
|
struct v4l2_control ctrl;
|
|
if (camera_fd == -1) {
|
camera_fd = open("/dev/video0", O_RDWR | O_NONBLOCK, 0);
|
if (camera_fd == -1) {
|
HAL_LOGE("ERROR opening /dev/video0: %s", strerror(errno));
|
return -1;
|
}
|
HAL_LOGD("cameraFd : %d opened", camera_fd);
|
inp.index = 0;
|
if (-1 == ioctl(camera_fd, VIDIOC_S_INPUT, &inp)) {
|
HAL_LOGE("VIDIOC_S_INPUT error!");
|
::close(camera_fd);
|
return -1;
|
}
|
}
|
|
if (enabled) {
|
ctrl.id = V4L2_CID_FLASH_LED_MODE;
|
ctrl.value = V4L2_FLASH_LED_MODE_TORCH;
|
retVal = ioctl(camera_fd, VIDIOC_S_CTRL, &ctrl);
|
if (retVal < 0) {
|
HAL_LOGE("setFlashMode failed, %s", strerror(errno));
|
} else {
|
HAL_LOGE("setFlashMode ok V4L2_FLASH_LED_MODE_TORCH");
|
}
|
} else {
|
ctrl.id = V4L2_CID_FLASH_LED_MODE;
|
ctrl.value = V4L2_FLASH_LED_MODE_NONE;
|
retVal = ioctl(camera_fd, VIDIOC_S_CTRL, &ctrl);
|
if (retVal < 0) {
|
HAL_LOGE("setFlashMode failed, %s", strerror(errno));
|
} else {
|
HAL_LOGE("setFlashMode ok V4L2_FLASH_LED_MODE_NONE");
|
}
|
}
|
return retVal;
|
}
|
|
int V4L2Camera::updateStatus(SituationMode mSituationMode) {
|
mgSituationMode = mSituationMode;
|
switch (mSituationMode) {
|
case BEFORE_PREVIEW:
|
break;
|
case BEFORE_VIDEO:
|
break;
|
case BEFORE_PICTURE:
|
mStream_status.pstream = CLOSE_STREAM;
|
mStream_status.tstream = OPEN_STREAM;
|
mBeffer_status.tbuffers = INIT_BUFFER;
|
break;
|
case AFTER_PICTURE:
|
mStream_status.pstream = OPEN_STREAM;
|
mBeffer_status.pbuffers = INIT_BUFFER;
|
mBeffer_status.tbuffers = INIT_BUFFER;
|
break;
|
default:
|
break;
|
}
|
return -1;
|
}
|
|
V4L2Camera::SituationMode V4L2Camera::getStatus() {
|
return mgSituationMode;
|
}
|
|
V4L2Camera::RequestMode V4L2Camera::analysisRequest(
|
std::shared_ptr<default_camera_hal::CaptureRequest> request) {
|
if (request == nullptr) {
|
if ((mVideoflag || isVideoByTemplate) &&
|
mBeffer_status.vbuffers == PREPARE_BUFFER) {
|
return RUN_VIDEO_MODE;
|
} else if (mBeffer_status.pbuffers == PREPARE_BUFFER) {
|
return RUN_PREVIEW_MODE;
|
}
|
} else {
|
// If the request include the tstream, return immediatly.
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
if ((analysisStreamModes(request->output_buffers[i].stream) == TSTREAM) &&
|
(request->tbuffer_has_been_used == false)) {
|
return PICTURE_MODE;
|
}
|
}
|
|
// If the request include the vstream and in video mode, return immediatly.
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
if ((analysisStreamModes(request->output_buffers[i].stream) == VSTREAM) &&
|
(mVideoflag || isVideoByTemplate)) {
|
return VIDEO_MODE;
|
}
|
}
|
|
// Deal the preview stream in the final.
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
if ((analysisStreamModes(request->output_buffers[i].stream) == PSTREAM)) {
|
return PREVIEW_MODE;
|
}
|
}
|
}
|
return ERROR_MODE;
|
}
|
|
int V4L2Camera::analysisStreamNum(
|
std::shared_ptr<default_camera_hal::CaptureRequest> request,
|
RequestMode mRequestMode) {
|
int revalue = -1;
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
HAL_LOGD("Get format %d!", request->output_buffers[i].stream->format);
|
if (request->output_buffers[i].stream->format == HAL_PIXEL_FORMAT_BLOB) {
|
if (mRequestMode == VIDEO_SNAP_SHOT_MODE) {
|
revalue = i;
|
}
|
}
|
|
if (request->output_buffers[i].stream->format ==
|
HAL_PIXEL_FORMAT_YCBCR_420_888) {
|
if (mRequestMode == PICTURE_MODE) {
|
revalue = i;
|
}
|
}
|
|
if (request->output_buffers[i].stream->format ==
|
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
|
if ((mRequestMode == PREVIEW_MODE) &&
|
(analysisStreamModes(request->output_buffers[i].stream) == PSTREAM)) {
|
revalue = i;
|
}
|
if ((mRequestMode == VIDEO_MODE) &&
|
(analysisStreamModes(request->output_buffers[i].stream) == VSTREAM)) {
|
revalue = i;
|
}
|
}
|
}
|
HAL_LOGD("No.%d is picked.", revalue);
|
return revalue;
|
}
|
|
V4L2Camera::StreamIedex V4L2Camera::analysisStreamModes(
|
camera3_stream_t* stream) {
|
HAL_LOGD("Get usage %d(%s), format %d(%s)!",
|
stream->usage,
|
getGrallocUsageString(stream->usage).c_str(),
|
stream->format,
|
getHalPixelFormatString(stream->format).c_str());
|
StreamIedex mStreamIedex = ESTREAM;
|
|
switch (stream->format) {
|
case HAL_PIXEL_FORMAT_YCBCR_420_888: {
|
HAL_LOGD("Detected the picture stream.");
|
mStreamIedex = TSTREAM;
|
break;
|
}
|
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
|
if ((stream->usage & GRALLOC_USAGE_HW_TEXTURE) ||
|
(stream->usage &
|
(GRALLOC_USAGE_HW_COMPOSER|GRALLOC_USAGE_HW_TEXTURE))) {
|
mStreamIedex = PSTREAM;
|
} else if ((stream->usage &
|
(GRALLOC_USAGE_HW_VIDEO_ENCODER|GRALLOC_USAGE_HW_2D))) {
|
mStreamIedex = VSTREAM;
|
}
|
break;
|
}
|
// Be compatible with Camera api 1.
|
case HAL_PIXEL_FORMAT_BLOB: {
|
if ((stream->usage & GRALLOC_USAGE_SW_READ_OFTEN) &&
|
(isVideoByTemplate)) {
|
mStreamIedex = VSTREAM;
|
}
|
if ((stream->usage & GRALLOC_USAGE_SW_READ_OFTEN) &&
|
(stream->usage & GRALLOC_USAGE_SW_WRITE_OFTEN)) {
|
mStreamIedex = VHSTREAM;
|
HAL_LOGD("Set stream isVideoSnap flag to ture!");
|
}
|
break;
|
}
|
default: {
|
HAL_LOGE("Do not find any stream!");
|
break;
|
}
|
}
|
return mStreamIedex;
|
}
|
|
|
int V4L2Camera::max(std::vector<uint32_t>vec) {
|
auto result = std::minmax_element(vec.begin(), vec.end());
|
return result.second - vec.begin();
|
}
|
|
int V4L2Camera::min(std::vector<uint32_t>vec) {
|
auto result = std::minmax_element(vec.begin(), vec.end());
|
return result.first - vec.begin();
|
}
|
|
int V4L2Camera::maxWidth(camera3_stream_t** streams, int num) {
|
int ret = -1;
|
if (num == 0) {
|
HAL_LOGE("Prepare num err!");
|
} else if (num == 1) {
|
ret = 0;
|
} else if (num == 2) {
|
if (streams[0]->width > streams[1]->width) {
|
ret = 0;
|
} else if (streams[0]->width == streams[1]->width) {
|
ret = 0;
|
} else {
|
ret = 1;
|
}
|
} else if (num == 3) {
|
std::vector<uint32_t>streamWidths{
|
streams[0]->width,
|
streams[1]->width,
|
streams[2]->width
|
};
|
ret = max(streamWidths);
|
}
|
return ret;
|
}
|
|
int V4L2Camera::maxHeight(camera3_stream_t** streams, int num) {
|
int ret = -1;
|
if (num == 0) {
|
HAL_LOGE("Prepare num err!");
|
} else if (num == 1) {
|
ret = 0;
|
} else if (num == 2) {
|
if (streams[0]->height > streams[1]->height) {
|
ret = 0;
|
} else if (streams[0]->height == streams[1]->height) {
|
ret = 0;
|
} else {
|
ret = 1;
|
}
|
} else if (num == 3) {
|
std::vector<uint32_t>streamHeight{
|
streams[0]->height,
|
streams[1]->height,
|
streams[2]->height
|
};
|
ret = max(streamHeight);
|
}
|
return ret;
|
}
|
|
int V4L2Camera::fillStreamInfo(camera3_stream_t * stream) {
|
int res = 0;
|
// Doesn't matter what was requested, we always use dataspace V0_JFIF.
|
// Note: according to camera3.h, this isn't allowed, but the camera
|
// framework team claims it's underdocumented; the implementation lets the
|
// HAL overwrite it. If this is changed, change the validation above.
|
stream->data_space = HAL_DATASPACE_V0_JFIF;
|
// Usage: currently using sw graphics.
|
switch (stream->stream_type) {
|
case CAMERA3_STREAM_INPUT:
|
stream->usage |= GRALLOC_USAGE_SW_READ_OFTEN;
|
break;
|
case CAMERA3_STREAM_OUTPUT:
|
// Set the usage to GRALLOC_USAGE_SW_WRITE_OFTEN for buffers
|
// with cache alloc by gralloc.
|
stream->usage |= GRALLOC_USAGE_SW_WRITE_OFTEN;
|
break;
|
case CAMERA3_STREAM_BIDIRECTIONAL:
|
stream->usage |=
|
(GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN);
|
break;
|
default:
|
// nothing to do.
|
break;
|
}
|
// Add camera usage.
|
stream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
|
|
switch (stream->format) {
|
case HAL_PIXEL_FORMAT_BLOB: {
|
stream->max_buffers = PICTURE_BUFFER_NUM;
|
break;
|
}
|
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
|
stream->max_buffers = MAX_BUFFER_NUM;
|
break;
|
}
|
case HAL_PIXEL_FORMAT_YCBCR_420_888: {
|
stream->max_buffers = PICTURE_BUFFER_NUM;
|
break;
|
}
|
default: {
|
HAL_LOGE("Do not find any format!");
|
break;
|
}
|
}
|
return res;
|
}
|
|
int V4L2Camera::findStreamModes(STREAM_SERIAL stream_serial,
|
camera3_stream_configuration_t* stream_config, int* isBlob) {
|
int mStreamIndex = -1;
|
camera3_stream_t** streams = stream_config->streams;
|
switch (stream_serial) {
|
case MAIN_STREAM: {
|
if (stream_config->num_streams == 1) {
|
mStreamIndex = 0;
|
} else if (stream_config->num_streams == 2) {
|
std::vector<uint32_t> streamLists {
|
streams[0]->width * streams[0]->height,
|
streams[1]->width * streams[1]->height,
|
};
|
mStreamIndex = max(streamLists);
|
} else if (stream_config->num_streams == 3) {
|
std::vector<uint32_t> streamLists {
|
streams[0]->width * streams[0]->height,
|
streams[1]->width * streams[1]->height,
|
streams[2]->width * streams[2]->height,
|
};
|
mStreamIndex = max(streamLists);
|
} else {
|
HAL_LOGE("Get stream_config->num_streams:%d failed!",
|
stream_config->num_streams);
|
break;
|
}
|
if (stream_config->streams[mStreamIndex]->format ==
|
HAL_PIXEL_FORMAT_BLOB) {
|
*isBlob = 1;
|
} else {
|
*isBlob = 0;
|
}
|
break;
|
}
|
case SUB_0_STREAM: {
|
if (stream_config->num_streams <= 1) {
|
HAL_LOGE("Get stream_config->num_streams:%d failed!",
|
stream_config->num_streams);
|
} else if (stream_config->num_streams == 2) {
|
int maintmp = findStreamModes(MAIN_STREAM, stream_config, isBlob);
|
if (maintmp == 1) {
|
mStreamIndex = 0;
|
} else {
|
mStreamIndex = 1;
|
}
|
} else if (stream_config->num_streams == 3) {
|
std::vector<uint32_t> streamLists{
|
streams[0]->width * streams[0]->height,
|
streams[1]->width * streams[1]->height,
|
streams[2]->width * streams[2]->height,
|
};
|
|
uint32_t maxtmp = max(streamLists);
|
uint32_t mintmp = min(streamLists);
|
|
for (uint32_t i = 0; i < stream_config->num_streams; i++) {
|
if ((i != maxtmp) && (i != mintmp)) {
|
mStreamIndex = i;
|
}
|
}
|
} else {
|
HAL_LOGE("Get stream_config->num_streams:%d failed!",
|
stream_config->num_streams);
|
break;
|
}
|
if (stream_config->streams[mStreamIndex]->format ==
|
HAL_PIXEL_FORMAT_BLOB) {
|
*isBlob = 1;
|
} else {
|
*isBlob = 0;
|
}
|
break;
|
}
|
default: {
|
HAL_LOGE("Do not find any stream!");
|
break;
|
}
|
}
|
return mStreamIndex;
|
}
|
|
int V4L2Camera::ProbeFlashMode(CameraFlashModeE* camera_flash_mode,
|
CameraMetadata* camera_meta_data,
|
int* set_flash_flag,
|
int frame_number) {
|
int result = 0;
|
uint8_t fwk_aeMode = 0;
|
uint8_t capture_intent = 0;
|
uint8_t fwk_flashMode = 0;
|
if (camera_meta_data->exists(ANDROID_CONTROL_AE_MODE)) {
|
fwk_aeMode =
|
mMetadata->find(ANDROID_CONTROL_AE_MODE).data.u8[0];
|
}
|
if (camera_meta_data->exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
|
capture_intent =
|
mMetadata->find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
|
}
|
if (camera_meta_data->exists(ANDROID_FLASH_MODE)) {
|
fwk_flashMode =
|
mMetadata->find(ANDROID_FLASH_MODE).data.u8[0];
|
}
|
|
std::lock_guard<std::mutex> guard(flash_status_lock_);
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH) {
|
if (!FlashStatusFlag) {
|
MarkFrameNum = frame_number;
|
FlashStatusFlag = 1;
|
*camera_flash_mode = CAM_FLASH_MODE_ON;
|
*set_flash_flag = 1;
|
}
|
}
|
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH) {
|
if (!FlashStatusFlag) {
|
MarkFrameNum = frame_number;
|
FlashStatusFlag = 1;
|
*camera_flash_mode = CAM_FLASH_MODE_AUTO;
|
*set_flash_flag = 1;
|
}
|
}
|
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON &&
|
fwk_flashMode == ANDROID_FLASH_MODE_TORCH) {
|
if (curFlashModeE != CAM_FLASH_MODE_TORCH) {
|
MarkFrameNum = 0;
|
curFlashModeE = CAM_FLASH_MODE_TORCH;
|
*camera_flash_mode = CAM_FLASH_MODE_TORCH;
|
*set_flash_flag = 1;
|
}
|
}
|
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD &&
|
fwk_flashMode == ANDROID_FLASH_MODE_TORCH) {
|
HAL_LOGV("CAM_FLASH_MODE_TORCH");
|
MarkFrameNum = 0;
|
*camera_flash_mode = CAM_FLASH_MODE_TORCH;
|
*set_flash_flag = 1;
|
}
|
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD &&
|
fwk_flashMode == ANDROID_FLASH_MODE_OFF && frame_number > 10) {
|
HAL_LOGV("CAM_FLASH_MODE_VIDEO_OFF");
|
MarkFrameNum = 0;
|
*camera_flash_mode = CAM_FLASH_MODE_VIDEO_OFF;
|
*set_flash_flag = 1;
|
}
|
|
if (MarkFrameNum != 0 && frame_number - MarkFrameNum == 20) {
|
*camera_flash_mode = CAM_FLASH_MODE_OFF;
|
*set_flash_flag = 1;
|
}
|
if (MarkFrameNum != 0 && frame_number - MarkFrameNum > 33) {
|
FlashStatusFlag = 0;
|
MarkFrameNum = 0;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH &&
|
fwk_flashMode == ANDROID_FLASH_MODE_OFF) {
|
HAL_LOGV("ON_ALWAYS_FLASH MODE_OFF frame_number:%d", frame_number);
|
curFlashStateE = CAM_FLASH_STATE_READY;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH &&
|
fwk_flashMode == ANDROID_FLASH_MODE_OFF) {
|
HAL_LOGV("ON_AUTO_FLASH MODE_OFF frame_number:%d", frame_number);
|
curFlashStateE = CAM_FLASH_STATE_READY;
|
curFlashModeE = CAM_FLASH_MODE_AUTO;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON &&
|
fwk_flashMode == ANDROID_FLASH_MODE_OFF) {
|
HAL_LOGV("AE_MODE_ON FLASH_MODE_OFF frame_number:%d", frame_number);
|
if (curFlashModeE == CAM_FLASH_MODE_TORCH) {
|
*camera_flash_mode = CAM_FLASH_MODE_VIDEO_OFF;
|
*set_flash_flag = 1;
|
}
|
curFlashStateE = CAM_FLASH_STATE_READY;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE &&
|
fwk_flashMode == ANDROID_FLASH_MODE_OFF) {
|
HAL_LOGV("REDEYE MODE_OFF frame_number:%d", frame_number);
|
curFlashStateE = CAM_FLASH_STATE_READY;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON &&
|
fwk_flashMode == ANDROID_FLASH_MODE_SINGLE) {
|
HAL_LOGV("FIRED FLASH_MODE_SINGLE frame_number:%d", frame_number);
|
curFlashStateE = CAM_FLASH_STATE_FIRED;
|
}
|
if (capture_intent == ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
|
fwk_flashMode == ANDROID_FLASH_MODE_TORCH &&
|
fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON) {
|
HAL_LOGV("TORCH AE_MODE_ON frame_number:%d", frame_number);
|
MarkFrameNum = 0;
|
*camera_flash_mode = CAM_FLASH_MODE_TORCH;
|
*set_flash_flag = 1;
|
curFlashModeE = CAM_FLASH_MODE_TORCH;
|
curFlashStateE = CAM_FLASH_STATE_FIRED;
|
}
|
|
return result;
|
}
|
|
void V4L2Camera::convertFromRegions(cam_area_t* roi,
|
const CameraMetadata &frame_settings,
|
uint32_t tag) {
|
const int32_t* i32_data = frame_settings.find(tag).data.i32;
|
int32_t x_min = i32_data[0];
|
int32_t y_min = i32_data[1];
|
int32_t x_max = i32_data[2];
|
int32_t y_max = i32_data[3];
|
|
roi->weight = i32_data[4];
|
roi->rect.x_min = x_min;
|
roi->rect.y_min = y_min;
|
roi->rect.x_max = x_max;
|
roi->rect.y_max = y_max;
|
}
|
|
void V4L2Camera::updateActiveArray(CameraMetadata* camera_meta_data,
|
uint32_t tag) {
|
int32_t* i32_data = camera_meta_data->find(tag).data.i32;
|
int32_t x_min = i32_data[0];
|
int32_t y_min = i32_data[1];
|
int32_t x_max = i32_data[2];
|
int32_t y_max = i32_data[3];
|
int32_t weight = i32_data[4];
|
|
x_min = std::max(camCropRect.left, x_min);
|
y_min = std::max(camCropRect.top, y_min);
|
x_max = x_min + std::min(x_max-x_min, camCropRect.width);
|
y_max = y_min + std::min(y_max-y_min, camCropRect.height);
|
|
int32_t ActiveRegions[5] = {x_min, y_min, x_max, y_max, weight};
|
camera_meta_data->update(tag, ActiveRegions, 5);
|
}
|
|
int V4L2Camera::ProcessFocusMode(CameraMetadata* camera_meta_data,
|
int frame_number) {
|
int result = 0;
|
uint8_t fwk_afMode = 0;
|
uint8_t fwk_afTrigger = 0;
|
CameraStream* focusStream;
|
uint32_t i = 0;
|
|
std::lock_guard<std::mutex> guard(auto_focus_lock_);
|
if (camera_meta_data->exists(ANDROID_CONTROL_AF_MODE)) {
|
for (i = 0; i < MAX_STREAM; i++) {
|
if (mCameraStream[i] != nullptr) {
|
focusStream = mCameraStream[i];
|
break;
|
}
|
}
|
if (i >= MAX_STREAM) {
|
HAL_LOGE("can not find camera stream!");
|
return -1;
|
}
|
camFocusImpl.focus_frame_num = frame_number;
|
fwk_afMode =
|
camera_meta_data->find(ANDROID_CONTROL_AF_MODE).data.u8[0];
|
if (fwk_afMode == ANDROID_CONTROL_AF_MODE_OFF) {
|
HAL_LOGV("ANDROID_CONTROL_AF_MODE_OFF frameNumber:%d", frame_number);
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_OFF;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO) {
|
HAL_LOGV("ANDROID_CONTROL_AF_MODE_AUTO frameNumber:%d", frame_number);
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_AUTO;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_MACRO) {
|
HAL_LOGV("ANDROID_CONTROL_AF_MODE_MACRO frameNumber:%d", frame_number);
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_MACRO;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
HAL_LOGV("ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO frameNumber:%d",
|
frame_number);
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_CONTINUOUS_VIDEO;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
|
HAL_LOGV("ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE frameNumber:%d",
|
frame_number);
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_CONTINUOUS_PICTURE;
|
} else {
|
HAL_LOGE("error ANDROID_CONTROL_AF_MODE frameNumber:%d", frame_number);
|
}
|
}
|
|
if (camera_meta_data->exists(ANDROID_CONTROL_AF_TRIGGER)) {
|
fwk_afTrigger =
|
camera_meta_data->find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
|
if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_IDLE) {
|
HAL_LOGV("ANDROID_CONTROL_AF_TRIGGER_IDLE frameNumber:%d", frame_number);
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_IDLE;
|
} else if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_START) {
|
HAL_LOGV("ANDROID_CONTROL_AF_TRIGGER_START frameNumber:%d", frame_number);
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_START;
|
} else if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
|
HAL_LOGV("ANDROID_CONTROL_AF_TRIGGER_CANCEL frameNumber:%d",
|
frame_number);
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_CANCLE;
|
} else {
|
HAL_LOGE("error ANDROID_CONTROL_AF_TRIGGER frameNumber:%d", frame_number);
|
}
|
}
|
if (camera_meta_data->exists(ANDROID_SCALER_CROP_REGION)) {
|
updateActiveArray(camera_meta_data, ANDROID_CONTROL_AF_REGIONS);
|
}
|
if (camera_meta_data->exists(ANDROID_CONTROL_AF_REGIONS)) {
|
cam_area_t roi;
|
memset(&roi, 0, sizeof(cam_area_t));
|
int32_t* i32_data =
|
camera_meta_data->find(ANDROID_CONTROL_AF_REGIONS).data.i32;
|
roi.rect.x_min = i32_data[0];
|
roi.rect.y_min = i32_data[1];
|
roi.rect.x_max = i32_data[2];
|
roi.rect.y_max = i32_data[3];
|
roi.weight = i32_data[4];
|
|
HAL_LOGV("roi.weight:%d,roi.rect.x_min:%d roi.rect.y_min:%d"
|
"roi.rect.x_max:%d roi.rect.y_max:%d",
|
roi.weight,
|
roi.rect.x_min,
|
roi.rect.y_min,
|
roi.rect.x_max,
|
roi.rect.y_max);
|
|
if (roi.weight != 0) {
|
cam_rect_t cam_regions;
|
cam_regions.x_min = roi.rect.x_min;
|
cam_regions.y_min = roi.rect.y_min;
|
cam_regions.x_max = roi.rect.x_max;
|
cam_regions.y_max = roi.rect.y_max;
|
camAreaRoi.rect.x_min = roi.rect.x_min;
|
camAreaRoi.rect.y_min = roi.rect.y_min;
|
camAreaRoi.rect.x_max = roi.rect.x_max;
|
camAreaRoi.rect.y_max = roi.rect.y_max;
|
camAreaRoi.weight = roi.weight;
|
focusStream->setIspFocusRegions(cam_regions);
|
}
|
}
|
|
if (fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO &&
|
fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_IDLE) {
|
camFocusImpl.update_status = 1;
|
}
|
|
if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE &&
|
frame_number == 0) {
|
focusStream->setIspFocus(CAM_FOCUS_MODE_START_UP);
|
camFocusImpl.focus_status = CAM_FOCUS_MODE_START_UP;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE &&
|
frame_number != 0 &&
|
camFocusImpl.focus_status != CAM_FOCUS_MODE_CONTINUOUS_PICTURE) {
|
focusStream->setIspFocus(CAM_FOCUS_MODE_CONTINUOUS_PICTURE);
|
camFocusImpl.focus_status = CAM_FOCUS_MODE_CONTINUOUS_PICTURE;
|
} else if ((fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_CANCEL &&
|
camAreaRoi.weight != 0) ||
|
(fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_CANCEL &&
|
frame_number < 2)) {
|
focusStream->setIspFocus(CAM_FOCUS_MODE_STOP);
|
camFocusImpl.focus_status = CAM_FOCUS_MODE_STOP;
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_CANCLE;
|
} else if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_START &&
|
fwk_afMode != ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE &&
|
camFocusImpl.focus_status != CAM_FOCUS_MODE_START) {
|
focusStream->setIspFocus(CAM_FOCUS_MODE_START);
|
camFocusImpl.focus_status = CAM_FOCUS_MODE_START;
|
camFocusImpl.focused_times = -1;
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO &&
|
fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_IDLE &&
|
camFocusImpl.focus_status == CAM_FOCUS_MODE_STOP) {
|
focusStream->setIspFocus(CAM_FOCUS_MODE_START);
|
camFocusImpl.focus_status = CAM_FOCUS_MODE_START;
|
camFocusImpl.focused_times = -1;
|
}
|
|
return result;
|
}
|
|
int V4L2Camera::ProcessCropRegion(CameraMetadata* camera_meta_data,
|
CameraStream* camera_stream) {
|
std::unique_lock<std::mutex> lock(frameNumber_request_lock_);
|
int result = 0;
|
|
if (camera_meta_data->exists(ANDROID_SCALER_CROP_REGION)) {
|
cam_crop_rect_t CameraCropRect;
|
memset(&CameraCropRect, 0, sizeof(cam_crop_rect_t));
|
int32_t* i32_data =
|
camera_meta_data->find(ANDROID_SCALER_CROP_REGION).data.i32;
|
camCropRect.left = i32_data[0];
|
camCropRect.top = i32_data[1];
|
camCropRect.width = i32_data[2];
|
camCropRect.height = i32_data[3];
|
HAL_LOGV("ANDROID_SCALER_CROP_REGION left:%d top:%d width:%d height:%d",
|
camCropRect.left,
|
camCropRect.top,
|
camCropRect.width,
|
camCropRect.height);
|
CameraCropRect.left = camCropRect.left;
|
CameraCropRect.top = camCropRect.top;
|
CameraCropRect.width = camCropRect.width;
|
CameraCropRect.height = camCropRect.height;
|
|
result = camera_stream->setIspCrop(CameraCropRect);
|
if (result) {
|
HAL_LOGE("Fail to setIspCrop on mCameraStream!");
|
return result;
|
}
|
}
|
|
return result;
|
}
|
|
int V4L2Camera::enqueueRequest(
|
std::shared_ptr<default_camera_hal::CaptureRequest> request) {
|
HAL_LOG_ENTER();
|
int res;
|
|
// Be compatible with Camera api 1.
|
mRunStreamNum = request->output_buffers.size();
|
uint32_t frameNumber;
|
{
|
std::unique_lock<std::mutex> lock(frameNumber_request_lock_);
|
in_flight_buffer_count++;
|
if (in_flight_buffer_count > MAX_BUFFER_NUM) {
|
HAL_LOGW("in_flight_buffer_count:%d > %d,wait!",
|
in_flight_buffer_count, MAX_BUFFER_NUM);
|
tbuffers_in_flight_.wait(lock);
|
}
|
frameNumber = request->frame_number;
|
mMapFrameNumRequest.emplace(frameNumber, request);
|
rfequest_queue_stream_.push(request);
|
mMetadata = &request->settings;
|
}
|
|
int setFlashFlag = 0;
|
CameraFlashModeE CameraFlashMode = CAM_FLASH_MODE_MAX;
|
if (mCameraConfig->supportFlashMode()) {
|
ProbeFlashMode(&CameraFlashMode, mMetadata, &setFlashFlag, frameNumber);
|
}
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
res = mStreamManager_.get()->markFrameNumber(frameNumber);
|
if (res) {
|
HAL_LOGE("mark FrameNumber failed.%d", i);
|
return -ENODEV;
|
}
|
}
|
for (uint32_t i = 0; i <request->output_buffers.size(); i++) {
|
std::lock_guard<std::mutex> guard(request_queue_stream_lock_);
|
const camera3_stream_buffer_t& output = request->output_buffers[i];
|
mCameraStream[i] = reinterpret_cast<CameraStream *>(output.stream->priv);
|
HAL_LOGV("i:%d request %d x %d, format:%d. frameNumber:%d", i,
|
output.stream->width, output.stream->height, output.stream->format,
|
frameNumber);
|
res = mCameraStream[i]->request(output.buffer, frameNumber, mMetadata);
|
if (res) {
|
HAL_LOGE("Fail to request on mCameraStream");
|
}
|
|
if (mCameraConfig->supportZoom()) {
|
ProcessCropRegion(mMetadata, mCameraStream[i]);
|
}
|
|
if (setFlashFlag) {
|
HAL_LOGV("setIspFlash(CameraFlashMode:%d)", CameraFlashMode);
|
res = mCameraStream[i]->setIspFlash(CameraFlashMode);
|
if (res) {
|
HAL_LOGE("Fail to setIspFlash on mCameraStream");
|
}
|
}
|
}
|
|
if (mCameraConfig->supportFocusMode()) {
|
ProcessFocusMode(mMetadata, frameNumber);
|
}
|
|
{
|
std::unique_lock<std::mutex> lock(frameNumber_request_lock_);
|
res = metadata_->SetRequestSettings(*mMetadata);
|
if (res) {
|
HAL_LOGE("Failed to set settings.");
|
}
|
|
res = metadata_->FillResultMetadata(mMetadata);
|
if (res) {
|
HAL_LOGE("Failed to fill result metadata.");
|
}
|
}
|
|
return 0;
|
}
|
|
std::shared_ptr<default_camera_hal::CaptureRequest>
|
V4L2Camera::dequeueRequest() {
|
HAL_LOG_ENTER();
|
|
// Wait.
|
// 1. No request in stream.
|
// 2. And exchange to record, record exchange to preview.
|
// a.exchange to record: In preview mode,
|
// so pbuffers must be PREPARE_BUFFER, until timeout that mean
|
// in_flight_.size() less than 2 buffers.
|
// b.exchange to preview: In record mode,
|
// so vbuffers must be PREPARE_BUFFER, until timeout that mean
|
// in_flight_.size() less than 2 buffers.
|
// Jump out while wait.
|
// 1. There are request in stream.
|
// 2. Or exchange to record, record exchange to preview.
|
// a.exchange to record: In preview mode,
|
// so pbuffers must be PREPARE_BUFFER, until timeout that mean
|
// in_flight_.size() less than 2 buffers.
|
// b.exchange to preview: In record mode,
|
// so vbuffers must be PREPARE_BUFFER, until timeout that mean
|
// in_flight_.size() less than 2 buffers.
|
HAL_LOGD("The request queue will be pop,there are leave %zu in prequest,"
|
"%zu in trequest, %zu in vrequest, %zu in vhrequest now!",
|
request_queue_pstream_.size(),
|
request_queue_tstream_.size(),
|
request_queue_vstream_.size(),
|
request_queue_vhstream_.size());
|
|
std::shared_ptr<default_camera_hal::CaptureRequest> request = nullptr;
|
while (1) {
|
std::unique_lock<std::mutex> lock(request_queue_stream_lock_);
|
// Only pick one stream.
|
if (!request_queue_tstream_.empty()) {
|
request = request_queue_tstream_.front();
|
request_queue_tstream_.pop();
|
break;
|
} else if (!request_queue_vstream_.empty() && mVideoflag) {
|
request = request_queue_vstream_.front();
|
request_queue_vstream_.pop();
|
break;
|
} else if (!request_queue_pstream_.empty() &&
|
((mVideoflag == false) || mRunStreamNum == 1)) {
|
// Think about the stream has only one cache for using,
|
// add detect stream number for preview in video mode.
|
request = request_queue_pstream_.front();
|
request_queue_pstream_.pop();
|
break;
|
}
|
HAL_LOGD("Wait the requests_available_stream_ lock,"
|
" in_flight_ buffer(s) size:%zu.",
|
in_flight_.size());
|
requests_available_stream_.wait(lock);
|
HAL_LOGD("The lock has been notified.");
|
}
|
|
HAL_LOGD("Stream queue has been pop,there are leave %zu in prequest,"
|
"%zu in trequest, %zu in vrequest,%zu in vhrequest now!",
|
request_queue_pstream_.size(),
|
request_queue_tstream_.size(),
|
request_queue_vstream_.size(),
|
request_queue_vhstream_.size());
|
return request;
|
}
|
|
void V4L2Camera::ReadyForBuffer() {
|
// Wait until something is available before looping again.
|
std::unique_lock<std::mutex> lock(in_flight_lock_);
|
while (!(((mBeffer_status.pbuffers == PREPARE_BUFFER) ||
|
(mBeffer_status.vbuffers == PREPARE_BUFFER)) &&
|
(in_flight_.size() > 0))) {
|
HAL_LOGV("Wait the buffers_in_flight_ lock, "
|
"in_flight_ buffer(s) size:%zu.",
|
in_flight_.size());
|
buffers_in_flight_.wait(lock);
|
HAL_LOGV("The lock has been notified.");
|
}
|
}
|
|
bool V4L2Camera::validateDataspacesAndRotations(
|
const camera3_stream_configuration_t* stream_config) {
|
HAL_LOG_ENTER();
|
for (uint32_t i = 0; i < stream_config->num_streams; ++i) {
|
if (stream_config->streams[i]->rotation != CAMERA3_STREAM_ROTATION_0) {
|
HAL_LOGV("Rotation %d for stream %d not supported",
|
stream_config->streams[i]->rotation,
|
i);
|
return false;
|
}
|
// Accept all dataspaces, as it will just be overwritten below anyways.
|
}
|
return true;
|
}
|
|
int V4L2Camera::sResultCallback(uint32_t frameNumber, struct timeval ts) {
|
std::unique_lock<std::mutex> lock(frameNumber_request_lock_);
|
int res = 0;
|
int ret = 0;
|
std::shared_ptr<default_camera_hal::CaptureRequest> request =
|
rfequest_queue_stream_.front();
|
int focus_status = -1;
|
CameraStream* focus_camera_stream = nullptr;
|
if (mCameraConfig->supportFocusMode()) {
|
for (int i = 0; i < MAX_STREAM; i++) {
|
if (mCameraStream[i] != nullptr) {
|
focus_camera_stream = mCameraStream[i];
|
break;
|
}
|
}
|
}
|
|
auto map_entry = mMapFrameNumRequest.find(frameNumber);
|
if (map_entry == mMapFrameNumRequest.end()) {
|
HAL_LOGE("No matching request for frameNumber:%d, something wrong!",
|
frameNumber);
|
return -ENOMEM;
|
} else {
|
if (request->frame_number != frameNumber) {
|
HAL_LOGE("No the successfully front request frameNumber:%d"
|
"for result frameNumber:%d, something wrong!",
|
request->frame_number, frameNumber);
|
wfequest_queue_stream_.push(map_entry->second);
|
} else {
|
// Fix cts case:
|
// android.hardware.camera2.cts.StillCaptureTest#testJpegExif
|
// We update the mThumbnailSize and sort for avoid framework sort the
|
// metadata so mThumbnailSize changed.
|
std::array<int32_t, 2> mThumbnailSize = {-1, -1};
|
ret = SingleTagValue(&map_entry->second->settings,
|
ANDROID_JPEG_THUMBNAIL_SIZE,
|
&mThumbnailSize);
|
(&map_entry->second->settings)->sort();
|
if ((mThumbnailSize[0] == 0) && (mThumbnailSize[1] == 0)) {
|
int mThumbnailSizet[2] = {0, 0};
|
ret = (&map_entry->second->settings)->update(
|
ANDROID_JPEG_THUMBNAIL_SIZE,
|
mThumbnailSizet,
|
2);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_JPEG_THUMBNAIL_SIZE);
|
}
|
}
|
|
if(!is_merge_stream_flag)
|
{
|
int64_t timestamp = 0;
|
timestamp = ts.tv_sec * 1000000000ULL + ts.tv_usec*1000;
|
ret = (&map_entry->second->settings)->update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x", ANDROID_SENSOR_TIMESTAMP);
|
}
|
res = 404;
|
}
|
|
if (mCameraConfig->supportFlashMode()) {
|
uint8_t fwk_flashState;
|
uint8_t fwk_aeMode;
|
if (curFlashStateE == CAM_FLASH_STATE_READY && device_id_ != 1) {
|
fwk_flashState = ANDROID_FLASH_STATE_READY;
|
ret = (&map_entry->second->settings)->update(ANDROID_FLASH_STATE,
|
&fwk_flashState, 1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x", ANDROID_FLASH_STATE);
|
}
|
} else if (curFlashStateE == CAM_FLASH_STATE_FIRED && device_id_ != 1) {
|
fwk_flashState = ANDROID_FLASH_STATE_FIRED;
|
ret = (&map_entry->second->settings)->update(ANDROID_FLASH_STATE,
|
&fwk_flashState,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x", ANDROID_FLASH_STATE);
|
}
|
}
|
if (curFlashModeE == CAM_FLASH_MODE_AUTO && device_id_ != 1) {
|
HAL_LOGV("frameNumber:%d", frameNumber);
|
fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AE_MODE,
|
&fwk_aeMode,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AE_MODE);
|
}
|
}
|
}
|
|
if (mCameraConfig->supportFocusMode()) {
|
uint8_t fwk_afStatus;
|
uint8_t fwk_afTrigger;
|
uint8_t fwk_afMode;
|
|
focus_status = focus_camera_stream->getFocusStatus();
|
HAL_LOGD("focus_status:%d frameNumber:%d", focus_status, frameNumber);
|
if (focus_status == 2 && camFocusImpl.focused_times < 2) {
|
HAL_LOGV("ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED focused_times:%d",
|
camFocusImpl.focused_times);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AF_STATE,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
camFocusImpl.focused_times++;
|
}
|
|
if ((&map_entry->second->settings)->exists(
|
ANDROID_CONTROL_AF_TRIGGER)) {
|
fwk_afTrigger = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
|
if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_IDLE) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_TRIGGER_IDLE"
|
" frameNumber:%d", frameNumber);
|
} else if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_START) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_TRIGGER_START"
|
" frameNumber:%d", frameNumber);
|
} else if (fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_CANCEL) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_TRIGGER_CANCEL"
|
" frameNumber:%d", frameNumber);
|
} else {
|
HAL_LOGE("sResultCallback error ANDROID_CONTROL_AF_TRIGGER"
|
" frameNumber:%d", frameNumber);
|
}
|
}
|
if ((&map_entry->second->settings)->exists(ANDROID_CONTROL_AF_MODE)) {
|
fwk_afMode =
|
(&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_MODE).data.u8[0];
|
if (fwk_afMode == ANDROID_CONTROL_AF_MODE_OFF) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_MODE_OFF "
|
"frameNumber:%d", frameNumber);
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_MODE_AUTO "
|
"frameNumber:%d", frameNumber);
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_MACRO) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_MODE_MACRO "
|
"frameNumber:%d", frameNumber);
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
HAL_LOGV("sResultCallback ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO "
|
"frameNumber:%d", frameNumber);
|
} else if (fwk_afMode == ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE) {
|
HAL_LOGV("sResultCallback "
|
"ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE "
|
"frameNumber:%d", frameNumber);
|
} else {
|
HAL_LOGE("sResultCallback error ANDROID_CONTROL_AF_MODE "
|
"frameNumber:%d", frameNumber);
|
}
|
}
|
if ((&map_entry->second->settings)->exists(
|
ANDROID_CONTROL_AF_REGIONS)) {
|
cam_area_t camRoi;
|
camRoi.rect.x_min = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_REGIONS).data.i32[0];
|
camRoi.rect.y_min = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_REGIONS).data.i32[1];
|
camRoi.rect.x_max = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_REGIONS).data.i32[2];
|
camRoi.rect.y_max = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_REGIONS).data.i32[3];
|
camRoi.weight = (&map_entry->second->settings)->find(
|
ANDROID_CONTROL_AF_REGIONS).data.i32[4];
|
HAL_LOGV("sResultCallback roi.weight:%d,roi.rect.x_min:%d "
|
"roi.rect.y_min:%d roi.rect.x_max:%d roi.rect.y_max:%d",
|
camAreaRoi.weight,
|
camAreaRoi.rect.x_min,
|
camAreaRoi.rect.y_min,
|
camAreaRoi.rect.x_max,
|
camAreaRoi.rect.y_max);
|
HAL_LOGV("sResultCallback camRoi.weight:%d,camRoi.rect.x_min:%d "
|
"camRoi.rect.y_min:%d camRoi.rect.x_max:%d camRoi.rect.y_max:%d",
|
camRoi.weight,
|
camRoi.rect.x_min,
|
camRoi.rect.y_min,
|
camRoi.rect.x_max,
|
camRoi.rect.y_max);
|
}
|
if (focus_status == 2 &&
|
fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_START &&
|
fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO) {
|
HAL_LOGV("update_status ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED "
|
"focused_times:%d",
|
camFocusImpl.focused_times);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AF_STATE,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
camFocusImpl.status_flag = 1;
|
}
|
if (focus_status == 4 &&
|
fwk_afTrigger == ANDROID_CONTROL_AF_TRIGGER_START &&
|
fwk_afMode == ANDROID_CONTROL_AF_MODE_AUTO) {
|
HAL_LOGV("update_status ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED "
|
"focused_times:%d",
|
camFocusImpl.focused_times);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AF_STATE,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
camFocusImpl.status_flag = 1;
|
}
|
if (focus_status == 2 &&
|
camFocusImpl.update_status == 1 &&
|
camFocusImpl.status_flag == 1) {
|
HAL_LOGV("update_status no CANCEL "
|
"ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED focused_times:%d",
|
camFocusImpl.focused_times);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AF_STATE,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
camFocusImpl.update_status_times++;
|
}
|
if (focus_status == 4 &&
|
camFocusImpl.update_status == 1 &&
|
camFocusImpl.status_flag == 1) {
|
HAL_LOGV("update_status no CANCEL "
|
"ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED focused_times:%d",
|
camFocusImpl.focused_times);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
|
ret = (&map_entry->second->settings)->update(ANDROID_CONTROL_AF_STATE,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
camFocusImpl.update_status_times++;
|
}
|
if (focus_status == 1) {
|
HAL_LOGV("ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN frameNumber:%d",
|
frameNumber);
|
fwk_afStatus = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
|
ret = (&map_entry->second->settings)->update(
|
ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN,
|
&fwk_afStatus,
|
1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_CONTROL_AF_STATE);
|
}
|
}
|
|
if (device_id_ != 1) {
|
int32_t afRegions[5] = {
|
camAreaRoi.rect.x_min,
|
camAreaRoi.rect.y_min,
|
camAreaRoi.rect.x_max,
|
camAreaRoi.rect.y_max,
|
camAreaRoi.weight};
|
(&map_entry->second->settings)->update(ANDROID_CONTROL_AF_REGIONS,
|
afRegions,
|
5);
|
}
|
}
|
|
if (mCameraConfig->supportZoom()) {
|
if ((&map_entry->second->settings)->exists(
|
ANDROID_SCALER_CROP_REGION)) {
|
int32_t* i32_data = (&map_entry->second->settings)->find(
|
ANDROID_SCALER_CROP_REGION).data.i32;
|
int left = i32_data[0];
|
int top = i32_data[1];
|
int width = i32_data[2];
|
int height = i32_data[3];
|
|
HAL_LOGV("sResultCallback ANDROID_SCALER_CROP_REGION left:%d "
|
"top:%d width:%d height:%d frameNumber:%d",
|
left,
|
top,
|
width,
|
height,
|
frameNumber);
|
HAL_LOGV("sResultCallback ANDROID_SCALER_CROP_REGION left:%d "
|
"top:%d width:%d height:%d frameNumber:%d",
|
camCropRect.left,
|
camCropRect.top,
|
camCropRect.width,
|
camCropRect.height,
|
frameNumber);
|
|
int32_t cropRegions[4];
|
cropRegions[0] = camCropRect.left;
|
cropRegions[1] = camCropRect.top;
|
cropRegions[2] = camCropRect.width;
|
cropRegions[3] = camCropRect.height;
|
(&map_entry->second->settings)->update(ANDROID_SCALER_CROP_REGION,
|
cropRegions,
|
4);
|
}
|
}
|
|
completeRequest(map_entry->second, res);
|
rfequest_queue_stream_.pop();
|
in_flight_buffer_count--;
|
tbuffers_in_flight_.notify_one();
|
|
while (!wfequest_queue_stream_.empty()) {
|
if (rfequest_queue_stream_.front()->frame_number ==
|
wfequest_queue_stream_.front()->frame_number) {
|
|
if (!is_merge_stream_flag) {
|
int64_t timestamp_w = 0;
|
timestamp_w = ts.tv_sec * 1000000000ULL + ts.tv_usec*1000;
|
ret = (&map_entry->second->settings)->update(
|
ANDROID_SENSOR_TIMESTAMP, ×tamp_w, 1);
|
if (ret) {
|
HAL_LOGE("Failed to update metadata tag 0x%x",
|
ANDROID_SENSOR_TIMESTAMP);
|
}
|
res = 404;
|
}
|
|
completeRequest(wfequest_queue_stream_.front(), res);
|
wfequest_queue_stream_.pop();
|
rfequest_queue_stream_.pop();
|
in_flight_buffer_count--;
|
tbuffers_in_flight_.notify_one();
|
} else {
|
HAL_LOGE("the front frameNumber in rfequest_queue(%d) "
|
"!= the wfequest_queue(%d), so keep",
|
rfequest_queue_stream_.front()->frame_number,
|
wfequest_queue_stream_.front()->frame_number);
|
break;
|
}
|
}
|
}
|
mMapFrameNumRequest.erase(frameNumber);
|
}
|
return res;
|
}
|
|
int V4L2Camera::setupStreams(camera3_stream_configuration_t* stream_config) {
|
HAL_LOG_ENTER();
|
int res = 0;
|
// stream_config should have been validated; assume at least 1 stream.
|
if (stream_config->num_streams < 1) {
|
HAL_LOGE("Validate the stream numbers, at least 1 stream.");
|
return -EINVAL;
|
}
|
// stream_config should have been validated; do not over MAX_STREAM_NUM.
|
if (stream_config->num_streams > MAX_STREAM_NUM) {
|
HAL_LOGE("Validate the stream numbers, "
|
"over the max stream number %d we support.", MAX_STREAM_NUM);
|
return -EINVAL;
|
}
|
|
// stop stream.
|
if (mStreamManager_ != nullptr) {
|
HAL_LOGD("Stop stream.");
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mStreamManager_->stop((STREAM_SERIAL)ss);
|
}
|
}
|
|
while (!wfequest_queue_stream_.empty()) {
|
wfequest_queue_stream_.pop();
|
}
|
while (!rfequest_queue_stream_.empty()) {
|
rfequest_queue_stream_.pop();
|
}
|
mMapFrameNumRequest.clear();
|
mStreamManager_.reset();
|
mStreamManager_ = StreamManager::NewStreamManager(device_, instance);
|
|
if (mCameraConfig->supportFlashMode()) {
|
std::lock_guard<std::mutex> guard(camera_fd_lock_);
|
if (camera_fd != -1) {
|
int ret_close = 0;
|
ret_close = ::close(camera_fd);
|
HAL_LOGD("mCameraFd:%d, ret_close: %d, errno:%s",
|
camera_fd, ret_close, strerror(errno));
|
camera_fd = -1;
|
}
|
}
|
|
uint32_t numStreamsSet = 0;
|
int retIndex = -1;
|
int mainIndex = -1;
|
int subIndex = -1;
|
int thirdIndex = -1;
|
|
for (int j = 0; j <MAX_STREAM; j++) {
|
mStreamTracker[j] = false;
|
}
|
for (int j = 0; j <MAX_STREAM; j++) {
|
mSourceStreamTracker[j] = false;
|
}
|
|
// Analysis and create stream.
|
for (uint32_t i = 0; i < stream_config->num_streams; ++i) {
|
int isBlob = 0;
|
retIndex = findStreamModes(MAIN_STREAM, stream_config, &isBlob);
|
if ((retIndex >= 0) && (!mStreamTracker[MAIN_STREAM +isBlob])) {
|
HAL_LOGD("Detect the main stream %d is format %d(%s), "
|
"width %d, height %d, usage %d(%s), stream_type %d, "
|
"data_space %d, num_streams:%d. isBlob %d",
|
retIndex,
|
stream_config->streams[retIndex]->format,
|
getHalPixelFormatString(
|
stream_config->streams[retIndex]->format).c_str(),
|
stream_config->streams[retIndex]->width,
|
stream_config->streams[retIndex]->height,
|
stream_config->streams[retIndex]->usage,
|
getGrallocUsageString(
|
stream_config->streams[retIndex]->usage).c_str(),
|
stream_config->streams[retIndex]->stream_type,
|
stream_config->streams[retIndex]->data_space,
|
stream_config->num_streams, isBlob);
|
mainIndex = retIndex;
|
|
stream_config->streams[retIndex]->priv =
|
reinterpret_cast<void *> (mStreamManager_->createStream(
|
MAIN_STREAM,
|
stream_config->streams[retIndex]->width,
|
stream_config->streams[retIndex]->height,
|
stream_config->streams[retIndex]->format,
|
stream_config->streams[retIndex]->usage,
|
isBlob,
|
merge_stream_status_));
|
if (nullptr == stream_config->streams[retIndex]->priv) {
|
HAL_LOGE("Failed create main stream!");
|
return -EINVAL;
|
}
|
mSourceStreamTracker[retIndex] = true;
|
mStreamTracker[MAIN_STREAM +isBlob] = true;
|
numStreamsSet++;
|
continue;
|
}
|
isBlob = 0;
|
retIndex = findStreamModes(SUB_0_STREAM, stream_config, &isBlob);
|
if ((retIndex >= 0) && (!mStreamTracker[SUB_0_STREAM +isBlob])) {
|
HAL_LOGV("Detect the sub stream %d is format %d(%s), "
|
"width %d, height %d, usage %d(%s), stream_type %d, "
|
"data_space %d, num_streams:%d. isBlob %d",
|
retIndex,
|
stream_config->streams[retIndex]->format,
|
getHalPixelFormatString(
|
stream_config->streams[retIndex]->format).c_str(),
|
stream_config->streams[retIndex]->width,
|
stream_config->streams[retIndex]->height,
|
stream_config->streams[retIndex]->usage,
|
getGrallocUsageString(
|
stream_config->streams[retIndex]->usage).c_str(),
|
stream_config->streams[retIndex]->stream_type,
|
stream_config->streams[retIndex]->data_space,
|
stream_config->num_streams, isBlob);
|
|
subIndex = retIndex;
|
|
stream_config->streams[retIndex]->priv =
|
reinterpret_cast<void *> (mStreamManager_->createStream(
|
SUB_0_STREAM,
|
stream_config->streams[retIndex]->width,
|
stream_config->streams[retIndex]->height,
|
stream_config->streams[retIndex]->format,
|
stream_config->streams[retIndex]->usage,
|
isBlob,
|
merge_stream_status_));
|
if (nullptr == stream_config->streams[retIndex]->priv) {
|
HAL_LOGE("Failed create sub stream!");
|
return -EINVAL;
|
}
|
mSourceStreamTracker[retIndex] = true;
|
mStreamTracker[SUB_0_STREAM +isBlob] = true;
|
numStreamsSet++;
|
continue;
|
}
|
|
// For third stream
|
isBlob = 0;
|
// detect source stream
|
for (uint32_t k = 0; k < stream_config->num_streams; k++) {
|
if (mSourceStreamTracker[k] == false) {
|
thirdIndex = k;
|
if (stream_config->streams[k]->format == HAL_PIXEL_FORMAT_BLOB) {
|
isBlob = 1;
|
}
|
}
|
}
|
|
camera3_stream_t* subIndexStream = stream_config->streams[subIndex];
|
camera3_stream_t* thirdIndexStream = stream_config->streams[thirdIndex];
|
|
// We can not deal the blob stream when input
|
// source/output source large than 5.
|
|
if ((numStreamsSet != stream_config->num_streams) &&
|
(((subIndexStream->width/thirdIndexStream->width) < 5) &&
|
((subIndexStream->height/thirdIndexStream->height) < 5))) {
|
// For third blob stream
|
for (int j = SUB_0_STREAM_BLOB; j >0;) {
|
HAL_LOGD("In j:%d circle!", j);
|
if ((!mStreamTracker[j]) && isBlob) {
|
HAL_LOGD("Find j+isBlob:%d false!", j);
|
HAL_LOGD("Detect the third blob stream %d link to %d "
|
"stream is format %d(%s), width %d, height %d, num_streams:%d.",
|
thirdIndex,
|
j,
|
stream_config->streams[thirdIndex]->format,
|
getHalPixelFormatString(
|
stream_config->streams[retIndex]->format).c_str(),
|
stream_config->streams[thirdIndex]->width,
|
stream_config->streams[thirdIndex]->height,
|
stream_config->num_streams);
|
|
stream_config->streams[thirdIndex]->priv =
|
reinterpret_cast<void *> (mStreamManager_->createStream(
|
(STREAM_SERIAL)(j -1),
|
stream_config->streams[thirdIndex]->width,
|
stream_config->streams[thirdIndex]->height,
|
stream_config->streams[thirdIndex]->format,
|
stream_config->streams[thirdIndex]->usage,
|
isBlob,
|
merge_stream_status_));
|
|
if (nullptr == stream_config->streams[thirdIndex]->priv) {
|
HAL_LOGE("Failed create third stream!");
|
return -EINVAL;
|
}
|
mSourceStreamTracker[thirdIndex] = true;
|
mStreamTracker[j] = true;
|
numStreamsSet++;
|
break;
|
}
|
|
if (numStreamsSet == stream_config->num_streams) {
|
break;
|
}
|
j = j -2;
|
}
|
}
|
|
// find mirror stream
|
if (numStreamsSet != stream_config->num_streams) {
|
if (!mStreamTracker[SUB_0_MIRROR_STREAM]) {
|
HAL_LOGD("Find SUB_0_MIRROR_STREAM:%d!", SUB_0_MIRROR_STREAM+isBlob);
|
HAL_LOGD("Detect the third mirror stream %d link to %d "
|
"stream is format %d(%s), width %d, height %d, num_streams:%d.",
|
thirdIndex,
|
SUB_0_MIRROR_STREAM +isBlob,
|
stream_config->streams[thirdIndex]->format,
|
getHalPixelFormatString(
|
stream_config->streams[retIndex]->format).c_str(),
|
stream_config->streams[thirdIndex]->width,
|
stream_config->streams[thirdIndex]->height,
|
stream_config->num_streams);
|
|
stream_config->streams[thirdIndex]->priv =
|
reinterpret_cast<void *> (mStreamManager_->createStream((
|
STREAM_SERIAL)SUB_0_MIRROR_STREAM,
|
stream_config->streams[thirdIndex]->width,
|
stream_config->streams[thirdIndex]->height,
|
stream_config->streams[thirdIndex]->format,
|
stream_config->streams[thirdIndex]->usage,
|
isBlob,
|
merge_stream_status_));
|
|
camera3_stream_t* subIndexStream = stream_config->streams[subIndex];
|
camera3_stream_t* thirdIndexStream = stream_config->streams[thirdIndex];
|
if (subIndexStream->width != thirdIndexStream->width ||
|
subIndexStream->height != thirdIndexStream->height) {
|
res = (reinterpret_cast<CameraSubMirrorStream *>
|
(thirdIndexStream->priv))->setScaleFlag();
|
if (res) {
|
HAL_LOGE("Failed setScaleFlag!");
|
}
|
}
|
if (nullptr == stream_config->streams[thirdIndex]->priv) {
|
HAL_LOGE("Failed create third stream!");
|
return -EINVAL;
|
}
|
mSourceStreamTracker[thirdIndex] = true;
|
mStreamTracker[SUB_0_MIRROR_STREAM +isBlob] = true;
|
numStreamsSet++;
|
}
|
}
|
}
|
|
HAL_LOGD("Configurate stream Manager.");
|
if (mStreamManager_ != nullptr) {
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
res = mStreamManager_->configurateManager((STREAM_SERIAL) ss);
|
if (res) {
|
HAL_LOGE("Configurate stream Manager failed.");
|
}
|
}
|
}
|
|
camFocusImpl.focus_frame_num = -1;
|
camFocusImpl.focused_times = -1;
|
camFocusImpl.update_status = -1;
|
camFocusImpl.status_flag = -1;
|
camFocusImpl.focus_status = -1;
|
camFocusImpl.update_status_times = -1;
|
camFocusImpl.camera_focus_mode = CAM_FOCUS_MODE_MAX;
|
camFocusImpl.camera_focus_trigger = CAM_FOCUS_TRIGGER_MAX;
|
memset(&camAreaRoi, 0, sizeof(cam_area_t));
|
memset(&camCropRect, 0, sizeof(cam_crop_rect_t));
|
|
HAL_LOGD("Start stream.");
|
// start stream.
|
if (mStreamManager_ != nullptr) {
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mStreamManager_->start((STREAM_SERIAL)ss);
|
}
|
}
|
|
// Fill some stream info:
|
// stream->usage, stream->max_buffers, stream->data_space.
|
for (uint32_t i = 0; i < stream_config->num_streams; ++i) {
|
fillStreamInfo(stream_config->streams[i]);
|
}
|
is_merge_stream_flag = false;
|
for (uint32_t i = 0; i < stream_config->num_streams; ++i) {
|
HAL_LOGD("stream %d is format %d(%s), width %d, height %d, "
|
"usage %d(%s), stream_type %d, data_space %d, num_streams:%d.",
|
i,
|
stream_config->streams[i]->format,
|
getHalPixelFormatString(stream_config->streams[i]->format).c_str(),
|
stream_config->streams[i]->width,
|
stream_config->streams[i]->height,
|
stream_config->streams[i]->usage,
|
getGrallocUsageString(stream_config->streams[retIndex]->usage).c_str(),
|
stream_config->streams[i]->stream_type,
|
stream_config->streams[i]->data_space,
|
stream_config->num_streams);
|
if(stream_config->streams[i]->width * stream_config->streams[i]->height >
|
4000*3000) {
|
is_merge_stream_flag = true;
|
}
|
}
|
|
HAL_LOGI("setupStreams finished");
|
return 0;
|
}
|
|
bool V4L2Camera::isValidRequestSettings(const CameraMetadata& settings) {
|
if (!metadata_->IsValidRequest(settings)) {
|
HAL_LOGE("Invalid request settings.");
|
return false;
|
}
|
return true;
|
}
|
|
} // namespace v4l2_camera_hal
|