/*
|
* Copyright (c) 2021 by Allwinnertech Co., Ltd.
|
*
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
* you may not use this file except in compliance with the License.
|
* You may obtain a copy of the License at
|
*
|
* http://www.apache.org/licenses/LICENSE-2.0
|
*
|
* Unless required by applicable law or agreed to in writing, software
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* See the License for the specific language governing permissions and
|
* limitations under the License.
|
*/
|
|
#define LOG_TAG "CameraHALv3_V4L2Stream"
|
#include "v4l2_stream.h"
|
|
#include <android-base/unique_fd.h>
|
#include <cutils/properties.h>
|
#include <fcntl.h>
|
#include <string.h>
|
#include <sys/mman.h>
|
#include <sys/stat.h>
|
#include <sys/types.h>
|
#include <unistd.h>
|
#include <utils/Timers.h>
|
|
#include <algorithm>
|
#include <array>
|
#include <cstdlib>
|
#include <functional>
|
#include <iostream>
|
#include <limits>
|
#include <map>
|
#include <mutex>
|
#include <sstream>
|
#include <string>
|
#include <utility>
|
#include <vector>
|
|
#include "camera_config.h"
|
#include "linux/videodev2.h"
|
#include "stream_format.h"
|
#include "type_camera.h"
|
#include "v4l2_gralloc.h"
|
#include GPU_PUBLIC_INCLUDE
|
|
#define ISP_3A_PARAM_SIZE 81412
|
#define ISP_DEBUG_MSG_SIZE 20796
|
#define ISP_DEBUG_MAGIC_STR "ISPDEBUG"
|
|
#if DBG_SAVE_OUTPUT
|
static int eb_yuv_count = 0;
|
static int eb_jpeg_count = 0;
|
static int dq_yuv_count = 0;
|
const int output_counts = 10;
|
#endif
|
|
|
namespace v4l2_camera_hal {
|
|
V4L2Stream* V4L2Stream::NewV4L2Stream(
|
const int id,
|
const std::string device_path,
|
std::shared_ptr<CCameraConfig> pCameraCfg) {
|
return new V4L2Stream(id, device_path, pCameraCfg);
|
}
|
|
V4L2Stream::V4L2Stream(const int id, const std::string device_path,
|
std::shared_ptr<CCameraConfig> pCameraCfg)
|
: mCameraConfig(pCameraCfg),
|
device_path_(std::move(device_path)),
|
device_fd_(-1),
|
disconnect(false),
|
device_id_(id),
|
has_StreamOn(false),
|
mflush_buffers(false),
|
isTakePicture(false),
|
buffer_state_(BUFFER_UNINIT),
|
#ifdef USE_ISP
|
mAWIspApi(NULL),
|
mIspId(-1),
|
#endif
|
connection_count_(0) {
|
HAL_LOG_ENTER();
|
int pipefd[2];
|
int ret = -1;
|
if (device_path_.compare(MAIN_STREAM_PATH) == 0) {
|
device_ss_ = MAIN_STREAM;
|
} else if (device_path_.compare(SUB_0_STREAM_PATH) == 0) {
|
device_ss_ = SUB_0_STREAM;
|
} else if (device_path_.compare(MAIN_FRONT_STREAM_PATH) == 0) {
|
device_ss_ = MAIN_STREAM;
|
} else if (device_path_.compare(SUB_0_FRONT_STREAM_PATH) == 0) {
|
device_ss_ = SUB_0_STREAM;
|
}
|
|
memset(&jpeg_crop_rect, 0, sizeof(cam_crop_rect_t));
|
ret = pipe(pipefd);
|
if (ret == -1) {
|
ALOGE("V4L2Stream create pipe failed");
|
} else {
|
read_fd_ = pipefd[0];
|
write_fd_ = pipefd[1];
|
}
|
pEvents = (epoll_event *)calloc(2, sizeof(epoll_event));
|
}
|
|
V4L2Stream::~V4L2Stream() {
|
HAL_LOG_ENTER();
|
std::unique_lock<std::mutex> lock(buffer_queue_lock_);
|
HAL_LOGV("%s, device_ss_:%d.", device_path_.c_str(), device_ss_);
|
close(read_fd_);
|
close(write_fd_);
|
}
|
|
int V4L2Stream::Connect() {
|
HAL_LOG_ENTER();
|
std::lock_guard<std::mutex> lock(connection_lock_);
|
|
if (connected()) {
|
HAL_LOGV("Camera stream %s is already connected.", device_path_.c_str());
|
++connection_count_;
|
return 0;
|
}
|
HAL_LOGD("Camera stream will link to %s.", device_path_.c_str());
|
int try_num = 5;
|
int fd = -1;
|
while (try_num--) {
|
HAL_LOGD("try to link %s, the %d time.", device_path_.c_str(), 5 -try_num);
|
// Open in nonblocking mode (DQBUF may return EAGAIN).
|
fd = TEMP_FAILURE_RETRY(open(device_path_.c_str(), O_RDWR | O_NONBLOCK, 0));
|
if (fd < 0) {
|
HAL_LOGE("failed to open %s (%s)", device_path_.c_str(), strerror(errno));
|
usleep(200*1000);
|
continue;
|
}
|
break;
|
}
|
if (fd < 0) {
|
HAL_LOGE("failed to open %s (%s)", device_path_.c_str(), strerror(errno));
|
return -ENODEV;
|
}
|
|
device_fd_ = fd;
|
++connection_count_;
|
|
HAL_LOGV("Detect camera stream %s, stream serial:%d.",
|
device_path_.c_str(), device_ss_);
|
|
struct v4l2_input inp;
|
inp.index = getSupportCameraId(device_id_);
|
HAL_LOGV("%s ioctl %s", __func__, getV4l2IoctlString(VIDIOC_S_INPUT).c_str());
|
if (TEMP_FAILURE_RETRY(ioctl(fd, VIDIOC_S_INPUT, &inp)) != 0) {
|
HAL_LOGE("VIDIOC_S_INPUT on %d error: %s.", inp.index, strerror(errno));
|
}
|
|
#ifdef USE_ISP
|
mAWIspApi = new android::AWIspApi();
|
#endif
|
return 0;
|
}
|
|
void V4L2Stream::Disconnect() {
|
HAL_LOG_ENTER();
|
std::lock_guard<std::mutex> lock(connection_lock_);
|
|
if (connection_count_ == 0) {
|
// Not connected.
|
HAL_LOGE("Camera device %s is not connected, cannot disconnect.",
|
device_path_.c_str());
|
return;
|
}
|
|
--connection_count_;
|
if (connection_count_ > 0) {
|
HAL_LOGV("Disconnected from camera device %s. connections remain.",
|
device_path_.c_str());
|
return;
|
}
|
// wake up epoll
|
disconnect = true;
|
write(write_fd_, "w", 2);
|
int res = TEMP_FAILURE_RETRY(close(device_fd_));
|
HAL_LOGD("Close device path:%s, fd:%d, res: %s",
|
device_path_.c_str(), device_fd_, strerror(res));
|
if (res) {
|
HAL_LOGW("Disconnected from camera device %s. fd:%d encount err(%s).",
|
device_path_.c_str(), device_fd_, strerror(res));
|
}
|
// Delay for open after close success encount open device busy.
|
// TODO(zjw): optimize this, keep node open until close the camera hal.
|
// usleep(200*1000);
|
|
#ifdef USE_ISP
|
if (mAWIspApi != NULL) {
|
delete mAWIspApi;
|
mAWIspApi = NULL;
|
}
|
#endif
|
|
device_fd_ = -1;
|
format_.reset();
|
buffers_.clear();
|
// Closing the device releases all queued buffers back to the user.
|
}
|
|
// Helper function. Should be used instead of ioctl throughout this class.
|
template <typename T>
|
int V4L2Stream::IoctlLocked(int request, T data) {
|
// Potentially called so many times logging entry is a bad idea.
|
std::lock_guard<std::mutex> lock(device_lock_);
|
if (!connected()) {
|
HAL_LOGE("Stream %s not connected.", device_path_.c_str());
|
return -ENODEV;
|
}
|
HAL_LOGV("Stream fd:%d.. request:%s",
|
device_fd_, getV4l2IoctlString((request)).c_str());
|
return TEMP_FAILURE_RETRY(ioctl(device_fd_, request, data));
|
}
|
|
int V4L2Stream::StreamOn() {
|
HAL_LOG_ENTER();
|
|
if (!format_) {
|
HAL_LOGE("Stream format must be set before turning on stream.");
|
return -EINVAL;
|
}
|
|
if (has_StreamOn) {
|
HAL_LOGV("Stream had been turned on.");
|
return 0;
|
}
|
#if DELAY_BETWEEN_ON_OFF
|
mTimeStampsFstreamon = systemTime() / 1000000;
|
#endif
|
int mDevice_id = getSupportCameraId(device_id_);
|
HAL_LOGD("id:%d mDevice_id:%d\n", device_id_, mDevice_id);
|
if (device_id_ == 0 && device_id_ != mDevice_id) {
|
struct v4l2_control ctrl;
|
ctrl.id = V4L2_CID_VFLIP;
|
ctrl.value = 1;
|
HAL_LOGV("ioctl %s", getV4l2IoctlString(VIDIOC_S_CTRL).c_str());
|
if (TEMP_FAILURE_RETRY(ioctl(device_fd_, VIDIOC_S_CTRL, &ctrl)) != 0) {
|
HAL_LOGE("VIDIOC_S_CTRL error: %s. value:%d", strerror(errno),
|
ctrl.value);
|
}
|
}
|
|
int32_t type = format_->type();
|
if (IoctlLocked(VIDIOC_STREAMON, &type) < 0) {
|
HAL_LOGE("STREAMON fails: %s", strerror(errno));
|
return -ENODEV;
|
} else {
|
buffer_state_ = BUFFER_UNINIT;
|
has_StreamOn = true;
|
}
|
#if DELAY_BETWEEN_ON_OFF
|
HAL_LOGV("Stream turned on.");
|
usleep(100*1000);
|
HAL_LOGV("Stream after turned on sleep for stream on prepare.");
|
#endif
|
|
#ifdef USE_ISP
|
mIspId = 0;
|
if (getSingleCameraId() < 0) {
|
mIspId = mAWIspApi->awIspGetIspId(mDevice_id);
|
}
|
if (mIspId >= 0) {
|
mAWIspApi->awIspStart(mIspId);
|
HAL_LOGD("ISP turned on.");
|
} else {
|
HAL_LOGE("ISP turned on failed!");
|
}
|
#endif
|
|
return 0;
|
}
|
|
int V4L2Stream::StreamOff() {
|
HAL_LOG_ENTER();
|
|
if (!format_) {
|
// Can't have turned on the stream without format being set,
|
// so nothing to turn off here.
|
return 0;
|
}
|
#if DELAY_BETWEEN_ON_OFF
|
// TODO(zjw): Remove it.
|
// Delay between vin stream on and off time that less than
|
// DELAY_BETWEEN_STREAM for resource release completely.
|
unsigned long mDeltaStream = systemTime() / 1000000 - mTimeStampsFstreamon;
|
HAL_LOGD("mDeltaStream:%ld, mTimeStampsFstreamon:%ld,"
|
" systemTime() / 1000000:%ld.",
|
mDeltaStream, mTimeStampsFstreamon,
|
systemTime() / 1000000);
|
if (mDeltaStream < DELAY_BETWEEN_STREAM) {
|
HAL_LOGD("mDeltaStream:%ld.", mDeltaStream);
|
usleep((DELAY_BETWEEN_STREAM -mDeltaStream)*1000);
|
}
|
#endif
|
int32_t type = format_->type();
|
int res = IoctlLocked(VIDIOC_STREAMOFF, &type);
|
if (res) {
|
HAL_LOGW("Stream turned off failed, err(%s).", strerror(res));
|
}
|
if (res < 0) {
|
HAL_LOGE("STREAMOFF fails: %s", strerror(errno));
|
}
|
HAL_LOGD("After stream %d, ind:%d turned off.", device_id_, device_fd_);
|
#ifdef USE_ISP
|
mAWIspApi->awIspStop(mIspId);
|
HAL_LOGV("Stream %d, ind:%d awIspStop.", device_id_, device_fd_);
|
#endif
|
|
// Calling STREAMOFF releases all queued buffers back to the user.
|
// int gralloc_res = gralloc_->unlockAllBuffers();
|
// No buffers in flight.
|
for (size_t i = 0; i < buffers_.size(); ++i) {
|
buffers_[i] = false;
|
}
|
// munmap buffer.
|
for (size_t i = 0; i < buffers_.size(); i++) {
|
HAL_LOGV("munmap index:%zu!", i);
|
res = munmap(mMapMem.mem[i], mMapMem.length);
|
if (res < 0) {
|
HAL_LOGE("munmap failed");
|
}
|
mMapMem.mem[i] = NULL;
|
}
|
has_StreamOn = false;
|
HAL_LOGV("Stream %d, ind:%d turned off.", device_id_, device_fd_);
|
return 0;
|
}
|
|
int V4L2Stream::flush() {
|
HAL_LOG_ENTER();
|
mflush_buffers = true;
|
buffer_availabl_queue_.notify_one();
|
HAL_LOGV("Stream %d, ss:%d, ind:%d flush.",
|
device_id_, device_ss_, device_fd_);
|
return 0;
|
}
|
|
int V4L2Stream::QueryControl(uint32_t control_id,
|
v4l2_query_ext_ctrl* result) {
|
int res;
|
memset(result, 0, sizeof(*result));
|
if (extended_query_supported_) {
|
result->id = control_id;
|
res = IoctlLocked(VIDIOC_QUERY_EXT_CTRL, result);
|
// Assuming the operation was supported (not ENOTTY), no more to do.
|
if (errno != ENOTTY) {
|
if (res) {
|
HAL_LOGE("QUERY_EXT_CTRL fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
return 0;
|
}
|
}
|
|
// Extended control querying not supported, fall back to basic control query.
|
v4l2_queryctrl query;
|
query.id = control_id;
|
if (IoctlLocked(VIDIOC_QUERYCTRL, &query)) {
|
HAL_LOGE("QUERYCTRL fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
// Convert the basic result to the extended result.
|
result->id = query.id;
|
result->type = query.type;
|
memcpy(result->name, query.name, sizeof(query.name));
|
result->minimum = query.minimum;
|
if (query.type == V4L2_CTRL_TYPE_BITMASK) {
|
// According to the V4L2 documentation, when type is BITMASK,
|
// max and default should be interpreted as __u32. Practically,
|
// this means the conversion from 32 bit to 64 will pad with 0s not 1s.
|
result->maximum = static_cast<uint32_t>(query.maximum);
|
result->default_value = static_cast<uint32_t>(query.default_value);
|
} else {
|
result->maximum = query.maximum;
|
result->default_value = query.default_value;
|
}
|
result->step = static_cast<uint32_t>(query.step);
|
result->flags = query.flags;
|
result->elems = 1;
|
switch (result->type) {
|
case V4L2_CTRL_TYPE_INTEGER64:
|
result->elem_size = sizeof(int64_t);
|
break;
|
case V4L2_CTRL_TYPE_STRING:
|
result->elem_size = result->maximum + 1;
|
break;
|
default:
|
result->elem_size = sizeof(int32_t);
|
break;
|
}
|
|
return 0;
|
}
|
|
int V4L2Stream::GetControl(uint32_t control_id, int32_t* value) {
|
// For extended controls (any control class other than "user"),
|
// G_EXT_CTRL must be used instead of G_CTRL.
|
if (V4L2_CTRL_ID2CLASS(control_id) != V4L2_CTRL_CLASS_USER) {
|
v4l2_ext_control control;
|
v4l2_ext_controls controls;
|
memset(&control, 0, sizeof(control));
|
memset(&controls, 0, sizeof(controls));
|
|
control.id = control_id;
|
controls.ctrl_class = V4L2_CTRL_ID2CLASS(control_id);
|
controls.count = 1;
|
controls.controls = &control;
|
|
if (IoctlLocked(VIDIOC_G_EXT_CTRLS, &controls) < 0) {
|
HAL_LOGE("G_EXT_CTRLS fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
*value = control.value;
|
} else {
|
v4l2_control control{control_id, 0};
|
if (IoctlLocked(VIDIOC_G_CTRL, &control) < 0) {
|
HAL_LOGE("G_CTRL fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
*value = control.value;
|
}
|
return 0;
|
}
|
|
int V4L2Stream::SetTakePictureCtrl(enum v4l2_take_picture value) {
|
struct v4l2_control ctrl;
|
int ret = -1;
|
HAL_LOGV("%s value = %d", __func__, value);
|
ctrl.id = V4L2_CID_TAKE_PICTURE;
|
ctrl.value = value;
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
if (ret < 0) {
|
HAL_LOGE("failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV(" ok");
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::SetFlashMode(uint32_t mode) {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
|
ctrl.id = V4L2_CID_FLASH_LED_MODE;
|
ctrl.value = mode;
|
HAL_LOGV("%s mode = %d", __func__, mode);
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("%s failed, %s", __func__, strerror(errno));
|
} else {
|
HAL_LOGV("%s ok, %s", __func__, strerror(errno));
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::SetAutoFocusInit() {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
ctrl.id = V4L2_CID_AUTO_FOCUS_INIT;
|
ctrl.value = 0;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("%s failed, %s", __func__, strerror(errno));
|
} else {
|
HAL_LOGV("%s ok, %s", __func__, strerror(errno));
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::SetAutoFocusRange(int af_range) {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
|
ctrl.id = V4L2_CID_FOCUS_AUTO;
|
ctrl.value = 1;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("id V4L2_CID_FOCUS_AUTO failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("id V4L2_CID_FOCUS_AUTO ok, %s", strerror(errno));
|
}
|
|
ctrl.id = V4L2_CID_AUTO_FOCUS_RANGE;
|
ctrl.value = af_range;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("id V4L2_CID_AUTO_FOCUS_RANGE failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("id V4L2_CID_AUTO_FOCUS_RANGE ok, %s", strerror(errno));
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::SetAutoFocusStart() {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
|
ctrl.id = V4L2_CID_AUTO_FOCUS_START;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("ok, %s", strerror(errno));
|
}
|
return ret;
|
}
|
|
int V4L2Stream::SetAutoFocusStop() {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
|
ctrl.id = V4L2_CID_AUTO_FOCUS_STOP;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("ok, %s", strerror(errno));
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::Set3ALock(int lock) {
|
int ret = -1;
|
struct v4l2_control ctrl;
|
|
ctrl.id = V4L2_CID_3A_LOCK;
|
ctrl.value = lock;
|
|
ret = IoctlLocked(VIDIOC_S_CTRL, &ctrl);
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
if (ret < 0) {
|
HAL_LOGE("failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("ok, %s", strerror(errno));
|
}
|
|
return ret;
|
}
|
|
int V4L2Stream::GetAutoFocusStatus() {
|
int ret = -1;
|
|
if (device_fd_ < 0) {
|
return 0xFF000000;
|
}
|
|
ret = mAWIspApi->awGetFocusStatus();
|
HAL_LOGD("ret:%d device_fd_:%d", ret, device_fd_);
|
|
return ret;
|
}
|
|
int V4L2Stream::SetAutoFocusRegions(cam_rect_t cam_regions) {
|
int ret = -1;
|
ret = mAWIspApi->awSetFocusRegions(cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
|
HAL_LOGD("ret:%d device_fd_:%d,x1:%d y1:%d x2:%d y2:%d",
|
ret,
|
device_fd_,
|
cam_regions.x_min,
|
cam_regions.y_min,
|
cam_regions.x_max,
|
cam_regions.y_max);
|
|
return ret;
|
}
|
|
|
int V4L2Stream::SetJpegCropRect(cam_crop_rect_t cam_crop_rect) {
|
jpeg_crop_rect.left = cam_crop_rect.left;
|
jpeg_crop_rect.top = cam_crop_rect.top;
|
jpeg_crop_rect.width = cam_crop_rect.width;
|
jpeg_crop_rect.height = cam_crop_rect.height;
|
return 0;
|
}
|
|
int V4L2Stream::SetCropRect(cam_crop_rect_t cam_crop_rect) {
|
int ret = -1;
|
struct v4l2_selection s;
|
|
s.target = V4L2_SEL_TGT_CROP;
|
s.r.left = cam_crop_rect.left;
|
s.r.top = cam_crop_rect.top;
|
s.r.width = cam_crop_rect.width;
|
s.r.height = cam_crop_rect.height;
|
|
ret = IoctlLocked(VIDIOC_S_SELECTION, &s);
|
HAL_LOGV("ret:%d device_fd_:%d left:%d top:%d width:%d height:%d",
|
ret, device_fd_, s.r.left, s.r.top, s.r.width, s.r.height);
|
if (ret < 0) {
|
HAL_LOGE("failed, %s", strerror(errno));
|
} else {
|
HAL_LOGV("ok, %s", strerror(errno));
|
}
|
return ret;
|
}
|
|
int V4L2Stream::SetControl(uint32_t control_id,
|
int32_t desired,
|
int32_t* result) {
|
int32_t result_value = 0;
|
|
// TODO(b/29334616): When async, this may need to check if the stream
|
// is on, and if so, lock it off while setting format. Need to look
|
// into if V4L2 supports adjusting controls while the stream is on.
|
|
// For extended controls (any control class other than "user"),
|
// S_EXT_CTRL must be used instead of S_CTRL.
|
if (V4L2_CTRL_ID2CLASS(control_id) != V4L2_CTRL_CLASS_USER) {
|
v4l2_ext_control control;
|
v4l2_ext_controls controls;
|
memset(&control, 0, sizeof(control));
|
memset(&controls, 0, sizeof(controls));
|
|
control.id = control_id;
|
control.value = desired;
|
controls.ctrl_class = V4L2_CTRL_ID2CLASS(control_id);
|
controls.count = 1;
|
controls.controls = &control;
|
|
if (IoctlLocked(VIDIOC_S_EXT_CTRLS, &controls) < 0) {
|
HAL_LOGE("S_EXT_CTRLS fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
result_value = control.value;
|
} else {
|
v4l2_control control{control_id, desired};
|
if (IoctlLocked(VIDIOC_S_CTRL, &control) < 0) {
|
HAL_LOGE("S_CTRL fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
result_value = control.value;
|
}
|
|
// If the caller wants to know the result, pass it back.
|
if (result != nullptr) {
|
*result = result_value;
|
}
|
return 0;
|
}
|
|
int V4L2Stream::SetParm(int mCapturemode) {
|
HAL_LOG_ENTER();
|
|
struct v4l2_streamparm params;
|
memset(¶ms, 0, sizeof(params));
|
|
params.parm.capture.timeperframe.numerator = 1;
|
params.parm.capture.timeperframe.denominator = 30;
|
params.parm.capture.reserved[0] = 0;
|
params.type = V4L2_CAPTURE_TYPE;
|
params.parm.capture.capturemode = mCapturemode;
|
|
if (IoctlLocked(VIDIOC_S_PARM, ¶ms) < 0) {
|
HAL_LOGE("S_PARM fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
return 0;
|
}
|
|
int V4L2Stream::GetFormats(std::set<uint32_t>* v4l2_formats) {
|
HAL_LOG_ENTER();
|
|
int format_temp = V4L2_PIX_FMT_NV12;
|
v4l2_formats->insert(format_temp);
|
|
format_temp = V4L2_PIX_FMT_YUV420;
|
v4l2_formats->insert(format_temp);
|
|
format_temp = V4L2_PIX_FMT_NV21;
|
v4l2_formats->insert(format_temp);
|
|
// Add the jpeg format for take picture.
|
format_temp = V4L2_PIX_FMT_JPEG;
|
v4l2_formats->insert(format_temp);
|
|
return 0;
|
}
|
|
int V4L2Stream::GetFormatFrameSizes(
|
uint32_t v4l2_format,
|
std::set<std::array<int32_t, 2>,
|
std::greater<std::array<int32_t, 2>>>* sizes) {
|
v4l2_frmsizeenum size_query;
|
memset(&size_query, 0, sizeof(size_query));
|
|
// Add the jpeg format for take picture.
|
if (v4l2_format == V4L2_PIX_FMT_JPEG) {
|
v4l2_format = V4L2_PIX_FMT_DEFAULT;
|
}
|
|
size_query.pixel_format = v4l2_format;
|
|
char * value;
|
value = mCameraConfig->supportPictureSizeValue();
|
|
std::string st1 = value;
|
int size_width = 0;
|
int size_height = 0;
|
std::string tmp;
|
std::vector<std::string> data;
|
std::stringstream input(st1);
|
|
while (getline(input, tmp, ',')) {
|
data.push_back(tmp);
|
}
|
for (auto s : data) {
|
sscanf(s.c_str(), "%dx%d", &size_width, &size_height);
|
sizes->insert({{{size_width, size_height}}});
|
}
|
|
return 0;
|
}
|
|
// Converts a v4l2_fract with units of seconds to an int64_t with units of ns.
|
inline int64_t FractToNs(const v4l2_fract& fract) {
|
return (1000000000LL * fract.numerator) / fract.denominator;
|
}
|
|
int V4L2Stream::GetFormatFrameDurationRange(
|
uint32_t v4l2_format,
|
const std::array<int32_t, 2>& size,
|
std::array<int64_t, 2>* duration_range) {
|
// Potentially called so many times logging entry is a bad idea.
|
|
v4l2_frmivalenum duration_query;
|
memset(&duration_query, 0, sizeof(duration_query));
|
|
// Add the jpeg format for take picture.
|
if (v4l2_format == V4L2_PIX_FMT_JPEG) {
|
v4l2_format = V4L2_PIX_FMT_DEFAULT;
|
}
|
|
duration_query.pixel_format = v4l2_format;
|
duration_query.width = size[0];
|
duration_query.height = size[1];
|
|
if (IoctlLocked(VIDIOC_ENUM_FRAMEINTERVALS, &duration_query) < 0) {
|
HAL_LOGE("ENUM_FRAMEINTERVALS failed: %s", strerror(errno));
|
}
|
|
int64_t min = std::numeric_limits<int64_t>::max();
|
int64_t max = std::numeric_limits<int64_t>::min();
|
min = 33300000;
|
max = 100000000;
|
(*duration_range)[0] = min;
|
(*duration_range)[1] = max;
|
return 0;
|
}
|
|
int V4L2Stream::parse_pair(const char *str,
|
uint32_t *first,
|
uint32_t *second,
|
char delim) {
|
// Find the first integer.
|
char *end;
|
uint32_t w = static_cast<int>(strtol(str, &end, 10));
|
// If a delimeter does not immediately follow, give up.
|
if (*end != delim) {
|
HAL_LOGE("Cannot find delimeter (%c) in str=%s", delim, str);
|
return -1;
|
}
|
// Find the second integer, immediately after the delimeter.
|
uint32_t h = static_cast<int>(strtol(end+1, &end, 10));
|
*first = w;
|
*second = h;
|
|
return 0;
|
}
|
|
int V4L2Stream::SetFormat(const StreamFormat& desired_format,
|
uint32_t* result_max_buffers) {
|
HAL_LOG_ENTER();
|
|
if (format_ && desired_format == *format_) {
|
HAL_LOGV("The desired format is as same as the format set last.");
|
return 0;
|
}
|
|
// Not in the correct format, set the new one.
|
if (format_) {
|
// If we had an old format, first request 0 buffers to inform the device
|
// we're no longer using any previously "allocated" buffers from the old
|
// format. This seems like it shouldn't be necessary for USERPTR memory,
|
// and/or should happen from turning the stream off, but the driver
|
// complained. May be a driver issue, or may be intended behavior.
|
int res = RequestBuffers(0);
|
if (res) {
|
return res;
|
}
|
}
|
|
// Set the camera to the new format.
|
v4l2_format new_format;
|
desired_format.FillFormatRequest(&new_format);
|
|
HAL_LOGD("%s type=%d pixelformat=%d(%s) width=%d height=%d",
|
__func__,
|
new_format.type,
|
new_format.fmt.pix_mp.pixelformat,
|
getV4l2PixelFormatString(new_format.fmt.pix_mp.pixelformat).c_str(),
|
new_format.fmt.pix_mp.width,
|
new_format.fmt.pix_mp.height);
|
|
int setFormatFlag = 0;
|
int cur_width = 0;
|
int cur_height = 0;
|
if (mCameraConfig->supportInterpolationSize()) {
|
uint32_t interpolation_dst_width = 0;
|
uint32_t interpolation_dst_height = 0;
|
uint32_t interpolation_src_width = 0;
|
uint32_t interpolation_src_height = 0;
|
char * value;
|
char * value1;
|
value = mCameraConfig->supportInterpolationSizeValue();
|
parse_pair(value, &interpolation_src_width, &interpolation_src_height, 'x');
|
value1 = mCameraConfig->defaultInterpolationSizeValue();
|
parse_pair(value1,
|
&interpolation_dst_width, &interpolation_dst_height, 'x');
|
|
if (interpolation_dst_width == new_format.fmt.pix_mp.width &&
|
interpolation_dst_height == new_format.fmt.pix_mp.height) {
|
cur_width = new_format.fmt.pix_mp.width;
|
cur_height = new_format.fmt.pix_mp.height;
|
new_format.fmt.pix_mp.width = interpolation_src_width;
|
new_format.fmt.pix_mp.height = interpolation_src_height;
|
setFormatFlag = 1;
|
}
|
}
|
|
// TODO(b/29334616): When async, this will need to check if the stream
|
// is on, and if so, lock it off while setting format.
|
if (IoctlLocked(VIDIOC_S_FMT, &new_format) < 0) {
|
HAL_LOGE("S_FMT failed: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
if (IoctlLocked(VIDIOC_G_FMT, &new_format) < 0) {
|
HAL_LOGE("G_FMT failed: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
// Check that the driver actually set to the requested values.
|
if (desired_format != StreamFormat(new_format)) {
|
HAL_LOGE("Device doesn't support desired stream configuration.");
|
}
|
|
if (setFormatFlag && cur_width != 0 && cur_height != 0) {
|
new_format.fmt.pix_mp.width = cur_width;
|
new_format.fmt.pix_mp.height = cur_height;
|
}
|
// Keep track of our new format.
|
format_.reset(new StreamFormat(new_format));
|
|
// Format changed, request new buffers.
|
int res = RequestBuffers(*result_max_buffers);
|
if (res) {
|
HAL_LOGE("Requesting buffers for new format failed.");
|
return res;
|
}
|
*result_max_buffers = buffers_.size();
|
HAL_LOGV("*result_max_buffers:%d.", *result_max_buffers);
|
return 0;
|
}
|
|
int V4L2Stream::RequestBuffers(uint32_t num_requested) {
|
v4l2_requestbuffers req_buffers;
|
memset(&req_buffers, 0, sizeof(req_buffers));
|
req_buffers.type = format_->type();
|
req_buffers.memory = format_->memory();
|
req_buffers.count = num_requested;
|
|
int res = IoctlLocked(VIDIOC_REQBUFS, &req_buffers);
|
// Calling REQBUFS releases all queued buffers back to the user.
|
// int gralloc_res = gralloc_->unlockAllBuffers();
|
if (res < 0) {
|
HAL_LOGE("REQBUFS failed: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
// V4L2 will set req_buffers.count to a number of buffers it can handle.
|
if (num_requested > 0 && req_buffers.count < 1) {
|
HAL_LOGE("REQBUFS claims it can't handle any buffers.");
|
return -ENODEV;
|
}
|
|
{
|
std::lock_guard<std::mutex> guard(cmd_queue_lock_);
|
buffer_cnt_inflight_ = 0;
|
}
|
|
// refresh buffers_num_ queue.
|
while (!buffers_num_.empty()) {
|
buffers_num_.pop();
|
}
|
|
if (buffers_num_.empty()) {
|
for (size_t i = 0; i < req_buffers.count; ++i) {
|
buffers_num_.push(i);
|
HAL_LOGV("buffers_num_ push:%zu, size:%zu.", i, buffers_num_.size());
|
}
|
}
|
|
buffers_.resize(req_buffers.count, false);
|
HAL_LOGD("num_requested:%d,req_buffers.count:%d.",
|
num_requested, req_buffers.count);
|
return 0;
|
}
|
|
int V4L2Stream::queueBuffer(v4l2_buffer* pdevice_buffer) {
|
int res;
|
std::lock_guard<std::mutex> guard(cmd_queue_lock_);
|
res = IoctlLocked(VIDIOC_QBUF, pdevice_buffer);
|
if (res >= 0) {
|
buffer_cnt_inflight_++;
|
HAL_LOGV("After queue buffer csi driver has %d buffer(s) now.",
|
buffer_cnt_inflight_);
|
}
|
return res;
|
}
|
|
int V4L2Stream::dequeueBuffer(v4l2_buffer* pdevice_buffer) {
|
int res;
|
std::lock_guard<std::mutex> guard(cmd_queue_lock_);
|
res = IoctlLocked(VIDIOC_DQBUF, pdevice_buffer);
|
if (res >= 0) {
|
buffer_cnt_inflight_--;
|
HAL_LOGV("After dequeue buffer csi driver has %d buffer(s) now.",
|
buffer_cnt_inflight_);
|
}
|
return res;
|
}
|
|
int V4L2Stream::PrepareBuffer() {
|
if (!format_) {
|
HAL_LOGE("Stream format must be set before enqueuing buffers.");
|
return -ENODEV;
|
}
|
|
struct v4l2_buffer device_buffer;
|
int index = -1;
|
|
for (size_t i = 0; i < buffers_.size(); i++) {
|
std::lock_guard<std::mutex> guard(buffer_queue_lock_);
|
index = buffers_num_.front();
|
buffers_num_.pop();
|
HAL_LOGV("buffers_num_ pop:%d, size:%zu.", index, buffers_num_.size());
|
|
// Set up a v4l2 buffer struct.
|
memset(&device_buffer, 0, sizeof(device_buffer));
|
device_buffer.type = format_->type();
|
device_buffer.index = index;
|
device_buffer.memory = format_->memory();
|
device_buffer.length = format_->nplanes();
|
struct v4l2_plane planes[VIDEO_MAX_PLANES];
|
// TODO(zjw) support mutiplanar.
|
memset(planes, 0, VIDEO_MAX_PLANES * sizeof(struct v4l2_plane));
|
if (V4L2_CAPTURE_TYPE == device_buffer.type) {
|
device_buffer.m.planes = planes;
|
if (NULL == device_buffer.m.planes) {
|
HAL_LOGE("device_buffer.m.planes calloc failed!\n");
|
}
|
}
|
|
// Use QUERYBUF to ensure our buffer/device is in good shape,
|
// and fill out remaining fields.
|
if (IoctlLocked(VIDIOC_QUERYBUF, &device_buffer) < 0) {
|
HAL_LOGE("QUERYBUF fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
mMapMem.mem[i] = mmap(0,
|
device_buffer.m.planes[0].length,
|
PROT_READ | PROT_WRITE,
|
MAP_SHARED,
|
device_fd_,
|
device_buffer.m.planes[0].m.mem_offset);
|
mMapMem.length = device_buffer.m.planes[0].length;
|
if (mMapMem.mem[i] == MAP_FAILED) {
|
HAL_LOGE("Unable to map buffer (%s)", strerror(errno));
|
for (size_t j = 0; j < i; j++) {
|
munmap(buffers_addr[i], mMapMem.length);
|
}
|
return -1;
|
}
|
HAL_LOGD("index: %zu, fd: %d, mem: %lx, len: %d, offset: 0x%x",
|
i,
|
device_fd_,
|
(unsigned long)mMapMem.mem[i],
|
device_buffer.m.planes[0].
|
length,
|
device_buffer.m.offset);
|
|
if (queueBuffer(&device_buffer) < 0) {
|
HAL_LOGE("QBUF fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
if (mCameraConfig->supportInterpolationSize()) {
|
uint32_t interpolation_dst_width = 0;
|
uint32_t interpolation_dst_height = 0;
|
uint32_t interpolation_src_width = 0;
|
uint32_t interpolation_src_height = 0;
|
char * value;
|
char * value1;
|
value = mCameraConfig->supportInterpolationSizeValue();
|
parse_pair(value,
|
&interpolation_src_width, &interpolation_src_height, 'x');
|
value1 = mCameraConfig->defaultInterpolationSizeValue();
|
parse_pair(value1,
|
&interpolation_dst_width, &interpolation_dst_height, 'x');
|
if (interpolation_dst_width == format_->width() &&
|
interpolation_dst_height == format_->height()) {
|
memset(reinterpret_cast<void*>(mMapMem.mem[i]),
|
0x10,
|
interpolation_src_width * interpolation_src_height);
|
memset(reinterpret_cast<char *>(mMapMem.mem[i]) +
|
interpolation_src_width * interpolation_src_height,
|
0x80,
|
interpolation_src_width * interpolation_src_height / 2);
|
}
|
} else {
|
memset(reinterpret_cast<void*>(mMapMem.mem[i]),
|
0x10, format_->width() * format_->height());
|
memset(reinterpret_cast<char *>(mMapMem.mem[i]) +
|
format_->width() * format_->height(),
|
0x80, format_->width() * format_->height() / 2);
|
}
|
}
|
|
HAL_LOGD("Buffers had been prepared!");
|
return 0;
|
}
|
|
int V4L2Stream::EnqueueBuffer() {
|
if (!format_) {
|
HAL_LOGE("Stream format must be set before enqueuing buffers.");
|
return -ENODEV;
|
}
|
|
// Find a free buffer index. Could use some sort of persistent hinting
|
// here to improve expected efficiency, but buffers_.size() is expected
|
// to be low enough (<10 experimentally) that it's not worth it.
|
int index = -1;
|
{
|
std::unique_lock<std::mutex> lock(buffer_queue_lock_);
|
while (buffers_num_.empty()) {
|
HAL_LOGV("buffers_num_ is empty now, wait for the queue to be filled.");
|
if (mflush_buffers) {
|
mflush_buffers = false;
|
return 0;
|
}
|
buffer_availabl_queue_.wait(lock);
|
if (mflush_buffers) {
|
mflush_buffers = false;
|
return 0;
|
}
|
}
|
index = buffers_num_.front();
|
buffers_num_.pop();
|
HAL_LOGV("buffers_num_ pop:%d, size:%zu.", index, buffers_num_.size());
|
}
|
|
if (index < 0) {
|
// Note: The HAL should be tracking the number of buffers in flight
|
// for each stream, and should never overflow the device.
|
HAL_LOGE("Cannot enqueue buffer: stream is already full.");
|
return -ENODEV;
|
}
|
|
// Set up a v4l2 buffer struct.
|
v4l2_buffer device_buffer;
|
memset(&device_buffer, 0, sizeof(device_buffer));
|
device_buffer.type = format_->type();
|
device_buffer.index = index;
|
device_buffer.memory = format_->memory();
|
device_buffer.length = format_->nplanes();
|
struct v4l2_plane planes[VIDEO_MAX_PLANES];
|
memset(planes, 0, VIDEO_MAX_PLANES*sizeof(struct v4l2_plane));
|
if (V4L2_CAPTURE_TYPE == device_buffer.type) {
|
device_buffer.m.planes = planes;
|
if (NULL == device_buffer.m.planes) {
|
HAL_LOGE("device_buffer.m.planes calloc failed!\n");
|
}
|
}
|
|
HAL_LOGV("mMapMem.mem[%d]:%p.", index, mMapMem.mem[index]);
|
|
if (queueBuffer(&device_buffer) < 0) {
|
HAL_LOGE("QBUF fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
|
// Mark the buffer as in flight.
|
std::lock_guard<std::mutex> guard(buffer_queue_lock_);
|
buffers_[index] = true;
|
|
return 0;
|
}
|
|
int V4L2Stream::DequeueBuffer(void** src_addr_, struct timeval* ts) {
|
if (!format_) {
|
HAL_LOGE(
|
"Format not set, so stream can't be on, "
|
"so no buffers available for dequeueing");
|
return -EAGAIN;
|
}
|
|
v4l2_buffer buffer;
|
memset(&buffer, 0, sizeof(buffer));
|
buffer.type = format_->type();
|
buffer.memory = format_->memory();
|
buffer.length = format_->nplanes();
|
struct v4l2_plane planes[VIDEO_MAX_PLANES];
|
memset(planes, 0, VIDEO_MAX_PLANES*sizeof(struct v4l2_plane));
|
if (V4L2_CAPTURE_TYPE == buffer.type) {
|
buffer.m.planes = planes;
|
if (NULL == buffer.m.planes) {
|
HAL_LOGE("device_buffer.m.planes calloc failed!\n");
|
}
|
}
|
|
int res = dequeueBuffer(&buffer);
|
if (res) {
|
if (errno == EAGAIN) {
|
// Expected failure.
|
return -EAGAIN;
|
} else {
|
// Unexpected failure.
|
HAL_LOGE("DQBUF fails: %s", strerror(errno));
|
return -ENODEV;
|
}
|
}
|
|
*ts = buffer.timestamp;
|
*src_addr_ = mMapMem.mem[buffer.index];
|
|
#if DBG_SAVE_OUTPUT
|
char yuv_path[100];
|
dq_yuv_count = dq_yuv_count % output_counts;
|
sprintf(yuv_path, "/data/camera/dq_yuv_%d.bin", dq_yuv_count++);
|
int copy_size = ALIGN_16B(format_->width())*ALIGN_16B(format_->height())*3/2;
|
saveBuffers(yuv_path, *src_addr_, copy_size, true);
|
#endif
|
|
// Mark the buffer as no longer in flight.
|
{
|
std::lock_guard<std::mutex> guard(buffer_queue_lock_);
|
buffers_[buffer.index] = false;
|
buffers_num_.push(buffer.index);
|
HAL_LOGV("buffers_num_ push:%d, size:%zu.",
|
buffer.index, buffers_num_.size());
|
buffer_availabl_queue_.notify_one();
|
HAL_LOGV("buffer.index:%d has been freed by csi driver, "
|
"and buffer_availabl_queue_ was notified!\n", buffer.index);
|
}
|
|
HAL_LOGV("mMapMem.mem[%d]:%p.", buffer.index, mMapMem.mem[buffer.index]);
|
return 0;
|
}
|
|
int V4L2Stream::CopyYCbCrBuffer(android_ycbcr* dst_addr_ycbcr, void* src_addr) {
|
if (!format_) {
|
HAL_LOGE("Stream format must be set before enqueuing buffers.");
|
return -ENODEV;
|
}
|
int res = 0;
|
VencRect sCropInfo;
|
VencIspBufferInfo pInBuffer, pOutBuffer;
|
memset(&pInBuffer, 0, sizeof(pInBuffer));
|
memset(&pOutBuffer, 0, sizeof(pOutBuffer));
|
sCropInfo.nLeft = jpeg_crop_rect.left;
|
sCropInfo.nTop = jpeg_crop_rect.top;
|
sCropInfo.nWidth = jpeg_crop_rect.width;
|
sCropInfo.nHeight = jpeg_crop_rect.height;
|
|
int width = format_->width();
|
int height = format_->height();
|
|
if (device_ss_ == MAIN_STREAM) {
|
if ((jpeg_crop_rect.width < width || jpeg_crop_rect.height < height)
|
&& jpeg_crop_rect.width > 0
|
&& jpeg_crop_rect.height > 0) {
|
pInBuffer.nWidth = width;
|
pInBuffer.nHeight = height;
|
pInBuffer.colorFormat = VENC_PIXEL_YUV420SP;
|
pInBuffer.pAddrVirY = (unsigned char*)src_addr;
|
|
pOutBuffer.nWidth = ALIGN_16B(width);
|
pOutBuffer.nHeight = ALIGN_16B(height);
|
pOutBuffer.pAddrVirY = (unsigned char*)dst_addr_ycbcr->y;
|
|
HAL_LOGV("nLeft:%d, nTop:%d,nWidth:%d. nHeight:%d",
|
sCropInfo.nLeft, sCropInfo.nTop, sCropInfo.nWidth, sCropInfo.nHeight);
|
std::lock_guard<std::mutex> guard(aw_ve_lock);
|
res = AWCropYuv(&pInBuffer, &sCropInfo , &pOutBuffer);
|
if (res < 0) {
|
HAL_LOGE("AWCropYuv fail nLeft:%d, nTop:%d,nWidth:%d. nHeight:%d",
|
sCropInfo.nLeft,
|
sCropInfo.nTop,
|
sCropInfo.nWidth,
|
sCropInfo.nHeight);
|
} else {
|
return 0;
|
}
|
}
|
|
if (mCameraConfig->supportInterpolationSize()) {
|
uint32_t interpolation_dst_width = 0;
|
uint32_t interpolation_dst_height = 0;
|
uint32_t interpolation_src_width = 0;
|
uint32_t interpolation_src_height = 0;
|
char * value;
|
char * value1;
|
value = mCameraConfig->supportInterpolationSizeValue();
|
parse_pair(value,
|
&interpolation_src_width, &interpolation_src_height, 'x');
|
value1 = mCameraConfig->defaultInterpolationSizeValue();
|
parse_pair(value1,
|
&interpolation_dst_width, &interpolation_dst_height, 'x');
|
if (interpolation_dst_width == format_->width() &&
|
interpolation_dst_height == format_->height()) {
|
pInBuffer.nWidth = interpolation_src_width;
|
pInBuffer.nHeight = interpolation_src_height;
|
pInBuffer.colorFormat = VENC_PIXEL_YUV420SP;
|
pInBuffer.pAddrVirY = (unsigned char*)src_addr;
|
pOutBuffer.nWidth = ALIGN_16B(format_->width());
|
pOutBuffer.nHeight = ALIGN_16B(format_->height());
|
pOutBuffer.pAddrVirY = (unsigned char*)dst_addr_ycbcr->y;
|
AWScalerYuv(&pInBuffer, &pOutBuffer);
|
return 0;
|
}
|
}
|
}
|
|
memcpy(dst_addr_ycbcr->y, src_addr, width*height);
|
memcpy(dst_addr_ycbcr->cr,
|
reinterpret_cast<char*>(src_addr) + width*height, width*height/2);
|
return 0;
|
}
|
|
int V4L2Stream::EncodeBuffer(void * dst_addr,
|
void * src_addr,
|
unsigned long mJpegBufferSizes,
|
JPEG_ENC_t jpeg_enc) {
|
isTakePicture = true;
|
unsigned long jpeg_buf = (unsigned long)dst_addr;
|
int bufSize = 0;
|
|
// Get buffer size.
|
HAL_LOGD("jpeg info:lock_buffer vaddr:%lu, buffer size:%lu.",
|
jpeg_buf, mJpegBufferSizes);
|
|
if (mCameraConfig->supportInterpolationSize()) {
|
uint32_t interpolation_dst_width = 0;
|
uint32_t interpolation_dst_height = 0;
|
uint32_t interpolation_src_width = 0;
|
uint32_t interpolation_src_height = 0;
|
char * value;
|
char * value1;
|
value = mCameraConfig->supportInterpolationSizeValue();
|
parse_pair(value, &interpolation_src_width, &interpolation_src_height, 'x');
|
value1 = mCameraConfig->defaultInterpolationSizeValue();
|
parse_pair(value1,
|
&interpolation_dst_width, &interpolation_dst_height, 'x');
|
if (interpolation_dst_width == format_->width() &&
|
interpolation_dst_height == format_->height()) {
|
jpeg_enc.src_w = interpolation_src_width;
|
jpeg_enc.src_h = interpolation_src_height;
|
}
|
}
|
|
if (jpeg_enc.src_w == 0) {
|
jpeg_enc.src_w = format_->width();
|
}
|
if (jpeg_enc.src_h == 0) {
|
jpeg_enc.src_h = format_->height();
|
}
|
|
if (jpeg_enc.rotate == 270 || jpeg_enc.rotate == 90) {
|
std::swap(jpeg_enc.pic_w, jpeg_enc.pic_h);
|
std::swap(jpeg_enc.crop_x, jpeg_enc.crop_y);
|
std::swap(jpeg_enc.crop_w, jpeg_enc.crop_h);
|
}
|
|
jpeg_enc.colorFormat = JPEG_COLOR_YUV420_NV21;
|
|
char mDateTime[64];
|
time_t t;
|
struct tm *tm_t;
|
time(&t);
|
tm_t = localtime(&t);
|
sprintf(mDateTime,
|
"%4d:%02d:%02d %02d:%02d:%02d",
|
tm_t->tm_year+1900, tm_t->tm_mon+1, tm_t->tm_mday,
|
tm_t->tm_hour, tm_t->tm_min, tm_t->tm_sec);
|
|
char property[PROPERTY_VALUE_MAX];
|
if (property_get("ro.product.manufacturer", property, "") > 0) {
|
strcpy(jpeg_enc.CameraMake, property);
|
}
|
if (property_get("ro.product.model", property, "") > 0) {
|
strcpy(jpeg_enc.CameraModel, property);
|
}
|
|
strcpy(jpeg_enc.DateTime, mDateTime);
|
HAL_LOGV("jpeg info:%s.", mDateTime);
|
|
jpeg_enc.whitebalance = 0;
|
jpeg_enc.focal_length = 3.04;
|
|
HAL_LOGV("src: %dx%d, pic: %dx%d, quality: %d, rotate: %d, Gps method: %s,"
|
"thumbW: %d, thumbH: %d, factor: %d, crop: [%d, %d, %d, %d]",
|
jpeg_enc.src_w,
|
jpeg_enc.src_h,
|
jpeg_enc.pic_w,
|
jpeg_enc.pic_h,
|
jpeg_enc.quality,
|
jpeg_enc.rotate,
|
jpeg_enc.gps_processing_method,
|
jpeg_enc.thumbWidth,
|
jpeg_enc.thumbHeight,
|
jpeg_enc.scale_factor,
|
jpeg_enc.crop_x,
|
jpeg_enc.crop_y,
|
jpeg_enc.crop_w,
|
jpeg_enc.crop_h);
|
|
|
JpegEncInfo sjpegInfo;
|
EXIFInfo exifInfo;
|
memset(&sjpegInfo, 0, sizeof(JpegEncInfo));
|
memset(&exifInfo, 0, sizeof(EXIFInfo));
|
|
sjpegInfo.sBaseInfo.nStride = jpeg_enc.src_w;
|
sjpegInfo.sBaseInfo.nInputWidth = jpeg_enc.src_w;
|
sjpegInfo.sBaseInfo.nInputHeight = jpeg_enc.src_h;
|
sjpegInfo.sBaseInfo.nDstWidth = jpeg_enc.pic_w;
|
sjpegInfo.sBaseInfo.nDstHeight = jpeg_enc.pic_h;
|
sjpegInfo.sBaseInfo.eInputFormat = VENC_PIXEL_YVU420SP;
|
if (mCameraConfig->supportInterpolationSize()) {
|
jpeg_enc.quality = 100;
|
sjpegInfo.quality = 100;
|
} else {
|
sjpegInfo.quality = jpeg_enc.quality;
|
}
|
|
exifInfo.Orientation = jpeg_enc.rotate;
|
|
if (jpeg_enc.crop_h != 0) {
|
sjpegInfo.nShareBufFd = jpeg_enc.crop_h;
|
jpeg_enc.crop_h = 0;
|
sjpegInfo.bNoUseAddrPhy = 0;
|
} else {
|
sjpegInfo.nShareBufFd = jpeg_enc.crop_h;
|
jpeg_enc.crop_h = 0;
|
sjpegInfo.bNoUseAddrPhy = 1;
|
}
|
|
HAL_LOGV("V4L2Stream::EncodeBuffer left:%d top:%d width:%d height:%d",
|
jpeg_crop_rect.left,
|
jpeg_crop_rect.top,
|
jpeg_crop_rect.width,
|
jpeg_crop_rect.height);
|
|
if (jpeg_enc.enable_crop != 0 &&
|
(jpeg_crop_rect.left != 0 || jpeg_crop_rect.top != 0)) {
|
sjpegInfo.bEnableCorp = 1;
|
sjpegInfo.sCropInfo.nWidth = jpeg_crop_rect.width;
|
sjpegInfo.sCropInfo.nHeight = jpeg_crop_rect.height;
|
sjpegInfo.sCropInfo.nLeft = jpeg_crop_rect.left;
|
sjpegInfo.sCropInfo.nTop = jpeg_crop_rect.top;
|
} else {
|
sjpegInfo.bEnableCorp = 0;
|
}
|
|
sjpegInfo.pAddrPhyY = (unsigned char *)src_addr;
|
sjpegInfo.pAddrPhyC = (unsigned char *)((unsigned long)src_addr +
|
jpeg_enc.src_w *jpeg_enc.src_h);
|
sjpegInfo.pAddrVirY = (unsigned char *)src_addr;
|
sjpegInfo.pAddrVirC = (unsigned char *)((unsigned long)src_addr +
|
jpeg_enc.src_w *jpeg_enc.src_h);
|
|
exifInfo.ThumbWidth = jpeg_enc.thumbWidth;
|
exifInfo.ThumbHeight = jpeg_enc.thumbHeight;
|
|
HAL_LOGD("src: %dx%d, pic: %dx%d, quality: %d, rotate: %d,"
|
"thumbW: %d, thumbH: %d, EnableCorp: %d, "
|
"crop: [%d, %d, %d, %d], share_fd:%d",
|
sjpegInfo.sBaseInfo.nInputWidth,
|
sjpegInfo.sBaseInfo.nInputHeight,
|
sjpegInfo.sBaseInfo.nDstWidth,
|
sjpegInfo.sBaseInfo.nDstHeight,
|
sjpegInfo.quality,
|
exifInfo.Orientation,
|
exifInfo.ThumbWidth,
|
exifInfo.ThumbHeight,
|
sjpegInfo.bEnableCorp,
|
sjpegInfo.sCropInfo.nLeft,
|
sjpegInfo.sCropInfo.nTop,
|
sjpegInfo.sCropInfo.nWidth,
|
sjpegInfo.sCropInfo.nHeight,
|
sjpegInfo.nShareBufFd);
|
|
strcpy(reinterpret_cast<char*>(exifInfo.CameraMake), jpeg_enc.CameraMake);
|
strcpy(reinterpret_cast<char*>(exifInfo.CameraModel), jpeg_enc.CameraModel);
|
strcpy(reinterpret_cast<char*>(exifInfo.DateTime), jpeg_enc.DateTime);
|
|
struct timeval tv;
|
gettimeofday(&tv, NULL);
|
char subSecTime1[8];
|
char subSecTime2[8];
|
char subSecTime3[8];
|
sprintf(subSecTime1, "%06ld", tv.tv_usec);
|
sprintf(subSecTime2, "%06ld", tv.tv_usec);
|
sprintf(subSecTime3, "%06ld", tv.tv_usec);
|
strcpy(reinterpret_cast<char*>(exifInfo.subSecTime), subSecTime1);
|
strcpy(reinterpret_cast<char*>(exifInfo.subSecTimeOrig), subSecTime2);
|
strcpy(reinterpret_cast<char*>(exifInfo.subSecTimeDig), subSecTime3);
|
|
if (0 != strlen(jpeg_enc.gps_processing_method)) {
|
strcpy(reinterpret_cast<char*>(exifInfo.gpsProcessingMethod),
|
jpeg_enc.gps_processing_method);
|
exifInfo.enableGpsInfo = 1;
|
exifInfo.gps_latitude = jpeg_enc.gps_latitude;
|
exifInfo.gps_longitude = jpeg_enc.gps_longitude;
|
exifInfo.gps_altitude = jpeg_enc.gps_altitude;
|
exifInfo.gps_timestamp = jpeg_enc.gps_timestamp;
|
} else {
|
exifInfo.enableGpsInfo = 0;
|
}
|
|
// TODO(aw): fix parameter for sensor
|
exifInfo.ExposureTime.num = 25;
|
exifInfo.ExposureTime.den = 100;
|
// eg:FNum=2.2, aperture = 220, --> num = 220,den = 100
|
exifInfo.FNumber.num = 200;
|
exifInfo.FNumber.den = 100;
|
exifInfo.ISOSpeed = 400;
|
|
exifInfo.ExposureBiasValue.num = 25;
|
exifInfo.ExposureBiasValue.den = 100;
|
|
exifInfo.MeteringMode = 0;
|
exifInfo.FlashUsed = 0;
|
|
exifInfo.FocalLength.num = 304;
|
exifInfo.FocalLength.den = 100;
|
|
exifInfo.DigitalZoomRatio.num = 0;
|
exifInfo.DigitalZoomRatio.den = 0;
|
|
exifInfo.WhiteBalance = 0;
|
exifInfo.ExposureMode = 0;
|
|
std::lock_guard<std::mutex> guard(aw_ve_lock);
|
|
#if DBG_SAVE_OUTPUT
|
char yuv_path[100];
|
eb_yuv_count = eb_yuv_count % output_counts;
|
sprintf(yuv_path, "/data/camera/eb_yuv_%d.bin", eb_yuv_count++);
|
int yuv_size = jpeg_enc.src_w * jpeg_enc.src_h*3/2;
|
saveBuffers(yuv_path, src_addr, yuv_size, true);
|
#endif
|
int ret = AWJpecEnc(&sjpegInfo,
|
&exifInfo,
|
reinterpret_cast<void *>(jpeg_buf),
|
&bufSize);
|
#if DBG_SAVE_OUTPUT
|
char jpeg_path[100];
|
eb_jpeg_count = eb_jpeg_count % output_counts;
|
sprintf(jpeg_path, "/data/camera/eb_jpg_%d.jpg", eb_jpeg_count++);
|
saveBuffers(jpeg_path, reinterpret_cast<void*>(jpeg_buf), bufSize, true);
|
#endif
|
|
if (ret < 0) {
|
HAL_LOGE("JpegEnc failed");
|
return false;
|
}
|
|
camera3_jpeg_3a_blob_t jpeg_3a_header;
|
jpeg_3a_header.jpeg_3a_header_id = CAMERA3_JPEG_3A_PARAM_BLOB_ID;
|
jpeg_3a_header.jpeg_3a_size = sizeof(camera3_jpeg_3a_blob_t) +
|
ISP_3A_PARAM_SIZE;
|
ALOGV("3a jpeg_3a_size = %d sizeof struct = %zu",
|
jpeg_3a_header.jpeg_3a_size, sizeof(camera3_jpeg_3a_blob_t));
|
strncpy(jpeg_3a_header.magic_str, ISP_DEBUG_MAGIC_STR, 8);
|
|
camera3_jpeg_isp_msg_blob_t jpeg_isp_msg_header;
|
jpeg_isp_msg_header.jpeg_isp_msg_header_id = CAMERA3_JPEG_ISP_MSG_BLOB_ID;
|
jpeg_isp_msg_header.jpeg_isp_msg_size =
|
sizeof(camera3_jpeg_isp_msg_blob_t) + ISP_DEBUG_MSG_SIZE;
|
ALOGV("isp jpeg_isp_size = %d sizeof struct = %zu",
|
jpeg_isp_msg_header.jpeg_isp_msg_size,
|
sizeof(camera3_jpeg_isp_msg_blob_t));
|
strncpy(jpeg_isp_msg_header.magic_str, ISP_DEBUG_MAGIC_STR, 8);
|
|
camera3_jpeg_blob_t jpegHeader;
|
jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
|
jpegHeader.jpeg_size =
|
bufSize + jpeg_3a_header.jpeg_3a_size +
|
jpeg_isp_msg_header.jpeg_isp_msg_size;
|
|
unsigned long jpeg_eof_offset =
|
(unsigned long)(mJpegBufferSizes - (unsigned long)sizeof(jpegHeader));
|
char *jpeg_eof = reinterpret_cast<char *>(jpeg_buf) + jpeg_eof_offset;
|
memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
|
|
char *jpeg_isp_3a_params = reinterpret_cast<char *>(jpeg_buf + bufSize);
|
memcpy(jpeg_isp_3a_params, &jpeg_3a_header, sizeof(jpeg_3a_header));
|
mAWIspApi->awIspGet3AParameters(reinterpret_cast<void*>(
|
jpeg_isp_3a_params + sizeof(jpeg_3a_header)));
|
|
char *jpeg_isp_debug_msg = reinterpret_cast<char *>(jpeg_buf + bufSize +
|
jpeg_3a_header.jpeg_3a_size);
|
memcpy(jpeg_isp_debug_msg, &jpeg_isp_msg_header, sizeof(jpeg_isp_msg_header));
|
mAWIspApi->awIspGetDebugMessage(reinterpret_cast<void*>(
|
jpeg_isp_debug_msg + sizeof(jpeg_isp_msg_header)));
|
|
return 0;
|
}
|
|
int V4L2Stream::WaitCameraReady() {
|
if (!format_) {
|
HAL_LOGV(
|
"Format not set, so stream can't be on, "
|
"so no buffers available for Ready");
|
return -EAGAIN;
|
}
|
|
int ret = -1;
|
int epollftd = epoll_create(2);
|
if (epollftd == -1) {
|
HAL_LOGE("create epoll failed");
|
return -1;
|
}
|
|
epoll_event event;
|
event.events = EPOLLIN | EPOLLET;
|
event.data.fd = device_fd_;
|
ret = epoll_ctl(epollftd, EPOLL_CTL_ADD, device_fd_, &event);
|
|
if (ret == -1) {
|
HAL_LOGE("add device fd failed");
|
close(epollftd);
|
return -1;
|
}
|
|
epoll_event wakeup;
|
wakeup.events = EPOLLIN | EPOLLET;
|
wakeup.data.fd = read_fd_;
|
epoll_ctl(epollftd, EPOLL_CTL_ADD, read_fd_, &event);
|
|
if (pEvents == NULL) {
|
pEvents = (epoll_event *)calloc(2, sizeof(epoll_event));
|
if (pEvents == NULL) {
|
close(epollftd);
|
return -1;
|
}
|
}
|
int nEventNum = epoll_wait(epollftd, pEvents, 2, 1000);
|
ret = -1;
|
for (int i = 0; i < nEventNum; i++) {
|
if (pEvents[i].data.fd == device_fd_) {
|
if (pEvents[i].events & EPOLLIN) {
|
ret = 0;
|
break;
|
}
|
} else if (pEvents[i].data.fd == read_fd_ && disconnect) {
|
char data[2];
|
read(read_fd_, data, 2);
|
ret = -1;
|
break;
|
}
|
}
|
|
close(epollftd);
|
return ret;
|
}
|
|
} // namespace v4l2_camera_hal
|