/*
|
* Copyright (c) 2021 by Allwinnertech Co., Ltd.
|
*
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
* you may not use this file except in compliance with the License.
|
* You may obtain a copy of the License at
|
*
|
* http://www.apache.org/licenses/LICENSE-2.0
|
*
|
* Unless required by applicable law or agreed to in writing, software
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* See the License for the specific language governing permissions and
|
* limitations under the License.
|
*/
|
|
#define LOG_TAG "CameraHALv3_StreamManager"
|
#include "stream_manager.h"
|
|
#include <inttypes.h>
|
#include <fcntl.h>
|
#include <sys/stat.h>
|
#include <sys/types.h>
|
#include <sys/time.h>
|
#include <hardware/camera3.h>
|
#include <linux/videodev2.h>
|
|
#include <cstdlib>
|
#include <memory>
|
#include <utility>
|
|
#include "CameraMetadata.h"
|
|
namespace v4l2_camera_hal {
|
|
std::shared_ptr<StreamManager> StreamManager::NewStreamManager(
|
std::shared_ptr<V4L2Wrapper> device,
|
std::shared_ptr<V4L2Camera> camera) {
|
HAL_LOG_ENTER();
|
std::unique_ptr<V4L2Gralloc> gralloc(V4L2Gralloc::NewV4L2Gralloc());
|
if (!gralloc) {
|
HAL_LOGE("Failed to initialize gralloc helper.");
|
}
|
HAL_LOGD("device.use_count:%ld. camera.use_count:%ld.",
|
device.use_count(),
|
camera.use_count());
|
return std::shared_ptr<StreamManager>(new StreamManager(std::move(device),
|
std::move(gralloc), std::move(camera)));
|
}
|
|
StreamManager::StreamManager(std::shared_ptr<V4L2Wrapper> device,
|
std::unique_ptr<V4L2Gralloc> gralloc,
|
std::shared_ptr<V4L2Camera> camera):
|
device_(std::move(device)),
|
gralloc_(std::move(gralloc)),
|
camera_(std::move(camera)) {
|
HAL_LOG_ENTER();
|
mDrop_main_buffers = 0;
|
mDrop_sub_buffers = 0;
|
merge_stream_flag = 0;
|
HAL_LOGD("device.use_count:%ld. camera.use_count:%ld.",
|
device.use_count(), camera.use_count());
|
}
|
|
StreamManager::~StreamManager() {
|
HAL_LOG_ENTER();
|
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
HAL_LOGV("before reset null mStream[%d].use_count:%ld."
|
"mCameraStream[%d].use_count:%ld.",
|
ss, mStream[ss].use_count(), ss, mCameraStream[ss].use_count());
|
}
|
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mConnection[ss].reset();
|
}
|
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mStream[ss].reset();
|
}
|
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
mCameraStream[ss].reset();
|
}
|
|
for (int ss = 0; ss < MAX_STREAM; ss++) {
|
HAL_LOGV("mStream[%d].use_count:%ld. mCameraStream[%d].use_count:%ld",
|
ss, mStream[ss].use_count(), ss, mCameraStream[ss].use_count());
|
}
|
|
HAL_LOGV("device.use_count:%ld. camera.use_count:%ld.",
|
device_.use_count(), camera_.use_count());
|
}
|
|
CameraStream* StreamManager::createStream(STREAM_SERIAL ss,
|
uint32_t width,
|
uint32_t height,
|
int format,
|
uint32_t usage,
|
int isBlob,
|
bool mergeStreamFlag) {
|
|
HAL_LOG_ENTER();
|
mMapFrameNumRef.clear();
|
if (mConnection[ss +isBlob] != nullptr) {
|
HAL_LOGD("Camera stream %d is already connected.", ss +isBlob);
|
return nullptr;
|
}
|
|
//mConnection[ss +isBlob].reset(new V4L2Wrapper::Connection(device_, ss));
|
if (width * height > 4000*3000) {
|
merge_stream_flag = 1;
|
mConnection[ss + isBlob].reset(new V4L2Wrapper::Connection(device_, ss, merge_stream_flag));
|
} else {
|
mConnection[ss + isBlob].reset(new V4L2Wrapper::Connection(device_, ss, merge_stream_flag));
|
}
|
|
if (mConnection[ss +isBlob]->status()) {
|
HAL_LOGE("Failed to connect to device: %d.",
|
mConnection[ss +isBlob]->status());
|
return nullptr;
|
}
|
|
mStream[ss +isBlob] = device_->getStream(ss);
|
if (mStream[ss +isBlob] == nullptr) {
|
HAL_LOGE("Failed to get Stream, we should connect first.");
|
return nullptr;
|
}
|
|
int isThirdMirrorStream = 0;
|
if (ss >= MAIN_MIRROR_STREAM) {
|
isThirdMirrorStream = 1;
|
}
|
mCameraStream[ss +isBlob].reset(
|
CameraStream::NewCameraStream(mStream[ss +isBlob],
|
isBlob,
|
isThirdMirrorStream));
|
if (mCameraStream[ss +isBlob] == nullptr) {
|
HAL_LOGD("Failed to get mCameraStream ojb %d .", ss +isBlob);
|
mConnection[ss +isBlob].reset();
|
return nullptr;
|
}
|
|
CameraMetadata static_metadata;
|
camera_->initStaticInfo(&static_metadata);
|
mCameraStream[ss +isBlob]->mStaticMetadata.reset(
|
new CameraMetadata(static_metadata));
|
|
int res = mCameraStream[ss +isBlob]->setFormat(width, height, format, usage);
|
if (res) {
|
HAL_LOGW("Failed to setFormat, ojb %d .", ss +isBlob);
|
}
|
|
if (ss < SUB_0_STREAM_BLOB) {
|
if (isBlob) {
|
if (mCameraStream[ss] == nullptr) {
|
if (mCameraStream[ss +isBlob]->initialize(width,
|
height,
|
format,
|
usage,
|
mergeStreamFlag)) {
|
HAL_LOGE("mCameraStream %d initialize failed.", ss);
|
mConnection[ss +isBlob].reset();
|
return nullptr;
|
}
|
} else {
|
HAL_LOGD("mCameraStream %d has link to stream,"
|
"do not need initialize.", ss +isBlob);
|
}
|
} else {
|
if (mCameraStream[ss +1] == nullptr) {
|
if (mCameraStream[ss +isBlob]->initialize(width,
|
height,
|
format,
|
usage,
|
mergeStreamFlag)) {
|
HAL_LOGE("mCameraStream %d initialize failed.", ss);
|
mConnection[ss +isBlob].reset();
|
return nullptr;
|
}
|
} else {
|
HAL_LOGD("mCameraStream %d has link to stream, do not need initialize.",
|
ss +isBlob);
|
}
|
}
|
}
|
|
HAL_LOGD("mCameraStream %d created, blob flag:%d.", ss, isBlob);
|
return mCameraStream[ss +isBlob].get();
|
}
|
|
int StreamManager::configurateManager(STREAM_SERIAL ss) {
|
HAL_LOG_ENTER();
|
int res = 0;
|
|
if (instance == nullptr) {
|
instance = this;
|
}
|
|
if (mCameraStream[ss] != nullptr) {
|
res = mCameraStream[ss]->configurateManager(instance);
|
}
|
return res;
|
}
|
|
int StreamManager::start(STREAM_SERIAL ss) {
|
HAL_LOGV("Stream %d start.", ss);
|
std::lock_guard<std::mutex> guard(frameNumber_lock_);
|
if (mCameraStream[ss] != nullptr) {
|
mCameraStream[ss]->start();
|
switch (ss) {
|
case MAIN_STREAM:
|
case MAIN_STREAM_BLOB:
|
if (msYUVmainEnqueue == nullptr) {
|
// init YUV main stream Enqueue thread
|
msYUVmainEnqueue = new StreamYUVMEQ(this);
|
mYUVMEThreadState = STREAM_STATE_NULL;
|
msYUVmainEnqueue->startThread();
|
HAL_LOGD("msYUVmainEnqueue thread was created.");
|
}
|
|
if (msYUVmainDequeue == nullptr) {
|
// init YUV main stream Dequeue thread
|
msYUVmainDequeue = new StreamYUVMDQ(this);
|
mYUVMDThreadState = STREAM_STATE_NULL;
|
msYUVmainDequeue->startThread();
|
HAL_LOGD("msYUVmainDequeue thread was created.");
|
}
|
{
|
std::unique_lock<std::mutex> lock(msYUVmainDequeueMutex);
|
// singal to start main thread, start dequeue before enqueue buffer.
|
if (mYUVMDThreadState == STREAM_STATE_NULL) {
|
mYUVMDThreadState = STREAM_STATE_STARTED;
|
msYUVmainDequeueCond.notify_one();
|
}
|
}
|
{
|
std::unique_lock<std::mutex> lock(msYUVmainEnqueueMutex);
|
if (mYUVMEThreadState == STREAM_STATE_NULL) {
|
mYUVMEThreadState = STREAM_STATE_STARTED;
|
msYUVmainEnqueueCond.notify_one();
|
}
|
}
|
break;
|
case SUB_0_STREAM:
|
case SUB_0_STREAM_BLOB:
|
// init YUV sub stream Enqueue thread
|
if (msYUVsubEnqueue == nullptr) {
|
msYUVsubEnqueue = new StreamYUVSEQ(this);
|
mYUVSEThreadState = STREAM_STATE_NULL;
|
msYUVsubEnqueue->startThread();
|
HAL_LOGD("msYUVsubEnqueue thread was created.");
|
}
|
|
// init YUV sub stream Dequeue thread
|
if (msYUVsubDequeue == nullptr) {
|
msYUVsubDequeue = new StreamYUVSDQ(this);
|
mYUVSDThreadState = STREAM_STATE_NULL;
|
msYUVsubDequeue->startThread();
|
HAL_LOGD("msYUVsubDequeue thread was created.");
|
}
|
{
|
std::unique_lock<std::mutex> lock(msYUVsubDequeueMutex);
|
// singal to start main thread, start dequeue before enqueue buffer.
|
if (mYUVSDThreadState == STREAM_STATE_NULL) {
|
mYUVSDThreadState = STREAM_STATE_STARTED;
|
msYUVsubDequeueCond.notify_one();
|
}
|
}
|
{
|
std::unique_lock<std::mutex> lock(msYUVsubEnqueueMutex);
|
if (mYUVSEThreadState == STREAM_STATE_NULL) {
|
mYUVSEThreadState = STREAM_STATE_STARTED;
|
msYUVsubEnqueueCond.notify_one();
|
}
|
}
|
break;
|
default:
|
break;
|
}
|
} else {
|
HAL_LOGV("mCameraStream %d has not live.", ss);
|
}
|
|
return 0;
|
}
|
|
int StreamManager::stop(STREAM_SERIAL ss) {
|
HAL_LOG_ENTER();
|
std::lock_guard<std::mutex> guard(frameNumber_lock_);
|
if (mCameraStream[ss] != nullptr) {
|
switch (ss) {
|
case MAIN_STREAM:
|
case MAIN_STREAM_BLOB:
|
// stop main thread
|
if (msYUVmainEnqueue != NULL) {
|
msYUVmainEnqueue->stopThread();
|
mCameraStream[ss]->flush();
|
msYUVmainEnqueue.clear();
|
msYUVmainEnqueue = 0;
|
HAL_LOGD("msYUVmainEnqueue %d stoped.", ss);
|
}
|
if (msYUVmainDequeue != NULL) {
|
msYUVmainDequeue->stopThread();
|
mCameraStream[ss]->flush();
|
msYUVmainDequeue.clear();
|
msYUVmainDequeue = 0;
|
HAL_LOGD("msYUVmainDequeue %d stoped.", ss);
|
}
|
mYUVMDThreadState = STREAM_STATE_NULL;
|
mYUVMEThreadState = STREAM_STATE_NULL;
|
break;
|
case SUB_0_STREAM:
|
case SUB_0_STREAM_BLOB:
|
// singal to stop sub thread
|
if (msYUVsubEnqueue != NULL) {
|
msYUVsubEnqueue->stopThread();
|
mCameraStream[ss]->flush();
|
msYUVsubEnqueue.clear();
|
msYUVsubEnqueue = 0;
|
HAL_LOGD("msYUVsubEnqueue %d stoped.", ss);
|
}
|
if (msYUVsubDequeue != NULL) {
|
msYUVsubDequeue->stopThread();
|
mCameraStream[ss]->flush();
|
msYUVsubDequeue.clear();
|
msYUVsubDequeue = 0;
|
HAL_LOGD("msYUVsubDequeue %d stoped.", ss);
|
}
|
mYUVSDThreadState = STREAM_STATE_NULL;
|
mYUVSEThreadState = STREAM_STATE_NULL;
|
break;
|
default:
|
break;
|
}
|
mCameraStream[ss]->stop();
|
}
|
|
if (mCameraStream[ss] == nullptr) {
|
HAL_LOGD("mCameraStream %d has not live.", ss);
|
}
|
HAL_LOGD("mCameraStream %d stoped.", ss);
|
return 0;
|
}
|
|
int StreamManager::request(uint32_t /*frameNumber*/) {
|
HAL_LOG_ENTER();
|
return 0;
|
}
|
|
int StreamManager::markFrameNumber(uint32_t frameNumber) {
|
HAL_LOG_ENTER();
|
std::lock_guard<std::mutex> guard(frameNumber_lock_);
|
int res = 0;
|
auto map_entry = mMapFrameNumRef.find(frameNumber);
|
if (map_entry == mMapFrameNumRef.end()) {
|
HAL_LOGV("No matching refcnt for frameNumber:%d, initialize!", frameNumber);
|
mMapFrameNumRef.emplace(frameNumber, 1);
|
} else {
|
int refcnt = map_entry->second;
|
if (refcnt < 1) {
|
HAL_LOGE("Refcnt:%d for frameNumber:%d erased!", refcnt, frameNumber);
|
mMapFrameNumRef.erase(frameNumber);
|
return -ENODEV;
|
}
|
mMapFrameNumRef.erase(frameNumber);
|
refcnt++;
|
HAL_LOGD("Refcnt:%d for frameNumber:%d emplaced!", refcnt, frameNumber);
|
mMapFrameNumRef.emplace(frameNumber, refcnt);
|
}
|
return res;
|
}
|
|
int StreamManager::resultCallback(uint32_t frameNumber, struct timeval ts) {
|
HAL_LOG_ENTER();
|
|
std::lock_guard<std::mutex> guard(frameNumber_lock_);
|
int res = 0;
|
auto map_entry = mMapFrameNumRef.find(frameNumber);
|
if (map_entry == mMapFrameNumRef.end()) {
|
HAL_LOGE("No matching refcnt for frameNumber:%d, something wrong!",
|
frameNumber);
|
return -ENOMEM;
|
} else {
|
int refcnt = map_entry->second;
|
refcnt = refcnt -1;
|
HAL_LOGV("Encount call back frameNumber:%d, refcnt:%d!",
|
frameNumber, refcnt);
|
if (refcnt == 0) {
|
HAL_LOGV("Call back frameNumber:%d!", frameNumber);
|
mMapFrameNumRef.erase(frameNumber);
|
camera_->sResultCallback(frameNumber, ts);
|
return res;
|
}
|
mMapFrameNumRef.erase(frameNumber);
|
mMapFrameNumRef.emplace(frameNumber, refcnt);
|
}
|
return res;
|
}
|
|
bool StreamManager::sYUVmainEnqueue() {
|
HAL_LOG_ENTER();
|
{
|
std::unique_lock<std::mutex> lock(msYUVmainEnqueueMutex);
|
while (STREAM_STATE_STARTED != mYUVMEThreadState) {
|
msYUVmainEnqueueCond.wait(lock);
|
}
|
}
|
int res = -1;
|
if (mCameraStream[MAIN_STREAM] != nullptr) {
|
res = mCameraStream[MAIN_STREAM]->enqueueBuffer();
|
if (res) {
|
HAL_LOGE("Device EnqueueBuffer failed.");
|
}
|
}
|
if (mCameraStream[MAIN_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[MAIN_STREAM_BLOB]->enqueueBuffer();
|
if (res) {
|
HAL_LOGE("Device EnqueueBuffer failed.");
|
}
|
}
|
return true;
|
}
|
|
bool StreamManager::sYUVmainDequeue() {
|
HAL_LOG_ENTER();
|
{
|
std::unique_lock<std::mutex> lock(msYUVmainDequeueMutex);
|
while (STREAM_STATE_STARTED != mYUVMDThreadState) {
|
msYUVmainDequeueCond.wait(lock);
|
}
|
}
|
|
int res = -1;
|
void* src_addr = nullptr;
|
struct timeval stream_timestamp;
|
if (mCameraStream[MAIN_STREAM] != nullptr) {
|
res = mCameraStream[MAIN_STREAM]->dequeueBuffer(&src_addr,
|
&stream_timestamp);
|
if (res) {
|
HAL_LOGE("Device main stream dequeueBuffer failed, src_addr:%p.",
|
src_addr);
|
if (src_addr == nullptr) {
|
return true;
|
}
|
}
|
}
|
if (mCameraStream[MAIN_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[MAIN_STREAM_BLOB]->dequeueBuffer(&src_addr,
|
&stream_timestamp);
|
if (res) {
|
HAL_LOGE("Device main blob stream dequeueBuffer failed, src_addr:%p.",
|
src_addr);
|
if (src_addr == nullptr) {
|
return true;
|
}
|
}
|
}
|
|
if (mDrop_main_buffers <= DROP_BUFFERS_NUM) {
|
mDrop_main_buffers++;
|
HAL_LOGD("mDrop_main_buffers:%d, DequeueBuffer %p.",
|
mDrop_main_buffers, src_addr);
|
return true;
|
}
|
|
HAL_LOGV("Device DequeueBuffer %p.", src_addr);
|
if (gtimemain > 0) {
|
int64_t currentTime = systemTime() / 1000000;
|
int64_t deltaTime = currentTime - gtimemain;
|
gtimemain = currentTime;
|
HAL_LOGV("Device deltaTime %" PRId64 ".", deltaTime);
|
} else {
|
gtimemain = systemTime() / 1000000;
|
}
|
|
void * dst_addr = nullptr;
|
buffer_handle_t * buffer = nullptr;
|
uint32_t frameNumber = 0;
|
android_ycbcr dst_addr_ycbcr;
|
|
if (mCameraStream[MAIN_STREAM] != nullptr) {
|
res = mCameraStream[MAIN_STREAM]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
gralloc_->lock_handle_ycbcr(buffer, &dst_addr_ycbcr);
|
if (mCameraStream[MAIN_STREAM]->copy_ycbcr_buffer(&dst_addr_ycbcr,
|
src_addr)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device mian stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[MAIN_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[MAIN_STREAM_BLOB]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
unsigned long mJpegBufferSizes = 0;
|
gralloc_->lock_handle(buffer, &dst_addr, &mJpegBufferSizes);
|
if (mCameraStream[MAIN_STREAM_BLOB]->encodebuffer(dst_addr,
|
src_addr, mJpegBufferSizes)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device main blob stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[MAIN_MIRROR_STREAM] != nullptr) {
|
res = mCameraStream[MAIN_MIRROR_STREAM]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
gralloc_->lock_handle_ycbcr(buffer, &dst_addr_ycbcr);
|
if (mCameraStream[MAIN_MIRROR_STREAM]->copy_ycbcr_buffer(&dst_addr_ycbcr,
|
src_addr)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device main mirror tream opybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[MAIN_MIRROR_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[MAIN_MIRROR_STREAM_BLOB]->getBuffer(&buffer,
|
&frameNumber);
|
if (!res) {
|
unsigned long mJpegBufferSizes = 0;
|
gralloc_->lock_handle(buffer, &dst_addr, &mJpegBufferSizes);
|
if (mCameraStream[MAIN_MIRROR_STREAM_BLOB]->encodebuffer(
|
dst_addr,
|
src_addr,
|
mJpegBufferSizes)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device main mirror blob stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
|
return true;
|
}
|
|
bool StreamManager::sYUVsubEnqueue() {
|
HAL_LOG_ENTER();
|
{
|
std::unique_lock<std::mutex> lock(msYUVsubEnqueueMutex);
|
while (STREAM_STATE_STARTED != mYUVSEThreadState) {
|
msYUVsubEnqueueCond.wait(lock);
|
}
|
}
|
|
int res = -1;
|
if (mCameraStream[SUB_0_STREAM] != nullptr) {
|
res = mCameraStream[SUB_0_STREAM]->enqueueBuffer();
|
if (res) {
|
HAL_LOGE("Device EnqueueBuffer failed.");
|
}
|
}
|
if (mCameraStream[SUB_0_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[SUB_0_STREAM_BLOB]->enqueueBuffer();
|
if (res) {
|
HAL_LOGE("Device EnqueueBuffer failed.");
|
}
|
}
|
return true;
|
}
|
|
bool StreamManager::sYUVsubDequeue() {
|
HAL_LOG_ENTER();
|
{
|
std::unique_lock<std::mutex> lock(msYUVsubDequeueMutex);
|
while (STREAM_STATE_STARTED != mYUVSDThreadState) {
|
msYUVsubDequeueCond.wait(lock);
|
}
|
}
|
|
int res = -1;
|
void* src_addr = nullptr;
|
struct timeval stream_timestamp;
|
if (mCameraStream[SUB_0_STREAM] != nullptr) {
|
res = mCameraStream[SUB_0_STREAM]->dequeueBuffer(&src_addr,
|
&stream_timestamp);
|
if (res) {
|
HAL_LOGE("Device sub stream dequeueBuffer failed, src_addr:%p.",
|
src_addr);
|
if (src_addr == nullptr) {
|
return true;
|
}
|
}
|
}
|
if (mCameraStream[SUB_0_STREAM_BLOB] != nullptr) {
|
res = mCameraStream[SUB_0_STREAM_BLOB]->dequeueBuffer(&src_addr,
|
&stream_timestamp);
|
if (res) {
|
HAL_LOGE("Device sub blob stream dequeueBuffer failed, src_addr:%p.",
|
src_addr);
|
if (src_addr == nullptr) {
|
return true;
|
}
|
}
|
}
|
if (mDrop_sub_buffers <= DROP_BUFFERS_NUM) {
|
mDrop_sub_buffers++;
|
HAL_LOGD("mDrop_sub_buffers:%d, DequeueBuffer %p.",
|
mDrop_sub_buffers, src_addr);
|
return true;
|
}
|
|
HAL_LOGV("Device DequeueBuffer %p.", src_addr);
|
|
if (gtimesub > 0) {
|
int64_t currentTime = systemTime() / 1000000;
|
int64_t deltaTime = currentTime - gtimesub;
|
HAL_LOGV("Device deltaTime %" PRId64 ".", deltaTime);
|
gtimesub = currentTime;
|
} else {
|
gtimesub = systemTime() / 1000000;
|
}
|
|
void* dst_addr = nullptr;
|
buffer_handle_t* buffer = nullptr;
|
uint32_t frameNumber = 0;
|
android_ycbcr dst_addr_ycbcr;
|
if (mCameraStream[SUB_0_STREAM] != nullptr) {
|
res = mCameraStream[SUB_0_STREAM]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
gralloc_->lock_handle_ycbcr(buffer, &dst_addr_ycbcr);
|
if (mCameraStream[SUB_0_STREAM]->copy_ycbcr_buffer(&dst_addr_ycbcr,
|
src_addr)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device sub stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[SUB_0_STREAM_BLOB] != nullptr) {
|
if (mCameraStream[SUB_0_STREAM_BLOB] != nullptr)
|
res = mCameraStream[SUB_0_STREAM_BLOB]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
unsigned long mJpegBufferSizes = 0;
|
gralloc_->lock_handle(buffer, &dst_addr, &mJpegBufferSizes);
|
if (mCameraStream[SUB_0_STREAM_BLOB]->encodebuffer(dst_addr,
|
src_addr,
|
mJpegBufferSizes)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[SUB_0_MIRROR_STREAM] != nullptr) {
|
res = mCameraStream[SUB_0_MIRROR_STREAM]->getBuffer(&buffer, &frameNumber);
|
if (!res) {
|
gralloc_->lock_handle_ycbcr(buffer, &dst_addr_ycbcr);
|
if (mCameraStream[SUB_0_MIRROR_STREAM]->copy_ycbcr_buffer(&dst_addr_ycbcr,
|
src_addr)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device sub mirror stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
if (mCameraStream[SUB_0_MIRROR_STREAM_BLOB] != nullptr) {
|
if (mCameraStream[SUB_0_MIRROR_STREAM_BLOB] != nullptr)
|
res = mCameraStream[SUB_0_MIRROR_STREAM_BLOB]->getBuffer(&buffer,
|
&frameNumber);
|
if (!res) {
|
unsigned long mJpegBufferSizes = 0;
|
gralloc_->lock_handle(buffer, &dst_addr, &mJpegBufferSizes);
|
if (mCameraStream[SUB_0_MIRROR_STREAM_BLOB]->encodebuffer(
|
dst_addr,
|
src_addr,
|
mJpegBufferSizes)) {
|
gralloc_->unlock_handle(buffer);
|
HAL_LOGE("Device sub mirror blob stream copybuffer failed.");
|
} else {
|
gralloc_->unlock_handle(buffer);
|
resultCallback(frameNumber, stream_timestamp);
|
}
|
}
|
}
|
return true;
|
}
|
|
}
|