1.前言
我們?cè)谧鰁is算法集成的時(shí)候,preview流或者video流需要經(jīng)過算法的異步處理,在hal1框架上實(shí)現(xiàn)就稍微有些難度,hal3或者camX相對(duì)要容易一些指孤。
如何去優(yōu)雅的實(shí)現(xiàn)異步回調(diào)數(shù)據(jù)呢?
- 異步:前面幾幀贬堵,比如5幀video數(shù)據(jù)需要先給算法處理恃轩,當(dāng)?shù)?幀數(shù)據(jù)過來時(shí),算法處理好第1幀返回給系統(tǒng)黎做,以此類推叉跛。
- 同步:一幀數(shù)據(jù)過來,算法處理完蒸殿,直接返回給系統(tǒng)筷厘。
2.獲取video數(shù)據(jù)的接口
hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
QCameraStream *stream,
void *userdata)
{
//1.拿到 QCamera2HardwareInterface指針
QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
//2.拿到視頻幀
mm_camera_buf_def_t *frame = super_frame->bufs[0];
//3.視頻幀的一些處理
if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
if (pme->mParameters.getVideoBatchSize() == 0) {//一定會(huì)跑到這里面
//計(jì)算視頻幀的時(shí)間戳
timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
videoMemObj = (QCameraVideoMemory *)frame->mem_info;
video_mem = NULL;
if (NULL != videoMemObj && !(pme->m_bNeedVideoCb)) {
video_mem = videoMemObj->getMemory(frame->buf_idx,
(pme->mStoreMetaDataInFrame > 0)? true : false);
triggerTCB = TRUE;
LOGH("Video frame TimeStamp : %lld batch = 0 idx = %d",
timeStamp, frame->frame_idx);
}
if (pme->m_bNeedVideoCb) {
video_mem = pme->videoMemFb->getMemory(frame->buf_idx,
(pme->mStoreMetaDataInFrame > 0)? true : false);
triggerTCB = TRUE;
}
}else{
···
}
}else{
···
}
//4.把數(shù)據(jù)傳遞給 視頻編碼器 處理
if ((NULL != video_mem) && (triggerTCB == TRUE)) {
if ((pme->mDataCbTimestamp != NULL) &&
pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
qcamera_callback_argm_t cbArg;
memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
cbArg.data = video_mem;
// For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source.
// So do not change video timestamp.
if (!pme->mParameters.isAVTimerEnabled()) {
// Convert Boottime from camera to Monotime for video if needed.
// Otherwise, mBootToMonoTimestampOffset value will be 0.
timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
}
LOGD("Final video buffer TimeStamp : %lld ", timeStamp);
cbArg.timestamp = timeStamp;
int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
if (rc != NO_ERROR) {
LOGE("fail sending data notify");
stream->bufDone(frame->buf_idx);
}
}
}
if (!pme->mParameters.isVideoFaceBeautification()) {
free(super_frame);
}
}
函數(shù)作用:處理視頻流中的視頻幀
視頻幀將被發(fā)送到視頻編碼器。一旦視頻編碼器完成了視頻幀的處理宏所,它將調(diào)用另一個(gè)API (release_recording_frame)來返回幀酥艳,需要注意釋放super_frame。
3.異步回調(diào)的實(shí)現(xiàn)
想要優(yōu)雅的實(shí)現(xiàn)異步回調(diào)爬骤,那就需要利用c++ 11的新特性充石,簡(jiǎn)直好用到哭!
這里我們編寫一個(gè)簡(jiǎn)單的demo霞玄,完成異步回調(diào)的實(shí)現(xiàn)骤铃。
3.1 編寫Demo類
3.1.1 DemoCamera.h
#ifndef DEMOCAMETA_H_
#define DEMOCAMETA_H_
#include <functional>
#include <queue>
#include <pthread.h>
#include "QCamera2HWI.h"
#include "QCameraStream.h"
//自定義命名空間
namespace democamera {
namespace hal1 {
//封裝video數(shù)據(jù)
struct VideoData{
mm_camera_super_buf_t *super_frame;
qcamera::QCameraStream *stream;
qcamera::QCamera2HardwareInterface *pme;
std::function<void(int)> cbHandler;
};
//demo類
class DemoCamera{
public:
DemoCamera(int);//構(gòu)造函數(shù)
virtual ~DemoCamera();//析構(gòu)函數(shù)
//模擬算法數(shù)據(jù)處理
void processVideo(
mm_camera_super_buf_t *super_frame,
qcamera::QCameraStream *stream,
qcamera::QCamera2HardwareInterface *pme,
std::function<void(int)> cbHandler);
void callBack(void);
public:
int videoFrame_Size;//需要的video幀數(shù)
std::queue<VideoData> videoDataQ;//緩存video的隊(duì)列
bool isStartVideo;//開始錄制
bool isStopVideo;//停止錄制
};
}
}
#endif
3.1.2 DemoCamera.cpp
#define LOG_TAG "DemoCamera"
#include "DemoCamera.h"
extern "C" {
#include "mm_camera_dbg.h"
}
using namespace qcamera;
namespace democamera {
namespace hal1 {
DemoCamera::DemoCamera(int size)
{
videoFrame_Size = size;//算法需要的video幀數(shù)量
isStartVideo = 0;//是否開始錄制視頻
isStopVideo = 0;//是否停止錄制視頻
LOGE("videoFrame_Size =%d ",videoFrame_Size );
}
DemoCamera::~DemoCamera()
{
video_Size = 0;//這里取釋放一些資源
LOGE("video_Size=%d ",video_Size);
}
void DemoCamera::processVideo(mm_camera_super_buf_t *super_frame,QCameraStream *stream,
QCamera2HardwareInterface *pme,std::function<void(int)> cbHandler)
{
VideoData d;
d.super_frame = super_frame;
d.stream = stream;
d.pme = pme;
d.cbHandler = cbHandler;
videoDataQ.push(d);//把video的數(shù)據(jù)存儲(chǔ)到隊(duì)列里
//調(diào)用算法處理
//xxx算法處理視頻幀
//調(diào)用回調(diào)函數(shù)
callBack();
}
void DemoCamera::callBack()
{
if(!videoDataQ.empty())
{
//如果開始錄制了視頻拉岁,并且收到的視頻幀達(dá)到算法需要的幀數(shù)
if(isStartVideo && (videoFrame_Size == videoDataQ.size())){
VideoData d = videoDataQ.front();
mm_camera_buf_def_t *frame = d.super_frame->bufs[0];
LOGE("zcf_cb:call cbHandler!videoDataQ.size = %d frame->idx=%d ",videoDataQ.size(),frame->frame_idx);
std::function<void(int)> cbHandler = d.cbHandler;//拿出回調(diào)函數(shù)
cbHandler(0);//回調(diào)給系統(tǒng)
videoDataQ.pop();//將數(shù)據(jù)彈出隊(duì)列
}
//停止錄制時(shí),回調(diào)所有數(shù)據(jù)
if(isStopVideo){
LOGE("zcf_cb:錄像停止 吐出所有數(shù)據(jù) ");
while(!videoDataQ.empty()){
VideoData d = videoDataQ.front();
mm_camera_buf_def_t *frame = d.super_frame->bufs[0];
LOGE("zcf_cb:call cbHandler惰爬!videoDataQ.size = %d frame->idx=%d ",videoDataQ.size(),frame->frame_idx);
std::function<void(int)> cbHandler = d.cbHandler;
cbHandler(0);
videoDataQ.pop();
}
}
}
}
}
}
3.2 DemoCamera加入編譯
hardware/qcom/camera/QCamera2/Android.mk
-LOCAL_CFLAGS := -Wall -Wextra -Werror
+LOCAL_CFLAGS := -Wall -Wextra//防止 定義的變量不使用時(shí)會(huì)報(bào)錯(cuò)喊暖,
LOCAL_SRC_FILES += \
util/QCameraExtZoomTranslator.cpp \
util/QCameraPprocManager.cpp \
util/QCameraBokeh.cpp \
util/QCameraClearSight.cpp \
+ HAL/DemoCamera.cpp
3.3 初始化 DemoCamera
hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.h
#include "QCameraTrace.h"
//前置聲明DemoCamera類
+namespace democamera {
+namespace hal1 {
+ class DemoCamera;
+}
+}
namespace qcamera {
···
private:
//使用智能指針 防止內(nèi)存泄漏
+ std::unique_ptr<xtccamera::hal1::DemoCamera> mDemoCamera;
camera_device_t mCameraDevice;
uint32_t mCameraId;
mm_camera_vtbl_t *mCameraHandle;
hardware/qcom/camera/QCamera2/HAL/QCamera2HWI.cpp
#include "QCameraTrace.h"
#include "QCameraDisplay.h"
+#include "DemoCamera.h"
int QCamera2HardwareInterface::start_recording(struct camera_device *device)
{
+ hw->mDemoCamera->isStartVideo = 1;
+ hw->mDemoCamera->isStopVideo = 0;
}
int QCamera2HardwareInterface::stop_recording(struct camera_device *device)
{
+ hw->mDemoCamera->isStartVideo = 0;
+ hw->mDemoCamera->isStopVideo = 1;
}
int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
//初始化智能指針mDemoCamera
+ mDemoCamera.reset(new democamera::hal1::DemoCamera(5));//這里設(shè)置算法需要的幀數(shù)為5
// Init params in the background
// 1. It's safe to queue init job, even if alloc job is not yet complete.
// It will be queued to the same thread, so the alloc is guaranteed to
// finish first.
// 2. However, it is not safe to begin param init until after camera is
// open. That is why we wait until after camera open completes to schedule
// this task.
memset(&args, 0, sizeof(args));
mParamInitJob = queueDeferredWork(CMD_DEF_PARAM_INIT, args);
}
3.4 調(diào)用算法處理Video流
hardware/qcom/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
#include <stdlib.h>
+#include <functional>
···
+#include "DemoCamera.h"
void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
QCameraStream *stream,
void *userdata)
{
···
//1.拿到 QCamera2HardwareInterface指針
QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
//2.拿到視頻幀
mm_camera_buf_def_t *frame = super_frame->bufs[0];
···
//打印幀信息
++ LOGE("zcf_c:Stream(%d), Timestamp: %ld %ld frame->idx=%d frame->buf_type=%d,getVideoBatchSize=%d",
frame->stream_id,
frame->ts.tv_sec,
frame->ts.tv_nsec,
frame->frame_idx,
frame->buf_type,
pme->mParameters.getVideoBatchSize());
//3.視頻幀的一些處理
if (frame->buf_type == CAM_STREAM_BUF_TYPE_MPLANE) {
if (pme->mParameters.getVideoBatchSize() == 0) {
//這里使用lamda表達(dá)式封裝callBackHandler,然后賦值給包裝器std::function<void(int)>
++ std::function<void(int)> callBackHandler = [=](int result)mutable{
timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL
+ frame->ts.tv_nsec;
pme->dumpFrameToFile(stream, frame, QCAMERA_DUMP_FRM_VIDEO);
videoMemObj = (QCameraVideoMemory *)frame->mem_info;
video_mem = NULL;
if (NULL != videoMemObj && !(pme->m_bNeedVideoCb)) {
video_mem = videoMemObj->getMemory(frame->buf_idx,
(pme->mStoreMetaDataInFrame > 0)? true : false);
triggerTCB = TRUE;
LOGH("ideo frame TimeStamp : %lld batch = 0 idx = %d",
timeStamp, frame->frame_idx);
}
if (pme->m_bNeedVideoCb) {
video_mem = pme->videoMemFb->getMemory(frame->buf_idx,
(pme->mStoreMetaDataInFrame > 0)? true : false);
triggerTCB = TRUE;
}
//4.把數(shù)據(jù)傳遞給 視頻編碼器 處理补鼻,這個(gè)直接封裝在callBackHandler里面
if (!result) {
/**
Code is copied from bottom of the outer function (QCamera2HardwareInterface::video_stream_cb_routine)
so the code will be executed here in this callback instead
*/
LOGE("zcf_cf: result =%d frame->idx=%d video_mem=%p triggerTCB=%d",
result,frame->frame_idx,video_mem,triggerTCB);
if ((NULL != video_mem) && (triggerTCB == TRUE)) {
if ((pme->mDataCbTimestamp != NULL) && pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
qcamera_callback_argm_t cbArg;
memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
cbArg.data = video_mem;
// For VT usecase, ISP uses AVtimer not CLOCK_BOOTTIME as time source.
// So do not change video timestamp.
if (!pme->mParameters.isAVTimerEnabled()) {
// Convert Boottime from camera to Monotime for video if needed.
// Otherwise, mBootToMonoTimestampOffset value will be 0.
timeStamp = timeStamp - pme->mBootToMonoTimestampOffset;
}
LOGE("zcf_cf: Final video buffer TimeStamp : %lld frame->idx=%d", timeStamp,frame->frame_idx);
cbArg.timestamp = timeStamp;
int32_t rc = pme->m_cbNotifier.notifyCallback(cbArg);
if (rc != NO_ERROR) {
LOGE("fail sending data notify");
stream->bufDone(frame->buf_idx);
}
}
}
}
free(super_frame);//釋放資源
};
pme->mXTCCamera->processVideo(super_frame,stream,pme,callBackHandler);
return;//這里return掉 不去調(diào)用步驟4 把數(shù)據(jù)傳遞給 視頻編碼器 處理
}
}
3.5 編譯運(yùn)行
-
mmm hardware/qcom/camera/QCamera2/
在out/···/vendor/lib/下會(huì)生成camera.msm8937.so - adb push camera.msm8937.so vendor/lib
- 重啟驗(yàn)證 或者殺掉camera provider
log如下:
從log中可以看到哄啄,前面累計(jì)的5幀數(shù)據(jù)都沒有直接回調(diào)給系統(tǒng)雅任,而是等算法處理后风范,第3幀數(shù)據(jù)才回調(diào)給系統(tǒng)。
3.6 問題:最后幾幀沒有正確回調(diào)
//步驟4:把數(shù)據(jù)傳遞給 視頻編碼器 處理
if ((NULL != video_mem) && (triggerTCB == TRUE)) {
if ((pme->mDataCbTimestamp != NULL) && pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
···
}
}
添加log 重新編譯分析:
最后幾幀數(shù)據(jù)
pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) =0
因此沒有正確回調(diào)
解決辦法
最簡(jiǎn)單的辦法就是去掉判斷pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0
可以看到log正郴γ矗回調(diào)了硼婿。
當(dāng)然這種解法不一定是最好的,畢竟修改了系統(tǒng)原有的邏輯禽车。
那么還有更好的解決方案嗎寇漫?