【嵌牛導讀】:安卓上OPENCV是經過剪切的,部分功能如VideoCapture不能使用。本文使用首先將FFMPEG封裝呈.so庫,然后使用JNI調用C語言實現了視頻的解碼處理。
【嵌牛鼻子】:FFMPEG? android JNI? 視頻解碼
【嵌牛提問】:安卓上使用FFMPEG進行視頻解碼
【嵌牛正文】:
JNI調用NATIVE的FFMPEG
Android的opencv不支持videocapture解碼視頻掌动,是因為Android上的opencv沒有集成ffmpeg這個解碼的庫。所以需要自己把ffmpeg的源碼打包成.so文件宁玫,在Android中使用JNI調用.so庫粗恢,然后再c++中讀取手機中的視頻解碼以后轉為一個一個的MAT矩陣,以方便后期處理欧瘪。下面是步驟
一. ? 將ffmpeg源碼編譯成.so庫眷射。我使用的是別人編譯好的庫。具體編譯方法看這個網頁佛掖。https://www.2cto.com/kf/201804/739639.html
二.? 編寫NATIVE函數
1.新建一個類FFmpegDecode的類妖碉,寫兩個native函數
2.使用Android studio上的終端。轉到app\build\intermediates\classes\debug文件夾下
3.生成java的native函數所對應的c++函數頭文件
4.然后在app\build\intermediates\classes\debug目錄下找到生成的.h頭文件
三.安卓上配置NDK芥被。
1修改工程下的gradle.properties
修改app下的build.gradle
sourceSets.main.jni.srcDirs= []
sourceSets.main.jniLibs.srcDirs = ['src/main/libs','src/main/jniLibs']
//禁止自帶的ndk功能task ndkBuild(type: Exec,description:'Compile JNI source with NDK') {
Properties properties = new Properties()
properties.load(project.rootProject.file('local.properties').newDataInputStream())
def ndkDir= properties.getProperty('ndk.dir')
if (org.apache.tools.ant.taskdefs.condition.Os.isFamily(org.apache.tools.ant.taskdefs.condition.Os.FAMILY_WINDOWS)) {
commandLine "$ndkDir/ndk-build.cmd",'-C',file('src/main/jni').absolutePath
} else{
commandLine "$ndkDir/ndk-build",'-C',file('src/main/jni').absolutePath
}
}
tasks.withType(JavaCompile) {
??? compileTask
-> compileTask.dependsOn ndkBuild
}
task ndkClean(type: Exec,description:'Clean NDK Binaries') {
Properties properties = new Properties()
properties.load(project.rootProject.file('local.properties').newDataInputStream())
def ndkDir= properties.getProperty('ndk.dir')
if (org.apache.tools.ant.taskdefs.condition.Os.isFamily(org.apache.tools.ant.taskdefs.condition.Os.FAMILY_WINDOWS)) {
commandLine "$ndkDir/ndk-build.cmd",'clean','-C',file('src/main/jni').absolutePath
} else{
commandLine "$ndkDir/ndk-build",'clean','-C',file('src/main/jni').absolutePath
}
}
defaultConfig {
multiDexEnabled true
}
clean.dependsOn 'ndkClean'
2修改工程下的local.properties
3在app/src/main下新建jni文件夾嗅绸。在jni文件夾下新建兩個文件Android.mk和Application.mk。
4.將剛才生成的頭文件復制到jni文件夾下撕彤。同時新建對應的.cpp文件。
5.在jni文件夾下面新建一個include文件夾猛拴,把ffmpeg的源碼拷貝到該文件夾下
6.將之前編譯好的.so文件放到jni文件夾下面
7.編寫Android.mk文件
LOCAL_PATH := $(call my-dir)
#ffmpeg lib
include $(CLEAR_VARS)
LOCAL_MODULE := avcodec
LOCAL_SRC_FILES := libavcodec-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avdevice
LOCAL_SRC_FILES := libavdevice-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avfilter
LOCAL_SRC_FILES := libavfilter-5.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avformat
LOCAL_SRC_FILES := libavformat-56.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := avutil
LOCAL_SRC_FILES := libavutil-54.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := postproc
LOCAL_SRC_FILES := libpostproc-53.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swresample
LOCAL_SRC_FILES := libswresample-1.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := swscale
LOCAL_SRC_FILES := libswscale-3.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := yuv
LOCAL_SRC_FILES := libyuv.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := opencv_java
LOCAL_SRC_FILES := libopencv_java.so
include $(PREBUILT_SHARED_LIBRARY)
include $(CLEAR_VARS)
#OPENCV_CAMERA_MODULES:=on
#OPENCV_INSTALL_MODULES:=off
include ..\..\..\..\native\jni\OpenCV.mk
LOCAL_MODULE???? :=ffmdecode
LOCAL_SRC_FILES?:=com_tinymonster_ffmpegstudy1_FFmpegDecode.cpp
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/ffmpeg
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/libyuv
LOCAL_LDLIBS???? += -llog -ldl
LOCAL_SHARED_LIBRARIES := avcodec avdevice avfilter avformat avutil postprocswresample swscale yuv
include $(BUILD_SHARED_LIBRARY)
8.編寫Application文件
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi
9.將opencv的native庫放到工程目錄下羹铅。
10.點擊屏幕右側的gradle中的ndkbuild。
11.然后會看到jni下面生成了libs和obj兩個文件夾愉昆。這兩個文件夾下面生成的是對應的.so庫
12.編寫c代碼职员,讀取手機視頻,解碼視頻跛溉,并轉為OPENCV的MAT焊切。
#define LOGI(FORMAT,...) __android_log_print(ANDROID_LOG_INFO,"ccj",FORMAT,##__VA_ARGS__);
#define LOGE(FORMAT,...) __android_log_print(ANDROID_LOG_ERROR,"ccj",FORMAT,##__VA_ARGS__);
using namespace cv;
JNIEXPORT jint JNICALLJava_com_tinymonster_ffmpegstudy1_FFmpegDecode_DecodeFile
? (JNIEnv* env, jclassobj, jstring input_){
LOGE("%s","1");
const
char *filename= env->GetStringUTFChars(input_, 0);
AVCodec*pCodec; //
解碼器指針
AVCodecContext* pCodecCtx; //ffmpeg解碼類的類成員
AVFrame* pAvFrame; //多媒體幀,保存解碼后的數據幀
AVFormatContext* pFormatCtx; //保存視頻流的信息
av_register_all(); //注冊庫中所有可用的文件格式和編碼器
pFormatCtx= avformat_alloc_context();
if(avformat_open_input(&pFormatCtx, filename, NULL, NULL) != 0) { //檢查文件頭部
LOGE("%s","Can'tfind the stream!");
}
if(avformat_find_stream_info(pFormatCtx,NULL) < 0) { //查找流信息
LOGE("%s","Can'tfind the stream information !");
}
intvideoindex= -1;
for(int i=0; i< pFormatCtx->nb_streams; ++i) //遍歷各個流芳室,找到第一個視頻流,并記錄該流的編碼信息
{
if (pFormatCtx->streams[i]->codec->codec_type== AVMEDIA_TYPE_VIDEO) {
videoindex= I;
break;
}
??????????? }
if(videoindex==
-1) {
LOGE("%s","Don'tfind a video stream !");
return 1;
}
pCodecCtx= pFormatCtx->streams[videoindex]->codec; //得到一個指向視頻流的上下文指針
pCodec= avcodec_find_decoder(pCodecCtx->codec_id); //到該格式的解碼器
if (pCodec== NULL) {
LOGE("%s","Cant'tfind the decoder !");
return 2;
}
if(avcodec_open2(pCodecCtx,pCodec,NULL) < 0) { //打開解碼器
LOGE("%s","Can't open the decoder !");
return 3;
}
pAvFrame= avcodec_alloc_frame(); //分配幀存儲空間
AVFrame* pFrameBGR= avcodec_alloc_frame(); //存儲解碼后轉換的RGB數據
??????? //
保存BGR专肪,opencv中是按BGR來保存的
int size=avpicture_get_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
uint8_t*out_buffer= (uint8_t*)av_malloc(size);
avpicture_fill((AVPicture*)pFrameBGR, out_buffer, AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height);
AVPacket* packet= (AVPacket*)malloc(sizeof(AVPacket));
LOGI("視頻的文件格式:%s",pFormatCtx->iformat->name);
LOGI("視頻時長:%d", (pFormatCtx->duration)/1000000);
LOGI("視頻的寬高:%d,%d",pCodecCtx->width,pCodecCtx->height);
LOGI("解碼器的名稱:%s",pCodec->name);
struct SwsContext*img_convert_ctx;
img_convert_ctx= sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt, pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_BGR24, SWS_BICUBIC, NULL, NULL, NULL);
//opencv
cv::Mat pCvMat;
pCvMat.create(cv::Size(pCodecCtx->width, pCodecCtx->height), CV_8UC3);
int ret;
int got_picture;
//讀取每一幀
int frame_count= 0;
while (av_read_frame(pFormatCtx, packet) >= 0)
??????????????????? {
if(packet->stream_index==videoindex)
??????????????????????? {
ret= avcodec_decode_video2(pCodecCtx, pAvFrame, &got_picture, packet);
if(ret< 0)
??????????????????????????? {
printf("Decode Error.(解碼錯誤)\n");
return 4;
}
LOGI("解碼第%d幀",frame_count);
if (got_picture)
????????????????? ??????????{
//YUV to RGB
sws_scale(img_convert_ctx, (const uint8_t* const*)pAvFrame->data, pAvFrame->linesize, 0, pCodecCtx->height, pFrameBGR->data, pFrameBGR->linesize);
memcpy(pCvMat.data, out_buffer, size);//拷貝
frame_count++;
LOGI("解碼第%d幀",frame_count);
}
??????????????????????? }
av_free_packet(packet);
}
av_free(out_buffer);
av_free(pFrameBGR);
av_free(pAvFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
sws_freeContext(img_convert_ctx);
retur 0;
}
13.這樣就可以在JAVA代碼中調用C++代碼處理視頻了???????????????????