關(guān)于ffmpeg硬解碼胸嘴,其實(shí)就是不使用ffmpeg自身的解碼器,而是從系統(tǒng)查找硬解碼器斩祭,在Android上就是通過(guò)反射調(diào)用系統(tǒng)的解碼器中間件MediaCodec。網(wǎng)絡(luò)上關(guān)于ffmpeg硬解碼的文章很多乡话,ffmpeg官方demo里面也有很詳細(xì)的寫(xiě)法摧玫。但是這些公開(kāi)的資料,大部分都是針對(duì)播放視頻文件(AVCC)绑青,極少針對(duì)AnnexB視頻流的硬解碼诬像。
眾所周知,Android中使用MediaCodec
硬解碼前闸婴,需要先通過(guò)MediaFormat
設(shè)置視頻流的參數(shù)坏挠,之后才能正確開(kāi)啟解碼器解碼。ffmpeg中邪乍,如果是從視頻文件硬解碼降狠,ffmpeg可以自動(dòng)從視頻文件頭讀取包含視頻的sps、pps等信息等的extradata
庇楞,用于設(shè)置AVCodecContext->extradata
和AVCodecContext->extradata_size
榜配,從而正確打開(kāi)硬件解碼器avcodec_open2
,AVCodecContext->extradata
中存儲(chǔ)的就是sps吕晌、pps蛋褥、width、height等MediaFormat
中存儲(chǔ)的信息睛驳。但是如果是AnnexB的視頻流烙心,沒(méi)有文件頭來(lái)讀取視頻參數(shù)來(lái)設(shè)置extradata,就會(huì)導(dǎo)致avcodec_open2
失敗乏沸,相當(dāng)于Java中使用MediaCodec
前淫茵,沒(méi)有通過(guò)MediaFormat
設(shè)置參數(shù)。
1. 硬解碼
AnnexB視頻流雖然沒(méi)有文件頭屎蜓,但是每一幀有包含sps和pps信息的nalu痘昌,可以在打開(kāi)解碼器前,先通過(guò)av_read_frame
拆出一個(gè)幀的AVPacket
炬转,解出AVPacket
中的extradata辆苔,填充進(jìn)AVCodecContext
,來(lái)實(shí)現(xiàn)成功avcodec_open2
扼劈。
int extract_extradata(AVCodecContext *pCodecCtx, AVPacket *packet, uint8_t **extradata_dest, int *extradata_size_dest)
{
const AVBitStreamFilter *bsf;
int ret;
if( (bsf = av_bsf_get_by_name("extract_extradata")) == NULL)
{
LOGD("failed to get extract_extradata bsf\n");
return 0;
}
printf("\nfound bsf\n");
AVBSFContext *bsf_context;
if( (ret=av_bsf_alloc(bsf, &bsf_context) ) < 0)
{
LOGD("failed to alloc bsf contextx\n");
return 0;
}
printf("alloced bsf context\n");
if( (ret=avcodec_parameters_from_context(bsf_context->par_in, pCodecCtx) ) < 0)
{
LOGD("failed to copy parameters from contextx\n");
av_bsf_free(&bsf_context);
return 0;
}
printf("copied bsf params\n");
if( (ret = av_bsf_init(bsf_context)) < 0 )
{
LOGD("failed to init bsf contextx\n");
av_bsf_free(&bsf_context);
return 0;
}
printf("initialized bsf context\n");
AVPacket *packet_ref = av_packet_alloc();
if(av_packet_ref(packet_ref, packet) < 0 )
{
LOGD("failed to ref packet\n");
av_bsf_free(&bsf_context);
return 0;
}
//make sure refs are used corectly
//this probably resests packet
if((ret = av_bsf_send_packet(bsf_context, packet_ref)) < 0)
{
LOGD("failed to send packet to bsf\n");
av_packet_unref(packet_ref);
av_bsf_free(&bsf_context);
return 0;
}
printf("sent packet to bsf\n");
int done=0;
while (ret >= 0 && !done) //!h->decoder_ctx->extradata)
{
int extradata_size;
uint8_t *extradata;
ret = av_bsf_receive_packet(bsf_context, packet_ref);
if (ret < 0)
{
if (ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
{
LOGD("bsf error, not eagain or eof\n");
return 0;
}
continue;
}
extradata = av_packet_get_side_data(packet_ref, AV_PKT_DATA_NEW_EXTRADATA, &extradata_size);
if (extradata)
{
LOGD("got extradata, %d size!\n", extradata_size);
done=1;
*extradata_dest = (uint8_t *) av_mallocz(extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
memcpy(*extradata_dest, extradata, extradata_size);
*extradata_size_dest = extradata_size;
av_packet_unref(packet_ref);
}
}
av_packet_free(&packet_ref);
av_bsf_free(&bsf_context);
return done;
}
2. 截圖JPEG
如果是軟解后的YUV420P
格式驻啤,在通過(guò)mjpeg編碼器編碼成jpeg的時(shí)候,可以直接指定輸入格式為AV_PIX_FMT_YUVJ420P
荐吵,此處不是YUV420P
而是YUVJ420P
的原因是骑冗,YUVJ420P
的color_range
是JPEG赊瞬,才可以編碼JPEG盛末,見(jiàn)https://stackoverflow.com/a/33939577 墩衙。那么問(wèn)題來(lái)了摘完,大部分Android設(shè)備推盛,以及NVIDIA顯卡嗡贺,硬解碼后的YUV荷荤,并不是YUV420P
隔崎,而是NV12
塌计,與YUVJ420P
之間就不只是color_range
的不同袒哥,如果強(qiáng)行指定AV_PIX_FMT_YUVJ420P
為輸入格式缩筛,編碼的時(shí)候會(huì)崩潰。所以需要編碼JPEG前堡称,先通過(guò)sws_scale
轉(zhuǎn)換為AV_PIX_FMT_YUVJ420P
瞎抛。
AVFrame *pFrameYUVJ420;
if (pix_fmt != AV_PIX_FMT_YUVJ420P) {
pFrameYUVJ420 = av_frame_alloc();
if (pFrameYUVJ420 == NULL) {
LOGD("Could not allocate video frame: pFrameYUVJ420.");
return -1;
}
// Determine required buffer size and allocate buffer
// buffer中數(shù)據(jù)就是用于編碼的,且格式為YUVJ420
int numBytes = av_image_get_buffer_size(AV_PIX_FMT_YUVJ420P, pFrame->width, pFrame->height,
1);
uint8_t *buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
av_image_fill_arrays(pFrameYUVJ420->data, pFrameYUVJ420->linesize, buffer,
AV_PIX_FMT_YUVJ420P,
pFrame->width, pFrame->height, 1);
// 由于解碼出來(lái)的幀格式不是YUVJ420的,在編碼之前需要進(jìn)行格式轉(zhuǎn)換
struct SwsContext *sws_ctx = sws_getContext(pFrame->width,
pFrame->height,
pix_fmt,
pFrame->width,
pFrame->height,
AV_PIX_FMT_YUVJ420P,
SWS_BILINEAR,
NULL,
NULL,
NULL);
// 格式轉(zhuǎn)換
sws_scale(sws_ctx, (uint8_t const *const *) pFrame->data,
pFrame->linesize, 0, pFrame->height,
pFrameYUVJ420->data, pFrameYUVJ420->linesize);
pFrameYUVJ420->format = AV_PIX_FMT_YUVJ420P;
pFrameYUVJ420->width = pFrame->width;
pFrameYUVJ420->height = pFrame->height;
av_frame_unref(pFrame);
av_free(pFrame);
} else {
pFrameYUVJ420 = pFrame;
}
3. OpenGL渲染
由于軟解碼出來(lái)的YUV420P
和硬解碼出來(lái)的NV12
,數(shù)據(jù)交織方式不同却紧,所以創(chuàng)建的紋理層數(shù)不同桐臊,使用到的片元著色器也不同。YUV420P
需要3層紋理啄寡,NV12
只需要2層紋理豪硅。由于視頻播放器一般要同時(shí)支持硬解和軟解,所以這邊著色器就支持了多種格式挺物。
頂點(diǎn)著色器和片元著色器:
//頂點(diǎn)著色器glsl
#define GET_STR(x) #x
static const char *vertexShader = GET_STR(
attribute vec4 aPosition; //頂點(diǎn)坐標(biāo)
attribute vec2 aTexCoord; //材質(zhì)頂點(diǎn)坐標(biāo)
varying vec2 vTexCoord; //輸出的材質(zhì)坐標(biāo)
void main(){
vTexCoord = vec2(aTexCoord.x,1.0-aTexCoord.y);
gl_Position = aPosition;
}
);
//片元著色器
static const char *fragYUV420P = GET_STR(
precision mediump float; //精度
varying vec2 vTexCoord; //頂點(diǎn)著色器傳遞的坐標(biāo)
uniform sampler2D yTexture; //輸入的材質(zhì)(不透明灰度懒浮,單像素)
uniform sampler2D uTexture;
uniform sampler2D vTexture;
uniform int u_ImgType;// 1:RGBA, 2:NV21, 3:NV12, 4:I420
void main(){
if(u_ImgType == 1) //RGBA
{
gl_FragColor = texture2D(yTexture, vTexCoord);
}
else if(u_ImgType == 2) //NV21
{
vec3 yuv;
vec3 rgb;
yuv.r = texture2D(yTexture,vTexCoord).r;
yuv.g = texture2D(uTexture,vTexCoord).a - 0.5;
yuv.b = texture2D(uTexture,vTexCoord).r - 0.5;
rgb = mat3(1.0, 1.0, 1.0,
0.0,-0.39465,2.03211,
1.13983,-0.58060,0.0)*yuv;
//輸出像素顏色
gl_FragColor = vec4(rgb,1.0);
}
else if(u_ImgType == 3) //NV12
{
vec3 yuv;
vec3 rgb;
yuv.r = texture2D(yTexture,vTexCoord).r;
yuv.g = texture2D(uTexture,vTexCoord).r - 0.5;
yuv.b = texture2D(uTexture,vTexCoord).a - 0.5;
rgb = mat3(1.0, 1.0, 1.0,
0.0,-0.39465,2.03211,
1.13983,-0.58060,0.0)*yuv;
//輸出像素顏色
gl_FragColor = vec4(rgb,1.0);
}
else if(u_ImgType == 4) //I420
{
vec3 yuv;
vec3 rgb;
yuv.r = texture2D(yTexture,vTexCoord).r;
yuv.g = texture2D(uTexture,vTexCoord).r - 0.5;
yuv.b = texture2D(vTexture,vTexCoord).r - 0.5;
rgb = mat3(1.0, 1.0, 1.0,
0.0,-0.39465,2.03211,
1.13983,-0.58060,0.0)*yuv;
//輸出像素顏色
gl_FragColor = vec4(rgb,1.0);
}
else
{
gl_FragColor = vec4(1.0);
}
}
);
紋理著色:
switch (pCodecCtx->pix_fmt) {
case AV_PIX_FMT_RGBA:
{
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texts[0]);
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_RGBA,GL_UNSIGNED_BYTE,pFrame->data[0]);
}
break;
case AV_PIX_FMT_NV21:
case AV_PIX_FMT_NV12:
{
//激活第1層紋理,綁定到創(chuàng)建的opengl紋理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,texts[0]);
//替換紋理內(nèi)容
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_LUMINANCE,GL_UNSIGNED_BYTE,pFrame->data[0]);
//update UV plane data
glActiveTexture(GL_TEXTURE0+1);
glBindTexture(GL_TEXTURE_2D, texts[1]);
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width/2,height/2,GL_LUMINANCE_ALPHA,GL_UNSIGNED_BYTE,pFrame->data[1]);
}
break;
case AV_PIX_FMT_YUV420P:
{
//激活第1層紋理,綁定到創(chuàng)建的opengl紋理
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D,texts[0]);
//替換紋理內(nèi)容
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width,height,GL_LUMINANCE,GL_UNSIGNED_BYTE,pFrame->data[0]);
//激活第2層紋理,綁定到創(chuàng)建的opengl紋理
glActiveTexture(GL_TEXTURE0+1);
glBindTexture(GL_TEXTURE_2D,texts[1]);
//替換紋理內(nèi)容
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width/2,height/2,GL_LUMINANCE,GL_UNSIGNED_BYTE,pFrame->data[1]);
//激活第3層紋理,綁定到創(chuàng)建的opengl紋理
glActiveTexture(GL_TEXTURE0+2);
glBindTexture(GL_TEXTURE_2D,texts[2]);
//替換紋理內(nèi)容
glTexSubImage2D(GL_TEXTURE_2D,0,0,0,width/2,height/2,GL_LUMINANCE,GL_UNSIGNED_BYTE,pFrame->data[2]);
}
break;
}
4. 編碼成MP4
編碼成MP4后如果發(fā)現(xiàn)無(wú)法播放,或者只有VLC可以播放识藤,同時(shí)手機(jī)或者pc或者mac無(wú)法對(duì)MP4文件加載出縮略圖砚著,可能是MP4文件頭不對(duì),關(guān)鍵在于要設(shè)置輸出流AVStream->codecpar
的extradata
和extradata_size
痴昧,以及AV_CODEC_FLAG_GLOBAL_HEADER
稽穆。軟解的話,ffmpeg自動(dòng)給AVCodecContext
加上extradata
和extradata_size
了赶撰,這邊可以直接從AVCodecContext
中讀取舌镶,再拷貝給AVStream->codecpar
,硬解的話豪娜,需要按照第一節(jié)中的方式餐胀,先從一個(gè)AVPacket
中提取extradata
和extradata_size
,再拷貝給AVStream->codecpar
瘤载。
AVStream *in_stream = ifmt_ctx_v->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
videoindex_v=i;
if (!out_stream) {
LOGD( "Failed allocating output stream");
ret = AVERROR_UNKNOWN;
goto end;
}
videoindex_out=out_stream->index;
//Copy the settings of AVCodecContext
ret = avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
// extra_data to write file header
out_stream->codecpar->extradata = (uint8_t *) av_mallocz(pCodecCtx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
memcpy(out_stream->codecpar->extradata, pCodecCtx->extradata, pCodecCtx->extradata_size);
out_stream->codecpar->extradata_size = pCodecCtx->extradata_size;
LOGD("got extradata, %d size!\n", out_stream->codecpar->extradata_size);
if (ret < 0) {
LOGD( "Failed to copy context from input to output stream codec context");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
LOGD( "AV_CODEC_FLAG_GLOBAL_HEADER");
}
參考文獻(xiàn):
https://blog.csdn.net/yue_huang/article/details/75126155
https://blog.csdn.net/special00/article/details/82533768
https://github.com/bmegli/hardware-video-decoder/issues/5#issuecomment-469857880
https://github.com/bmegli/hardware-video-decoder/blob/2b9bf0f053/hvd.c
https://github.com/githubhaohao/AudioVideo/blob/main/app/src/main/cpp/player/render/video/VideoGLRender.cpp
https://stackoverflow.com/a/33939577
https://blog.csdn.net/Kennethdroid/article/details/108737936
http://www.reibang.com/p/65d926ba1f1c/
https://qincji.gitee.io/2021/02/01/afplayer/03_mediacodec/index.html