一朦前、總體說明
1.打開ijkplayer,可看到其主要目錄結(jié)構(gòu)如下:
tool - 初始化項目工程腳本
config - 編譯ffmpeg使用的配置文件
extra - 存放編譯ijkplayer所需的依賴源文件, 如ffmpeg硝清、openssl等
ijkmedia - 核心代碼
ijkplayer - 播放器數(shù)據(jù)下載及解碼相關(guān)
ijksdl - 音視頻數(shù)據(jù)渲染相關(guān)
ios - iOS平臺上的上層接口封裝以及平臺相關(guān)方法
android - android平臺上的上層接口封裝以及平臺相關(guān)方法
在功能的具體實現(xiàn)上帘腹,iOS和Android平臺的差異主要表現(xiàn)在視頻硬件解碼以及音視頻渲染方面,兩者實現(xiàn)的載體區(qū)別如下表所示:
Platform HardwareCodec VideoRender AudioOutput
iOS VideoToolBox OpenGL ES AudioQueue
Android MediaCodec OpenGL ES/MediaCodec OpenSL ES/AudioTrack
2.IJK模塊
初始化模塊:初始化完成的主要工作就是創(chuàng)建IJKMediaPlayer播放器對象薛窥,創(chuàng)建圖像渲染對象SDL_Vout叉橱,創(chuàng)建平臺相關(guān)的IJKFF_Pipeline對象
核心模塊:音視頻數(shù)據(jù)讀取挫以、音視頻解碼、音視頻渲染及同步
事件處理:在播放過程中赏迟,某些行為的完成或者變化屡贺,如prepare完成,開始渲染等锌杀,需要以事件形式通知到外部甩栈,以便上層作出具體的業(yè)務(wù)處理
二、初始化流程
在該方法中主要完成了三個動作:
1.創(chuàng)建IJKMediaPlayer對象糕再;
2.創(chuàng)建圖像渲染對象SDL_Vout量没;
3.創(chuàng)建平臺相關(guān)的IJKFF_Pipeline對象,包括視頻解碼以及音頻輸出部分
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))//ijkmp_android_create(message_loop)
//(21)通過ffp_create方法創(chuàng)建了FFPlayer對象突想,并設(shè)置消息處理函數(shù)
ijkmp_create(msg_loop)
IjkMediaPlayer *mp = (IjkMediaPlayer *) mallocz(sizeof(IjkMediaPlayer));
mp->ffplayer = ffp_create();
mp->msg_loop = msg_loop;
//(22)創(chuàng)建圖像渲染對象SDL_Vout
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
return SDL_VoutAndroid_CreateForANativeWindow();
SDL_Vout *vout = SDL_Vout_CreateInternal(sizeof(SDL_Vout_Opaque));
//函數(shù)指針初始化
vout->opaque_class?= &g_nativewindow_class;
vout->create_overlay= func_create_overlay;
vout->free_l???= func_free_l;
vout->display_overlay = func_display_overlay;
//(23)創(chuàng)建平臺相關(guān)的IJKFF_Pipeline對象殴蹄,包括視頻解碼以及音頻輸出部分
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
//函數(shù)指針初始化
pipeline->func_open_video_decoder?= func_open_video_decoder;
至此已經(jīng)完成了ijkplayer播放器初始化的相關(guān)流程,簡單來說猾担,就是創(chuàng)建播放器對象袭灯,完成音視頻解碼、渲染的準(zhǔn)備工作绑嘹。
static JNINativeMethod g_methods[] = {
??{
???"_setDataSource",
??"(LV",
???!!!(void *) IjkMediaPlayer_setDataSourceAndHeaders
??},
??{ "_setDataSourceFd",???"(I)V",??(void *) IjkMediaPlayer_setDataSourceFd },
??{ "_setDataSource",??"(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback },
??{ "_setAndroidIOCallback","(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback },
??{ "_setVideoSurface",??"(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface },
??{ "_prepareAsync",???"()V",?(void *) IjkMediaPlayer_prepareAsync },//00
??{ "_start",??????"()V",?(void *) IjkMediaPlayer_start },
??{ "_stop",??????"()V",??(void *) IjkMediaPlayer_stop },
??{ "seekTo",??????"(J)V",?(void *) IjkMediaPlayer_seekTo },
??{ "_pause",??????"()V",?(void *) IjkMediaPlayer_pause },
??{ "isPlaying",????"()Z",?(void *) IjkMediaPlayer_isPlaying },
??{ "getCurrentPosition",??"()J",?(void *) IjkMediaPlayer_getCurrentPosition },
??{ "getDuration",????"()J",??(void *) IjkMediaPlayer_getDuration },
??{ "_release",?????"()V",?(void *) IjkMediaPlayer_release },
??{ "_reset",??????"()V",?(void *) IjkMediaPlayer_reset },
??{ "setVolume",????"(FF)V",(void *) IjkMediaPlayer_setVolume },
??{ "getAudioSessionId",??"()I",??(void *) IjkMediaPlayer_getAudioSessionId },
??{ "native_init",????"()V",??(void *) IjkMediaPlayer_native_init },
??{ "native_setup",????"(LV", (void *) IjkMediaPlayer_native_setup },
??{ "native_finalize",??"()V",?(void *) IjkMediaPlayer_native_finalize },
??{ "_setOption",????"(IL IjkMediaPlayer_setOption },
??{ "_setOption",????"(ILV",?????(void *) IjkMediaPlayer_setOptionLong },
??{ "_getColorFormatName",?"(I)Ljava/lang/String;",?(void *) IjkMediaPlayer_getColorFormatName },
??{ "_getVideoCodecInfo",??"()Ljava/lang/String;",??(void *) IjkMediaPlayer_getVideoCodecInfo },
??{ "_getAudioCodecInfo",??"()Ljava/lang/String;",??(void *) IjkMediaPlayer_getAudioCodecInfo },
??{ "_getMediaMeta",???"()Landroid/os/Bundle;",?(void *) IjkMediaPlayer_getMediaMeta },
??{ "_setLoopCount",???"(I)V",???????(void *) IjkMediaPlayer_setLoopCount },
??{ "_getLoopCount",???"()I",??????(void *) IjkMediaPlayer_getLoopCount },
??{ "_getPropertyFloat",??"(IF)F",??????(void *) ijkMediaPlayer_getPropertyFloat },
??{ "_setPropertyFloat",??"(IF)V",??????(void *) ijkMediaPlayer_setPropertyFloat },
??{ "_getPropertyLong",???"(IJ)J",??????(void *) ijkMediaPlayer_getPropertyLong },
??{ "_setPropertyLong",???"(IJ)V",??????(void *) ijkMediaPlayer_setPropertyLong },
??{ "_setStreamSelected",??"(IZ)V",??????(void *) ijkMediaPlayer_setStreamSelected },
??{ "native_profileBegin",?"(LV",?(void *) IjkMediaPlayer_native_profileBegin },
??{ "native_profileEnd",??"()V",???????(void *) IjkMediaPlayer_native_profileEnd },
??{ "native_setLogLevel",??"(I)V",???????(void *) IjkMediaPlayer_native_setLogLevel },
??{ "_setFrameAtTime",??"(L IjkMediaPlayer_setFrameAtTime },
};
三稽荧、核心代碼剖析
//ijkplayer實際上是基于ffplay.c實現(xiàn)的,
//本章節(jié)將以該文件為主線工腋,從數(shù)據(jù)接收(數(shù)據(jù)讀纫陶伞)、音視頻解碼擅腰、音視頻渲染及同步這三大方面進行講解蟋恬,要求讀者有基本的ffmpeg知識。
IjkMediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
int ijkmp_prepare_async(IjkMediaPlayer *mp)
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
//開始播放時趁冈,啟動消息線程
mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
int ret = mp->msg_loop(arg);
//該方法是啟動播放器的入口函數(shù)歼争,在此會設(shè)置player選項,打開audio output渗勘,最重要的是調(diào)用stream_open方法沐绒。
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
//音頻輸出:ijkplayer中Android平臺使用OpenSL ES或AudioTrack輸出音頻,iOS平臺使用AudioQueue輸出音頻呀邢。
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
return pipeline->func_open_audio_output(pipeline, ffp);
if (ffp->opensles) {
aout = SDL_AoutAndroid_CreateForOpenSLES();
} else {
//默認(rèn)硬解洒沦,主要完成的是創(chuàng)建SDL_Aout對象
//回到ffplay.c中,如果發(fā)現(xiàn)待播放的文件中含有音頻价淌,那么在調(diào)用stream_component_open打開解碼器時申眼,該方法里面也調(diào)用audio_open打開了audio output設(shè)備瞒津。
aout = SDL_AoutAndroid_CreateForAudioTrack();
}
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat)
從代碼中可以看出,stream_open主要做了以下幾件事情:
(1)創(chuàng)建存放video/audio解碼前數(shù)據(jù)的videoq/audioq
(2)創(chuàng)建存放video/audio解碼后數(shù)據(jù)的pictq/sampq
(3)創(chuàng)建視頻渲染線程video_refresh_thread
(4)創(chuàng)建讀數(shù)據(jù)線程read_thread
if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0)
goto fail;
if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0)
goto fail;
if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0)
goto fail;
//視頻的處理流程:
//1.在decoder_decode_frame 方法中從解碼前的video queue中取出一幀數(shù)據(jù)括尸,
//2.送入decoder進行解碼巷蚪,解碼后的數(shù)據(jù)在ffplay_video_thread中送入pictq,
//3.解碼后的數(shù)據(jù)被送到pictq后,video_image_display2函數(shù)會取出最新的解碼后的視頻數(shù)據(jù)
//4.然后交給SDL通過openGL來進行渲染
秒開,首先濒翻,我們知道在ijkplayer默認(rèn)視頻同步到音頻屁柏,在video_refresh_thread對視頻做了同步,
我們把視頻前兩幀數(shù)據(jù)不做同步有送,即時刷新淌喻,這樣能大大加快首屏?xí)r間,其次我們設(shè)置probesize大小雀摘,
如果probesize不設(shè)置的話裸删,avformat_find_stream_info會消耗很長時間,這里建議如果只是音頻阵赠,設(shè)置1k涯塔,
如果是音視頻,設(shè)置為64k清蚀,更進一步的修改是自己設(shè)置相關(guān)解碼屬性匕荸,不用avformat_find_stream_info獲取,
最后我們還可以對前兩幀的刷新時機進行進一步優(yōu)化枷邪,現(xiàn)在通過sleep來控制榛搔,可以換成信號量,解碼之后齿风,
立即通知開始執(zhí)行渲染药薯,改完這些之后基本上首屏能在500ms內(nèi)绑洛。
is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout");
//音視頻同步
參考時鐘的選擇也有多種方式:
選取視頻時間戳作為參考時鐘源
選取音頻時間戳作為參考時鐘源
選取外部時間作為參考時鐘源
考慮人對視頻救斑、和音頻的敏感度,在存在音頻的情況下真屯,優(yōu)先選擇音頻作為主時鐘源脸候。
video_refresh(ffp, &remaining_time);
(312)
//lastvp是上一幀,vp是當(dāng)前幀绑蔫,//循環(huán)隊列运沦,看不懂???
//1.frame_queue_peek_last表示從循環(huán)隊列幀里面取出當(dāng)前需要顯示的上一幀視頻
//2.frame_queue_peek表示從循環(huán)隊列幀里面取出當(dāng)前需要顯示的一幀視頻
//3.frame_queue_peek_next表示從循環(huán)隊列幀里面取出當(dāng)前需要顯示的下一幀視頻
//4.frame_queue_next從幀隊列中取出幀之后的參數(shù)操作
//5.Frame *frame_queue_peek_writable(FrameQueue *f)//返回要填充的frame_queue中的Frame。
//6.frame_queue_push放幀到隊列中配深,frame_queue_peek_writable之后的參數(shù)操作携添,windex++
lastvp = frame_queue_peek_last(&is->pictq);
vp = frame_queue_peek(&is->pictq);
//last_duration則是根據(jù)當(dāng)前幀和上一幀的pts,計算出來上一幀的顯示時間篓叶,
//經(jīng)過compute_target_delay方法烈掠,計算出顯示當(dāng)前幀需要等待的時間
在compute_target_delay方法中羞秤,如果發(fā)現(xiàn)當(dāng)前主時鐘源不是video,則計算當(dāng)前視頻時鐘與主時鐘的差值:
如果當(dāng)前視頻幀落后于主時鐘源左敌,則需要減小下一幀畫面的等待時間瘾蛋;
如果視頻幀超前,并且該幀的顯示時間大于顯示更新門檻矫限,則顯示下一幀的時間為超前的時間差加上上一幀的顯示時間
如果視頻幀超前哺哼,并且上一幀的顯示時間小于顯示更新門檻,則采取加倍延時的策略叼风。
last_duration = vp_duration(is, lastvp, vp);
delay = compute_target_delay(ffp, last_duration, is);
//frame_timer實際上就是上一幀的播放時間取董,
//而frame_timer + delay實際上就是當(dāng)前這一幀的播放時間,
//(312)如果系統(tǒng)時間還沒有到當(dāng)前這一幀的播放時間无宿,直接跳轉(zhuǎn)至display甲葬,
//而此時is->force_refresh變量為0,不顯示當(dāng)前幀懈贺,
//進入video_refresh_thread中下一次循環(huán)经窖,并睡眠等待。
time= av_gettime_relative()/1000000.0;
if (isnan(is->frame_timer) || time < is->frame_timer)
is->frame_timer = time;
if (time < is->frame_timer + delay) {
*remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
goto display;
}
//(313)如果當(dāng)前這一幀的播放時間已經(jīng)過了梭灿,
//并且其和當(dāng)前系統(tǒng)時間的差值超過了AV_SYNC_THRESHOLD_MAX画侣,
//則將當(dāng)前這一幀的播放時間改為系統(tǒng)時間,并在后續(xù)判斷是否需要丟幀堡妒,
//其目的是為后面幀的播放時間重新調(diào)整frame_timer配乱,
//如果緩沖區(qū)中有更多的數(shù)據(jù),并且當(dāng)前的時間已經(jīng)大于當(dāng)前幀的持續(xù)顯示時間皮迟,
//則丟棄當(dāng)前幀搬泥,嘗試顯示下一幀。
is->frame_timer += delay;
if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX)
is->frame_timer = time;
SDL_LockMutex(is->pictq.mutex);
if (!isnan(vp->pts))
update_video_pts(is, vp->pts, vp->pos, vp->serial);
SDL_UnlockMutex(is->pictq.mutex);
if (frame_queue_nb_remaining(&is->pictq) > 1) {
//表示從循環(huán)隊列幀里面取出當(dāng)前需要顯示的下一幀視頻
Frame *nextvp = frame_queue_peek_next(&is->pictq);
duration = vp_duration(is, vp, nextvp);
if(!is->step && (ffp->framedrop > 0 || (ffp->framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration) {
frame_queue_next(&is->pictq);
goto retry;
}
}
//(314)最后渲染圖像的方法
video_display2(ffp);
video_image_display2(ffp);
//數(shù)據(jù)傳遞:is->pictq 到 vp->bmp 到 overlay
//(3131)從pictq中讀取當(dāng)前需要顯示視頻幀//f->rindex
vp = frame_queue_peek_last(&is->pictq);
//(3132)進行繪制////顯示每幀圖片
SDL_VoutDisplayYUVOverlay(ffp->vout, vp->bmp);
return vout->display_overlay(vout, overlay);
//在ijksdl_vout_android_nativewindow.c/SDL_VoutAndroid_CreateForANativeWindow()函數(shù)中
vout->display_overlay = func_display_overlay;
//調(diào)用OpengGL繪制圖像
int retval = func_display_overlay_l(vout, overlay);
return IJK_EGL_display(opaque->egl, native_window, overlay);
//(or)return SDL_Android_NativeWindow_display_l(native_window, overlay);
//read_thread()調(diào)用了如下函數(shù):
1.avformat_open_input():打開媒體伏尼。
2.avformat_find_stream_info():獲得媒體信息忿檩。
3.av_dump_format():輸出媒體信息到控制臺。
4.stream_component_open():分別打開視頻/音頻/字幕解碼線程爆阶。
5.refresh_thread():視頻刷新線程燥透。
6.av_read_frame():獲取一幀壓縮編碼數(shù)據(jù)(即一個AVPacket)。
7.packet_queue_put():根據(jù)壓縮編碼數(shù)據(jù)類型的不同(視頻/音頻/字幕)辨图,放到不同的PacketQueue中班套。
is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");//創(chuàng)建數(shù)據(jù)讀取線程
ic = avformat_alloc_context();//(41)創(chuàng)建上下文結(jié)構(gòu)體,這個結(jié)構(gòu)體是最上層的結(jié)構(gòu)體故河,表示輸入上下文
ic->interrupt_callback.callback = decode_interrupt_cb;//(42)設(shè)置中斷函數(shù)吱韭,如果出錯或者退出,就可以立刻退出
ic->interrupt_callback.opaque = is;
//(43)打開文件鱼的,主要是探測協(xié)議類型理盆,如果是網(wǎng)絡(luò)文件則創(chuàng)建網(wǎng)絡(luò)鏈接等
err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts);
//(44)探測媒體類型瞻讽,可得到當(dāng)前文件的封裝格式,音視頻編碼參數(shù)等信息
err = avformat_find_stream_info(ic, opts);
//(45)打開ffmpeg視頻熏挎、音頻解碼器速勇。在此會打開相應(yīng)解碼器,并創(chuàng)建相應(yīng)的解碼線程坎拐。
int stream_component_open(FFPlayer *ffp, int stream_index)
//stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
//ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);
//stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]);
codec = avcodec_find_decoder(avctx->codec_id);//獲取音視頻編碼格式
if ((ret = avcodec_open2(avctx, codec, &opts)) < 0) {//用一個編碼格式打開一個編碼文件
goto fail;
}
switch (avctx->codec_type) {
case AVMEDIA_TYPE_AUDIO:
//(451)打開音頻解碼器烦磁,創(chuàng)建audio解碼線程
ret = audio_open(is, channel_layout, nb_channels, sample_rate, &is->audio_tgt)
//(4511)配置了音頻輸出的相關(guān)參數(shù)SDL_AudioSpec
SDL_AudioSpec wanted_spec;
wanted_spec.format = AUDIO_S16SYS;
wanted_spec.silence = 0;
wanted_spec.samples = FFMAX(SDL_AUDIO_MIN_BUFFER_SIZE, 2 << av_log2(wanted_spec.freq / SDL_AoutGetAudioPerSecondCallBacks(ffp->aout)));
//(4513)AudioTrack(Android)/AudioQueue(IOS)模塊在工作過程中,通過不斷的callback來獲取pcm數(shù)據(jù)進行播放
wanted_spec.callback = sdl_audio_callback;
//(4512)用此函數(shù)來打開音響設(shè)備
while (SDL_AoutOpenAudio(ffp->aout, &wanted_spec, &spec) < 0) {}
return aout->open_audio(aout, desired, obtained);
ret = decoder_start(&is->auddec, audio_thread, ffp, "ff_audio_dec")//audio的解碼線程
case AVMEDIA_TYPE_VIDEO:
//(452)創(chuàng)建IJKFF_Pipenode
ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
return pipeline->func_open_video_decoder(pipeline, ffp);//ffpipeline_anroid.c
//iOS平臺上硬解使用VideoToolbox哼勇,Android平臺上使用MediaCodec都伪。ijkplayer中的音頻解碼只支持軟解,暫不支持硬解积担。
// 根據(jù)是否設(shè)置mediacodec參數(shù)決定是否啟用硬解碼 //默認(rèn)軟解
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2){
ALOGD("laixh1 ffpipenode_create_video_decoder_from_android_mediacodec()\n");
node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
}
if (!node) {//如果ffpipenode_create_video_decoder_from_android_mediacodec返回的node為NULL 那么就會走軟解
ALOGD("laixh2 ffpipenode_create_video_decoder_from_ffplay()\n");
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
}
//(453)video的解碼線程
//不管視頻解碼還是音頻解碼陨晶,其基本流程都是從解碼前的數(shù)據(jù)緩沖區(qū)中取出一幀數(shù)據(jù)進行解碼,完成后放入相應(yīng)的解碼后的數(shù)據(jù)緩沖區(qū)
ret = decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec")
if (ffp->node_vdec)
{
ret = ffpipenode_run_sync(ffp->node_vdec);
//func_run_sync取決于播放前配置的軟硬解帝璧。
//軟解:調(diào)用ffpipenode_ffplay_vdec調(diào)用ffpipenode_ffplay_vdec.c中的函數(shù)定義先誉;//默認(rèn)軟解
//硬解:調(diào)用ffpipenode_ffplay_vdec調(diào)用ffpipenode_ffplay_mediacodec.c中的函數(shù)定義;
return node->func_run_sync(node);
return ffp_video_thread(opaque->ffp);
return ffplay_video_thread(ffp);//ff_ffplay.c
for (;;) {
//(4531)該方法中從解碼前的video queue中取出一幀數(shù)據(jù)的烁,送入decoder進行解碼褐耳,
ret = get_video_frame(ffp, frame);
got_picture = decoder_decode_frame(ffp, &is->viddec, frame, NULL)
AVPacket pkt;
for (;;) {
//(45311)從解碼前的video queue中取出一幀數(shù)據(jù)
do {
packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished) < 0)
while(1){
//獲取壓縮編碼數(shù)據(jù)(一個AVPacket)
int new_packet = packet_queue_get(q, pkt, 0, serial);
}
}while (d->queue->serial != d->pkt_serial);
//(45312)將流數(shù)據(jù)輸出給解碼器進行解碼
ffmpeg中解碼的API之前的是avcodec_decode_video2()和avcodec_decode_audio4(),
現(xiàn)在使用avcodec_send_packet()/ avcodec_receive_frame()來代替原有的接口
// 1)對于解碼渴庆,請調(diào)用avcodec_send_packet()以在AVPacket中給出解碼器原始的壓縮數(shù)據(jù)铃芦。
// 2)對于編碼,請調(diào)用avcodec_send_frame()為編碼器提供包含未壓縮音頻或視頻的AVFrame
avcodec_send_packet和avcodec_receive_frame調(diào)用關(guān)系并不一定是一對一的襟雷,
比如一些音頻數(shù)據(jù)一個AVPacket中包含了1秒鐘的音頻刃滓,調(diào)用一次avcodec_send_packet之后,
可能需要調(diào)用25次 avcodec_receive_frame才能獲取全部的解碼音頻數(shù)據(jù)
do {
ret = avcodec_receive_frame(d->avctx, frame);
}while (ret != AVERROR(EAGAIN));
avcodec_send_packet(AVCodecContext *avctx, const AVPacket *avpkt)
decode_receive_frame_internal(avctx, avci->buffer_frame);
av_packet_unref(&pkt);
}
//(4532)默認(rèn)MAX_RETRY_CONVERT_IMAGE=3//laixh秒開優(yōu)化改成1耸弄,驗證效果???沒有效果_只ⅰ!
while (retry_convert_image <= MAX_RETRY_CONVERT_IMAGE) {
ret = convert_image(ffp, frame, (int64_t)pts, frame->width, frame->height);
//FFmpeg里面的sws_scale庫可以在一個函數(shù)里面同時實現(xiàn):
//1.圖像色彩空間轉(zhuǎn)換;2.分辨率縮放;3.前后圖像濾波處理叙赚。
//其核心函數(shù)主要有三個:sws_getContext():初始化一個SwsContext老客;sws_scale():處理圖像數(shù)據(jù)僚饭;sws_freeContext():釋放一個SwsContext震叮。
//初始化一個SwsContext //src_frame->format轉(zhuǎn)AV_PIX_FMT_RGB24
img_info->frame_img_convert_ctx = sws_getContext(width,height,src_frame->format,dst_width,dst_height,AV_PIX_FMT_RGB24,SWS_BICUBIC,NULL, NULL,NULL)
//處理圖像數(shù)據(jù)。
//參數(shù)struct SwsContext *c,為上面sws_getContext函數(shù)返回值;
//參數(shù)const uint8_t *const srcSlice[], const int srcStride[]定義輸入圖像信息(當(dāng)前處理區(qū)域的每個通道數(shù)據(jù)指針,每個通道行字節(jié)數(shù))
//參數(shù)uint8_t *const dst[], const int dstStride[]定義輸出圖像信息(輸出的每個通道數(shù)據(jù)指針,每個通道行字節(jié)數(shù))
ret = sws_scale(img_info->frame_img_convert_ctx,(const uint8_t * const *) src_frame->data,src_frame->linesize,0,src_frame->height,dst_frame->data, dst_frame->linesize);
//avctx:編碼器的AVCodecContext鳍鸵;avpkt:編碼輸出的AVPacket苇瓣;frame:編碼輸入的AVFrame;got_packet_ptr:成功編碼一個AVPacket的時候設(shè)置為1偿乖。
//函數(shù)返回0代表編碼成功击罪。//不是解碼嗎哲嘲?為什么還要進行編碼???
ret = avcodec_encode_video2(AVCodecContext *avctx, AVPacket *avpkt,const AVFrame *frame, int *got_packet_ptr);
if (ret >= 0 && got_packet > 0) {
fd = open(file_path, O_RDWR | O_TRUNC | O_CREAT, 0600);
write(fd, avpkt.data, avpkt.size);
}
//釋放一個SwsContext。
sws_freeContext()
}
//(4533)解碼后的數(shù)據(jù)在ffplay_video_thread中送入pictq
ret = queue_picture(ffp, frame, pts, duration, frame->pkt_pos, is->viddec.pkt_serial);
//返回要填充的frame_queue中的Frame
//Frame *frame_queue_peek_writable(FrameQueue *f)
vp = frame_queue_peek_writable(&is->pictq)
//放幀到隊列中//frame_queue_peek_writable之后的參數(shù)操作媳禁,windex++
frame_queue_push(&is->pictq);
}
}
//重復(fù)(46)眠副、(47)步,即可不斷獲取待播放的數(shù)據(jù)竣稽。
//(46)讀取媒體數(shù)據(jù)囱怕,得到的是音視頻分離的解碼前數(shù)據(jù)
ret = av_read_frame(ic, pkt);
//(47)將音視頻數(shù)據(jù)分別送入相應(yīng)的queue中
if (pkt->stream_index == is->audio_stream && pkt_in_play_range) {
packet_queue_put(&is->audioq, pkt);
} else if (pkt->stream_index == is->video_stream && pkt_in_play_range
?&& !(is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC))) {
packet_queue_put(&is->videoq, pkt);
} else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) {
packet_queue_put(&is->subtitleq, pkt);
} else {
av_packet_unref(pkt);
}
四、事件處理
//在播放過程中毫别,某些行為的完成或者變化娃弓,如prepare完成,開始渲染等岛宦,需要以事件形式通知到外部台丛,以便上層作出具體的業(yè)務(wù)處理。
//ijkplayer支持的事件比較多砾肺,具體定義在ijkplayer/ijkmedia/ijkplayer/ff_ffmsg.h中
#define FFP_MSG_FLUSH????????0
#define FFP_MSG_ERROR????????100?
#define FFP_MSG_PREPARED??????200
#define FFP_MSG_COMPLETED??????300
#define FFP_MSG_VIDEO_SIZE_CHANGED??400?
#define FFP_MSG_SAR_CHANGED?????401?
#define FFP_MSG_VIDEO_RENDERING_START??402
#define FFP_MSG_AUDIO_RENDERING_START??403
#define FFP_MSG_VIDEO_ROTATION_CHANGED?404
#define FFP_MSG_BUFFERING_START????500
#define FFP_MSG_BUFFERING_END?????501
#define FFP_MSG_BUFFERING_UPDATE???502
#define FFP_MSG_BUFFERING_BYTES_UPDATE?503
#define FFP_MSG_BUFFERING_TIME_UPDATE??504
#define FFP_MSG_SEEK_COMPLETE?????600?
#define FFP_MSG_PLAYBACK_STATE_CHANGED?700
#define FFP_MSG_TIMED_TEXT?????800
#define FFP_MSG_VIDEO_DECODER_OPEN??10001
#define MEDIA_INFO_UNKNOWN = 1,
#define MEDIA_INFO_VIDEO_RENDERING_START = 3, // 第一幀視頻數(shù)據(jù)渲染時間
#define MEDIA_INFO_VIDEO_ROTATION_CHANGED = 10001,
#define MEDIA_INFO_AUDIO_RENDERING_START= 10002,
#define MEDIA_INFO_AUDIO_DECODED_START= 10003,
#define MEDIA_INFO_VIDEO_DECODED_START= 10004, //第一幀視頻解碼完成時間
#define MEDIA_INFO_OPEN_INPUT????= 10005, //avformat_open_input執(zhí)行完成時間
#define MEDIA_INFO_FIND_STREAM_INFO??= 10006, //avformat_find_stream_info執(zhí)行完成時間
#define MEDIA_INFO_COMPONENT_OPEN???= 10007, //IO設(shè)備操作完成時間
#define MEDIA_INFO_VIDEO_FIRSTPKT_GOT //首幀獲取時間
//ijkmp_android_create(message_loop)
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
ijkmp_create(msg_loop)
//(41)消息上報初始化
mp->msg_loop = msg_loop;
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
return SDL_VoutAndroid_CreateForANativeWindow();
//函數(shù)指針初始化
vout->opaque_class?= &g_nativewindow_class;
vout->create_overlay= func_create_overlay;
vout->free_l???= func_free_l;
vout->display_overlay = func_display_overlay;
IjkMediaPlayer_prepareAsync(JNIEnv *env, jobject thiz)
int ijkmp_prepare_async(IjkMediaPlayer *mp)
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
//(42)開始播放時挽霉,啟動消息線程
mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
int ret = mp->msg_loop(arg);
message_loop
message_loop_n
while (1) {
//(44)最后是在該方法中讀取消息,并采用notification通知到APP上層
int retval = ijkmp_get_msg(mp, &msg, 1);
switch (msg.what) {
case FFP_MSG_VIDEO_RENDERING_START:
MPTRACE("FFP_MSG_VIDEO_RENDERING_START:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0);
break;
case FFP_MSG_OPEN_INPUT:
MPTRACE("FFP_MSG_OPEN_INPUT:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_OPEN_INPUT, 0);
break;
case FFP_MSG_FIND_STREAM_INFO:
MPTRACE("FFP_MSG_FIND_STREAM_INFO:\n");
post_event(env, weak_thiz, MEDIA_INFO, MEDIA_INFO_FIND_STREAM_INFO, 0);
break;
}
}
//(43)ffplay.c中上報PREPARED完成
ffp_notify_msg1(ffp, FFP_MSG_PREPARED);
ffp_notify_msg1(ffp, FFP_MSG_VIDEO_RENDERING_START);
//將事件及其參數(shù)封裝成了AVMessge對象
msg_queue_put_simple3(&ffp->msg_queue, what, 0, 0);
msg_init_msg(&msg);
msg_queue_put(q, &msg);
//消息對象放入消息隊列
ret = msg_queue_put_private(q, msg);
預(yù)加載接口
??public void doPreload(String url, int tsindex){
???//stop
???_doPreload(url, tsindex);
??}
//預(yù)緩存清除接口
??public void deleteCache() {
???_deleteCache();
??}