// ijkmedia/ijkplayer/ff_ffplay_def.h
typedef struct FFPlayer {
//...
SDL_Aout *aout;
SDL_Vout *vout;
struct IJKFF_Pipeline *pipeline;
struct IJKFF_Pipenode *node_vdec;
//...
}FFPlayer;
這篇文章的內(nèi)容:
分析FFPlayer
結(jié)構(gòu)體中的SDL_Aout
、SDL_Vout
雅宾、IJKFF_Pipeline
和IJKFF_Pipenode
的作用
此外养涮,這四個結(jié)構(gòu)體,我認(rèn)為是為了實(shí)現(xiàn)多態(tài)眉抬,詳見IBM教程:技巧:用 C 語言實(shí)現(xiàn)程序的多態(tài)性
而這里面還有用到SDL_Vout_Opaque
這類的opaque單詞的結(jié)構(gòu)體和指針贯吓,這是為了實(shí)現(xiàn)封裝和對外隱藏細(xì)節(jié)。
不得不說蜀变,ijkplayer用c語言也很好地體現(xiàn)了面向?qū)ο蟮乃枷搿?/p>
SDL_Vout
1. 結(jié)構(gòu)體
1.1 SDL_Vout
// ijkmedia/ijksdl/ijksdl_vout.h
struct SDL_Vout {
SDL_mutex *mutex;
SDL_Class *opaque_class;
SDL_Vout_Opaque *opaque;
//創(chuàng)建圖層悄谐,即SDL_VoutOverlay
SDL_VoutOverlay *(*create_overlay)(int width, int height, int frame_format, SDL_Vout *vout);
//釋放
void (*free_l)(SDL_Vout *vout);
//展示圖層
int (*display_overlay)(SDL_Vout *vout, SDL_VoutOverlay *overlay);
//圖層格式
Uint32 overlay_format;
};
1.2 SDL_Vout_Opaque
opaque類似于Java中的內(nèi)部類,用來向調(diào)用者屏蔽該類的內(nèi)部邏輯
看下這個SDL_Vout_Opaque
的定義:
用typedef
定義了一個抽象昏苏,那么他的實(shí)現(xiàn)在哪里尊沸?
// ijkmedia/ijksdl/ijksdl_vout.h
//這里引用的是ffmepg后綴的這個頭文件,因此用的是軟件的方式去創(chuàng)建的SDL_VoutOverlay_Opaque
#include "ffmpeg/ijksdl_inc_ffmpeg.h"
//...
typedef struct SDL_Vout_Opaque SDL_Vout_Opaque;
//...
他的實(shí)現(xiàn)在兩處定義了贤惯,分別在硬解和軟解的時(shí)候使用
// 硬解
// ijkmedia/ijksdl/android/ijksdl_vout_overlay_android_mediacodec.c
typedef struct SDL_VoutOverlay_Opaque {
SDL_mutex *mutex;
SDL_Vout *vout;
SDL_AMediaCodec *acodec;
SDL_AMediaCodecBufferProxy *buffer_proxy;
Uint16 pitches[AV_NUM_DATA_POINTERS];
Uint8 *pixels[AV_NUM_DATA_POINTERS];
} SDL_VoutOverlay_Opaque;
// 軟解
// ijkmedia/ijksdl/ffmpeg/ijksdl_vout_overlay_ffmpeg.c
struct SDL_VoutOverlay_Opaque {
SDL_mutex *mutex;
AVFrame *managed_frame;
AVBufferRef *frame_buffer;
int planes;
AVFrame *linked_frame;
Uint16 pitches[AV_NUM_DATA_POINTERS];
Uint8 *pixels[AV_NUM_DATA_POINTERS];
int no_neon_warned;
struct SwsContext *img_convert_ctx;
int sws_flags;
};
而實(shí)際上硬解的那個是不會使用的洼专,為什么?因?yàn)槎xSDL_VoutOverlay_Opaque
引用的頭文件是ffmpeg/ijksdl_inc_ffmpeg.h
1.3 SDL_Class
暫時(shí)也不清楚這個是做什么的孵构,只保存了一個字符串而已屁商。
typedef struct SDL_Class {
const char *name;
} SDL_Class;
1.4 SDL_VoutOverlay
struct SDL_VoutOverlay {
int w; /**< Read-only */
int h; /**< Read-only */
Uint32 format; /**< Read-only */
int planes; /**< Read-only */
Uint16 *pitches; /**< in bytes, Read-only */
Uint8 **pixels; /**< Read-write */
int is_private;
int sar_num;
int sar_den;
SDL_Class *opaque_class;
SDL_VoutOverlay_Opaque *opaque;
void (*free_l)(SDL_VoutOverlay *overlay);
int (*lock)(SDL_VoutOverlay *overlay);
int (*unlock)(SDL_VoutOverlay *overlay);
void (*unref)(SDL_VoutOverlay *overlay);
int (*func_fill_frame)(SDL_VoutOverlay *overlay, const AVFrame *frame);
};
1.5 SDL_VoutOverlay_Opaque
#include "ffmpeg/ijksdl_inc_ffmpeg.h"
typedef struct SDL_VoutOverlay_Opaque SDL_VoutOverlay_Opaque;
軟解:
// ijkmedia/ijksdl/ffmpeg/ijksdl_vout_overlay_ffmpeg.c
struct SDL_VoutOverlay_Opaque {
SDL_mutex *mutex;
AVFrame *managed_frame;
AVBufferRef *frame_buffer;
int planes;
AVFrame *linked_frame;
Uint16 pitches[AV_NUM_DATA_POINTERS];
Uint8 *pixels[AV_NUM_DATA_POINTERS];
int no_neon_warned;
struct SwsContext *img_convert_ctx;
int sws_flags;
};
硬解:
typedef struct SDL_VoutOverlay_Opaque {
SDL_mutex *mutex;
SDL_Vout *vout;
SDL_AMediaCodec *acodec;
SDL_AMediaCodecBufferProxy *buffer_proxy;
Uint16 pitches[AV_NUM_DATA_POINTERS];
Uint8 *pixels[AV_NUM_DATA_POINTERS];
} SDL_VoutOverlay_Opaque;
同樣的,這里是用的軟解颈墅。
2. 初始化
3. 使用
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
//創(chuàng)建IjkMediaPlayer
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;
//創(chuàng)建視頻輸出設(shè)備蜡镶,會根據(jù)根據(jù)硬解還是軟件,硬解用MediaCodec創(chuàng)建恤筛,軟解用FFmpeg創(chuàng)建
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
if (!mp->ffplayer->vout)
goto fail;
//創(chuàng)建管道
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
if (!mp->ffplayer->pipeline)
goto fail;
//將創(chuàng)建的視頻輸出設(shè)備vout官还,賦值到ffplayer->pipeline中
ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);
return mp;
fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
SDL_Vout *SDL_VoutAndroid_CreateForAndroidSurface()
{
return SDL_VoutAndroid_CreateForANativeWindow();
}
SDL_Vout *SDL_VoutAndroid_CreateForANativeWindow()
{
//創(chuàng)建SDL_Vout
SDL_Vout *vout = SDL_Vout_CreateInternal(sizeof(SDL_Vout_Opaque));
if (!vout)
return NULL;
SDL_Vout_Opaque *opaque = vout->opaque;
opaque->native_window = NULL;
if (ISDL_Array__init(&opaque->overlay_manager, 32))
goto fail;
if (ISDL_Array__init(&opaque->overlay_pool, 32))
goto fail;
//創(chuàng)建egl
opaque->egl = IJK_EGL_create();
if (!opaque->egl)
goto fail;
//為vout的函數(shù)賦值
vout->opaque_class = &g_nativewindow_class;
vout->create_overlay = func_create_overlay;
vout->free_l = func_free_l;
vout->display_overlay = func_display_overlay;
return vout;
fail:
func_free_l(vout);
return NULL;
}
inline static SDL_Vout *SDL_Vout_CreateInternal(size_t opaque_size)
{
//分配Vout內(nèi)存
SDL_Vout *vout = (SDL_Vout*) calloc(1, sizeof(SDL_Vout));
if (!vout)
return NULL;
//分配Opaue內(nèi)存
vout->opaque = calloc(1, opaque_size);
if (!vout->opaque) {
free(vout);
return NULL;
}
//創(chuàng)建互斥鎖
vout->mutex = SDL_CreateMutex();
if (vout->mutex == NULL) {
free(vout->opaque);
free(vout);
return NULL;
}
return vout;
}
接下來一一看下vout的3個函數(shù)在這里被賦值的函數(shù)
vout->create_overlay = func_create_overlay;
vout->free_l = func_free_l;
vout->display_overlay = func_display_overlay;
static SDL_VoutOverlay *func_create_overlay(int width, int height, int frame_format, SDL_Vout *vout)
{
SDL_LockMutex(vout->mutex);
//創(chuàng)建SDL_VoutOverlay
SDL_VoutOverlay *overlay = func_create_overlay_l(width, height, frame_format, vout);
SDL_UnlockMutex(vout->mutex);
return overlay;
}
static SDL_VoutOverlay *func_create_overlay_l(int width, int height, int frame_format, SDL_Vout *vout)
{
switch (frame_format) {
case IJK_AV_PIX_FMT__ANDROID_MEDIACODEC:
//如果幀的格式是IJK_AV_PIX_FMT__ANDROID_MEDIACODEC,就用硬解創(chuàng)建
return SDL_VoutAMediaCodec_CreateOverlay(width, height, vout);
default:
//否則用軟解創(chuàng)建
return SDL_VoutFFmpeg_CreateOverlay(width, height, frame_format, vout);
}
}
SDL_Aout
1. 結(jié)構(gòu)體
typedef struct SDL_Aout SDL_Aout;
struct SDL_Aout {
SDL_mutex *mutex;
double minimal_latency_seconds;
SDL_Class *opaque_class;
SDL_Aout_Opaque *opaque;
void (*free_l)(SDL_Aout *vout);
int (*open_audio)(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained);
void (*pause_audio)(SDL_Aout *aout, int pause_on);
void (*flush_audio)(SDL_Aout *aout);
void (*set_volume)(SDL_Aout *aout, float left, float right);
void (*close_audio)(SDL_Aout *aout);
double (*func_get_latency_seconds)(SDL_Aout *aout);
void (*func_set_default_latency_seconds)(SDL_Aout *aout, double latency);
// optional
void (*func_set_playback_rate)(SDL_Aout *aout, float playbackRate);
void (*func_set_playback_volume)(SDL_Aout *aout, float playbackVolume);
int (*func_get_audio_persecond_callbacks)(SDL_Aout *aout);
// Android only
int (*func_get_audio_session_id)(SDL_Aout *aout);
};
2. 初始化
在ffp_preapare_async_l()函數(shù)中
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
assert(ffp);
assert(!ffp->is);
assert(file_name);
//針對rtmp和rtsp協(xié)議毒坛,移除選項(xiàng)”timeout“
if (av_stristart(file_name, "rtmp", NULL) ||
av_stristart(file_name, "rtsp", NULL)) {
// There is total different meaning for 'timeout' option in rtmp
av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
}
/* there is a length limit in avformat */
if (strlen(file_name) + 1 > 1024) {
av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
if (avio_find_protocol_name("ijklongurl:")) {
av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
file_name = "ijklongurl:";
}
}
//打印版本信息
av_log(NULL, AV_LOG_INFO, "===== versions =====\n");
ffp_show_version_str(ffp, "ijkplayer", ijk_version_info());
ffp_show_version_str(ffp, "FFmpeg", av_version_info());
ffp_show_version_int(ffp, "libavutil", avutil_version());
ffp_show_version_int(ffp, "libavcodec", avcodec_version());
ffp_show_version_int(ffp, "libavformat", avformat_version());
ffp_show_version_int(ffp, "libswscale", swscale_version());
ffp_show_version_int(ffp, "libswresample", swresample_version());
av_log(NULL, AV_LOG_INFO, "===== options =====\n");
ffp_show_dict(ffp, "player-opts", ffp->player_opts);
ffp_show_dict(ffp, "format-opts", ffp->format_opts);
ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);
ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict);
ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts);
av_log(NULL, AV_LOG_INFO, "===================\n");
//設(shè)置播放器選項(xiàng)
av_opt_set_dict(ffp, &ffp->player_opts);
//如果ffplayer->aout==null望伦,那么久打開音頻輸出設(shè)備。前面的初始化代碼是沒有為這個賦值過的煎殷,所以第一次調(diào)用肯定會返回true.
if (!ffp->aout) {
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
if (!ffp->aout)
return -1;
}
#if CONFIG_AVFILTER
if (ffp->vfilter0) {
GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
}
#endif
VideoState *is = stream_open(ffp, file_name, NULL);
if (!is) {
av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
return EIJK_OUT_OF_MEMORY;
}
ffp->is = is;
ffp->input_filename = av_strdup(file_name);
return 0;
}
即這句:
//如果ffplayer->aout==null屯伞,那么久打開音頻輸出設(shè)備。前面的初始化代碼是沒有為這個賦值過的豪直,所以第一次調(diào)用肯定會返回true.
if (!ffp->aout) {
ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
if (!ffp->aout)
return -1;
}
SDL_Aout *ffpipeline_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
//借助pipeline的方法
return pipeline->func_open_audio_output(pipeline, ffp);
}
這個地方要使用IJKFF_Pipeline
的方法劣摇,而IJKFF_Pipeline
是在創(chuàng)建播放器的時(shí)候創(chuàng)建的。
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
SDL_Aout *aout = NULL;
if (ffp->opensles) {
aout = SDL_AoutAndroid_CreateForOpenSLES();
} else {
//一般不會用opensles弓乙,都是默認(rèn)用的android的AudioTrack來創(chuàng)建Aout
aout = SDL_AoutAndroid_CreateForAudioTrack();
}
if (aout)
SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
return aout;
}
SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack()
{
SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
if (!aout)
return NULL;
SDL_Aout_Opaque *opaque = aout->opaque;
opaque->wakeup_cond = SDL_CreateCond();
opaque->wakeup_mutex = SDL_CreateMutex();
opaque->speed = 1.0f;
aout->opaque_class = &g_audiotrack_class;
aout->free_l = aout_free_l;
aout->open_audio = aout_open_audio;
aout->pause_audio = aout_pause_audio;
aout->flush_audio = aout_flush_audio;
aout->set_volume = aout_set_volume;
aout->close_audio = aout_close_audio;
aout->func_get_audio_session_id = aout_get_audio_session_id;
aout->func_set_playback_rate = func_set_playback_rate;
return aout;
}
那么這里看一下這個aoujt->open_audio
函數(shù):
static int aout_open_audio(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
// SDL_Aout_Opaque *opaque = aout->opaque;
JNIEnv *env = NULL;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("aout_open_audio: AttachCurrentThread: failed");
return -1;
}
return aout_open_audio_n(env, aout, desired, obtained);
}
static int aout_open_audio_n(JNIEnv *env, SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
assert(desired);
SDL_Aout_Opaque *opaque = aout->opaque;
opaque->spec = *desired;
opaque->atrack = SDL_Android_AudioTrack_new_from_sdl_spec(env, desired);
if (!opaque->atrack) {
ALOGE("aout_open_audio_n: failed to new AudioTrcak()");
return -1;
}
opaque->buffer_size = SDL_Android_AudioTrack_get_min_buffer_size(opaque->atrack);
if (opaque->buffer_size <= 0) {
ALOGE("aout_open_audio_n: failed to getMinBufferSize()");
SDL_Android_AudioTrack_free(env, opaque->atrack);
opaque->atrack = NULL;
return -1;
}
opaque->buffer = malloc(opaque->buffer_size);
if (!opaque->buffer) {
ALOGE("aout_open_audio_n: failed to allocate buffer");
SDL_Android_AudioTrack_free(env, opaque->atrack);
opaque->atrack = NULL;
return -1;
}
if (obtained) {
SDL_Android_AudioTrack_get_target_spec(opaque->atrack, obtained);
SDLTRACE("audio target format fmt:0x%x, channel:0x%x", (int)obtained->format, (int)obtained->channels);
}
opaque->audio_session_id = SDL_Android_AudioTrack_getAudioSessionId(env, opaque->atrack);
ALOGI("audio_session_id = %d\n", opaque->audio_session_id);
opaque->pause_on = 1;
opaque->abort_request = 0;
//創(chuàng)建音頻輸出線程
opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_android");
if (!opaque->audio_tid) {
ALOGE("aout_open_audio_n: failed to create audio thread");
SDL_Android_AudioTrack_free(env, opaque->atrack);
opaque->atrack = NULL;
return -1;
}
return 0;
}
那么這里看下這個音頻輸出線程做了什么:
static int aout_thread(void *arg)
{
SDL_Aout *aout = arg;
// SDL_Aout_Opaque *opaque = aout->opaque;
JNIEnv *env = NULL;
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("aout_thread: SDL_AndroidJni_SetupEnv: failed");
return -1;
}
return aout_thread_n(env, aout);
}
static int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
{
SDL_Aout_Opaque *opaque = aout->opaque;
SDL_Android_AudioTrack *atrack = opaque->atrack;
SDL_AudioCallback audio_cblk = opaque->spec.callback;
void *userdata = opaque->spec.userdata;
uint8_t *buffer = opaque->buffer;
int copy_size = 256;
assert(atrack);
assert(buffer);
SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);
if (!opaque->abort_request && !opaque->pause_on)
SDL_Android_AudioTrack_play(env, atrack);
//只要沒有中斷請求末融,就無限循環(huán)
while (!opaque->abort_request) {
SDL_LockMutex(opaque->wakeup_mutex);
if (!opaque->abort_request && opaque->pause_on) {
//暫停
SDL_Android_AudioTrack_pause(env, atrack);
while (!opaque->abort_request && opaque->pause_on) {
SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
}
if (!opaque->abort_request && !opaque->pause_on) {
if (opaque->need_flush) {
opaque->need_flush = 0;
//flush
SDL_Android_AudioTrack_flush(env, atrack);
}
//播放
SDL_Android_AudioTrack_play(env, atrack);
}
}
if (opaque->need_flush) {
opaque->need_flush = 0;
SDL_Android_AudioTrack_flush(env, atrack);
}
if (opaque->need_set_volume) {
opaque->need_set_volume = 0;
SDL_Android_AudioTrack_set_volume(env, atrack, opaque->left_volume, opaque->right_volume);
}
if (opaque->speed_changed) {
opaque->speed_changed = 0;
SDL_Android_AudioTrack_setSpeed(env, atrack, opaque->speed);
}
SDL_UnlockMutex(opaque->wakeup_mutex);
audio_cblk(userdata, buffer, copy_size);
if (opaque->need_flush) {
SDL_Android_AudioTrack_flush(env, atrack);
opaque->need_flush = false;
}
if (opaque->need_flush) {
opaque->need_flush = 0;
SDL_Android_AudioTrack_flush(env, atrack);
} else {
int written = SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size);
if (written != copy_size) {
ALOGW("AudioTrack: not all data copied %d/%d", (int)written, (int)copy_size);
}
}
// TODO: 1 if callback return -1 or 0
}
SDL_Android_AudioTrack_free(env, atrack);
return 0;
}
這里看下這個播放是在干嘛:
void SDL_Android_AudioTrack_play(JNIEnv *env, SDL_Android_AudioTrack *atrack)
{
SDLTRACE("%s", __func__);
J4AC_AudioTrack__play__catchAll(env, atrack->thiz);
}
// ijkmedia/ijkj4a/j4a/class/android/media/AudioTrack.h
#define J4AC_AudioTrack__play__catchAll J4AC_android_media_AudioTrack__play__catchAll
void J4AC_android_media_AudioTrack__play__catchAll(JNIEnv *env, jobject thiz)
{
J4AC_android_media_AudioTrack__play(env, thiz);
J4A_ExceptionCheck__catchAll(env);
}
void J4AC_android_media_AudioTrack__play(JNIEnv *env, jobject thiz)
{
//調(diào)用了jni方法钧惧,即通過c去調(diào)用java里的方法了。
(*env)->CallVoidMethod(env, thiz, class_J4AC_android_media_AudioTrack.method_play);
}
而這個class_J4AC_android_media_AudioTrack.method_play
是:
// ijkmedia/ijkj4a/j4a/class/android/media/AudioTrack.c
int J4A_loadClass__J4AC_android_media_AudioTrack(JNIEnv *env)
{
//...
class_id = class_J4AC_android_media_AudioTrack.id;
name = "play";
sign = "()V";
class_J4AC_android_media_AudioTrack.method_play = J4A_GetMethodID__catchAll(env, class_id, name, sign);
//...
}
那么的確是調(diào)用的java層的AudioTrack#play()
3. 使用
switch (avctx->codec_type) {
case AVMEDIA_TYPE_AUDIO:
//audio_open里面會去調(diào)用到AudioTrack.java # play()
if ((ret = audio_open(ffp, channel_layout, nb_channels, sample_rate, &is->audio_tgt)) < 0)
//decoder初始化
decoder_init(&is->auddec, avctx, &is->audioq, is->continue_read_thread);
//decoder啟動勾习,啟動audio_thread線程
if ((ret = decoder_start(&is->auddec, audio_thread, ffp, "ff_audio_dec")) < 0)
}
IJKFF_Pipeline
1. 結(jié)構(gòu)體
1.1 IJKFF_Pipeline
typedef struct IJKFF_Pipeline IJKFF_Pipeline;
struct IJKFF_Pipeline {
SDL_Class *opaque_class;
IJKFF_Pipeline_Opaque *opaque;
//銷毀
void (*func_destroy) (IJKFF_Pipeline *pipeline);
//打開視頻解碼器
IJKFF_Pipenode *(*func_open_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
//打開音頻解碼器
SDL_Aout *(*func_open_audio_output) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
//初始化視頻解碼器
IJKFF_Pipenode *(*func_init_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
//配置視頻解碼器
int (*func_config_video_decoder) (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
};
1.2 IJKFF_Pipeline_Opaque
typedef struct IJKFF_Pipeline_Opaque {
FFPlayer *ffp;
SDL_mutex *surface_mutex;
jobject jsurface;
volatile bool is_surface_need_reconfigure;
bool (*mediacodec_select_callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc);
void *mediacodec_select_callback_opaque;
SDL_Vout *weak_vout;
float left_volume;
float right_volume;
} IJKFF_Pipeline_Opaque;
2. 初始化
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
//...
mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
//...
}
IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
ALOGD("ffpipeline_create_from_android()\n");
//分配內(nèi)存
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;
//初始化opaque
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
opaque->ffp = ffp;
opaque->surface_mutex = SDL_CreateMutex();
opaque->left_volume = 1.0f;
opaque->right_volume = 1.0f;
if (!opaque->surface_mutex) {
ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
goto fail;
}
//初始化pipeline中的每個函數(shù)
pipeline->func_destroy = func_destroy;
pipeline->func_open_video_decoder = func_open_video_decoder;
pipeline->func_open_audio_output = func_open_audio_output;
pipeline->func_init_video_decoder = func_init_video_decoder;
pipeline->func_config_video_decoder = func_config_video_decoder;
return pipeline;
fail:
ffpipeline_free_p(&pipeline);
return NULL;
}
一個一個來看一下pipeline中的函數(shù)的作用:
2.1 func_destroy()
static void func_destroy(IJKFF_Pipeline *pipeline)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
JNIEnv *env = NULL;
SDL_DestroyMutexP(&opaque->surface_mutex);
if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("amediacodec-pipeline:destroy: SetupThreadEnv failed\n");
goto fail;
}
//變量并釋放IJKFF_Pipeline_Opaque.jsurface
SDL_JNI_DeleteGlobalRefP(env, &opaque->jsurface);
fail:
return;
}
//用env指針變量垢乙,刪除obj_ptr的jni全局引用
void SDL_JNI_DeleteGlobalRefP(JNIEnv *env, jobject *obj_ptr)
{
if (!obj_ptr || !*obj_ptr)
return;
//jni方法,刪除全局引用
(*env)->DeleteGlobalRef(env, *obj_ptr);
*obj_ptr = NULL;
}
2.2 func_open_video_decoder()
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
IJKFF_Pipenode *node = NULL;
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
//從硬解中創(chuàng)建解碼器
node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
if (!node) {
//從ffplay中創(chuàng)建解碼器语卤,即ffmpeg的解碼器
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
}
return node;
}
這里很有意思追逮,創(chuàng)建解碼器,而返回的對象是IJKFF_Pipenode
粹舵,這是否說明IJKFF_Pipenode
就是一個解碼器的抽象钮孵?
2.3 func_open_audio_output
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
SDL_Aout *aout = NULL;
if (ffp->opensles) {
aout = SDL_AoutAndroid_CreateForOpenSLES();
} else {
//一般不會用opensles,都是默認(rèn)用的android的AudioTrack來創(chuàng)建Aout
aout = SDL_AoutAndroid_CreateForAudioTrack();
}
if (aout)
SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
return aout;
}
SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack()
{
//在這里創(chuàng)建并分配了SDL_Aout結(jié)構(gòu)體
SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
if (!aout)
return NULL;
SDL_Aout_Opaque *opaque = aout->opaque;
opaque->wakeup_cond = SDL_CreateCond();
opaque->wakeup_mutex = SDL_CreateMutex();
opaque->speed = 1.0f;
aout->opaque_class = &g_audiotrack_class;
aout->free_l = aout_free_l;
aout->open_audio = aout_open_audio;
aout->pause_audio = aout_pause_audio;
aout->flush_audio = aout_flush_audio;
aout->set_volume = aout_set_volume;
aout->close_audio = aout_close_audio;
aout->func_get_audio_session_id = aout_get_audio_session_id;
aout->func_set_playback_rate = func_set_playback_rate;
return aout;
}
2.4 func_init_video_decoder
static IJKFF_Pipenode *func_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
IJKFF_Pipenode *node = NULL;
if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
//如果是硬解眼滤,則要再初始化一下巴席,如果是ffmpeg的軟解,就不需要了诅需。
node = ffpipenode_init_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
return node;
}
2.5 func_config_video_decoder
static int func_config_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
int ret = NULL;
if (ffp->node_vdec) {
ret = ffpipenode_config_from_android_mediacodec(ffp, pipeline, opaque->weak_vout, ffp->node_vdec);
}
return ret;
}
3. 使用
視頻解碼線程開始之前漾唉,用ffpipeline_open_video_decoder
創(chuàng)建一個解碼器。
static int stream_component_open(FFPlayer *ffp, int stream_index)
{
//...
case AVMEDIA_TYPE_VIDEO:
//decoder初始化
decoder_init(&is->viddec, avctx, &is->videoq, is->continue_read_thread);
ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
//解碼器開始
if ((ret = decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec")) < 0)
goto out;
//...
}
IJKFF_Pipenode
這個稱為管道節(jié)點(diǎn)結(jié)構(gòu)體包含的func_run_sync()
函數(shù)是用來運(yùn)行解碼線程的堰塌。因此該結(jié)構(gòu)體對底層ffmpeg來說赵刑,也是底層ffmpeg的一層抽象了。
1. 結(jié)構(gòu)體
typedef struct IJKFF_Pipenode IJKFF_Pipenode;
struct IJKFF_Pipenode {
SDL_mutex *mutex;
void *opaque;
void (*func_destroy) (IJKFF_Pipenode *node);
int (*func_run_sync)(IJKFF_Pipenode *node);
int (*func_flush) (IJKFF_Pipenode *node); // optional
};
2. 初始化
IJKFF_Pipenode *ffpipenode_create_video_decoder_from_ffplay(FFPlayer *ffp)
{
//分配IJKFF_Pipenode的內(nèi)存
IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque));
if (!node)
return node;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
opaque->ffp = ffp;
//為node的函數(shù)賦值
node->func_destroy = func_destroy;
node->func_run_sync = func_run_sync;
ffp_set_video_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(ffp->is->viddec.avctx->codec_id));
ffp->stat.vdec_type = FFP_PROPV_DECODER_AVCODEC;
return node;
}
static void func_destroy(IJKFF_Pipenode *node)
{
// do nothing
}
static int func_run_sync(IJKFF_Pipenode *node)
{
IJKFF_Pipenode_Opaque *opaque = node->opaque;
return ffp_video_thread(opaque->ffp);
}
int ffp_video_thread(FFPlayer *ffp)
{
return ffplay_video_thread(ffp);
}
static int ffplay_video_thread(void *arg)
{
FFPlayer *ffp = arg;
VideoState *is = ffp->is;
AVFrame *frame = av_frame_alloc();
double pts;
double duration;
int ret;
AVRational tb = is->video_st->time_base;
AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
int64_t dst_pts = -1;
int64_t last_dst_pts = -1;
int retry_convert_image = 0;
int convert_frame_count = 0;
ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, ffp_get_video_rotate_degrees(ffp));
if (!frame) {
return AVERROR(ENOMEM);
}
for (;;) {
//獲取解碼后的數(shù)據(jù)AVFrame數(shù)據(jù)
ret = get_video_frame(ffp, frame);
if (ret < 0)
goto the_end;
if (!ret)
continue;
if (ffp->get_frame_mode) {
if (!ffp->get_img_info || ffp->get_img_info->count <= 0) {
av_frame_unref(frame);
continue;
}
last_dst_pts = dst_pts;
if (dst_pts < 0) {
dst_pts = ffp->get_img_info->start_time;
} else {
dst_pts += (ffp->get_img_info->end_time - ffp->get_img_info->start_time) / (ffp->get_img_info->num - 1);
}
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
pts = pts * 1000;
if (pts >= dst_pts) {
while (retry_convert_image <= MAX_RETRY_CONVERT_IMAGE) {
ret = convert_image(ffp, frame, (int64_t)pts, frame->width, frame->height);
if (!ret) {
convert_frame_count++;
break;
}
retry_convert_image++;
av_log(NULL, AV_LOG_ERROR, "convert image error retry_convert_image = %d\n", retry_convert_image);
}
retry_convert_image = 0;
if (ret || ffp->get_img_info->count <= 0) {
if (ret) {
av_log(NULL, AV_LOG_ERROR, "convert image abort ret = %d\n", ret);
ffp_notify_msg3(ffp, FFP_MSG_GET_IMG_STATE, 0, ret);
} else {
av_log(NULL, AV_LOG_INFO, "convert image complete convert_frame_count = %d\n", convert_frame_count);
}
goto the_end;
}
} else {
dst_pts = last_dst_pts;
}
av_frame_unref(frame);
continue;
}
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
//將frame數(shù)據(jù)進(jìn)入到picture_queue场刑。即渲染隊(duì)列
ret = queue_picture(ffp, frame, pts, duration, frame->pkt_pos, is->viddec.pkt_serial);
av_frame_unref(frame);
if (ret < 0)
goto the_end;
}
the_end:
av_log(NULL, AV_LOG_INFO, "convert image convert_frame_count = %d\n", convert_frame_count);
av_frame_free(&frame);
return 0;
}
3. 使用
主要就是他的func_run_sync()
方法被用來解碼視頻幀并入隊(duì)渲染隊(duì)列般此。這個操作發(fā)生在:
static int video_thread(void *arg)
{
FFPlayer *ffp = (FFPlayer *)arg;
int ret = 0;
//如果node_vdec不為null。
if (ffp->node_vdec) {
//解碼操作
ret = ffpipenode_run_sync(ffp->node_vdec);
}
return ret;
}
那么我們來看一下解碼的時(shí)候他的邏輯:
// ijkmedia/ijkplayer/pipeline/ffpipenode_ffplay_vdec.c
static int func_run_sync(IJKFF_Pipenode *node)
{
IJKFF_Pipenode_Opaque *opaque = node->opaque;
return ffp_video_thread(opaque->ffp);
}
// ijkmedia/ijkplayer/ff_ffplay.c
int ffp_video_thread(FFPlayer *ffp)
{
return ffplay_video_thread(ffp);
}