理解ijkplayer(六)Vout颁井、Aout厅贪、FFPipeLine

// ijkmedia/ijkplayer/ff_ffplay_def.h
typedef struct FFPlayer {
        //...
    SDL_Aout *aout;
    SDL_Vout *vout;
    struct IJKFF_Pipeline *pipeline;
    struct IJKFF_Pipenode *node_vdec;
    //...
}FFPlayer;

這篇文章的內(nèi)容:

分析FFPlayer結(jié)構(gòu)體中的SDL_AoutSDL_Vout雅宾、IJKFF_PipelineIJKFF_Pipenode的作用

此外养涮,這四個結(jié)構(gòu)體,我認(rèn)為是為了實(shí)現(xiàn)多態(tài)眉抬,詳見IBM教程:技巧:用 C 語言實(shí)現(xiàn)程序的多態(tài)性

而這里面還有用到SDL_Vout_Opaque這類的opaque單詞的結(jié)構(gòu)體和指針贯吓,這是為了實(shí)現(xiàn)封裝和對外隱藏細(xì)節(jié)。

不得不說蜀变,ijkplayer用c語言也很好地體現(xiàn)了面向?qū)ο蟮乃枷搿?/p>

SDL_Vout

1. 結(jié)構(gòu)體

1.1 SDL_Vout
//  ijkmedia/ijksdl/ijksdl_vout.h
struct SDL_Vout {
    SDL_mutex *mutex;

    SDL_Class       *opaque_class;
    SDL_Vout_Opaque *opaque;
    //創(chuàng)建圖層悄谐,即SDL_VoutOverlay
    SDL_VoutOverlay *(*create_overlay)(int width, int height, int frame_format, SDL_Vout *vout);
    //釋放
    void (*free_l)(SDL_Vout *vout);
    //展示圖層
    int (*display_overlay)(SDL_Vout *vout, SDL_VoutOverlay *overlay);
    //圖層格式
    Uint32 overlay_format;
};
1.2 SDL_Vout_Opaque

opaque類似于Java中的內(nèi)部類,用來向調(diào)用者屏蔽該類的內(nèi)部邏輯

看下這個SDL_Vout_Opaque的定義:

typedef定義了一個抽象昏苏,那么他的實(shí)現(xiàn)在哪里尊沸?

//  ijkmedia/ijksdl/ijksdl_vout.h

//這里引用的是ffmepg后綴的這個頭文件,因此用的是軟件的方式去創(chuàng)建的SDL_VoutOverlay_Opaque
#include "ffmpeg/ijksdl_inc_ffmpeg.h"

//...
typedef struct SDL_Vout_Opaque SDL_Vout_Opaque;
//...

他的實(shí)現(xiàn)在兩處定義了贤惯,分別在硬解和軟解的時(shí)候使用

//  硬解
//  ijkmedia/ijksdl/android/ijksdl_vout_overlay_android_mediacodec.c
typedef struct SDL_VoutOverlay_Opaque {
    SDL_mutex *mutex;

    SDL_Vout                   *vout;
    SDL_AMediaCodec            *acodec;

    SDL_AMediaCodecBufferProxy *buffer_proxy;

    Uint16 pitches[AV_NUM_DATA_POINTERS];
    Uint8 *pixels[AV_NUM_DATA_POINTERS];
} SDL_VoutOverlay_Opaque;
// 軟解
//  ijkmedia/ijksdl/ffmpeg/ijksdl_vout_overlay_ffmpeg.c
struct SDL_VoutOverlay_Opaque {
    SDL_mutex *mutex;

    AVFrame *managed_frame;
    AVBufferRef *frame_buffer;
    int planes;

    AVFrame *linked_frame;

    Uint16 pitches[AV_NUM_DATA_POINTERS];
    Uint8 *pixels[AV_NUM_DATA_POINTERS];

    int no_neon_warned;

    struct SwsContext *img_convert_ctx;
    int sws_flags;
};

而實(shí)際上硬解的那個是不會使用的洼专,為什么?因?yàn)槎xSDL_VoutOverlay_Opaque引用的頭文件是ffmpeg/ijksdl_inc_ffmpeg.h

1.3 SDL_Class

暫時(shí)也不清楚這個是做什么的孵构,只保存了一個字符串而已屁商。

typedef struct SDL_Class {
    const char *name;
} SDL_Class;
1.4 SDL_VoutOverlay
struct SDL_VoutOverlay {
    int w; /**< Read-only */
    int h; /**< Read-only */
    Uint32 format; /**< Read-only */
    int planes; /**< Read-only */
    Uint16 *pitches; /**< in bytes, Read-only */
    Uint8 **pixels; /**< Read-write */

    int is_private;

    int sar_num;
    int sar_den;

    SDL_Class               *opaque_class;
    SDL_VoutOverlay_Opaque  *opaque;

    void    (*free_l)(SDL_VoutOverlay *overlay);
    int     (*lock)(SDL_VoutOverlay *overlay);
    int     (*unlock)(SDL_VoutOverlay *overlay);
    void    (*unref)(SDL_VoutOverlay *overlay);

    int     (*func_fill_frame)(SDL_VoutOverlay *overlay, const AVFrame *frame);
};
1.5 SDL_VoutOverlay_Opaque
#include "ffmpeg/ijksdl_inc_ffmpeg.h"

typedef struct SDL_VoutOverlay_Opaque SDL_VoutOverlay_Opaque;

軟解:

//  ijkmedia/ijksdl/ffmpeg/ijksdl_vout_overlay_ffmpeg.c
struct SDL_VoutOverlay_Opaque {
    SDL_mutex *mutex;

    AVFrame *managed_frame;
    AVBufferRef *frame_buffer;
    int planes;

    AVFrame *linked_frame;

    Uint16 pitches[AV_NUM_DATA_POINTERS];
    Uint8 *pixels[AV_NUM_DATA_POINTERS];

    int no_neon_warned;

    struct SwsContext *img_convert_ctx;
    int sws_flags;
};

硬解:

typedef struct SDL_VoutOverlay_Opaque {
    SDL_mutex *mutex;

    SDL_Vout                   *vout;
    SDL_AMediaCodec            *acodec;

    SDL_AMediaCodecBufferProxy *buffer_proxy;

    Uint16 pitches[AV_NUM_DATA_POINTERS];
    Uint8 *pixels[AV_NUM_DATA_POINTERS];
} SDL_VoutOverlay_Opaque;

同樣的,這里是用的軟解颈墅。

2. 初始化

3. 使用

IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
    //創(chuàng)建IjkMediaPlayer
    IjkMediaPlayer *mp = ijkmp_create(msg_loop);
    if (!mp)
        goto fail;
    //創(chuàng)建視頻輸出設(shè)備蜡镶,會根據(jù)根據(jù)硬解還是軟件,硬解用MediaCodec創(chuàng)建恤筛,軟解用FFmpeg創(chuàng)建
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();
    if (!mp->ffplayer->vout)
        goto fail;
    //創(chuàng)建管道
    mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
    if (!mp->ffplayer->pipeline)
        goto fail;
    //將創(chuàng)建的視頻輸出設(shè)備vout官还,賦值到ffplayer->pipeline中
    ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout);

    return mp;

fail:
    ijkmp_dec_ref_p(&mp);
    return NULL;
}

mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface();

SDL_Vout *SDL_VoutAndroid_CreateForAndroidSurface()
{
    return SDL_VoutAndroid_CreateForANativeWindow();
}
SDL_Vout *SDL_VoutAndroid_CreateForANativeWindow()
{
    //創(chuàng)建SDL_Vout
    SDL_Vout *vout = SDL_Vout_CreateInternal(sizeof(SDL_Vout_Opaque));
    if (!vout)
        return NULL;

    SDL_Vout_Opaque *opaque = vout->opaque;
    opaque->native_window = NULL;
    if (ISDL_Array__init(&opaque->overlay_manager, 32))
        goto fail;
    if (ISDL_Array__init(&opaque->overlay_pool, 32))
        goto fail;
    //創(chuàng)建egl
    opaque->egl = IJK_EGL_create();
    if (!opaque->egl)
        goto fail;
    //為vout的函數(shù)賦值
    vout->opaque_class    = &g_nativewindow_class;
    vout->create_overlay  = func_create_overlay;
    vout->free_l          = func_free_l;
    vout->display_overlay = func_display_overlay;

    return vout;
fail:
    func_free_l(vout);
    return NULL;
}
inline static SDL_Vout *SDL_Vout_CreateInternal(size_t opaque_size)
{
    //分配Vout內(nèi)存
    SDL_Vout *vout = (SDL_Vout*) calloc(1, sizeof(SDL_Vout));
    if (!vout)
        return NULL;
    //分配Opaue內(nèi)存
    vout->opaque = calloc(1, opaque_size);
    if (!vout->opaque) {
        free(vout);
        return NULL;
    }
    //創(chuàng)建互斥鎖
    vout->mutex = SDL_CreateMutex();
    if (vout->mutex == NULL) {
        free(vout->opaque);
        free(vout);
        return NULL;
    }

    return vout;
}

接下來一一看下vout的3個函數(shù)在這里被賦值的函數(shù)

vout->create_overlay  = func_create_overlay;
vout->free_l          = func_free_l;
vout->display_overlay = func_display_overlay;
static SDL_VoutOverlay *func_create_overlay(int width, int height, int frame_format, SDL_Vout *vout)
{
    SDL_LockMutex(vout->mutex);
    //創(chuàng)建SDL_VoutOverlay
    SDL_VoutOverlay *overlay = func_create_overlay_l(width, height, frame_format, vout);
    SDL_UnlockMutex(vout->mutex);
    return overlay;
}
static SDL_VoutOverlay *func_create_overlay_l(int width, int height, int frame_format, SDL_Vout *vout)
{
    switch (frame_format) {
    case IJK_AV_PIX_FMT__ANDROID_MEDIACODEC:
        //如果幀的格式是IJK_AV_PIX_FMT__ANDROID_MEDIACODEC,就用硬解創(chuàng)建
        return SDL_VoutAMediaCodec_CreateOverlay(width, height, vout);
    default:
                //否則用軟解創(chuàng)建
        return SDL_VoutFFmpeg_CreateOverlay(width, height, frame_format, vout);
    }
}

SDL_Aout

1. 結(jié)構(gòu)體

typedef struct SDL_Aout SDL_Aout;
struct SDL_Aout {
    SDL_mutex *mutex;
    double     minimal_latency_seconds;

    SDL_Class       *opaque_class;
    SDL_Aout_Opaque *opaque;
    void (*free_l)(SDL_Aout *vout);
    int (*open_audio)(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained);
    void (*pause_audio)(SDL_Aout *aout, int pause_on);
    void (*flush_audio)(SDL_Aout *aout);
    void (*set_volume)(SDL_Aout *aout, float left, float right);
    void (*close_audio)(SDL_Aout *aout);

    double (*func_get_latency_seconds)(SDL_Aout *aout);
    void   (*func_set_default_latency_seconds)(SDL_Aout *aout, double latency);

    // optional
    void   (*func_set_playback_rate)(SDL_Aout *aout, float playbackRate);
    void   (*func_set_playback_volume)(SDL_Aout *aout, float playbackVolume);
    int    (*func_get_audio_persecond_callbacks)(SDL_Aout *aout);

    // Android only
    int    (*func_get_audio_session_id)(SDL_Aout *aout);
};

2. 初始化

ffp_preapare_async_l()函數(shù)中

int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
    assert(ffp);
    assert(!ffp->is);
    assert(file_name);
    //針對rtmp和rtsp協(xié)議毒坛,移除選項(xiàng)”timeout“
    if (av_stristart(file_name, "rtmp", NULL) ||
        av_stristart(file_name, "rtsp", NULL)) {
        // There is total different meaning for 'timeout' option in rtmp
        av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");
        av_dict_set(&ffp->format_opts, "timeout", NULL, 0);
    }

    /* there is a length limit in avformat */
    if (strlen(file_name) + 1 > 1024) {
        av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);
        if (avio_find_protocol_name("ijklongurl:")) {
            av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);
            file_name = "ijklongurl:";
        }
    }
    //打印版本信息
    av_log(NULL, AV_LOG_INFO, "===== versions =====\n");
    ffp_show_version_str(ffp, "ijkplayer",      ijk_version_info());
    ffp_show_version_str(ffp, "FFmpeg",         av_version_info());
    ffp_show_version_int(ffp, "libavutil",      avutil_version());
    ffp_show_version_int(ffp, "libavcodec",     avcodec_version());
    ffp_show_version_int(ffp, "libavformat",    avformat_version());
    ffp_show_version_int(ffp, "libswscale",     swscale_version());
    ffp_show_version_int(ffp, "libswresample",  swresample_version());
    av_log(NULL, AV_LOG_INFO, "===== options =====\n");
    ffp_show_dict(ffp, "player-opts", ffp->player_opts);
    ffp_show_dict(ffp, "format-opts", ffp->format_opts);
    ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);
    ffp_show_dict(ffp, "sws-opts   ", ffp->sws_dict);
    ffp_show_dict(ffp, "swr-opts   ", ffp->swr_opts);
    av_log(NULL, AV_LOG_INFO, "===================\n");
    //設(shè)置播放器選項(xiàng)
    av_opt_set_dict(ffp, &ffp->player_opts);
    //如果ffplayer->aout==null望伦,那么久打開音頻輸出設(shè)備。前面的初始化代碼是沒有為這個賦值過的煎殷,所以第一次調(diào)用肯定會返回true.
    if (!ffp->aout) {
        ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
        if (!ffp->aout)
            return -1;
    }

#if CONFIG_AVFILTER
    if (ffp->vfilter0) {
        GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);
        ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;
    }
#endif

    VideoState *is = stream_open(ffp, file_name, NULL);
    if (!is) {
        av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");
        return EIJK_OUT_OF_MEMORY;
    }

    ffp->is = is;
    ffp->input_filename = av_strdup(file_name);
    return 0;
}

即這句:

//如果ffplayer->aout==null屯伞,那么久打開音頻輸出設(shè)備。前面的初始化代碼是沒有為這個賦值過的豪直,所以第一次調(diào)用肯定會返回true.
    if (!ffp->aout) {
        ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);
        if (!ffp->aout)
            return -1;
    }
SDL_Aout *ffpipeline_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    //借助pipeline的方法
    return pipeline->func_open_audio_output(pipeline, ffp);
}

這個地方要使用IJKFF_Pipeline的方法劣摇,而IJKFF_Pipeline是在創(chuàng)建播放器的時(shí)候創(chuàng)建的。

static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    SDL_Aout *aout = NULL;
    if (ffp->opensles) {
        aout = SDL_AoutAndroid_CreateForOpenSLES();
    } else {
        //一般不會用opensles弓乙,都是默認(rèn)用的android的AudioTrack來創(chuàng)建Aout
        aout = SDL_AoutAndroid_CreateForAudioTrack();
    }
    if (aout)
        SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
    return aout;
}
SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack()
{
    SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
    if (!aout)
        return NULL;

    SDL_Aout_Opaque *opaque = aout->opaque;
    opaque->wakeup_cond  = SDL_CreateCond();
    opaque->wakeup_mutex = SDL_CreateMutex();
    opaque->speed        = 1.0f;

    aout->opaque_class = &g_audiotrack_class;
    aout->free_l       = aout_free_l;
    aout->open_audio   = aout_open_audio;
    aout->pause_audio  = aout_pause_audio;
    aout->flush_audio  = aout_flush_audio;
    aout->set_volume   = aout_set_volume;
    aout->close_audio  = aout_close_audio;
    aout->func_get_audio_session_id = aout_get_audio_session_id;
    aout->func_set_playback_rate    = func_set_playback_rate;

    return aout;
}

那么這里看一下這個aoujt->open_audio函數(shù):

static int aout_open_audio(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
    // SDL_Aout_Opaque *opaque = aout->opaque;
    JNIEnv *env = NULL;
    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("aout_open_audio: AttachCurrentThread: failed");
        return -1;
    }

    return aout_open_audio_n(env, aout, desired, obtained);
}
static int aout_open_audio_n(JNIEnv *env, SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
    assert(desired);
    SDL_Aout_Opaque *opaque = aout->opaque;

    opaque->spec = *desired;
    opaque->atrack = SDL_Android_AudioTrack_new_from_sdl_spec(env, desired);
    if (!opaque->atrack) {
        ALOGE("aout_open_audio_n: failed to new AudioTrcak()");
        return -1;
    }

    opaque->buffer_size = SDL_Android_AudioTrack_get_min_buffer_size(opaque->atrack);
    if (opaque->buffer_size <= 0) {
        ALOGE("aout_open_audio_n: failed to getMinBufferSize()");
        SDL_Android_AudioTrack_free(env, opaque->atrack);
        opaque->atrack = NULL;
        return -1;
    }

    opaque->buffer = malloc(opaque->buffer_size);
    if (!opaque->buffer) {
        ALOGE("aout_open_audio_n: failed to allocate buffer");
        SDL_Android_AudioTrack_free(env, opaque->atrack);
        opaque->atrack = NULL;
        return -1;
    }

    if (obtained) {
        SDL_Android_AudioTrack_get_target_spec(opaque->atrack, obtained);
        SDLTRACE("audio target format fmt:0x%x, channel:0x%x", (int)obtained->format, (int)obtained->channels);
    }

    opaque->audio_session_id = SDL_Android_AudioTrack_getAudioSessionId(env, opaque->atrack);
    ALOGI("audio_session_id = %d\n", opaque->audio_session_id);

    opaque->pause_on = 1;
    opaque->abort_request = 0;
    //創(chuàng)建音頻輸出線程
    opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_android");
    if (!opaque->audio_tid) {
        ALOGE("aout_open_audio_n: failed to create audio thread");
        SDL_Android_AudioTrack_free(env, opaque->atrack);
        opaque->atrack = NULL;
        return -1;
    }

    return 0;
}

那么這里看下這個音頻輸出線程做了什么:

static int aout_thread(void *arg)
{
    SDL_Aout *aout = arg;
    // SDL_Aout_Opaque *opaque = aout->opaque;
    JNIEnv *env = NULL;

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("aout_thread: SDL_AndroidJni_SetupEnv: failed");
        return -1;
    }

    return aout_thread_n(env, aout);
}
static int aout_thread_n(JNIEnv *env, SDL_Aout *aout)
{
    SDL_Aout_Opaque *opaque = aout->opaque;
    SDL_Android_AudioTrack *atrack = opaque->atrack;
    SDL_AudioCallback audio_cblk = opaque->spec.callback;
    void *userdata = opaque->spec.userdata;
    uint8_t *buffer = opaque->buffer;
    int copy_size = 256;

    assert(atrack);
    assert(buffer);

    SDL_SetThreadPriority(SDL_THREAD_PRIORITY_HIGH);

    if (!opaque->abort_request && !opaque->pause_on)
        SDL_Android_AudioTrack_play(env, atrack);
    //只要沒有中斷請求末融,就無限循環(huán)
    while (!opaque->abort_request) {
        SDL_LockMutex(opaque->wakeup_mutex);
        if (!opaque->abort_request && opaque->pause_on) {
            //暫停
            SDL_Android_AudioTrack_pause(env, atrack);
            while (!opaque->abort_request && opaque->pause_on) {
                SDL_CondWaitTimeout(opaque->wakeup_cond, opaque->wakeup_mutex, 1000);
            }
            if (!opaque->abort_request && !opaque->pause_on) {
                if (opaque->need_flush) {
                    opaque->need_flush = 0;
                    //flush
                    SDL_Android_AudioTrack_flush(env, atrack);
                }
                //播放
                SDL_Android_AudioTrack_play(env, atrack);
            }
        }
        if (opaque->need_flush) {
            opaque->need_flush = 0;
            SDL_Android_AudioTrack_flush(env, atrack);
        }
        if (opaque->need_set_volume) {
            opaque->need_set_volume = 0;
            SDL_Android_AudioTrack_set_volume(env, atrack, opaque->left_volume, opaque->right_volume);
        }
        if (opaque->speed_changed) {
            opaque->speed_changed = 0;
            SDL_Android_AudioTrack_setSpeed(env, atrack, opaque->speed);
        }
        SDL_UnlockMutex(opaque->wakeup_mutex);

        audio_cblk(userdata, buffer, copy_size);
        if (opaque->need_flush) {
            SDL_Android_AudioTrack_flush(env, atrack);
            opaque->need_flush = false;
        }

        if (opaque->need_flush) {
            opaque->need_flush = 0;
            SDL_Android_AudioTrack_flush(env, atrack);
        } else {
            int written = SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size);
            if (written != copy_size) {
                ALOGW("AudioTrack: not all data copied %d/%d", (int)written, (int)copy_size);
            }
        }

        // TODO: 1 if callback return -1 or 0
    }

    SDL_Android_AudioTrack_free(env, atrack);
    return 0;
}

這里看下這個播放是在干嘛:

void SDL_Android_AudioTrack_play(JNIEnv *env, SDL_Android_AudioTrack *atrack)
{
    SDLTRACE("%s", __func__);
    J4AC_AudioTrack__play__catchAll(env, atrack->thiz);
}
// ijkmedia/ijkj4a/j4a/class/android/media/AudioTrack.h
#define J4AC_AudioTrack__play__catchAll J4AC_android_media_AudioTrack__play__catchAll
void J4AC_android_media_AudioTrack__play__catchAll(JNIEnv *env, jobject thiz)
{
    J4AC_android_media_AudioTrack__play(env, thiz);
    J4A_ExceptionCheck__catchAll(env);
}
void J4AC_android_media_AudioTrack__play(JNIEnv *env, jobject thiz)
{   
    //調(diào)用了jni方法钧惧,即通過c去調(diào)用java里的方法了。
    (*env)->CallVoidMethod(env, thiz, class_J4AC_android_media_AudioTrack.method_play);
}

而這個class_J4AC_android_media_AudioTrack.method_play是:

//  ijkmedia/ijkj4a/j4a/class/android/media/AudioTrack.c
int J4A_loadClass__J4AC_android_media_AudioTrack(JNIEnv *env)
{
    //...
    class_id = class_J4AC_android_media_AudioTrack.id;
    name     = "play";
    sign     = "()V";
    class_J4AC_android_media_AudioTrack.method_play = J4A_GetMethodID__catchAll(env, class_id, name, sign);
    //...
}

那么的確是調(diào)用的java層的AudioTrack#play()

3. 使用

switch (avctx->codec_type) {
  case AVMEDIA_TYPE_AUDIO:
        //audio_open里面會去調(diào)用到AudioTrack.java # play()
      if ((ret = audio_open(ffp, channel_layout, nb_channels, sample_rate, &is->audio_tgt)) < 0)
        //decoder初始化
      decoder_init(&is->auddec, avctx, &is->audioq, is->continue_read_thread);
      //decoder啟動勾习,啟動audio_thread線程
      if ((ret = decoder_start(&is->auddec, audio_thread, ffp, "ff_audio_dec")) < 0)
}

IJKFF_Pipeline

1. 結(jié)構(gòu)體

1.1 IJKFF_Pipeline
typedef struct IJKFF_Pipeline IJKFF_Pipeline;
struct IJKFF_Pipeline {
    SDL_Class             *opaque_class;
    IJKFF_Pipeline_Opaque *opaque;
        //銷毀
    void            (*func_destroy)             (IJKFF_Pipeline *pipeline);
    //打開視頻解碼器
    IJKFF_Pipenode *(*func_open_video_decoder)  (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
    //打開音頻解碼器
    SDL_Aout       *(*func_open_audio_output)   (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
    //初始化視頻解碼器
    IJKFF_Pipenode *(*func_init_video_decoder)  (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
    //配置視頻解碼器
    int           (*func_config_video_decoder)  (IJKFF_Pipeline *pipeline, FFPlayer *ffp);
};
1.2 IJKFF_Pipeline_Opaque
typedef struct IJKFF_Pipeline_Opaque {
    FFPlayer      *ffp;
    SDL_mutex     *surface_mutex;
    jobject        jsurface;
    volatile bool  is_surface_need_reconfigure;

    bool         (*mediacodec_select_callback)(void *opaque, ijkmp_mediacodecinfo_context *mcc);
    void          *mediacodec_select_callback_opaque;

    SDL_Vout      *weak_vout;

    float          left_volume;
    float          right_volume;
} IJKFF_Pipeline_Opaque;

2. 初始化

IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*))
{
        //...
    mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer);
    //...
}
IJKFF_Pipeline *ffpipeline_create_from_android(FFPlayer *ffp)
{
    ALOGD("ffpipeline_create_from_android()\n");
    //分配內(nèi)存
    IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
    if (!pipeline)
        return pipeline;
    //初始化opaque
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    opaque->ffp                   = ffp;
    opaque->surface_mutex         = SDL_CreateMutex();
    opaque->left_volume           = 1.0f;
    opaque->right_volume          = 1.0f;
    if (!opaque->surface_mutex) {
        ALOGE("ffpipeline-android:create SDL_CreateMutex failed\n");
        goto fail;
    }
    //初始化pipeline中的每個函數(shù)
    pipeline->func_destroy              = func_destroy;
    pipeline->func_open_video_decoder   = func_open_video_decoder;
    pipeline->func_open_audio_output    = func_open_audio_output;
    pipeline->func_init_video_decoder   = func_init_video_decoder;
    pipeline->func_config_video_decoder = func_config_video_decoder;

    return pipeline;
fail:
    ffpipeline_free_p(&pipeline);
    return NULL;
}

一個一個來看一下pipeline中的函數(shù)的作用:

2.1 func_destroy()
static void func_destroy(IJKFF_Pipeline *pipeline)
{
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    JNIEnv *env = NULL;

    SDL_DestroyMutexP(&opaque->surface_mutex);

    if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
        ALOGE("amediacodec-pipeline:destroy: SetupThreadEnv failed\n");
        goto fail;
    }
    //變量并釋放IJKFF_Pipeline_Opaque.jsurface
    SDL_JNI_DeleteGlobalRefP(env, &opaque->jsurface);
fail:
    return;
}
//用env指針變量垢乙,刪除obj_ptr的jni全局引用
void SDL_JNI_DeleteGlobalRefP(JNIEnv *env, jobject *obj_ptr)
{
    if (!obj_ptr || !*obj_ptr)
        return;
        //jni方法,刪除全局引用
    (*env)->DeleteGlobalRef(env, *obj_ptr);
    *obj_ptr = NULL;
}
2.2 func_open_video_decoder()
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    IJKFF_Pipenode        *node = NULL;

    if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
        //從硬解中創(chuàng)建解碼器
        node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);
    if (!node) {
        //從ffplay中創(chuàng)建解碼器语卤,即ffmpeg的解碼器
        node = ffpipenode_create_video_decoder_from_ffplay(ffp);
    }

    return node;
}

這里很有意思追逮,創(chuàng)建解碼器,而返回的對象是IJKFF_Pipenode粹舵,這是否說明IJKFF_Pipenode就是一個解碼器的抽象钮孵?

2.3 func_open_audio_output
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    SDL_Aout *aout = NULL;
    if (ffp->opensles) {
        aout = SDL_AoutAndroid_CreateForOpenSLES();
    } else {
        //一般不會用opensles,都是默認(rèn)用的android的AudioTrack來創(chuàng)建Aout
        aout = SDL_AoutAndroid_CreateForAudioTrack();
    }
    if (aout)
        SDL_AoutSetStereoVolume(aout, pipeline->opaque->left_volume, pipeline->opaque->right_volume);
    return aout;
}
SDL_Aout *SDL_AoutAndroid_CreateForAudioTrack()
{
    //在這里創(chuàng)建并分配了SDL_Aout結(jié)構(gòu)體
    SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
    if (!aout)
        return NULL;

    SDL_Aout_Opaque *opaque = aout->opaque;
    opaque->wakeup_cond  = SDL_CreateCond();
    opaque->wakeup_mutex = SDL_CreateMutex();
    opaque->speed        = 1.0f;

    aout->opaque_class = &g_audiotrack_class;
    aout->free_l       = aout_free_l;
    aout->open_audio   = aout_open_audio;
    aout->pause_audio  = aout_pause_audio;
    aout->flush_audio  = aout_flush_audio;
    aout->set_volume   = aout_set_volume;
    aout->close_audio  = aout_close_audio;
    aout->func_get_audio_session_id = aout_get_audio_session_id;
    aout->func_set_playback_rate    = func_set_playback_rate;

    return aout;
}
2.4 func_init_video_decoder
static IJKFF_Pipenode *func_init_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    IJKFF_Pipenode        *node = NULL;

    if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)
        //如果是硬解眼滤,則要再初始化一下巴席,如果是ffmpeg的軟解,就不需要了诅需。
        node = ffpipenode_init_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);

    return node;
}
2.5 func_config_video_decoder
static int func_config_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
    IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
    int                       ret = NULL;

    if (ffp->node_vdec) {
        ret = ffpipenode_config_from_android_mediacodec(ffp, pipeline, opaque->weak_vout, ffp->node_vdec);
    }

    return ret;
}

3. 使用

視頻解碼線程開始之前漾唉,用ffpipeline_open_video_decoder創(chuàng)建一個解碼器。

static int stream_component_open(FFPlayer *ffp, int stream_index)
{
  //...
  case AVMEDIA_TYPE_VIDEO:
    //decoder初始化
        decoder_init(&is->viddec, avctx, &is->videoq, is->continue_read_thread);
    ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
      //解碼器開始
    if ((ret = decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec")) < 0)
        goto out;
  //...
}

IJKFF_Pipenode

這個稱為管道節(jié)點(diǎn)結(jié)構(gòu)體包含的func_run_sync()函數(shù)是用來運(yùn)行解碼線程的堰塌。因此該結(jié)構(gòu)體對底層ffmpeg來說赵刑,也是底層ffmpeg的一層抽象了。

1. 結(jié)構(gòu)體

typedef struct IJKFF_Pipenode IJKFF_Pipenode;
struct IJKFF_Pipenode {
    SDL_mutex *mutex;
    void *opaque;

    void (*func_destroy) (IJKFF_Pipenode *node);
    int  (*func_run_sync)(IJKFF_Pipenode *node);
    int  (*func_flush)   (IJKFF_Pipenode *node); // optional
};

2. 初始化

IJKFF_Pipenode *ffpipenode_create_video_decoder_from_ffplay(FFPlayer *ffp)
{
    //分配IJKFF_Pipenode的內(nèi)存
    IJKFF_Pipenode *node = ffpipenode_alloc(sizeof(IJKFF_Pipenode_Opaque));
    if (!node)
        return node;

    IJKFF_Pipenode_Opaque *opaque = node->opaque;
    opaque->ffp         = ffp;
        //為node的函數(shù)賦值
    node->func_destroy  = func_destroy;
    node->func_run_sync = func_run_sync;
  
    ffp_set_video_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(ffp->is->viddec.avctx->codec_id));
    ffp->stat.vdec_type = FFP_PROPV_DECODER_AVCODEC;
    return node;
}
static void func_destroy(IJKFF_Pipenode *node)
{
    // do nothing
}
static int func_run_sync(IJKFF_Pipenode *node)
{
    IJKFF_Pipenode_Opaque *opaque = node->opaque;

    return ffp_video_thread(opaque->ffp);
}
int ffp_video_thread(FFPlayer *ffp)
{
    return ffplay_video_thread(ffp);
}
static int ffplay_video_thread(void *arg)
{
    FFPlayer *ffp = arg;
    VideoState *is = ffp->is;
    AVFrame *frame = av_frame_alloc();
    double pts;
    double duration;
    int ret;
    AVRational tb = is->video_st->time_base;
    AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
    int64_t dst_pts = -1;
    int64_t last_dst_pts = -1;
    int retry_convert_image = 0;
    int convert_frame_count = 0;

    ffp_notify_msg2(ffp, FFP_MSG_VIDEO_ROTATION_CHANGED, ffp_get_video_rotate_degrees(ffp));

    if (!frame) {
        return AVERROR(ENOMEM);
    }

    for (;;) {
        //獲取解碼后的數(shù)據(jù)AVFrame數(shù)據(jù)
        ret = get_video_frame(ffp, frame);
        if (ret < 0)
            goto the_end;
        if (!ret)
            continue;

        if (ffp->get_frame_mode) {
            if (!ffp->get_img_info || ffp->get_img_info->count <= 0) {
                av_frame_unref(frame);
                continue;
            }

            last_dst_pts = dst_pts;

            if (dst_pts < 0) {
                dst_pts = ffp->get_img_info->start_time;
            } else {
                dst_pts += (ffp->get_img_info->end_time - ffp->get_img_info->start_time) / (ffp->get_img_info->num - 1);
            }

            pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
            pts = pts * 1000;
            if (pts >= dst_pts) {
                while (retry_convert_image <= MAX_RETRY_CONVERT_IMAGE) {
                    ret = convert_image(ffp, frame, (int64_t)pts, frame->width, frame->height);
                    if (!ret) {
                        convert_frame_count++;
                        break;
                    }
                    retry_convert_image++;
                    av_log(NULL, AV_LOG_ERROR, "convert image error retry_convert_image = %d\n", retry_convert_image);
                }

                retry_convert_image = 0;
                if (ret || ffp->get_img_info->count <= 0) {
                    if (ret) {
                        av_log(NULL, AV_LOG_ERROR, "convert image abort ret = %d\n", ret);
                        ffp_notify_msg3(ffp, FFP_MSG_GET_IMG_STATE, 0, ret);
                    } else {
                        av_log(NULL, AV_LOG_INFO, "convert image complete convert_frame_count = %d\n", convert_frame_count);
                    }
                    goto the_end;
                }
            } else {
                dst_pts = last_dst_pts;
            }
            av_frame_unref(frame);
            continue;
        }
            duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
            pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
                //將frame數(shù)據(jù)進(jìn)入到picture_queue场刑。即渲染隊(duì)列
            ret = queue_picture(ffp, frame, pts, duration, frame->pkt_pos, is->viddec.pkt_serial);
            av_frame_unref(frame);

        if (ret < 0)
            goto the_end;
    }
 the_end:
    av_log(NULL, AV_LOG_INFO, "convert image convert_frame_count = %d\n", convert_frame_count);
    av_frame_free(&frame);
    return 0;
}

3. 使用

主要就是他的func_run_sync()方法被用來解碼視頻幀并入隊(duì)渲染隊(duì)列般此。這個操作發(fā)生在:

static int video_thread(void *arg)
{
    FFPlayer *ffp = (FFPlayer *)arg;
    int       ret = 0;
        //如果node_vdec不為null。
    if (ffp->node_vdec) {
        //解碼操作
        ret = ffpipenode_run_sync(ffp->node_vdec);
    }
    return ret;
}

那么我們來看一下解碼的時(shí)候他的邏輯:

//  ijkmedia/ijkplayer/pipeline/ffpipenode_ffplay_vdec.c
static int func_run_sync(IJKFF_Pipenode *node)
{
    IJKFF_Pipenode_Opaque *opaque = node->opaque;

    return ffp_video_thread(opaque->ffp);
}
//  ijkmedia/ijkplayer/ff_ffplay.c
int ffp_video_thread(FFPlayer *ffp)
{
    return ffplay_video_thread(ffp);
}
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
  • 序言:七十年代末牵现,一起剝皮案震驚了整個濱河市铐懊,隨后出現(xiàn)的幾起案子,更是在濱河造成了極大的恐慌瞎疼,老刑警劉巖科乎,帶你破解...
    沈念sama閱讀 216,324評論 6 498
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件,死亡現(xiàn)場離奇詭異贼急,居然都是意外死亡茅茂,警方通過查閱死者的電腦和手機(jī),發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 92,356評論 3 392
  • 文/潘曉璐 我一進(jìn)店門竿裂,熙熙樓的掌柜王于貴愁眉苦臉地迎上來玉吁,“玉大人照弥,你說我怎么就攤上這事腻异。” “怎么了这揣?”我有些...
    開封第一講書人閱讀 162,328評論 0 353
  • 文/不壞的土叔 我叫張陵悔常,是天一觀的道長影斑。 經(jīng)常有香客問我,道長机打,這世上最難降的妖魔是什么矫户? 我笑而不...
    開封第一講書人閱讀 58,147評論 1 292
  • 正文 為了忘掉前任,我火速辦了婚禮残邀,結(jié)果婚禮上皆辽,老公的妹妹穿的比我還像新娘。我一直安慰自己芥挣,他們只是感情好驱闷,可當(dāng)我...
    茶點(diǎn)故事閱讀 67,160評論 6 388
  • 文/花漫 我一把揭開白布。 她就那樣靜靜地躺著空免,像睡著了一般空另。 火紅的嫁衣襯著肌膚如雪。 梳的紋絲不亂的頭發(fā)上蹋砚,一...
    開封第一講書人閱讀 51,115評論 1 296
  • 那天扼菠,我揣著相機(jī)與錄音,去河邊找鬼坝咐。 笑死循榆,一個胖子當(dāng)著我的面吹牛,可吹牛的內(nèi)容都是我干的墨坚。 我是一名探鬼主播冯痢,決...
    沈念sama閱讀 40,025評論 3 417
  • 文/蒼蘭香墨 我猛地睜開眼,長吁一口氣:“原來是場噩夢啊……” “哼框杜!你這毒婦竟也來了浦楣?” 一聲冷哼從身側(cè)響起,我...
    開封第一講書人閱讀 38,867評論 0 274
  • 序言:老撾萬榮一對情侶失蹤咪辱,失蹤者是張志新(化名)和其女友劉穎振劳,沒想到半個月后,有當(dāng)?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體油狂,經(jīng)...
    沈念sama閱讀 45,307評論 1 310
  • 正文 獨(dú)居荒郊野嶺守林人離奇死亡历恐,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點(diǎn)故事閱讀 37,528評論 2 332
  • 正文 我和宋清朗相戀三年,在試婚紗的時(shí)候發(fā)現(xiàn)自己被綠了专筷。 大學(xué)時(shí)的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片弱贼。...
    茶點(diǎn)故事閱讀 39,688評論 1 348
  • 序言:一個原本活蹦亂跳的男人離奇死亡,死狀恐怖磷蛹,靈堂內(nèi)的尸體忽然破棺而出吮旅,到底是詐尸還是另有隱情,我是刑警寧澤味咳,帶...
    沈念sama閱讀 35,409評論 5 343
  • 正文 年R本政府宣布庇勃,位于F島的核電站檬嘀,受9級特大地震影響,放射性物質(zhì)發(fā)生泄漏责嚷。R本人自食惡果不足惜鸳兽,卻給世界環(huán)境...
    茶點(diǎn)故事閱讀 41,001評論 3 325
  • 文/蒙蒙 一、第九天 我趴在偏房一處隱蔽的房頂上張望罕拂。 院中可真熱鬧揍异,春花似錦、人聲如沸爆班。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,657評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽蛋济。三九已至棍鳖,卻和暖如春,著一層夾襖步出監(jiān)牢的瞬間碗旅,已是汗流浹背渡处。 一陣腳步聲響...
    開封第一講書人閱讀 32,811評論 1 268
  • 我被黑心中介騙來泰國打工, 沒想到剛下飛機(jī)就差點(diǎn)兒被人妖公主榨干…… 1. 我叫王不留祟辟,地道東北人医瘫。 一個月前我還...
    沈念sama閱讀 47,685評論 2 368
  • 正文 我出身青樓,卻偏偏與公主長得像旧困,于是被迫代替她去往敵國和親醇份。 傳聞我的和親對象是個殘疾皇子,可洞房花燭夜當(dāng)晚...
    茶點(diǎn)故事閱讀 44,573評論 2 353

推薦閱讀更多精彩內(nèi)容