Android Studio 2.2+ opensl FFmpeg音頻解碼播放

Android FFmpeg音頻播放

本文介紹了使用opensl es和FFmpeg在Android平臺上實現(xiàn)音頻解碼播放功能的方法搪锣。

opensl es簡介

Android NDK中包含了平臺特有的opensl es。它包含了一系列底層的音頻接口彩掐,允許開發(fā)者使用純底層代碼實現(xiàn)音頻播放构舟,錄制等功能。相比于AudioTrack堵幽,opensl es具有低延遲狗超,高性能等多項優(yōu)點。

準(zhǔn)備工作

  1. 搭建Android Studio NDK開發(fā)環(huán)境
  2. 編譯FFmpeg庫并將之集成到Android Studio中
  3. 在工程的AndroidManifest中加入權(quán)限
<!-- RECORD_AUDIO is needed to create an audio recorder -->
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<!-- MODIFY_AUDIO_SETTINGS is needed to use audio effects such as environmental reverb -->
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<!-- INTERNET is needed to use a URI-based audio player, depending on the URI -->
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
  1. CMakeLists.txt中加入鏈接庫opensl es
target_link_libraries(native-lib
log
android
OpenSLES
avcodec-57_lib
avformat-57_lib
avutil-55_lib
swresample-2_lib
swscale-4_lib)

FFmpeg音頻解碼

  1. 在native-lib中加入相關(guān)的庫
extern "C"{
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libswresample/swresample.h"
#include "libavutil/opt.h"
};
  1. 聲明全局變量
static void *nextBuffer;
static int nextSize;
static AVPacket *packet;
static AVFrame *pFrame;
static AVCodecContext *pCodecCtx;
static SwrContext *swr;
static AVFormatContext *pFormatCtx;
static int audioindex;uint8_t *outputBuffer;
  1. 初始化FFmpeg朴下,讀取音頻文件努咐,創(chuàng)建解碼器和輸出緩存
extern "C"
int Java_cn_jx_audiotest_MainActivity_play(JNIEnv* env, jclass clazz, jstring url) {
    int i;    
    AVCodec *pCodec;
    char input_str[500]={0};
    //讀取輸入的音頻文件地址
    sprintf(input_str, "%s", env->GetStringUTFChars(url, NULL)); 
    //初始化
    av_register_all();
    //分配一個AVFormatContext結(jié)構(gòu)
    pFormatCtx = avformat_alloc_context();
    //打開文件
    if(avformat_open_input(&pFormatCtx,input_str,NULL,NULL)!=0) {
        LOGD("Couldn't open input stream.\n");
        return -1;    
    }    
    //查找文件的流信息
    if(avformat_find_stream_info(pFormatCtx,NULL)<0) {
        LOGD("Couldn't find stream information.\n");
        return -1;    
    }
    //在流信息中找到音頻流
    audioindex = -1;
    for(i=0; i<pFormatCtx->nb_streams; i++) {
        if (pFormatCtx->streams[i]->codecpar->codec_type==AVMEDIA_TYPE_AUDIO) {
            audioindex = i;
            break;
        }
    }
    if(audioindex == -1){
        LOGD("Couldn't find a video stream.\n");
        return -1;
    }
    //獲取相應(yīng)音頻流的解碼器
    pCodecCtx=pFormatCtx->streams[audioindex]->codec;
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    assert(pCodec != NULL);
    //分配一個幀指針,指向解碼后的原始幀
    pFrame = av_frame_alloc();
    packet=(AVPacket *)av_malloc(sizeof(AVPacket));
    //打開解碼器
    if(avcodec_open2(pCodecCtx, pCodec,NULL)<0){
        LOGD("Couldn't open codec.\n");
        return -1;
    }
    //設(shè)置格式轉(zhuǎn)換
    swr = swr_alloc();
    av_opt_set_int(swr, "in_channel_layout",  pCodecCtx->channel_layout, 0);
    av_opt_set_int(swr, "out_channel_layout", pCodecCtx->channel_layout,  0);
    av_opt_set_int(swr, "in_sample_rate",     pCodecCtx->sample_rate, 0);
    av_opt_set_int(swr, "out_sample_rate",    pCodecCtx->sample_rate, 0);
    av_opt_set_sample_fmt(swr, "in_sample_fmt",  pCodecCtx->sample_fmt, 0);
    av_opt_set_sample_fmt(swr, "out_sample_fmt", AV_SAMPLE_FMT_S16,  0);
    swr_init(swr);
    //分配輸入緩存
    int outputBufferSize = 8196;
    outputBuffer = (uint8_t *) malloc(sizeof(uint8_t) * outputBufferSize);
    //解碼音頻文件
    getPCM();
    //將解碼后的buffer使用opensl es播放
    SLresult result;
    if (nextSize > 0) {
        result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
        if (SL_RESULT_SUCCESS != result) {
            pthread_mutex_unlock(&audioEngineLock);
            return -1;
            }
        }
        return 0;
    }
};
  1. 解碼音頻文件
int getPCM(){
    while(av_read_frame(pFormatCtx, packet)>=0) {
        if (packet->stream_index == audioindex) {
            int ret = avcodec_send_packet(pCodecCtx, packet);
            if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF)
                return -1;

            ret = avcodec_receive_frame(pCodecCtx, pFrame);
            if (ret < 0 && ret != AVERROR_EOF)
                return -1;
            //處理不同的格式
            if (pCodecCtx->sample_fmt == AV_SAMPLE_FMT_S16P) {
                nextSize = av_samples_get_buffer_size(pFrame->linesize, pCodecCtx->channels,pCodecCtx->frame_size,pCodecCtx->sample_fmt, 1);
            }else {
                av_samples_get_buffer_size(&nextSize, pCodecCtx->channels,pCodecCtx->frame_size,pCodecCtx->sample_fmt, 1);
            }
            // 音頻格式轉(zhuǎn)換
            swr_convert(swr, &outputBuffer, nextSize,
                        (uint8_t const **) (pFrame->extended_data),
                        pFrame->nb_samples);
            nextBuffer = outputBuffer;
            return 0;
        }
    }
}

opensl es音頻播放

  1. 定義相關(guān)全局變量
// engine interfaces
static SLObjectItf engineObject = NULL;
static SLEngineItf engineEngine;
static SLObjectItf outputMixObject = NULL;
static SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
static SLEffectSendItf bqPlayerEffectSend;
static SLMuteSoloItf bqPlayerMuteSolo;
static SLVolumeItf bqPlayerVolume;
static SLmilliHertz bqPlayerSampleRate = 0;
static jint   bqPlayerBufSize = 0;
static short *resampleBuf = NULL;
static pthread_mutex_t  audioEngineLock = PTHREAD_MUTEX_INITIALIZER;
// aux effect on the output mix, used by the buffer queue player
static const SLEnvironmentalReverbSettings reverbSettings =
        SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
  1. 創(chuàng)建引擎
// create the engine and output mix objects
extern "C"
void
Java_cn_jx_audiotest_MainActivity_createEngine(JNIEnv* env, jclass clazz) {
    SLresult result;
    // create engine
    result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // realize the engine
    result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // get the engine interface, which is needed in order to create other objects
    result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // create output mix, with environmental reverb specified as a non-required interface
    const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
    const SLboolean req[1] = {SL_BOOLEAN_FALSE};
    result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // realize the output mix
    result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // get the environmental reverb interface
    // this could fail if the environmental reverb effect is not available,
    // either because the feature is not present, excessive CPU load, or
    // the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
    result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
                                              &outputMixEnvironmentalReverb);
    if (SL_RESULT_SUCCESS == result) {
        result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
                outputMixEnvironmentalReverb, &reverbSettings);
        (void)result;
    }
    // ignore unsuccessful result codes for environmental reverb, as it is optional for this example
}
  1. 創(chuàng)建BufferQueueAudioPlayer殴胧。注意到代碼中注冊的bqPlayerCallback渗稍,每次buffer被處理后,將會調(diào)用這個callback溃肪。我們在這個callback中再次解碼一幀數(shù)據(jù)塞給BufferQueueAudioPlayer處理免胃。
// create buffer queue audio player
extern "C"
void
Java_cn_jx_audiotest_MainActivity_createBufferQueueAudioPlayer(JNIEnv* env, jclass clazz, jint sampleRate, jint bufSize)
{
    SLresult result;
    if (sampleRate >= 0 && bufSize >= 0 ) {
        bqPlayerSampleRate = sampleRate * 1000;
        /*
         * device native buffer size is another factor to minimize audio latency, not used in this
         * sample: we only play one giant buffer here
         */
        bqPlayerBufSize = bufSize;
    }
    // configure audio source
    SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
    SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_8,
                                   SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
                                   SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
    /*
     * Enable Fast Audio when possible:  once we set the same rate to be the native, fast audio path
     * will be triggered
     */
    if(bqPlayerSampleRate) {
        format_pcm.samplesPerSec = bqPlayerSampleRate;       //sample rate in mili second
    }
    SLDataSource audioSrc = {&loc_bufq, &format_pcm};

    // configure audio sink
    SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
    SLDataSink audioSnk = {&loc_outmix, NULL};
    /*
     * create audio player:
     *     fast audio does not support when SL_IID_EFFECTSEND is required, skip it
     *     for fast audio case
     */
    const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_EFFECTSEND,
            /*SL_IID_MUTESOLO,*/};
    const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
            /*SL_BOOLEAN_TRUE,*/ };
    result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
                                                bqPlayerSampleRate? 2 : 3, ids, req);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // realize the player
    result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // get the play interface
    result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // get the buffer queue interface
    result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
                                             &bqPlayerBufferQueue);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // register callback on the buffer queue
    result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // get the effect send interface
    bqPlayerEffectSend = NULL;
    if( 0 == bqPlayerSampleRate) {
        result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
                                                 &bqPlayerEffectSend);
        assert(SL_RESULT_SUCCESS == result);
        (void)result;
    }
#if 0   // mute/solo is not supported for sources that are known to be mono, as this is
    // get the mute/solo interface
    result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_MUTESOLO, &bqPlayerMuteSolo);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
#endif
    // get the volume interface
    result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
    // set the player's state to playing
    result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
}
  1. 實現(xiàn)bqPlayerCallback
// this callback handler is called every time a buffer finishes playing
void
bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
    assert(bq == bqPlayerBufferQueue);
    assert(NULL == context);
    // for streaming playback, replace this test by logic to find and fill the next buffer
    getPCM();
    if ( NULL != nextBuffer && 0 != nextSize) {
        SLresult result;
        // enqueue another buffer
        result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
        // the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
        // which for this code example would indicate a programming error
        if (SL_RESULT_SUCCESS != result) {
            pthread_mutex_unlock(&audioEngineLock);
        }
        (void)result;
    } else {
        releaseResampleBuf();
        pthread_mutex_unlock(&audioEngineLock);
    }
}

JAVA層調(diào)用

我們在JAVA層定義好JNI接口,并按照順序調(diào)用即可開始播放音頻文件了惫撰。

  1. 加載so庫羔沙,定義JNI接口
static {
    System.loadLibrary("native-lib");
}
public static native void createEngine();
public static native boolean createBufferQueueAudioPlayer(int sampleRate, int samplesPerBuf);
public native void play(String url);
  1. 依次調(diào)用接口,創(chuàng)建引擎開始播放
createEngine();
int sampleRate = 0;
int bufSize = 0;
/*
 * retrieve fast audio path sample rate and buf size; if we have it, we pass to native
 * side to create a player with fast audio enabled [ fast audio == low latency audio ];
 * IF we do not have a fast audio path, we pass 0 for sampleRate, which will force native
 * side to pick up the 8Khz sample rate.
 */
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
    AudioManager myAudioMgr = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
    String nativeParam = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
    sampleRate = Integer.parseInt(nativeParam);
    nativeParam = myAudioMgr.getProperty(AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
    bufSize = Integer.parseInt(nativeParam);
}
createBufferQueueAudioPlayer(sampleRate, bufSize);
//獲取文件地址
String folderurl = Environment.getExternalStorageDirectory().getPath();
String inputurl = folderurl+"/background.mp3";
play(inputurl);

遇到的坑

  1. 不同的音視頻文件解析出來的音頻采樣數(shù)據(jù)格式不一樣厨钻,有的MP3文件解出來是AV_SAMPLE_FMT_S16P扼雏,有的MP4文件是AV_SAMPLE_FMT_FLTP,這兩種文件的outputBuffer大小設(shè)置不太一樣夯膀。沒處理好就會有大量雜音诗充,處理方式詳見代碼getPCM中的解碼部分肛著。(處理格式如有遺漏歡迎舉報)

參考資料

  1. 最簡單的基于FFMPEG+SDL的音頻播放器
  2. NDK sample(native audio)
  3. AAC to PCM produced a lot of noise
最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
  • 序言:七十年代末夹攒,一起剝皮案震驚了整個濱河市,隨后出現(xiàn)的幾起案子单默,更是在濱河造成了極大的恐慌,老刑警劉巖茎匠,帶你破解...
    沈念sama閱讀 217,185評論 6 503
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件格仲,死亡現(xiàn)場離奇詭異,居然都是意外死亡诵冒,警方通過查閱死者的電腦和手機(jī)凯肋,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 92,652評論 3 393
  • 文/潘曉璐 我一進(jìn)店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來汽馋,“玉大人侮东,你說我怎么就攤上這事”荆” “怎么了悄雅?”我有些...
    開封第一講書人閱讀 163,524評論 0 353
  • 文/不壞的土叔 我叫張陵,是天一觀的道長铁蹈。 經(jīng)常有香客問我煤伟,道長,這世上最難降的妖魔是什么木缝? 我笑而不...
    開封第一講書人閱讀 58,339評論 1 293
  • 正文 為了忘掉前任便锨,我火速辦了婚禮,結(jié)果婚禮上我碟,老公的妹妹穿的比我還像新娘放案。我一直安慰自己,他們只是感情好矫俺,可當(dāng)我...
    茶點故事閱讀 67,387評論 6 391
  • 文/花漫 我一把揭開白布吱殉。 她就那樣靜靜地躺著,像睡著了一般厘托。 火紅的嫁衣襯著肌膚如雪友雳。 梳的紋絲不亂的頭發(fā)上,一...
    開封第一講書人閱讀 51,287評論 1 301
  • 那天铅匹,我揣著相機(jī)與錄音押赊,去河邊找鬼。 笑死包斑,一個胖子當(dāng)著我的面吹牛流礁,可吹牛的內(nèi)容都是我干的。 我是一名探鬼主播罗丰,決...
    沈念sama閱讀 40,130評論 3 418
  • 文/蒼蘭香墨 我猛地睜開眼神帅,長吁一口氣:“原來是場噩夢啊……” “哼!你這毒婦竟也來了萌抵?” 一聲冷哼從身側(cè)響起找御,我...
    開封第一講書人閱讀 38,985評論 0 275
  • 序言:老撾萬榮一對情侶失蹤元镀,失蹤者是張志新(化名)和其女友劉穎,沒想到半個月后霎桅,有當(dāng)?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體凹联,經(jīng)...
    沈念sama閱讀 45,420評論 1 313
  • 正文 獨居荒郊野嶺守林人離奇死亡,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點故事閱讀 37,617評論 3 334
  • 正文 我和宋清朗相戀三年哆档,在試婚紗的時候發(fā)現(xiàn)自己被綠了。 大學(xué)時的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片住闯。...
    茶點故事閱讀 39,779評論 1 348
  • 序言:一個原本活蹦亂跳的男人離奇死亡瓜浸,死狀恐怖,靈堂內(nèi)的尸體忽然破棺而出比原,到底是詐尸還是另有隱情插佛,我是刑警寧澤,帶...
    沈念sama閱讀 35,477評論 5 345
  • 正文 年R本政府宣布量窘,位于F島的核電站雇寇,受9級特大地震影響,放射性物質(zhì)發(fā)生泄漏蚌铜。R本人自食惡果不足惜锨侯,卻給世界環(huán)境...
    茶點故事閱讀 41,088評論 3 328
  • 文/蒙蒙 一、第九天 我趴在偏房一處隱蔽的房頂上張望冬殃。 院中可真熱鬧囚痴,春花似錦、人聲如沸审葬。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,716評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽涣觉。三九已至痴荐,卻和暖如春,著一層夾襖步出監(jiān)牢的瞬間官册,已是汗流浹背生兆。 一陣腳步聲響...
    開封第一講書人閱讀 32,857評論 1 269
  • 我被黑心中介騙來泰國打工, 沒想到剛下飛機(jī)就差點兒被人妖公主榨干…… 1. 我叫王不留膝宁,地道東北人皂贩。 一個月前我還...
    沈念sama閱讀 47,876評論 2 370
  • 正文 我出身青樓,卻偏偏與公主長得像昆汹,于是被迫代替她去往敵國和親明刷。 傳聞我的和親對象是個殘疾皇子,可洞房花燭夜當(dāng)晚...
    茶點故事閱讀 44,700評論 2 354

推薦閱讀更多精彩內(nèi)容