視頻播放
準(zhǔn)備工作
1. 首先轿亮,定義一個(gè)播放控件PlayerView
public class PlayerView extends TextureView implements TextureView.SurfaceTextureListener {
public PlayerView(Context context, AttributeSet attrs) {
super(context, attrs);
setSurfaceTextureListener(this);
}
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
LogUtils.e("onSurfaceTextureAvailable:width=" + width + ",height=" + height);
setSurface(new Surface(surfaceTexture), width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture, int width, int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
/**
* ndk調(diào)用這個(gè)方法設(shè)置視頻的寬高
*
* @param videoWidth
* @param videoHeight
*/
public void onNativeGetVideoSize(int videoWidth, int videoHeight) {
LogUtils.e("onNativeGetVideoSize:videoWidth=" + videoWidth + ",videoHeight=" + videoHeight);
int width = getWidth();
int height = getHeight();
float scaleX = videoWidth * 1.0f / width;
float scaleY = videoHeight * 1.0f / height;
float maxScale = Math.max(scaleX, scaleY);//要保證寬度或者高度全屏
scaleX /= maxScale;
scaleY /= maxScale;
Matrix matrix = new Matrix();
matrix.setScale(scaleX, scaleY, width / 2, height / 2);
setTransform(matrix);
}
private native void setSurface(Surface surface, int width, int height);
public native void play(String path);
}
2. 編寫頁(yè)面布局activity_main
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<com.levylin.ffmpegdemo.PlayerView
android:id="@+id/playerView"
android:layout_width="match_parent"
android:layout_height="150dp" />
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<Button
android:id="@+id/play_btn"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:onClick="playVideo"
android:text="播放" />
</LinearLayout>
</LinearLayout>
3.編寫主界面MainActivity
class MainActivity : AppCompatActivity() {
val URL = "rtmp://live.hkstv.hk.lxdns.com/live/hks"
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
play_btn.setOnClickListener { playVideo() }
}
fun playVideo() {
playerView.play(URL)
}
companion object {
init {
System.loadLibrary("native-lib")
}
}
}
4.定義權(quán)限
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.INTERNET" />
5.CMakeLists.txt修改
將
add_library( native-lib
SHARED
src/main/cpp/native-lib.cpp )
改為
file(GLOB my_source src/main/cpp/*.cpp)
add_library( native-lib
SHARED
${my_source} )
主要是為了方便后續(xù)新增別的cpp文件,不需要手動(dòng)再去修改CMakeLists.txt
C++核心代碼
1.定義一個(gè)my-log.h
#ifndef FFMPEGDEMO_MY_LOG_H
#define FFMPEGDEMO_MY_LOG_H
#include <android/log.h>
#define TAG "LEVY"
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,TAG,__VA_ARGS__)
#endif //FFMPEGDEMO_MY_LOG_H
2.定義一個(gè)FFmpegVideo的h文件和C++文件
FFmpegVideo.h
#ifndef FFMPEGDEMO_FFMPEGVIDEO_H
#define FFMPEGDEMO_FFMPEGVIDEO_H
#include "my-log.h"
#include <queue>
#include <unistd.h>
#include <pthread.h>
extern "C" {
#include "libswscale/swscale.h"
#include "libavcodec/avcodec.h"
#include "libavformat/avformat.h"
};
using namespace std;
class FFmpegVideo {
public:
FFmpegVideo();
~FFmpegVideo();
/**
* 從隊(duì)列中獲取一個(gè)包
* @param packet
* @return
*/
int get(AVPacket *packet);
/**
* 往隊(duì)列中插入一個(gè)包
* @param packet
* @return
*/
int put(AVPacket *packet);
/**
* 播放
*/
void play();
/**
* 結(jié)束
*/
void stop();
/**
* 設(shè)置解碼器上下文
* @param avCodecContext
*/
void setAVCodecPacket(AVCodecContext *avCodecContext);
/**
* 播放回調(diào)
* @param call
*/
void setPlayCall(void(*call)(AVFrame *frame));
public:
int isPlay;//是否播放
int index;//視頻流索引
queue<AVPacket *> video_queue;//包隊(duì)列
pthread_t tid;//播放線程id
AVCodecContext *avCodecContext;//解碼器上下文
pthread_mutex_t mutex;//互斥鎖
pthread_cond_t cond;
};
#endif //FFMPEGDEMO_FFMPEGVIDEO_H
FFmpegVideo.cpp
#include "FFmpegVideo.h"
/**
* ANativeWindow繪制的方法回調(diào)
* @param frame
*/
static void (*video_call)(AVFrame *frame);
/**
* 視頻播放線程
* @param data
* @return
*/
void *playVideo(void *data) {
LOGE("播放視頻線程");
FFmpegVideo *video = (FFmpegVideo *) data;
AVCodecContext *pContext = video->avCodecContext;
//像素格式
AVPixelFormat pixelFormat = AV_PIX_FMT_RGBA;
SwsContext *swsContext = sws_getContext(pContext->width,
pContext->height,
pContext->pix_fmt,
pContext->width,
pContext->height,
pixelFormat,
SWS_BICUBIC,
NULL,
NULL,
NULL);
LOGE("獲取swsContext完成");
//要畫在window上的frame
AVFrame *rgb_frame = av_frame_alloc();
uint8_t *out_buffer = (uint8_t *) av_malloc(
(size_t) avpicture_get_size(pixelFormat, pContext->width, pContext->height));
avpicture_fill((AVPicture *) rgb_frame, out_buffer, pixelFormat, pContext->width,
pContext->height);
LOGE("設(shè)置rgb_frame完成");
int got_frame;
AVFrame *frame = av_frame_alloc();
AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
av_init_packet(packet);
while (video->isPlay) {
video->get(packet);
avcodec_decode_video2(pContext, frame, &got_frame, packet);
if (!got_frame) {
continue;
}
sws_scale(swsContext, (const uint8_t *const *) frame->data, frame->linesize, 0,
frame->height, rgb_frame->data, rgb_frame->linesize);
video_call(rgb_frame);
usleep(16 * 1000);//這邊先暫定時(shí)間是16毫秒
}
}
FFmpegVideo::FFmpegVideo() {
pthread_mutex_init(&mutex, NULL);//初始化互斥鎖
pthread_cond_init(&cond, NULL);//初始化條件
}
FFmpegVideo::~FFmpegVideo() {
}
int FFmpegVideo::get(AVPacket *packet) {
LOGE("獲取視頻包");
pthread_mutex_lock(&mutex);
if (isPlay) {
if (video_queue.empty()) {
LOGE("列表為空");
pthread_cond_wait(&cond, &mutex);
} else {
AVPacket *packet1 = video_queue.front();
video_queue.pop();
if (av_packet_ref(packet, packet1) < 0) {
LOGE("獲取包.....克隆失敗");
return 0;
}
av_free_packet(packet1);
}
}
pthread_mutex_unlock(&mutex);
return 1;
}
int FFmpegVideo::put(AVPacket *packet) {
LOGE("插入視頻包");
AVPacket *packet1 = (AVPacket *) malloc(sizeof(AVPacket));
if (av_copy_packet(packet1, packet) < 0) {
LOGE("克隆失敗");
return 0;
}
pthread_mutex_lock(&mutex);
video_queue.push(packet1);
av_free_packet(packet);
pthread_cond_signal(&cond);
pthread_mutex_unlock(&mutex);
return 1;
}
void FFmpegVideo::play() {
isPlay = 1;
pthread_create(&tid, NULL, playVideo, this);
}
void FFmpegVideo::stop() {
isPlay = 0;
}
void FFmpegVideo::setAVCodecPacket(AVCodecContext *avCodecContext) {
this->avCodecContext = avCodecContext;
}
void FFmpegVideo::setPlayCall(void (*call)(AVFrame *)) {
video_call = call;
}
3.編寫jni實(shí)現(xiàn)方法
#include <jni.h>
#include <string>
#include "FFmpegVideo.h"
#include <android/native_window.h>
#include <android/native_window_jni.h>
pthread_t main_tid;
int isPlaying;
ANativeWindow *window;
const char *path;
FFmpegVideo *video;
jobject jobj;
JavaVM *jvm;
void call_video_play(AVFrame *frame) {
if (!window) {
LOGE("window is null");
return;
}
ANativeWindow_Buffer buffer;
if (ANativeWindow_lock(window, &buffer, NULL) < 0) {
LOGE("window 鎖住失敗");
return;
}
uint8_t *dst = (uint8_t *) buffer.bits;
int dstStride = buffer.stride * 4;
uint8_t *src = frame->data[0];
int srcStride = frame->linesize[0];
for (int i = 0; i < video->avCodecContext->height; ++i) {
memcpy(dst + i * dstStride, src + i * srcStride, (size_t) srcStride);
}
ANativeWindow_unlockAndPost(window);
}
void *proccess(void *data) {
av_register_all();//使用ffmpeg必須要注冊(cè)
avformat_network_init();//如果播放網(wǎng)絡(luò)視頻戈鲁,需要注冊(cè)
AVFormatContext *formatContext = avformat_alloc_context();
if (avformat_open_input(&formatContext, path, NULL, NULL) < 0) {
LOGE("打開視頻失敗");
}
LOGE("打開視頻成功");
if (avformat_find_stream_info(formatContext, NULL) < 0) {
LOGE("尋找流信息失敗");
}
LOGE("尋找流信息成功");
for (int i = 0; i < formatContext->nb_streams; ++i) {
AVStream *stream = formatContext->streams[i];
AVCodecContext *codecContext = stream->codec;
//獲取解碼器
AVCodec *codec = avcodec_find_decoder(codecContext->codec_id);
if (avcodec_open2(codecContext, codec, NULL) < 0) {
LOGE("打開解碼器失敗");
continue;
}
if (codecContext->codec_type == AVMEDIA_TYPE_VIDEO) {
video->index = i;
video->setAVCodecPacket(codecContext);
int width = codecContext->width;
int height = codecContext->height;
LOGE("視頻:寬=%d,高寬=%d", width, height);
JNIEnv *env;
jvm->AttachCurrentThread(&env, 0);
LOGE("獲取env");
jclass clazz = env->GetObjectClass(jobj);
LOGE("Native found Java jobj Class :%d", clazz ? 1 : 0);
jmethodID mid = env->GetMethodID(clazz, "onNativeGetVideoSize", "(II)V");
if (env && jobj && mid) {
LOGE("給JAVA中設(shè)置寬高");
env->CallVoidMethod(jobj, mid, width, height);
}
ANativeWindow_setBuffersGeometry(window, width, height,
WINDOW_FORMAT_RGBA_8888);
}
}
LOGE("開始播放");
video->play();
AVPacket *packet = (AVPacket *) av_malloc(sizeof(AVPacket));
while (isPlaying) {
if (av_read_frame(formatContext, packet) < 0) {
LOGE("讀取幀失敗");
av_packet_unref(packet);
continue;
}
if (video && video->isPlay && video->index == packet->stream_index) {
video->put(packet);
}
av_packet_unref(packet);
}
isPlaying = 0;
if (video && video->isPlay) {
video->stop();
}
av_free_packet(packet);
avformat_free_context(formatContext);
pthread_exit(0);
}
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM *vm, void *reserved) {
jvm = vm;
JNIEnv *env = NULL;
jint result = -1;
if (jvm) {
LOGE("jvm init success");
}
if (vm->GetEnv((void **) &env, JNI_VERSION_1_4) != JNI_OK) {
return result;
}
return JNI_VERSION_1_4;
}
extern "C"
JNIEXPORT jstring JNICALL
Java_com_levylin_ffmpegdemo_MainActivity_stringFromJNI(
JNIEnv *env,
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}
extern "C"
JNIEXPORT void JNICALL
Java_com_levylin_ffmpegdemo_PlayerView_setSurface(JNIEnv *env, jobject instance, jobject surface,
jint width, jint height) {
if (!window) {
window = ANativeWindow_fromSurface(env, surface);
}
if (!jobj) {
jobj = env->NewGlobalRef(instance);
}
}
extern "C"
JNIEXPORT void JNICALL
Java_com_levylin_ffmpegdemo_PlayerView_play(JNIEnv *env, jobject instance, jstring path_) {
path = env->GetStringUTFChars(path_, 0);
video = new FFmpegVideo;
video->setPlayCall(call_video_play);
isPlaying = 1;
pthread_create(&main_tid, NULL, proccess, NULL);
env->ReleaseStringUTFChars(path_, path);
}