流程:ffmpeg讀取視頻幀的yuv-> jni層創(chuàng)建Bitmap,拿到bitmap表示像素?cái)?shù)據(jù)的指針->將YUV轉(zhuǎn)換到bitmap的像素?cái)?shù)據(jù)中(ARGB_8888)
一. ffmpeg讀取視頻幀的yuv
這里只處理格式為yuv420p的視頻幀
初始化AVFormatContext
const char *cstr = videoPath.c_str();
LOGD("inputFmtContext = %p", iFmtContext);
//打開(kāi)AVFormatContext,用于解封裝的上下文
int ret = avformat_open_input(&iFmtContext, cstr, nullptr, nullptr);
if (ret != 0) {
LOGE("avformat_open_input file %s failed,%s", cstr, av_err2str(ret));
return;
}
LOGI("av_find_best_stream file %s success", cstr);
avformat_find_stream_info(iFmtContext, nullptr);
//找到視頻流在iFmtContext內(nèi)部數(shù)組里的索引
int videoIndex = av_find_best_stream(iFmtContext, AVMEDIA_TYPE_VIDEO, -1, -1, nullptr, -1);
if (videoIndex < 0) {
LOGE("av_find_best_stream file %s failed,%d", cstr, videoIndex);
return;
}
videoStream = iFmtContext->streams[videoIndex];
LOGD("video stream index = %d,duration = %lu,real duration = %f", videoIndex,
videoStream->duration, videoStream->duration * timeBaseToDuration(videoStream->time_base));
打開(kāi)解碼器,并獲取YUV
if (!iCodecContext) {
//查找解碼器
AVCodec *avCodec = avcodec_find_decoder(videoStream->codecpar->codec_id);
if (!avCodec) {
LOGW("getFrameAt avcodec_find_decoder failed");
return nullptr;
}
LOGD2(LOG_TAG, "codec name:%s", avCodec->name);
iCodecContext = avcodec_alloc_context3(avCodec);
if (!iCodecContext) {
LOGW("getFrameAt avcodec_alloc_context3 failed");
return nullptr;
}
//從AVStream里面復(fù)制解碼參數(shù)
int err = avcodec_parameters_to_context(iCodecContext, videoStream->codecpar);
if (err < 0) {
LOGW("getFrameAt avcodec_parameters_to_context failed,err:%s", av_err2str(err));
return nullptr;
}
err = avcodec_open2(iCodecContext, avCodec, nullptr);
if (err < 0) {
LOGW("getFrameAt avcodec_open2 failed,err:%s", av_err2str(err));
return nullptr;
}
}
LOGI("codec init success!!!");
// 未解碼數(shù)據(jù)結(jié)構(gòu)體
AVPacket *packet = av_packet_alloc();
//已解碼數(shù)據(jù)結(jié)構(gòu)體
AVFrame *frame = av_frame_alloc();
int64_t frameNum = 0;
int length = 0;
int read = 0;
// seek到指定時(shí)間cuo
int seek = av_seek_frame(iFmtContext, videoStream->index,
timeMills / 1000 / timeBaseToDuration(videoStream->time_base),
AVSEEK_FLAG_BACKWARD);
if (seek < 0) {
LOGW("seek failed,code:%d", seek);
goto end;
}
while (!(read = av_read_frame(iFmtContext, packet))) {
LOGD2(LOG_TAG, "packet index:%d", packet->stream_index);
if (packet->stream_index == videoStream->index) {
//LOGD("read frame:%" PRId64 ,frameNum);
//將數(shù)據(jù)發(fā)送到解碼器解碼
int code = avcodec_send_packet(iCodecContext, packet);
if (code != 0) {
LOGW("avcodec_send_packet failed");
av_packet_unref(packet);
break;
}
frameNum++;
int ret = 0;
int num = 0;
//讀取解碼后的視頻數(shù)據(jù)
if ((ret = avcodec_receive_frame(iCodecContext, frame)) == AVERROR(EAGAIN)) {
LOGD("avcodec_receive_frame ret:%d,", ret);
continue;
}
if (!ret) {
num++;
LOGD("single codec return:%d,ret:%d", num, ret);
LOGD("frame width: %d,height: %d", frame->width, frame->height);
// writeSingleFrame2File(frame);
// yuv4202RGB(frame);
//這里拿到的frame數(shù)據(jù)就包含一幀yuv視頻數(shù)據(jù)了
yuv420ToRgb(frame, rgb);
}
if (ret < 0) {
LOGW("avcodec_receive_frame err:%d,%s", ret, av_err2str(ret));
}
av_packet_unref(packet);
break;
}
}
LOGD("frame num:%" PRId64 ",frame read:%" PRId64 ",read %d", videoStream->nb_frames, frameNum,
read);
end:
av_packet_free(&packet);
av_frame_free(&frame);
二. jni層創(chuàng)建Bitmap,拿到bitmap表示像素?cái)?shù)據(jù)的指針
- native創(chuàng)建一個(gè)bitmap
static jobject createBitmap(JNIEnv *env, int width, int height) {
jclass bitmapCls = env->FindClass("android/graphics/Bitmap");
if (!bitmapCls) {
LOGW("bitmapCls failed");
return nullptr;
}
jmethodID createBitmapFunction = env->GetStaticMethodID(bitmapCls,"createBitmap",
"(IILandroid/graphics/Bitmap$Config;)Landroid/graphics/Bitmap;");
if (!createBitmapFunction) {
LOGW("createBitmapFunction failed");
return nullptr;
}
jstring configName = env->NewStringUTF("ARGB_8888");
jclass bitmapConfigClass = env->FindClass("android/graphics/Bitmap$Config");
jmethodID valueOfBitmapConfigFunction = env->GetStaticMethodID(
bitmapConfigClass, "valueOf",
"(Ljava/lang/String;)Landroid/graphics/Bitmap$Config;");
if (!valueOfBitmapConfigFunction) {
LOGW("valueOfBitmapConfigFunction failed");
return nullptr;
}
LOGI("valueOfBitmapConfigFunction success");
jobject bitmapConfig = env->CallStaticObjectMethod(bitmapConfigClass,
valueOfBitmapConfigFunction,configName);
jobject bitmap = env->CallStaticObjectMethod(bitmapCls,
createBitmapFunction,
width,
height, bitmapConfig);
return bitmap;
}
2 拿到bitmap表示像素?cái)?shù)據(jù)的指針
需要添加內(nèi)置本地庫(kù): jnigraphics
jobject bitmap = createBitmap(env, width, height);
int ret;
uint8_t *rgbData = nullptr;
//AndroidBitmap_lockPixels后,rgbData就指向bitmap的像素?cái)?shù)據(jù)了
if ((ret = AndroidBitmap_lockPixels(env, bitmap, (void**)&rgbData)) < 0) {
LOGW("AndroidBitmap_lockPixels() failed ! error=%d", ret);
return nullptr;
}
LOGD("AndroidBitmap_lockPixels ret=%d", ret);
reader->getFrameAt(time_mills,&rgbData);
LOGD("getFrameAt end");
//TODO
AndroidBitmap_unlockPixels(env, bitmap);
//返回bitmap到j(luò)ava層
return bitmap;
三 將YUV數(shù)據(jù)轉(zhuǎn)換到bitmap的像素?cái)?shù)據(jù)中
static void yuv420ToRgb(AVFrame *frame, uint8_t **rgb) {
int img_width = frame->width;
int img_height = frame->height;
//int buffer_len = frame->width * frame->height;
//uint8_t *buffer = static_cast<uint8_t *>(malloc(sizeof(uint8_t) * buffer_len * 4));
int channels = 4;
uint8_t *buffer = *rgb;
for (int y = 0; y < img_height; y++) {
for (int x = 0; x < img_width; x++) {
//linesize[0]表示一行Y數(shù)據(jù)需要多少字節(jié)存儲(chǔ), 由于字節(jié)對(duì)齊的優(yōu)化,一般會(huì)大于圖片的寬度,例如,測(cè)試視頻linesize[0]為864,img_width為854
int indexY = y * frame->linesize[0] + x;
int indexU = y / 2 * frame->linesize[1] + x / 2;
int indexV = y / 2 * frame->linesize[2] + x / 2;
uint8_t Y = frame->data[0][indexY];
uint8_t U = frame->data[1][indexU];
uint8_t V = frame->data[2][indexV];
// 這里可以參考YUV420轉(zhuǎn)rgb公式
int R = Y + 1.402 * (V - 128); // 由于計(jì)算的結(jié)果可能不在0~255之間,所以R不能用uint8_t表示
int G = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128);
int B = Y + 1.772 * (U - 128);
R = (R < 0) ? 0 : R;
G = (G < 0) ? 0 : G;
B = (B < 0) ? 0 : B;
R = (R > 255) ? 255 : R;
G = (G > 255) ? 255 : G;
B = (B > 255) ? 255 : B;
buffer[(y * img_width + x) * channels + 0] = (uint8_t) R;
buffer[(y * img_width + x) * channels + 1] = (uint8_t) G;
buffer[(y * img_width + x) * channels + 2] = (uint8_t) B;
//補(bǔ)充 alpha通道數(shù)據(jù), android轉(zhuǎn)bitmap需要
buffer[(y * img_width + x) * channels + 3] = 0xff;
}
}
}