FFMpeg用來(lái)打開(kāi)和解析媒體文件肉津,提取視頻流用的,QOpenGLWidget是用來(lái)播放YUV的妹沙,YUV在著色器中轉(zhuǎn)換為RGB,從而實(shí)現(xiàn)播放距糖。
1.視頻文件的播放
視頻數(shù)據(jù)提取在一個(gè)獨(dú)立的線(xiàn)程之中,提供三個(gè)事件悍引,
(1)文件打開(kāi);
(2)文件讀取到第一幀趣斤,返回視頻文件尺寸;
(3)讀取到完整一幀,每幀包含三個(gè)顏色通道YUV玉凯。
FFMpeg使用很麻煩,此處不直接用漫仆,而是用FFMpeg的一個(gè)文件讀取的封裝庫(kù) ffms2
qffmpegreader.h
#ifndef QFFMPEGREADER_H
#define QFFMPEGREADER_H
#include <QObject>
#include <QThread>
#include <QPoint>
#include <QByteArray>
#include <ffms2/ffms.h>
struct FFFrame;
typedef std::shared_ptr<FFFrame> FFFramePtr;
struct FFFrame
{
FFFrame()
{
}
//YUV
QByteArray TextureY;
QByteArray TextureU;
QByteArray TextureV;
//創(chuàng)建一個(gè)數(shù)據(jù)幀
static FFFramePtr MakeFrame()
{
FFFramePtr FFrame = std::make_shared<FFFrame>();
return FFrame;
}
};
class QFFMpegReader : public QThread
{
Q_OBJECT
public:
explicit QFFMpegReader(QObject *parent = nullptr);
void Open(const QString& Url);
void Close();
// QThread interface
protected:
void run();
private:
bool OpenImpl(FFMS_VideoSource* &videosource, int& num_frames);
signals:
void OnOpen();
void OnUpdate(const QPoint VideoSize);
void OnFrame(FFFramePtr Frame);
private:
QString Url_;
bool Running_;
QPoint DisplaySize_;
};
#endif // QFFMPEGREADER_H
qffmpegreader.cpp
#include "qffmpegreader.h"
#include <QDateTime>
#include <QDebug>
QFFMpegReader::QFFMpegReader(QObject *parent) : Running_(false)
{
}
void QFFMpegReader::Open(const QString& Url)
{
Url_ = Url;
Running_ = true;
start();
}
void QFFMpegReader::Close()
{
Running_ = false;
wait();
}
bool QFFMpegReader::OpenImpl(FFMS_VideoSource* &videosource, int& num_frames)
{
//https://github.com/FFMS/ffms2/blob/master/doc/ffms2-api.md
/* Index the source file. Note that this example does not index any audio tracks. */
char errmsg[1024];
FFMS_ErrorInfo errinfo;
errinfo.Buffer = errmsg;
errinfo.BufferSize = sizeof(errmsg);
errinfo.ErrorType = FFMS_ERROR_SUCCESS;
errinfo.SubType = FFMS_ERROR_SUCCESS;
FFMS_Indexer* indexer = FFMS_CreateIndexer(Url_.toStdString().c_str(), &errinfo);
if (indexer == nullptr) {
return false;
}
//Both FFMS_DoIndexing2 and FFMS_CancelIndexing destroys the indexer object and frees its memory.
FFMS_Index* index = FFMS_DoIndexing2(indexer, FFMS_IEH_ABORT, &errinfo);
if (index == nullptr) {
FFMS_CancelIndexing(indexer);
return false;
}
//查找視頻源
int trackno = FFMS_GetFirstTrackOfType(index, FFMS_TYPE_VIDEO, &errinfo);
if (trackno < 0) {
FFMS_DestroyIndex(index);
return false;
}
//創(chuàng)建視頻源
videosource = FFMS_CreateVideoSource(Url_.toStdString().c_str(), trackno, index, 1, FFMS_SEEK_NORMAL, &errinfo);
if (videosource == nullptr) {
FFMS_DestroyIndex(index);
return false;
}
//清除index
FFMS_DestroyIndex(index);
//通知事件
OnOpen();
//獲取屬性
const FFMS_VideoProperties* videoprops = FFMS_GetVideoProperties(videosource);
num_frames = videoprops->NumFrames;
//讀取第一幀
const FFMS_Frame* propframe = FFMS_GetFrame(videosource, 0, &errinfo);
DisplaySize_ = QPoint(propframe->EncodedWidth, propframe->EncodedHeight);
OnUpdate(DisplaySize_);
//設(shè)置播放像素格式歹啼,尺寸,和拉伸模式
int pixfmts[2];
pixfmts[0] = FFMS_GetPixFmt("yuv420p");
pixfmts[1] = -1;
if (FFMS_SetOutputFormatV2(videosource, pixfmts, DisplaySize_.x(), DisplaySize_.y(), FFMS_RESIZER_BICUBIC, &errinfo))
{
return false;
}
return true;
}
void QFFMpegReader::run()
{
char errmsg[1024];
FFMS_ErrorInfo errinfo;
errinfo.Buffer = errmsg;
errinfo.BufferSize = sizeof(errmsg);
errinfo.ErrorType = FFMS_ERROR_SUCCESS;
errinfo.SubType = FFMS_ERROR_SUCCESS;
//打開(kāi)文件
FFMS_VideoSource* VideoSource = nullptr;
int NumFrames = 0;
if (!OpenImpl(VideoSource, NumFrames))
{
qDebug("Open File Failed [%s]", Url_.toStdString().c_str());
return;
}
//逐幀處理
FFMS_Track* VideoTrack = FFMS_GetTrackFromVideo(VideoSource);
const FFMS_TrackTimeBase* TrackTimeBase = FFMS_GetTimeBase(VideoTrack);
//初始化數(shù)據(jù)
QDateTime StartTime = QDateTime::currentDateTime();
//初始PTS并不是零狸眼,所以需要減去第一幀的PTS
int64_t StartPTS = 0;
int FrameNum = 0;
while (Running_ && (FrameNum < NumFrames))
{
//取幀
const FFMS_Frame* Frame = FFMS_GetFrame(VideoSource, FrameNum, &errinfo);
const FFMS_FrameInfo* FrameInfo = FFMS_GetFrameInfo(VideoTrack, FrameNum);
if (Frame)
{
//記錄第一幀的PTS
if (FrameNum == 0)
{
StartPTS = FrameInfo->PTS;
}
//拷貝幀
FFFramePtr FFrame = FFFrame::MakeFrame();
FFrame->TextureY.setRawData((const char *)Frame->Data[0], Frame->Linesize[0] * Frame->ScaledHeight);
FFrame->TextureU.setRawData((const char *)Frame->Data[1], Frame->Linesize[1] * (Frame->ScaledHeight / 2));
FFrame->TextureV.setRawData((const char *)Frame->Data[2], Frame->Linesize[2] * (Frame->ScaledHeight / 2));
//計(jì)算時(shí)間差,逐毫秒等待岁钓,如果一次等待,則需要等待很久
while (Running_)
{
int64_t PTS = (int64_t)(((FrameInfo->PTS - StartPTS) * TrackTimeBase->Num) / (double)TrackTimeBase->Den);
int64_t CurPTS = StartTime.msecsTo(QDateTime::currentDateTime());
if (CurPTS < PTS)
{
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
else
{
break;
}
}
//顯示幀
OnFrame(FFrame);
}
//計(jì)算下一幀索引
FrameNum++;
//如果單文件循環(huán)屡限,則重置起始時(shí)間
if (FrameNum >= NumFrames)
{
StartTime = QDateTime::currentDateTime();
FrameNum = 0;
}
}
//關(guān)閉文件
if (VideoSource)
{
FFMS_DestroyVideoSource(VideoSource);
VideoSource = nullptr;
}
}
2.視頻數(shù)據(jù)的渲染
視頻數(shù)據(jù)是YUV,當(dāng)然這里可以直接轉(zhuǎn)換為RGBA/BGRA然后傳遞到Opengl中渲染,但是有兩個(gè)明顯的缺點(diǎn)钧大。
(1)RBGA占用更大的顯卡帶寬和顯存,和YUV相比是3/8的差別啊央;
(2)YUV轉(zhuǎn)換RGBA會(huì)占用大量的CPU時(shí)間,CPU并不擅長(zhǎng)于轉(zhuǎn)換瓜饥,而GPU就很擅長(zhǎng)。
視頻讀取類(lèi)QFFMpegReader的事件浴骂,是在異步線(xiàn)程中觸發(fā)的,綁定的時(shí)候溯警,需要設(shè)置連接類(lèi)型為Qt::ConnectionType::QueuedConnection。
根據(jù)讀取類(lèi)的事件設(shè)計(jì)愧膀,可以做以下處理。
2.1文件打開(kāi)
根據(jù)需要可以做一些初始化的工作
2.2文件讀取到第一幀檩淋,返回視頻文件尺寸
根據(jù)視頻的尺寸用于創(chuàng)建三個(gè)紋理
2.3讀取到完整一幀萄金,每幀包含三個(gè)顏色通道YUV
用于更新三個(gè)紋理的數(shù)據(jù)
qyuvwidegt.h
#ifndef QYUVWIDEGT_H
#define QYUVWIDEGT_H
#include <QOpenGLWidget>
#include <QOpenGLTexture>
#include <QOpenGLShader>
#include <QOpenGLVertexArrayObject>
#include <QOpenGLBuffer>
#include <QOpenGLShaderProgram>
#include <QOpenGLFunctions_4_5_Core>
#include "qffmpegreader.h"
class QYUVWidegt : public QOpenGLWidget, protected QOpenGLFunctions_4_5_Core
{
Q_OBJECT
public:
explicit QYUVWidegt(QWidget *parent = nullptr);
virtual ~QYUVWidegt();
signals:
// QOpenGLWidget interface
protected:
void initializeGL();
void resizeGL(int w, int h);
void paintGL();
private:
QOpenGLTexture* createTexture(const QPoint VideoSize);
private:
QFFMpegReader reader;
QOpenGLShaderProgram program;
QOpenGLBuffer VBO, EBO;
QOpenGLVertexArrayObject VAO;
QOpenGLTexture* texture_y = nullptr;
QOpenGLTexture* texture_u = nullptr;
QOpenGLTexture* texture_v = nullptr;
};
#endif // QYUVWIDEGT_H
qyuvwidegt.cpp
#include "qyuvwidegt.h"
#include <QDir>
QYUVWidegt::QYUVWidegt(QWidget *parent) : QOpenGLWidget(parent),
VBO(QOpenGLBuffer::Type::VertexBuffer),
EBO(QOpenGLBuffer::Type::IndexBuffer)
{
connect(&reader, &QFFMpegReader::OnOpen, this,
[&]{
qDebug("OnOpen");
}, Qt::ConnectionType::QueuedConnection);
connect(&reader, &QFFMpegReader::OnUpdate, this, [&](const QPoint VideoSize){
texture_y = createTexture(VideoSize);
texture_u = createTexture(VideoSize / 2);
texture_v = createTexture(VideoSize / 2);
//紋理
program.bind();
GLuint textureUniformY = program.uniformLocation("tex_y");
GLuint textureUniformU = program.uniformLocation("tex_u");
GLuint textureUniformV = program.uniformLocation("tex_v");
program.setUniformValue(textureUniformY, 0);
program.setUniformValue(textureUniformU, 1);
program.setUniformValue(textureUniformV, 2);
program.release();
qDebug("OnUpdate");
}, Qt::ConnectionType::QueuedConnection);
connect(&reader, &QFFMpegReader::OnFrame, this, [&](FFFramePtr Frame){
texture_y->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureY.data());
texture_u->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureU.data());
texture_v->setData(0, 0, QOpenGLTexture::PixelFormat::Red, QOpenGLTexture::PixelType::UInt8, Frame->TextureV.data());
update();
}, Qt::ConnectionType::QueuedConnection);
}
QYUVWidegt::~QYUVWidegt()
{
reader.Close();
makeCurrent();
VBO.destroy();
EBO.destroy();
texture_y->destroy();
texture_u->destroy();
texture_v->destroy();
VAO.destroy();
VBO.destroy();
EBO.destroy();
doneCurrent();
}
QOpenGLTexture* QYUVWidegt::createTexture(const QPoint VideoSize)
{
QOpenGLTexture* texture = new QOpenGLTexture(QOpenGLTexture::Target::Target2D);
texture->setMinMagFilters(QOpenGLTexture::LinearMipMapLinear,QOpenGLTexture::Linear);
texture->create();
texture->setSize(VideoSize.x(), VideoSize.y());
texture->setFormat(QOpenGLTexture::TextureFormat::R8_UNorm);
texture->allocateStorage();
return texture;
}
void QYUVWidegt::initializeGL()
{
initializeOpenGLFunctions();
glDisable(GL_DEPTH_TEST);
QDir CurrentPath = QDir(R"(D:\work\OpenGL\QtWidget\Base)");
if(!program.addShaderFromSourceFile(QOpenGLShader::Vertex, CurrentPath.absoluteFilePath(R"(YUV.vert)")) ||
!program.addShaderFromSourceFile(QOpenGLShader::Fragment, CurrentPath.absoluteFilePath(R"(YUV.frag)")))
{
return;
}
program.link();
float vertices[] = {
// positions // colors // texture coords
1.0f, 1.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, // top right
1.0f, -1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, // bottom right
-1.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, // bottom left
-1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f // top left
};
unsigned int indices[] = {
0, 1, 3, // first triangle
1, 2, 3 // second triangle
};
//幾何
QOpenGLVertexArrayObject::Binder vaoBind(&VAO);
VBO.create();
VBO.bind();
VBO.allocate(vertices, sizeof(vertices));
EBO.create();
EBO.bind();
EBO.allocate(indices, sizeof(indices));
int vertex = program.attributeLocation("vertex");
program.setAttributeBuffer(vertex, GL_FLOAT, 0, 3, sizeof(GLfloat) * 8);
program.enableAttributeArray(vertex);
int color = program.attributeLocation("color");
program.setAttributeBuffer(color, GL_FLOAT, sizeof(GLfloat) * 3, 3, sizeof(GLfloat) * 8);
program.enableAttributeArray(color);
int uv = program.attributeLocation("uv");
program.setAttributeBuffer(uv, GL_FLOAT, sizeof(GLfloat) * 6, 2, sizeof(GLfloat) * 8);
program.enableAttributeArray(uv);
VBO.release();
reader.Open(R"(D:\work\OpenGL\QtWidget\Media\123.mp4)");
}
void QYUVWidegt::resizeGL(int w, int h)
{
glViewport(0, 0, w, h);
}
void QYUVWidegt::paintGL()
{
glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);//GL_LINE GL_FILL
if(texture_y && texture_u && texture_v)
{
glActiveTexture(GL_TEXTURE0);
texture_y->bind();
glActiveTexture(GL_TEXTURE1);
texture_u->bind();
glActiveTexture(GL_TEXTURE2);
texture_v->bind();
QOpenGLVertexArrayObject::Binder binder(&VAO);
program.bind();
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
program.release();
texture_y->release();
texture_u->release();
texture_v->release();
}
}
3.著色器代碼
YUV.vert
#version 450 core
layout(location=0) in vec3 vertex;
layout(location=1) in vec3 color;
layout(location=2) in vec2 uv;
out vec3 VertexColor;
out vec2 VertexUV;
void main(void)
{
gl_Position =vec4(vertex, 1.0);
VertexColor = color;
VertexUV = uv;
}
YUV.frag
#version 450 core
out vec4 FragColor;
in vec3 VertexColor;
in vec2 VertexUV;
uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;
void main(void)
{
vec3 yuv;
vec3 rgb;
yuv.x = texture(tex_y, VertexUV).r;
yuv.y = texture(tex_u, VertexUV).r - 0.5;
yuv.z = texture(tex_v, VertexUV).r - 0.5;
rgb = mat3( 1, 1, 1,
0, -0.39465, 2.03211,
1.13983, -0.58060, 0) * yuv;
//FragColor = vec4(rgb * VertexColor, 1) ;
FragColor = vec4(rgb, 1) ;
}