網(wǎng)上搜索的大部分資料都是OpenGL ES渲染視頻的猿推,OpenGL渲染yuv數(shù)據(jù)的資料比較難找宅此,因此編輯本文mark下;
結(jié)合網(wǎng)上搜索的資料卒煞,實(shí)現(xiàn)了在MacOS App開發(fā)中,將接收到的yuv420視頻數(shù)據(jù)渲染到視圖上叼架;
本文并非原創(chuàng)畔裕,只是在其他作者的基礎(chǔ)上修修改改,實(shí)現(xiàn)了在MacOS App開發(fā)中的乖订,使用OpenGL渲染yuv420視頻數(shù)據(jù)扮饶;
參考資料:
1.利用Qt + OpenGL 渲染 YUV數(shù)據(jù),播放視頻 mac版
2.AVCapture之4——NSOpenGLView
3.最簡(jiǎn)單的視音頻播放示例6:OpenGL播放YUV420P(通過Texture乍构,使用Shader)
1.感謝第一篇資料作者jake2012甜无,對(duì)于不同版本OpenGL的shader不同編寫使用的有價(jià)值的分享;
3.蘋果有個(gè)Demo中有一個(gè)VideoCIView哥遮,這個(gè)類基本實(shí)現(xiàn)了將一個(gè)CIImage繪制到NSOpenGLView中岂丘;
代碼部分
頂點(diǎn)著色器--Shader3.vs:
#version 410
in vec4 vertexIn;
in vec2 textureIn;
out vec2 textureOut;
void main(void)
{
gl_Position = vertexIn;
textureOut = textureIn;
}
片段著色器--Shader3.frag:
#version 410
in vec2 textureOut;
out vec4 fragColor;
uniform sampler2D tex_y;
uniform sampler2D tex_u;
uniform sampler2D tex_v;
void main(void)
{
vec3 yuv;
vec3 rgb;
yuv.x = texture(tex_y, textureOut).r;
yuv.y = texture(tex_u, textureOut).r - 0.5;
yuv.z = texture(tex_v, textureOut).r - 0.5;
rgb = mat3( 1, 1, 1,
0, -0.21482, 2.12798,
1.28033, -0.38059, 0) * yuv;
fragColor = vec4(rgb, 1);
}
OpenGLRenderer.h
#import <Foundation/Foundation.h>
#include "glUtil.h"
#import <AVFoundation/AVFoundation.h>
@interface OpenGLRenderer : NSObject
@property (nonatomic) GLuint defaultFBOName;
- (instancetype)initWithDefaultFBO:(id)asbs;
- (void) resizeWithWidth:(GLuint)width AndHeight:(GLuint)height;
- (void) render;
- (void) dealloc;
- (void)setImage:(CVImageBufferRef)pixelBuffer;
- (void)presentYUVData:(NSData*)yuvdata width:(GLuint)width height:(GLuint)height;
@end
OpenGLRenderer.mm
#import "OpenGLRenderer.h"
#include "glUtil.h"
#include "imageUtil.h"
#include "sourceUtil.h"
#ifndef NULL
#define NULL 0
#endif
@interface OpenGLRenderer ()
{
GLuint m_program;
GLuint m_vertexBuffer;
GLuint textureUniformY;
GLuint textureUniformU;
GLuint textureUniformV;
GLuint vertexBuffer;
GLuint vertextAttribute;
GLuint textureAttribute;
GLuint id_y;
GLuint id_u;
GLuint id_v;
int m_nVideoW;
int m_nVideoH;
int m_nViewW;
int m_nViewH;
unsigned char* m_pBufYuv420p;
unsigned char* m_pBuffer;
}
@end
@implementation OpenGLRenderer
- (void)setImage:(CVImageBufferRef)pixelBuffer {
//上鎖
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
{
//寬高
GLuint width = (GLuint)CVPixelBufferGetWidth(pixelBuffer);
GLuint height = (GLuint)CVPixelBufferGetHeight(pixelBuffer);
//調(diào)整寬高
[self resizeWithWidth:width AndHeight:height];
//yuv420視頻數(shù)據(jù)
m_pBufYuv420p = NULL;
m_pBufYuv420p = (unsigned char*)CVPixelBufferGetBaseAddress(pixelBuffer);
//渲染yuv420
[self rendeYUVData:m_pBufYuv420p];
}
//解鎖
CVPixelBufferUnlockBaseAddress(pixelBuffer,0);
}
- (void)presentYUVData:(NSData*)yuvdata width:(GLuint)width height:(GLuint)height {
@synchronized (self) {
//調(diào)整寬高
m_nVideoW = width;
m_nVideoH = height;
//yuv420視頻數(shù)據(jù)
m_pBufYuv420p = NULL;
m_pBufYuv420p = (unsigned char*)[yuvdata bytes];
//渲染yuv420
[self rendeYUVData:m_pBufYuv420p];
}
}
- (instancetype)initWithDefaultFBO:(id)asbs {
if((self = [super init])) {
iLog(@"Render: %s; Version:%s", glGetString(GL_RENDERER), glGetString(GL_VERSION));
[self initializeGL];
//清除緩存
[self clearRenderBuffer];
}
return self;
}
- (void)resizeWithWidth:(GLuint)width AndHeight:(GLuint)height {
glViewport(0, 0, width, height);
m_nViewW = width;
m_nViewH = height;
if (m_nViewW==0) {
m_nViewW = 2*iScreenWidth/3;
}
if (m_nViewH==0) {
m_nViewH = 2*iScreenHeight/3;
}
//清除緩存
[self clearRenderBuffer];
}
- (void) render {
}
- (void) dealloc {
}
-(void)clearRenderBuffer {
//清除緩存
glClearColor(0.0, 0.0, 0.0, 1.0);
glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT );
}
#pragma mark 渲染數(shù)據(jù)
-(void)rendeYUVData:(unsigned char*)yuv420data {
//清除緩存
[self clearRenderBuffer];
//
float x,y;
float wRatio = (float)m_nViewW/m_nVideoW;
float hRatio = (float)m_nViewH/m_nVideoH;
float minRatio = wRatio<hRatio ? wRatio : hRatio;
y = m_nVideoH * minRatio/m_nViewH;
x = m_nVideoW * minRatio/m_nViewW;
float vertexPoints[] ={
-x, -y, 0.0f, 1.0f,
x, -y, 1.0f, 1.0f,
-x, y, 0.0f, 0.0f,
x, y, 1.0f, 0.0f,
};
glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(float), vertexPoints, GL_STATIC_DRAW);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, id_y);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW, m_nVideoH, 0, GL_RED, GL_UNSIGNED_BYTE, yuv420data);
glUniform1i(textureUniformY, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, id_u);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW / 2, m_nVideoH / 2, 0, GL_RED, GL_UNSIGNED_BYTE, (char*)yuv420data + m_nVideoW*m_nVideoH);
glUniform1i(textureUniformU, 1);
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, id_v);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, m_nVideoW / 2, m_nVideoH / 2, 0, GL_RED, GL_UNSIGNED_BYTE, (char*)yuv420data + m_nVideoW*m_nVideoH * 5 / 4);
glUniform1i(textureUniformV, 2);
// Draw stuff
glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 );
//∫glCheckError();
}
#pragma mark init methdos
-(void)initializeGL {
// 準(zhǔn)備 著色器程序
[self prepareShaderProgram];
textureUniformY = glGetUniformLocation(m_program, "tex_y");
textureUniformU = glGetUniformLocation(m_program, "tex_u");
textureUniformV = glGetUniformLocation(m_program, "tex_v");
// Create a interleaved triangle (vec3 position, vec3 color)
float vertexPoints[] ={
-1.0f, -1.0f, 0.0f, 1.0f,
1.0f, -1.0f, 1.0f, 1.0f,
-1.0f, 1.0f, 0.0f, 0.0f,
1.0f, 1.0f, 1.0f, 0.0f,
};
glGenVertexArrays(1, &m_vertexBuffer);
glBindVertexArray(m_vertexBuffer);
glGenBuffers(1, &vertexBuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glBufferData(GL_ARRAY_BUFFER, 4 * 4 * sizeof(float), vertexPoints, GL_STATIC_DRAW);
vertextAttribute = glGetAttribLocation(m_program, "vertexIn");
textureAttribute = glGetAttribLocation(m_program, "textureIn");
glEnableVertexAttribArray(vertextAttribute);
glVertexAttribPointer(vertextAttribute, 2, GL_FLOAT, GL_FALSE, sizeof(float)*4, (const GLvoid *)0);
glEnableVertexAttribArray(textureAttribute);
glVertexAttribPointer(textureAttribute, 2, GL_FLOAT, GL_FALSE, sizeof(float)*4, (const GLvoid *)(sizeof(float)*2));
//Init Texture
glGenTextures(1, &id_y);
glBindTexture(GL_TEXTURE_2D, id_y);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glGenTextures(1, &id_u);
glBindTexture(GL_TEXTURE_2D, id_u);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glGenTextures(1, &id_v);
glBindTexture(GL_TEXTURE_2D, id_v);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
-(void)prepareShaderProgram {
//讀取文件路徑
NSString* vertFile = [[NSBundle mainBundle] pathForResource:@"Shader3" ofType:@"vs"];
NSString* fragFile = [[NSBundle mainBundle] pathForResource:@"Shader3" ofType:@"frag"];
//加載shader
m_program = [self loadShaders:vertFile frag:fragFile];
//鏈接
glBindFragDataLocation(m_program, 0, "fragColor");
glLinkProgram(m_program);
GLint linked;
glGetProgramiv(m_program, GL_LINK_STATUS, &linked );
if ( !linked ) {
NSLog(@"Shader program failed to link");
GLint logSize;
glGetProgramiv(m_program, GL_INFO_LOG_LENGTH, &logSize);
char* logMsg = new char[logSize];
glGetProgramInfoLog(m_program, logSize, NULL, logMsg );
NSLog(@"Link Error: %s", logMsg);
delete [] logMsg;
exit( EXIT_FAILURE );
}
//use program object
glUseProgram(m_program);
}
- (GLuint)loadShaders:(NSString *)vert frag:(NSString *)frag {
GLuint verShader, fragShader;
GLint program = glCreateProgram();
//編譯
[self compileShader:&verShader type:GL_VERTEX_SHADER file:vert];
[self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:frag];
glAttachShader(program, verShader);
glAttachShader(program, fragShader);
//釋放不需要的shader
glDeleteShader(verShader);
glDeleteShader(fragShader);
return program;
}
- (void)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file {
//讀取字符串
NSString* content = [NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil];
const GLchar* source = (GLchar *)[content UTF8String];
*shader = glCreateShader(type);
glShaderSource(*shader, 1, &source, NULL);
glCompileShader(*shader);
//錯(cuò)誤分析
GLint compiled;
glGetShaderiv( *shader, GL_COMPILE_STATUS, &compiled );
if ( !compiled ) {
GLint logSize;
glGetShaderiv( *shader, GL_INFO_LOG_LENGTH, &logSize );
char* logMsg = new char[logSize];
glGetShaderInfoLog( *shader, logSize, NULL, logMsg );
NSLog(@"Shader compile log:%s\n", logMsg);
delete [] logMsg;
exit(EXIT_FAILURE);
}
}
@end
VideoGLView.h
#import <Cocoa/Cocoa.h>
#import <QuartzCore/CVDisplayLink.h>
@interface VideoGLView : NSOpenGLView {
CVDisplayLinkRef displayLink;
}
- (void)setImage:(CVImageBufferRef)img;
-(void)presentYUVData:(NSData*)yuvdata width:(CGFloat)width height:(CGFloat)height;
@end
VideoGLView.m
#import "VideoGLView.h"
#import "OpenGLRenderer.h"
//#define SUPPORT_RETINA_RESOLUTION 1
@interface VideoGLView()
{
OpenGLRenderer* _renderer;
}
@end
@implementation VideoGLView
-(instancetype)init {
if (self=[super init]) {
[self awakeFromNib];
}
return self;
}
- (CVReturn) getFrameForTime:(const CVTimeStamp*)outputTime
{
// There is no autorelease pool when this method is called
// because it will be called from a background thread.
// It's important to create one or app can leak objects.
@autoreleasepool {
dispatch_sync(dispatch_get_main_queue(), ^{
[self drawView];
});
}
return kCVReturnSuccess;
}
// This is the renderer output callback function
static CVReturn MyDisplayLinkCallback(CVDisplayLinkRef displayLink,
const CVTimeStamp* now,
const CVTimeStamp* outputTime,
CVOptionFlags flagsIn,
CVOptionFlags* flagsOut,
void* displayLinkContext)
{
CVReturn result = [(__bridge VideoGLView*)displayLinkContext getFrameForTime:outputTime];
return result;
}
- (void) awakeFromNib
{
NSOpenGLPixelFormatAttribute attrs[] =
{
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
// Must specify the 3.2 Core Profile to use OpenGL 3.2
#if ESSENTIAL_GL_PRACTICES_SUPPORT_GL3
NSOpenGLPFAOpenGLProfile,
NSOpenGLProfileVersion3_2Core,
#endif
0
};
NSOpenGLPixelFormat *pf = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs];
if (!pf)
{
NSLog(@"No OpenGL pixel format");
}
NSOpenGLContext* context = [[NSOpenGLContext alloc] initWithFormat:pf shareContext:nil];
#if ESSENTIAL_GL_PRACTICES_SUPPORT_GL3 && defined(DEBUG)
// When we're using a CoreProfile context, crash if we call a legacy OpenGL function
// This will make it much more obvious where and when such a function call is made so
// that we can remove such calls.
// Without this we'd simply get GL_INVALID_OPERATION error for calling legacy functions
// but it would be more difficult to see where that function was called.
CGLEnable([context CGLContextObj], kCGLCECrashOnRemovedFunctions);
#endif
[self setPixelFormat:pf];
[self setOpenGLContext:context];
#if SUPPORT_RETINA_RESOLUTION
// Opt-In to Retina resolution
[self setWantsBestResolutionOpenGLSurface:YES];
#endif // SUPPORT_RETINA_RESOLUTION
}
- (void) prepareOpenGL
{
[super prepareOpenGL];
// Make all the OpenGL calls to setup rendering
// and build the necessary rendering objects
[self initGL];
// Create a display link capable of being used with all active displays
CVDisplayLinkCreateWithActiveCGDisplays(&displayLink);
// Set the renderer output callback function
CVDisplayLinkSetOutputCallback(displayLink, &MyDisplayLinkCallback, (__bridge void*)self);
// Set the display link for the current renderer
CGLContextObj cglContext = [[self openGLContext] CGLContextObj];
CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj];
CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext(displayLink, cglContext, cglPixelFormat);
// Activate the display link
CVDisplayLinkStart(displayLink);
// Register to be notified when the window closes so we can stop the displaylink
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(windowWillClose:)
name:NSWindowWillCloseNotification
object:[self window]];
}
- (void) windowWillClose:(NSNotification*)notification
{
// Stop the display link when the window is closing because default
// OpenGL render buffers will be destroyed. If display link continues to
// fire without renderbuffers, OpenGL draw calls will set errors.
CVDisplayLinkStop(displayLink);
}
- (void) initGL
{
// The reshape function may have changed the thread to which our OpenGL
// context is attached before prepareOpenGL and initGL are called. So call
// makeCurrentContext to ensure that our OpenGL context current to this
// thread (i.e. makeCurrentContext directs all OpenGL calls on this thread
// to [self openGLContext])
[[self openGLContext] makeCurrentContext];
// Synchronize buffer swaps with vertical refresh rate
GLint swapInt = 1;
[[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
// Init our renderer. Use 0 for the defaultFBO which is appropriate for
// OSX (but not iOS since iOS apps must create their own FBO)
_renderer = [[OpenGLRenderer alloc] initWithDefaultFBO:0];
}
- (void)reshape
{
[super reshape];
// We draw on a secondary thread through the display link. However, when
// resizing the view, -drawRect is called on the main thread.
// Add a mutex around to avoid the threads accessing the context
// simultaneously when resizing.
CGLLockContext([[self openGLContext] CGLContextObj]);
// Get the view size in Points
NSRect viewRectPoints = [self bounds];
#if SUPPORT_RETINA_RESOLUTION
// Rendering at retina resolutions will reduce aliasing, but at the potential
// cost of framerate and battery life due to the GPU needing to render more
// pixels.
// Any calculations the renderer does which use pixel dimentions, must be
// in "retina" space. [NSView convertRectToBacking] converts point sizes
// to pixel sizes. Thus the renderer gets the size in pixels, not points,
// so that it can set it's viewport and perform and other pixel based
// calculations appropriately.
// viewRectPixels will be larger than viewRectPoints for retina displays.
// viewRectPixels will be the same as viewRectPoints for non-retina displays
NSRect viewRectPixels = [self convertRectToBacking:viewRectPoints];
#else //if !SUPPORT_RETINA_RESOLUTION
// App will typically render faster and use less power rendering at
// non-retina resolutions since the GPU needs to render less pixels.
// There is the cost of more aliasing, but it will be no-worse than
// on a Mac without a retina display.
// Points:Pixels is always 1:1 when not supporting retina resolutions
NSRect viewRectPixels = viewRectPoints;
#endif // !SUPPORT_RETINA_RESOLUTION
// Set the new dimensions in our renderer
[_renderer resizeWithWidth:viewRectPixels.size.width
AndHeight:viewRectPixels.size.height];
CGLUnlockContext([[self openGLContext] CGLContextObj]);
}
- (void)renewGState
{
// Called whenever graphics state updated (such as window resize)
// OpenGL rendering is not synchronous with other rendering on the OSX.
// Therefore, call disableScreenUpdatesUntilFlush so the window server
// doesn't render non-OpenGL content in the window asynchronously from
// OpenGL content, which could cause flickering. (non-OpenGL content
// includes the title bar and drawing done by the app with other APIs)
[[self window] disableScreenUpdatesUntilFlush];
[super renewGState];
}
- (void) drawRect: (NSRect) theRect
{
// Called during resize operations
// Avoid flickering during resize by drawiing
[self drawView];
}
- (void) drawView
{
[[self openGLContext] makeCurrentContext];
// We draw on a secondary thread through the display link
// When resizing the view, -reshape is called automatically on the main
// thread. Add a mutex around to avoid the threads accessing the context
// simultaneously when resizing
CGLLockContext([[self openGLContext] CGLContextObj]);
[_renderer render];
CGLFlushDrawable([[self openGLContext] CGLContextObj]);
CGLUnlockContext([[self openGLContext] CGLContextObj]);
}
- (void) dealloc
{
// Stop the display link BEFORE releasing anything in the view
// otherwise the display link thread may call into the view and crash
// when it encounters something that has been release
CVDisplayLinkStop(displayLink);
CVDisplayLinkRelease(displayLink);
}
- (void)setImage:(CVImageBufferRef)img {
dispatch_sync(dispatch_get_main_queue(), ^{
[_renderer setImage:img];
});
}
-(void)presentYUVData:(NSData*)yuvdata width:(CGFloat)width height:(CGFloat)height {
[_renderer presentYUVData:yuvdata width:width height:height];
}
@end