1.AVCaptureVideoPreviewLayer->視頻預(yù)覽layer
#import <AVFoundation/AVFoundation.h>
// AVCaptureSession 初始化即可
self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.avSession];
[self.previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.previewLayer setFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height)];
[self.view.layer insertSublayer:self.previewLayer above:0];
2.AVSampleBufferDisplayLayer ->視頻輸出對(duì)象CMSampleBufferRef顯示layer
// - (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)addAVSampleBufferDisplayLayer {
AVSampleBufferDisplayLayer * layer = [[AVSampleBufferDisplayLayer alloc] init];
layer.frame = CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height);
[layer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
self.sampleDisplayLayer = layer;
[self.view.layer addSublayer:layer];
}
3. CMSampleBufferRef和CVPixelBufferRef相互轉(zhuǎn)換
CMSampleBufferRef-> CVPixelBufferRef
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferRef-> CMSampleBufferRef
- (CMSampleBufferRef)getCMSampleBufferFromPixelBuffer:(CVPixelBufferRef)pixelBuffer
{
if (!pixelBuffer){
return NULL;
}
@synchronized(self) {
if (_previousPixelBuffer) {
CVPixelBufferRelease(_previousPixelBuffer);
}
_previousPixelBuffer = CVPixelBufferRetain(pixelBuffer);
}
//不設(shè)置具體時(shí)間信息
CMSampleTimingInfo timing = {kCMTimeInvalid, kCMTimeInvalid, kCMTimeInvalid};
//獲取視頻信息
CMVideoFormatDescriptionRef videoInfo = NULL;
OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(NULL, _previousPixelBuffer, &videoInfo);
NSParameterAssert(result == 0 && videoInfo != NULL);
CMSampleBufferRef sampleBuffer = NULL;
result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,_previousPixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
NSParameterAssert(result == 0 && sampleBuffer != NULL);
if (_previousPixelBuffer) {
CFRelease(_previousPixelBuffer);
}
if (videoInfo) {
CFRelease(videoInfo);
}
CFArrayRef attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, YES);
CFMutableDictionaryRef dict = (CFMutableDictionaryRef)CFArrayGetValueAtIndex(attachments, 0);
CFDictionarySetValue(dict, kCMSampleAttachmentKey_DisplayImmediately, kCFBooleanTrue);
return CFAutorelease(sampleBuffer);
}
4. 編碼前輸入CMSampleBufferRef组贺, 編碼后輸出 CVPixelBufferRef
//AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
}
// 解碼回調(diào)函數(shù)
static void didDecompress( void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
*outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);
VideoH264Decoder *decoder = (__bridge VideoH264Decoder *)decompressionOutputRefCon;
if ([decoder.delegate respondsToSelector:@selector(decoder:didDecodingFrame:)]) {
[decoder.delegate decoder: decoder didDecodingFrame:pixelBuffer];
}
}
5. CVOpenGLESTextureCacheCreateTextureFromImage生成紋理
CVOpenGLESTextureCacheCreateTextureFromImage ->CVImageBufferRef->Texture
CVOpenGLESTextureCacheCreateTextureFromImage -> CVPixelBufferRef->Texture
CVOpenGLESTextureCacheCreateTextureFromImage -> CMSampleBufferRef-> CVPixelBufferRef ->Texture
CVImageBufferRef==CVPixelBufferRef等價(jià)
CVPixelBufferRef-> Texture-> OpenGL ES 或 Metal渲染
CMSampleBufferRef->Texture-> OpenGL ES 或 Metal渲染
CIImage * ciImage = [CIImage imageWithCVImageBuffer:pixelBuffer];
CIContext * context = [CIContext contextWithOptions:nil];
CGImageRef cgImage = [context createCGImage:ciImage fromRect:CGRectMake(0, 0, 100, 100)];
UIImage * uiImage = [[UIImage alloc] initWithCIImage:ciImage];
UIImage * uiImage2 = [[UIImage alloc] initWithCGImage:cgImage];