前言
AVFoundation框架是iOS中很重要的框架摧找,所有與音視頻相關(guān)的軟硬件控制都在這個(gè)框架里。本文主要介紹iOS媒體捕捉和視頻采集蝙斜。
媒體捕捉流程
簡單介紹
- AVCaptureSession:媒體捕獲會(huì)話(包括音頻和視頻)箱蝠,負(fù)責(zé)把捕獲的音視頻數(shù)據(jù)輸出到輸出設(shè)備中煤杀,一個(gè)AVCaptureSession可以有多個(gè)輸入輸出诀紊。
在視頻或音頻捕捉時(shí)谒出,客戶端可以實(shí)例AVCaptureSession,添加適當(dāng)?shù)腁VCaptureInputs、AVCaptureDeviceInput和輸出。 - AVCaptureInput和AVCaptureDevice:設(shè)備輸入數(shù)據(jù)管理對(duì)象勺疼,可以根據(jù)AVCaptureDevice創(chuàng)建對(duì)應(yīng)AVCaptureDeviceInput對(duì)象,該對(duì)象將會(huì)被添加到AVCaptureSession中管理杀狡。
- AVCaptureOutput:設(shè)備輸出數(shù)據(jù)管理對(duì)象。
- AVCaptureVideoPreviewLayer和AVSampleBufferDisplayLayer贰镣,相機(jī)拍攝預(yù)覽圖層捣卤,是CALayer的子類,前者創(chuàng)建需要AVCaptureSession對(duì)象八孝,后者可以直接創(chuàng)建,添加CMSampleBufferRef進(jìn)行展示鸠项。
相關(guān)代碼展示
- (void)configureCamera{
/// 參數(shù)設(shè)置
// 默認(rèn)后置攝像頭
AVCaptureDevicePosition position = AVCaptureDevicePositionBack;
// 幀率
int frameRate = 25;
// 顯色方案
OSType videoFormat = kCVPixelFormatType_32BGRA;
// 分辨率高
int resolutionHeight = 720;
/// 創(chuàng)建AVCaptureSession對(duì)象
AVCaptureSession *session = [[AVCaptureSession alloc] init];
/// 設(shè)置分辨率
session.sessionPreset = AVCaptureSessionPreset1280x720;
/// 獲取攝像頭
AVCaptureDevice *captureDevice;
// 默認(rèn)AVCaptureDevicePositionBack干跛,后置攝像頭
AVCaptureDeviceDiscoverySession *deviceDiscoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera] mediaType:AVMediaTypeVideo position:position];
NSArray *devices = deviceDiscoverySession.devices;
for (AVCaptureDevice *device in devices) {
if (AVCaptureDevicePositionBack == device.position) {
captureDevice = device;
}else if (AVCaptureDevicePositionFront == device.position){
captureDevice = device;
}
}
/// 設(shè)置幀率和分辨率高度
BOOL isSuccess = NO;
for(AVCaptureDeviceFormat *vFormat in [captureDevice formats]) {
CMFormatDescriptionRef description = vFormat.formatDescription;
float maxRate = ((AVFrameRateRange*) [vFormat.videoSupportedFrameRateRanges objectAtIndex:0]).maxFrameRate;
if (maxRate >= frameRate && CMFormatDescriptionGetMediaSubType(description) == videoFormat) {
if ([captureDevice lockForConfiguration:NULL] == YES) {
// 對(duì)比鏡頭支持的分辨率和當(dāng)前設(shè)置的分辨率
CMVideoDimensions dims = CMVideoFormatDescriptionGetDimensions(description);
if (dims.height == resolutionHeight && dims.width == [self.class getResolutionWidthByHeight:resolutionHeight]) {
[session beginConfiguration];
if ([captureDevice lockForConfiguration:NULL]){
captureDevice.activeFormat = vFormat;
[captureDevice setActiveVideoMinFrameDuration:CMTimeMake(1, frameRate)];
[captureDevice setActiveVideoMaxFrameDuration:CMTimeMake(1, frameRate)];
[captureDevice unlockForConfiguration];
}
[session commitConfiguration];
isSuccess = YES;
}
}else {
NSLog(@"%s: 失敗",__func__);
}
}
}
NSError *error;
//添加輸入
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
if (error != noErr) {
NSLog(@"配置設(shè)備輸入失敗:%@",error.localizedDescription);
return;
}
[session addInput:input];
//添加輸出
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
if ([session canAddOutput:videoDataOutput]) {
[session addOutput:videoDataOutput];
}
videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:videoFormat]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//當(dāng)此屬性的值為YES時(shí),接收方將立即丟棄捕獲的幀祟绊,而處理現(xiàn)有幀的調(diào)度隊(duì)列在captureOutput:didOutputSampleBuffer:fromConnection: delegate方法中被阻塞楼入。當(dāng)此屬性的值為NO時(shí),將允許委托在丟棄新幀之前有更多的時(shí)間處理舊幀牧抽,但應(yīng)用程序的內(nèi)存使用量可能會(huì)顯著增加嘉熊。默認(rèn)值為“YES”。(機(jī)翻)
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
//創(chuàng)建一個(gè)隊(duì)列接收數(shù)據(jù)
dispatch_queue_t videoQueue = dispatch_queue_create("video_receive_queue", NULL);
[videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
//創(chuàng)建接收對(duì)象
AVCaptureVideoPreviewLayer *videoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:session];
videoPreviewLayer.backgroundColor = [[UIColor blackColor] CGColor];
CGRect frame = [videoPreviewLayer bounds];
NSLog(@"previewViewLayer = %@",NSStringFromCGRect(frame));
//設(shè)置尺寸和填充方式
[videoPreviewLayer setFrame:[UIScreen mainScreen].bounds];
[videoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspect];
if ([[videoPreviewLayer connection] isVideoOrientationSupported]) {
[videoPreviewLayer.connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
}else{
NSLog(@"不支持視頻定向");
}
//需要在哪個(gè)view上展示
UIView *showView = [[UIView alloc] init];
[showView.layer insertSublayer:videoPreviewLayer atIndex:0];
}
//采集視頻的回調(diào)扬舒,如果需要編碼H264/H265,在這里操作
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
/*
//另一種展示方式
AVSampleBufferDisplayLayer *previewLayer = [AVSampleBufferDisplayLayer layer];
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
[previewLayer enqueueSampleBuffer:sampleBuffer];
//需要在哪個(gè)view上展示
UIView *showView = [[UIView alloc] init];
[showView.layer insertSublayer:previewLayer atIndex:0];
*/
}
///需要考慮橫豎屏情況阐肤,這里暫未考慮
+ (int)getResolutionWidthByHeight:(int)height {
switch (height) {
case 2160:
return 3840;
case 1080:
return 1920;
case 720:
return 1280;
case 480:
return 640;
default:
return -1;
}
}
Tips:配置采集之前,記得申請(qǐng)攝像頭權(quán)限讲坎,如果沒有權(quán)限孕惜,需要自己做判斷,這里省略晨炕。
Demo地址整理后奉上衫画。
有其他不明白的,可以留言瓮栗,看到就會(huì)回復(fù)削罩。
如果喜歡瞄勾,請(qǐng)幫忙點(diǎn)贊。支持轉(zhuǎn)載弥激,轉(zhuǎn)載請(qǐng)附原文鏈接进陡。