首先通過(guò)#import <AVFoundation/AVFoundation.h> 捕捉手機(jī)屏幕圖像住涉。
主要代碼:
@interface ViewController ()<AVCaptureVideoDataOutputSampleBufferDelegate>
//硬件設(shè)備
@property (nonatomic, strong) AVCaptureDevice *device;
//輸入流
@property (nonatomic, strong) AVCaptureDeviceInput *input;
//協(xié)調(diào)輸入輸出流的數(shù)據(jù)
@property (nonatomic, strong) AVCaptureSession *session;
//預(yù)覽層
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
//輸出流
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
@end
- (void)viewDidLoad {
[super viewDidLoad];
[self.view.layer addSublayer:self.previewLayer];
[self.session startRunning];
}
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
//AVCaptureVideoDataOutput獲取實(shí)時(shí)圖像俄占,這個(gè)代理方法的回調(diào)頻率很快腔长,幾乎與手機(jī)屏幕的刷新頻率一樣快
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
//設(shè)置圖像方向辐烂,否則largeImage取出來(lái)是反的
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
//獲取圖像圖片(使用下面人臉捕捉代碼即可)
largeImage = [self imageFromSampleBuffer:sampleBuffer];
}
//CMSampleBufferRef轉(zhuǎn)NSImage
-(UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer{
// 為媒體數(shù)據(jù)設(shè)置一個(gè)CMSampleBuffer的Core Video圖像緩存對(duì)象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// 鎖定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// 得到pixel buffer的基地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// 得到pixel buffer的行字節(jié)數(shù)
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的寬和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// 創(chuàng)建一個(gè)依賴于設(shè)備的RGB顏色空間
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// 用抽樣緩存的數(shù)據(jù)創(chuàng)建一個(gè)位圖格式的圖形上下文(graphics context)對(duì)象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// 根據(jù)這個(gè)位圖context中的像素?cái)?shù)據(jù)創(chuàng)建一個(gè)Quartz image對(duì)象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 解鎖pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// 釋放context和顏色空間
CGContextRelease(context); CGColorSpaceRelease(colorSpace);
// 用Quartz image創(chuàng)建一個(gè)UIImage對(duì)象image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// 釋放Quartz image對(duì)象
CGImageRelease(quartzImage);
return (image);
}
-(AVCaptureDeviceInput *)input{
if (_input == nil) {
_input = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:nil];
}
return _input;
}
-(AVCaptureVideoDataOutput *)videoDataOutput{
if (_videoDataOutput == nil) {
_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[_videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//設(shè)置像素格式遏插,否則CMSampleBufferRef轉(zhuǎn)換NSImage的時(shí)候CGContextRef初始化會(huì)出問(wèn)題
[_videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}
return _videoDataOutput;
}
-(AVCaptureSession *)session{
if (_session == nil) {
_session = [[AVCaptureSession alloc] init];
if ([_session canAddInput:self.input]) {
[_session addInput:self.input];
}
if ([_session canAddOutput:self.videoDataOutput]) {
[_session addOutput:self.videoDataOutput];
}
}
return _session;
}
-(AVCaptureVideoPreviewLayer *)previewLayer{
if (_previewLayer == nil) {
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_previewLayer.frame = self.view.layer.bounds;
}
return _previewLayer;
}
主要使用#import <CoreImage/CoreImage.h>框架捕捉,簡(jiǎn)單代碼如下
// 圖像識(shí)別能力:可以在CIDetectorAccuracyHigh(較強(qiáng)的處理能力)與CIDetectorAccuracyLow(較弱的處理能力)中選擇纠修,因?yàn)橄胱寽?zhǔn)確度高一些在這里選擇CIDetectorAccuracyHigh
NSDictionary *opts = [NSDictionary dictionaryWithObject:
CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
// 將圖像轉(zhuǎn)換為CIImage
CIImage *faceImage = [CIImage imageWithCGImage:image.CGImage];
CIDetector *faceDetector=[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:opts];
// 識(shí)別出人臉數(shù)組
NSArray *features = [faceDetector featuresInImage:faceImage options: [NSDictionary dictionaryWithObject:@(5) forKey:CIDetectorImageOrientation]];
iOS技術(shù)群
iOS開(kāi)發(fā)交流群群二維碼.png