iOS人臉識(shí)別、自定義相機(jī)慧邮、圖像掃描癌压、系統(tǒng)自帶二維碼識(shí)別
基于 OpenCV 的人臉識(shí)別
視頻捕獲
用到的類衙荐,主要有這些:
//硬件設(shè)備
@property (nonatomic, strong) AVCaptureDevice *device;
//輸入流
@property (nonatomic, strong) AVCaptureDeviceInput *input;
//協(xié)調(diào)輸入輸出流的數(shù)據(jù)
@property (nonatomic, strong) AVCaptureSession *session;
//預(yù)覽層
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer;
//輸出流
@property (nonatomic, strong) AVCaptureStillImageOutput *stillImageOutput; //用于捕捉靜態(tài)圖片
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput; //原始視頻幀,用于獲取實(shí)時(shí)圖像以及視頻錄制
@property (nonatomic, strong) AVCaptureMetadataOutput *metadataOutput; //用于二維碼識(shí)別以及人臉識(shí)別
1.首先我們需要在手機(jī)上把圖像顯示出來(lái)
1.1 獲取硬件設(shè)備
-(AVCaptureDevice *)device{
if (_device == nil) {
_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
if ([_device lockForConfiguration:nil]) {
//自動(dòng)閃光燈
if ([_device isFlashModeSupported:AVCaptureFlashModeAuto]) {
[_device setFlashMode:AVCaptureFlashModeAuto];
}
//自動(dòng)白平衡
if ([_device isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]) {
[_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance];
}
//自動(dòng)對(duì)焦
if ([_device isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) {
[_device setFocusMode:AVCaptureFocusModeContinuousAutoFocus];
}
//自動(dòng)曝光
if ([_device isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
[_device setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
}
[_device unlockForConfiguration];
}
}
return _device;
}
device
有很多屬性可以調(diào)整(注意調(diào)整device屬性的時(shí)候需要上鎖, 調(diào)整完再解鎖):
閃光燈
typedef NS_ENUM(NSInteger, AVCaptureFlashMode) {
AVCaptureFlashModeOff = 0,
AVCaptureFlashModeOn = 1,
AVCaptureFlashModeAuto = 2
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
前后置攝像頭
typedef NS_ENUM(NSInteger, AVCaptureDevicePosition) {
AVCaptureDevicePositionUnspecified = 0,
AVCaptureDevicePositionBack = 1,
AVCaptureDevicePositionFront = 2
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
手電筒
typedef NS_ENUM(NSInteger, AVCaptureTorchMode) {
AVCaptureTorchModeOff = 0,
AVCaptureTorchModeOn = 1,
AVCaptureTorchModeAuto = 2,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
對(duì)焦
typedef NS_ENUM(NSInteger, AVCaptureFocusMode) {
AVCaptureFocusModeLocked = 0,
AVCaptureFocusModeAutoFocus = 1,
AVCaptureFocusModeContinuousAutoFocus = 2,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
曝光
typedef NS_ENUM(NSInteger, AVCaptureExposureMode) {
AVCaptureExposureModeLocked = 0,
AVCaptureExposureModeAutoExpose = 1,
AVCaptureExposureModeContinuousAutoExposure = 2,
AVCaptureExposureModeCustom NS_ENUM_AVAILABLE_IOS(8_0) = 3,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
白平衡
typedef NS_ENUM(NSInteger, AVCaptureWhiteBalanceMode) {
AVCaptureWhiteBalanceModeLocked = 0,
AVCaptureWhiteBalanceModeAutoWhiteBalance = 1,
AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance = 2,
} NS_AVAILABLE(10_7, 4_0) __TVOS_PROHIBITED;
1.2 獲取硬件的輸入流
-(AVCaptureDeviceInput *)input{
if (_input == nil) {
_input = [[AVCaptureDeviceInput alloc] initWithDevice:self.device error:nil];
}
return _input;
}
創(chuàng)建輸入流的時(shí)候希柿,會(huì)彈出alert向用戶獲取相機(jī)權(quán)限
1.3 需要一個(gè)用來(lái)協(xié)調(diào)輸入和輸出數(shù)據(jù)的會(huì)話诊沪,然后把input添加到會(huì)話中
-(AVCaptureSession *)session{
if (_session == nil) {
_session = [[AVCaptureSession alloc] init];
if ([_session canAddInput:self.input]) {
[_session addInput:self.input];
}
}
return _session;
}
1.4 然后我們需要一個(gè)預(yù)覽圖像的層
-(AVCaptureVideoPreviewLayer *)previewLayer{
if (_previewLayer == nil) {
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
_previewLayer.frame = self.view.layer.bounds;
}
return _previewLayer;
}
1.5 最后把previewLayer添加到self.view.layer上
[self.view.layer addSublayer:self.previewLayer];
1.6 找個(gè)合適的位置,讓session運(yùn)行起來(lái)曾撤,比如viewWillAppear
-(void)viewWillAppear:(BOOL)animated{
[super viewWillAppear:animated];
[self.session startRunning];
}
2.搞一個(gè)按鈕用來(lái)控制手電筒
#pragma mark - 手電筒
-(void)openTorch:(UIButton*)button{
button.selected = !button.selected;
Class captureDeviceClass = NSClassFromString(@"AVCaptureDevice");
if (captureDeviceClass != nil) {
if ([self.device hasTorch] && [self.device hasFlash]){
[self.device lockForConfiguration:nil];
if (button.selected) {
[self.device setTorchMode:AVCaptureTorchModeOn];
} else {
[self.device setTorchMode:AVCaptureTorchModeOff];
}
[self.device unlockForConfiguration];
}
}
}
3.再搞一個(gè)按鈕來(lái)切換前后置攝像頭
#pragma mark - 切換前后攝像頭
-(void)switchCamera{
NSUInteger cameraCount = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count];
if (cameraCount > 1) {
AVCaptureDevice *newCamera = nil;
AVCaptureDeviceInput *newInput = nil;
AVCaptureDevicePosition position = [[self.input device] position];
if (position == AVCaptureDevicePositionFront){
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
}else {
newCamera = [self cameraWithPosition:AVCaptureDevicePositionFront];
}
newInput = [AVCaptureDeviceInput deviceInputWithDevice:newCamera error:nil];
if (newInput != nil) {
[self.session beginConfiguration];
[self.session removeInput:self.input];
if ([self.session canAddInput:newInput]) {
[self.session addInput:newInput];
self.input = newInput;
}else {
[self.session addInput:self.input];
}
[self.session commitConfiguration];
}
}
}
-(AVCaptureDevice *)cameraWithPosition:(AVCaptureDevicePosition)position{
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for ( AVCaptureDevice *device in devices )
if ( device.position == position ) return device;
return nil;
}
4.使用AVCaptureStillImageOutput捕獲靜態(tài)圖片
4.1 創(chuàng)建一個(gè)AVCaptureStillImageOutput對(duì)象
-(AVCaptureStillImageOutput *)stillImageOutput{
if (_stillImageOutput == nil) {
_stillImageOutput = [[AVCaptureStillImageOutput alloc] init];
}
return _stillImageOutput;
}
4.2將stillImageOutput添加到session中
if ([_session canAddOutput:self.stillImageOutput]) {
[_session addOutput:self.stillImageOutput];
}
4.3 搞個(gè)拍照按鈕端姚,截取靜態(tài)圖片
/AVCaptureStillImageOutput截取靜態(tài)圖片,會(huì)有快門聲
-(void)screenshot{
AVCaptureConnection * videoConnection = [self.stillImageOutput connectionWithMediaType:AVMediaTypeVideo];
if (!videoConnection) {
NSLog(@"take photo failed!");
return;
}
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer == NULL) {
return;
}
NSData * imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
UIImage *image = [UIImage imageWithData:imageData];
[self saveImageToPhotoAlbum:image];
}];
}
5.使用AVCaptureVideoOutput實(shí)時(shí)獲取預(yù)覽圖像挤悉,這也是樓主的項(xiàng)目需求所在
5.1 創(chuàng)建AVCaptureVideoOutput對(duì)象
-(AVCaptureVideoDataOutput *)videoDataOutput{
if (_videoDataOutput == nil) {
_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[_videoDataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
[_videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
}
return _videoDataOutput;
}
5.2 將videoDataOutput添加session中
if ([_session canAddOutput:self.videoDataOutput]) {
[_session addOutput:self.videoDataOutput];
}
5.3 遵守AVCaptureVideoDataOutputSampleBufferDelegate協(xié)議渐裸,并實(shí)現(xiàn)它的代理方法
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
//AVCaptureVideoDataOutput獲取實(shí)時(shí)圖像,這個(gè)代理方法的回調(diào)頻率很快,幾乎與手機(jī)屏幕的刷新頻率一樣快
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
largeImage = [self imageFromSampleBuffer:sampleBuffer];
}
5.4 實(shí)現(xiàn)imageFromSampleBuffer:方法昏鹃,將CMSampleBufferRef轉(zhuǎn)為NSImage
//CMSampleBufferRef轉(zhuǎn)NSImage
-(UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer{
// 為媒體數(shù)據(jù)設(shè)置一個(gè)CMSampleBuffer的Core Video圖像緩存對(duì)象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// 鎖定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// 得到pixel buffer的基地址
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
// 得到pixel buffer的行字節(jié)數(shù)
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的寬和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// 創(chuàng)建一個(gè)依賴于設(shè)備的RGB顏色空間
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
// 用抽樣緩存的數(shù)據(jù)創(chuàng)建一個(gè)位圖格式的圖形上下文(graphics context)對(duì)象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// 根據(jù)這個(gè)位圖context中的像素?cái)?shù)據(jù)創(chuàng)建一個(gè)Quartz image對(duì)象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 解鎖pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// 釋放context和顏色空間
CGContextRelease(context); CGColorSpaceRelease(colorSpace);
// 用Quartz image創(chuàng)建一個(gè)UIImage對(duì)象image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// 釋放Quartz image對(duì)象
CGImageRelease(quartzImage);
return (image);
}
5.5 解決圖片顛倒
#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate
//AVCaptureVideoDataOutput獲取實(shí)時(shí)圖像尚氛,這個(gè)代理方法的回調(diào)頻率很快,幾乎與手機(jī)屏幕的刷新頻率一樣快
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
[connection setVideoOrientation:AVCaptureVideoOrientationPortrait];
largeImage = [self imageFromSampleBuffer:sampleBuffer];
}
6.設(shè)定攝像頭的尺寸
[_session setSessionPreset:AVCaptureSessionPreset1920x1080];
7.使用AVCaptureMetadataOutput識(shí)別二維碼
7.1 創(chuàng)建AVCaptureMetadataOutput對(duì)象
-(AVCaptureMetadataOutput *)metadataOutput{
if (_metadataOutput == nil) {
_metadataOutput = [[AVCaptureMetadataOutput alloc]init];
[_metadataOutput setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
//設(shè)置掃描區(qū)域
_metadataOutput.rectOfInterest = self.view.bounds;
}
return _metadataOutput;
}
7.2 將metadataOutput添加到session中洞渤,并且設(shè)置掃描類型
f ([_session canAddOutput:self.metadataOutput]) {
[_session addOutput:self.metadataOutput];
//設(shè)置掃碼格式
self.metadataOutput.metadataObjectTypes = @[
AVMetadataObjectTypeQRCode,
AVMetadataObjectTypeEAN13Code,
AVMetadataObjectTypeEAN8Code,
AVMetadataObjectTypeCode128Code
];
}
7.3 遵守AVCaptureMetadataOutputObjectsDelegate協(xié)議阅嘶,并實(shí)現(xiàn)其代理方法
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects.count>0) {
[self.session stopRunning];
AVMetadataMachineReadableCodeObject *metadataObject = [metadataObjects objectAtIndex :0];
NSLog(@"二維碼內(nèi)容 : %@",metadataObject.stringValue);
}
}
8.關(guān)于人臉識(shí)別
人臉識(shí)別也是基于AVCaptureMetadataOutput實(shí)現(xiàn)的,跟二維碼識(shí)別的區(qū)別在于您宪,掃描類型:
self.metadataOutput.metadataObjectTypes = @[AVMetadataObjectTypeFace];
#pragma mark - AVCaptureMetadataOutputObjectsDelegate
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
if (metadataObjects.count>0) {
AVMetadataObject *faceData = [self.previewLayer transformedMetadataObjectForMetadataObject:metadataObject];
if ([metadataObject isKindOfClass:[AVMetadataFaceObject class]]) {
NSLog(@"%@",NSStringFromCGRect(faceData.bounds));
self.faceOutlineView.frame = faceData.bounds;
}
}
}
//
// ViewController.m
// VideoRecoderDemo
//
// Created by Damon on 16/8/29.
// Copyright ? 2016年 damonvvong. All rights reserved.
//
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <CoreImage/CoreImage.h>
@interface ViewController ()<AVCaptureFileOutputRecordingDelegate>
@property (nonatomic, strong) AVCaptureSession *captureSession; /**< 捕捉會(huì)話 */
@property (nonatomic, weak) AVCaptureDeviceInput *captureVideoInput; /**< 視頻捕捉輸出 */
@property (nonatomic, strong) AVCaptureMovieFileOutput *captureMovieFileOutput; /**< 視頻輸出流 */
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; /**< 相機(jī)拍攝預(yù)覽圖層 */
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
self.captureSession = ({
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) {
[session setSessionPreset:AVCaptureSessionPresetHigh];
}
session;
});
NSError *error = nil;
[self setupSessionInputs:&error];
//初始化設(shè)備輸出對(duì)象奈懒,用于獲得輸出數(shù)據(jù)
self.captureMovieFileOutput = ({
AVCaptureMovieFileOutput *output = [[AVCaptureMovieFileOutput alloc]init];
// 設(shè)置錄制模式
AVCaptureConnection *captureConnection=[output connectionWithMediaType:AVMediaTypeVideo];
if ([captureConnection isVideoStabilizationSupported ]) {
captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto;
}
//將設(shè)備輸出添加到會(huì)話中
if ([self.captureSession canAddOutput:output]) {
[self.captureSession addOutput:output];
}
output;
});
//創(chuàng)建視頻預(yù)覽層,用于實(shí)時(shí)展示攝像頭狀態(tài)
self.captureVideoPreviewLayer = ({
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession];
previewLayer.frame= CGRectMake(0, 0, [UIScreen mainScreen].bounds.size.width, [UIScreen mainScreen].bounds.size.height);
previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式
[self.view.layer addSublayer:previewLayer];
self.view.layer.masksToBounds = YES;
previewLayer;
});
[self.captureSession startRunning];
}
/// 初始化 捕捉輸入
- (BOOL)setupSessionInputs:(NSError **)error {
// 添加 攝像頭
AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:({
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
}) error:error];
if (!videoInput) { return NO; }
if ([self.captureSession canAddInput:videoInput]) {
[self.captureSession addInput:videoInput];
}else{
return NO;
}
// 添加 話筒
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:({
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
}) error:error];
if (!audioInput) { return NO; }
if ([self.captureSession canAddInput:audioInput]) {
[self.captureSession addInput:audioInput];
}else{
return NO;
}
return YES;
}
-(void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
if (![self.captureMovieFileOutput isRecording]) {
AVCaptureConnection *captureConnection=[self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
captureConnection.videoOrientation=[self.captureVideoPreviewLayer connection].videoOrientation;
[self.captureMovieFileOutput startRecordingToOutputFileURL:({
// 錄制 緩存地址宪巨。
NSURL *url = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temp.mov"]];
if ([[NSFileManager defaultManager] fileExistsAtPath:url.path]) {
[[NSFileManager defaultManager] removeItemAtURL:url error:nil];
}
url;
}) recordingDelegate:self];
}else{
[self.captureMovieFileOutput stopRecording];//停止錄制
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error{
[self videoCompression];
}
-(void)videoCompression{
NSLog(@"begin");
NSURL *tempurl = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"temp.mov"]];
//加載視頻資源
AVAsset *asset = [AVAsset assetWithURL:tempurl];
//創(chuàng)建視頻資源導(dǎo)出會(huì)話
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
//創(chuàng)建導(dǎo)出視頻的URL
session.outputURL = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:@"tempLow.mov"]];
//必須配置輸出屬性
session.outputFileType = @"com.apple.quicktime-movie";
//導(dǎo)出視頻
[session exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"end");
}];
}
@end