前言
- 本文主要使用iOS平臺下各方案對靜態(tài)圖片的人臉識別進行性能及耗時統(tǒng)計露乏,先附上測試結(jié)果电谣,最后有各實現(xiàn)方案的代碼着绷。
CoreImage方案:
測試數(shù)據(jù):
test_1080x1920.JPG
檢測參數(shù)設(shè)置:
性能消耗情況:
CIDetectorAccuracyLow (低精度):
CIDetectorAccuracyHigh(高精度):
測試數(shù)據(jù):
test_3024x3024.JPG
檢測參數(shù)設(shè)置:
性能消耗情況:
CIDetectorAccuracyLow (低精度):
CIDetectorAccuracyHigh(高精度):
OpenCV方案:
測試數(shù)據(jù):
test_1080x1920.JPG
檢測參數(shù)設(shè)置:
注釋:
- scaleFactor為每一個圖像尺度中的尺度參數(shù),默認值為1.1去件。scale_factor參數(shù)可以決定兩個不同大小的窗口掃描之間有多大的跳躍,這個參數(shù)設(shè)置的大扰路,則意味著計算會變快尤溜,但如果窗口錯過了某個大小的人臉,則可能丟失物體汗唱。
- minNeighbors參數(shù)為每一個級聯(lián)矩形應(yīng)該保留的鄰近個數(shù)宫莱,默認為3。minNeighbors控制著誤檢測哩罪,默認值為3表明至少有3次重疊檢測授霸,才認為人臉確實存在。
- cvSize()指示尋找人臉的最小區(qū)域际插。設(shè)置這個參數(shù)過大碘耳,會以丟失小物體為代價減少計算量。
scaleFactor:1.1
minNeighbors:3
cvSize():Size(30, 30)
性能消耗情況:
人臉檢測器(Haar_1):
[圖片上傳中...(image.png-52e482-1546677776256-0)]
人臉檢測器(快速Haar):
檢測參數(shù)設(shè)置:
scaleFactor:1.1
minNeighbors:1
cvSize():Size(100, 100)
性能消耗情況:
人臉檢測器(Haar_1):
[圖片上傳中...(image.png-813064-1546677870655-0)]
人臉檢測器(快速Haar):
Vision方案:
系統(tǒng)要求:iOS11及以上
測試數(shù)據(jù):
test_1080x1920.JPG
檢測參數(shù)設(shè)置:
性能消耗情況:
測試數(shù)據(jù):
test_3024x3024.JPG
檢測參數(shù)設(shè)置:
性能消耗情況:
AVFoundation方案:
支持視頻拍攝實時預(yù)覽時進行人臉識別
通過一個特定的AVCaptureOutput類型的AVCaptureMetadataOutput可以實現(xiàn)人臉檢測功能.支持硬件加速以及同時對10個人臉進行實時檢測.
- PS:
1.所有方案都已提前初始化好檢測器框弛,單純統(tǒng)計從處理UIImage到輸出檢測結(jié)果的耗時辛辨;
綜述:
CoreImage框架:
支持靜態(tài)圖像人臉識別檢測;
處理過程占CPU較低;高精度:50+%斗搞,低精度:約100%
低精度時識別時間只占幾十毫秒級別绞蹦,但是橫向人臉會檢測失敗(橫向照片可以通過照片的方向信息榜旦,先將照片方向矯正)
高精度識別時間100多ms級別幽七,橫向人臉檢測成功
后續(xù)擴展:可做濾鏡,人臉特征檢測(檢測處理圖片的特性溅呢,如使用來檢測圖片中人臉的眼睛澡屡、嘴巴、等等)咐旧。
OpenCV框架 :
支持靜態(tài)圖像人臉識別檢測驶鹉;
跨平臺;
處理過程占CPU很高; 400+%~500+%
耗時跟參數(shù)的設(shè)置铣墨,識別精度有很大關(guān)系
但是識別參數(shù)可定制化程度高;
橫向檢測失斒衣瘛(可能姿勢不對;
Vision框架:
iOS11起支持;
支持多種圖片類型:
CIImage伊约,NSURL姚淆,NSData,CGImageRef屡律,CVPixelBufferRef(視頻幀取出的圖像格式)腌逢;
處理過程占CPU較低;50+%
耗時和Coreimage的高精度識別差不多超埋,100多毫秒級別
橫向檢測成功
后續(xù)擴展:
人臉特征點搏讶,人臉檢測,二維碼識別霍殴,文字檢測媒惕、識別,目標跟蹤(臉部来庭,矩形和通用模板)妒蔚,矩形檢測。巾腕。
- 出于識別過程CPU占用率以及識別速度等方面考慮面睛,對相冊的人臉識別篩選需求建議iOS11以上系統(tǒng)采用Vision方案,iOS11以下的系統(tǒng)采用CoreImage的高精度識別方案篩選尊搬。由于沒有足夠的人臉數(shù)據(jù)叁鉴,暫時無法進行識別準確率測試。
附錄
- KRFaceDetectTool.h //人臉識別工具類
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <CoreImage/CoreImage.h>
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, KRFaceDetectResolutionType) {
KRFaceDetectResolutionTypeCoreImage,//靜態(tài)圖像人臉識別
KRFaceDetectResolutionTypeOpenCV,
KRFaceDetectResolutionTypeVision,
KRFaceDetectResolutionTypeAVFoundation//動態(tài)人臉識別
};
@interface KRFaceDetectTool : NSObject
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)initWithType:(KRFaceDetectResolutionType)type;
- (void)testForDetectFace;
- (BOOL)detectFaceWithImage:(UIImage*)image;
@end
NS_ASSUME_NONNULL_END
- KRFaceDetectTool.mm
#import "KRFaceDetectTool.h"
#import <CoreImage/CIDetector.h>
#import "KRCVFaceDetectTool.h"
#import <Vision/Vision.h>
#import "KRGCDExtension.h"
@interface KRFaceDetectTool ()
@property (nonatomic, assign) KRFaceDetectResolutionType type;
//CoreImage
@property (nonatomic, strong) CIDetector *ciDetector;
@property (nonatomic, strong) KRCVFaceDetectTool *cvDetector;
@property (nonatomic, strong) VNImageBasedRequest *visionFaceDetectRequest;
@end
@implementation KRFaceDetectTool
- (instancetype)init {
NSAssert(NO, @"Please use the given initial method !");
return nil;
}
+ (instancetype)new {
NSAssert(NO, @"Please use the given initial method !");
return nil;
}
- (instancetype)initWithType:(KRFaceDetectResolutionType)type
{
self = [super init];
if (self) {
_type = type;
[self prepareToDetectWithType:type];
}
return self;
}
- (void)dealloc {
if (self.ciDetector) {
self.ciDetector = nil;
}
if (self.cvDetector) {
self.cvDetector = nil;
}
}
- (void)prepareToDetectWithType:(KRFaceDetectResolutionType)type {
switch (type) {
case KRFaceDetectResolutionTypeCoreImage:
{
if (!self.ciDetector) {
NSDictionary *options = @{CIDetectorAccuracy:CIDetectorAccuracyLow};
self.ciDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:options];
}
}
break;
case KRFaceDetectResolutionTypeVision:
{
void (^completionHandler)(VNRequest *, NSError * _Nullable) = ^(VNRequest *request, NSError * _Nullable error) {
NSArray *observations = request.results;
if (request.results.count > 0) {
NSLog(@"KRFaceDetectTool: has face!");
} else {
NSLog(@"KRFaceDetectTool: no face!");
}
};
self.visionFaceDetectRequest = [[VNDetectFaceRectanglesRequest alloc] initWithCompletionHandler:completionHandler];
}
break;
case KRFaceDetectResolutionTypeOpenCV:
{
if (!self.cvDetector) {
self.cvDetector = [[KRCVFaceDetectTool alloc] initWithType:KRCVFaceXMLTypeHaarcascadeFrontalfaceAlt];
}
}
break;
case KRFaceDetectResolutionTypeAVFoundation:
{
}
break;
default:
break;
}
}
- (BOOL)detectFaceWithImage:(UIImage*)image{
switch (self.type) {
case KRFaceDetectResolutionTypeCoreImage:
{
CIImage *ciImage = [[CIImage alloc] initWithCGImage:image.CGImage];
NSArray *features = [self.ciDetector featuresInImage:ciImage];
if (features.count) {
return YES;
}
return NO;
}
break;
case KRFaceDetectResolutionTypeVision:
{
VNImageRequestHandler *visionRequestHandler = [[VNImageRequestHandler alloc] initWithCGImage:image.CGImage options:@{}];
[visionRequestHandler performRequests:@[self.visionFaceDetectRequest] error:nil];
}
break;
case KRFaceDetectResolutionTypeOpenCV:
{
[self.cvDetector detectFaceWithImage:image];
}
break;
case KRFaceDetectResolutionTypeAVFoundation:
{
}
break;
default:
break;
}
return NO;
}
- (void)testForDetectFace {
[self prepareToDetectWithType:KRFaceDetectResolutionTypeCoreImage];
[self prepareToDetectWithType:KRFaceDetectResolutionTypeOpenCV];
[self prepareToDetectWithType:KRFaceDetectResolutionTypeVision];
[self prepareToDetectWithType:KRFaceDetectResolutionTypeAVFoundation];
UIImage *testImage = [UIImage imageNamed:@"test_1080x1920.JPG"] ;
UIImage *testImage2 = [UIImage imageNamed:@"test_3024x3024.JPG"];
// testImage2 = [testImage2 imageByRotateLeft90];
void (^coreImageHighAccuracyBlock)(void) = ^{
@autoreleasepool {
CIImage *ciImage = [[CIImage alloc] initWithCGImage:testImage.CGImage];
NSArray *features = [self.ciDetector featuresInImage:ciImage];
if (features.count) {
NSLog(@"KRFaceDetectTool: has face!");
} else {
NSLog(@"KRFaceDetectTool: no face!");
}
}
};
void (^cvDetectBlock)(void) = ^{
@autoreleasepool {
BOOL result = [self.cvDetector detectFaceWithImage:testImage];
NSLog(@"KRFaceDetectTool: %@",result ? @"has face!": @"no face!");
}
};
void (^visionDetectBlock)(void) = ^{
VNImageRequestHandler *visionRequestHandler = [[VNImageRequestHandler alloc] initWithCGImage:testImage.CGImage options:@{}];
[visionRequestHandler performRequests:@[self.visionFaceDetectRequest] error:nil];
};
int64_t result = kr_dispatch_benchmark(100, coreImageHighAccuracyBlock);
NSLog(@"KRFaceDetectTool:xxx coreImageHighAccuracyBlock cost time:%lld",result);
int64_t result2 = kr_dispatch_benchmark(100, cvDetectBlock);
NSLog(@"KRFaceDetectTool:xxx cvDetectBlock cost time:%lld",result2);
int64_t result3 = kr_dispatch_benchmark(100, visionDetectBlock);
NSLog(@"KRFaceDetectTool:xxx visionDetectBlock cost time:%lld",result3);
}
@end
- KRCVFaceDetectTool.h //基于opencv的人臉識別工具
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
//OpenCV
#import <opencv2/opencv.hpp>
#import <opencv2/imgproc/types_c.h>
#import <opencv2/imgcodecs/ios.h>
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, KRCVFaceXMLType) {
KRCVFaceXMLTypeHaarcascadeFrontalfaceAlt,//對應(yīng)opencv人臉識別xml的文件類型
KRCVFaceXMLTypeHaarcascadeFrontalfaceAlt2
};
@interface KRCVFaceDetectTool : NSObject
{
cv::CascadeClassifier faceDetector;
}
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype)new NS_UNAVAILABLE;
- (instancetype)initWithType:(KRCVFaceXMLType)type;
- (BOOL)detectFaceWithImage:(UIImage*)image;
@end
NS_ASSUME_NONNULL_END
- KRCVFaceDetectTool.mm
#import "KRCVFaceDetectTool.h"
@implementation KRCVFaceDetectTool
- (instancetype)initWithType:(KRCVFaceXMLType)type
{
self = [super init];
if (self) {
[self prepareForDetectInOpenCV:type];
}
return self;
}
- (void)dealloc {
}
- (void)prepareForDetectInOpenCV:(KRCVFaceXMLType)type {
switch (type) {
case KRCVFaceXMLTypeHaarcascadeFrontalfaceAlt:
{
NSString* cascadePath = [[NSBundle mainBundle]
pathForResource:@"haarcascade_frontalface_alt"
ofType:@"xml"];
faceDetector.load([cascadePath UTF8String]);
}
break;
case KRCVFaceXMLTypeHaarcascadeFrontalfaceAlt2:
{
NSString* cascadePath = [[NSBundle mainBundle]
pathForResource:@"haarcascade_frontalface_alt2"
ofType:@"xml"];
faceDetector.load([cascadePath UTF8String]);
}
break;
default:
break;
}
}
- (BOOL)detectFaceWithImage:(UIImage*)image {
cv::Mat cvImage;
UIImageToMat(image, cvImage);
if (!cvImage.empty()) {
//轉(zhuǎn)換為灰度圖
cv::Mat gray;
cvtColor(cvImage, gray, CV_BGR2GRAY);
//人臉檢測
std::vector<cv::Rect> faces;
faceDetector.detectMultiScale(gray,
faces,
1.1,
1,
0|CV_HAAR_SCALE_IMAGE,
cv::Size(100,100)
);
if (faces.size() > 0) {
return YES;
}
return NO;
}
return NO;
}
@end
- benchmark方法
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
#ifdef __cplusplus
extern "C"
#endif
extern int64_t kr_dispatch_benchmark(size_t count, void (^block)(void));
NS_ASSUME_NONNULL_END
#import "KRGCDExtension.h"
extern uint64_t dispatch_benchmark(size_t count, void (^block)(void));
int64_t kr_dispatch_benchmark(size_t count, void (^block)(void)) {
return dispatch_benchmark(count, block);
}