iOS開發(fā)之CoreImage框架使用(下)

二氏身、使用CoreImage實現(xiàn)人臉識別

人臉識別是目前非常熱門的一種圖像處理技術炉爆,CoreImage內(nèi)置了對人臉進行識別的相關功能接口丛忆,并且可以對人臉面部特征進行抓取否纬,下面我們來實現(xiàn)一個簡單的實時識別人臉特征的Demo。

首先創(chuàng)建一個視圖作為圖像掃描視圖怠晴,如下:

.h文件

//.h 文件
@interface FaceView : UIView
@end

.m文件

//
//  FaceView.m
//  CoreImageDemo
//
//  Created by jaki on 2018/12/22.
//  Copyright ? 2018年 jaki. All rights reserved.
//

#import "FaceView.h"
#import <AVFoundation/AVFoundation.h>
#import "FaceHandle.h"
//定義線程
#define FACE_SCAN_QUEUE "FACE_SCAN_QUEUE"

@interface FaceView()<AVCaptureVideoDataOutputSampleBufferDelegate>

@property(nonatomic,strong)AVCaptureSession *captureSession;

@property(nonatomic,strong)AVCaptureDeviceInput * captureInput;

@property(nonatomic,strong)AVCaptureVideoDataOutput * captureOutput;

@property(nonnull,strong)AVCaptureVideoPreviewLayer * videoLayer;

@property(nonatomic,strong)dispatch_queue_t queue;

@property(nonatomic,assign)BOOL hasHandle;

@property(nonatomic,strong)UIView * faceView;

@end

@implementation FaceView
#pragma mark - Override
-(instancetype)init{
    self = [super init];
    if (self) {
        [self install];
    }
    return self;
}

-(instancetype)initWithFrame:(CGRect)frame{
    self = [super initWithFrame:frame];
    if (self) {
        [self install];
    }
    return self;
}
-(void)layoutSubviews{
    [super layoutSubviews];
    self.videoLayer.frame = self.bounds;
}

#pragma mark - InnerFunc
-(void)install{
    if (![UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypeCamera]) {
        NSLog(@"不支持");
        return;
    }
    self.queue = dispatch_queue_create(FACE_SCAN_QUEUE, NULL);
    [self.captureSession startRunning];
    AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    if (status!=AVAuthorizationStatusAuthorized) {
        NSLog(@"需要權限");
        return;
    }
    self.videoLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession];
    self.videoLayer.frame = CGRectZero;
    self.videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    [self.layer addSublayer:self.videoLayer];
    [self addSubview:self.faceView];
    self.faceView.frame = CGRectMake(0, 0, self.frame.size.width, self.frame.size.height);
    
}
//將人臉特征點標記出來
-(void)renderReactWithInfo:(NSDictionary *)info{
    for (UIView * v in self.faceView.subviews) {
        [v removeFromSuperview];
    }
    NSArray * faceArray = info[FACE_HANDLE_INFO_FACE_ARRAY];
    for (int i = 0;i < faceArray.count; i++) {
        NSDictionary * face = faceArray[i];
        NSValue * faceValue = face[FACE_HANDLE_INFO_FACE_FRAME];
        if (faceValue) {
            CGRect faceR = [faceValue CGRectValue];
            UIView * faceView = [[UIView alloc]initWithFrame:faceR];
            faceView.backgroundColor = [UIColor clearColor];
            faceView.layer.borderColor = [UIColor redColor].CGColor;
            faceView.layer.borderWidth = 2;
            [self.faceView addSubview:faceView];
        }
        NSValue * leftEye = face[FACE_HANDLE_INFO_FACE_LEFT_EYE_FRAME];
        if (leftEye) {
            CGRect leftEyeR = [leftEye CGRectValue];
            UIView * eye = [[UIView alloc]initWithFrame:leftEyeR];
            eye.backgroundColor = [UIColor clearColor];
            eye.layer.borderColor = [UIColor greenColor].CGColor;
            eye.layer.borderWidth = 2;
            [self.faceView addSubview:eye];
        }
        NSValue * rightEye = face[FACE_HANDLE_INFO_FACE_RIGHT_EYE_FRAME];
        if (rightEye) {
            CGRect rightEyeR = [rightEye CGRectValue];
            UIView * eye = [[UIView alloc]initWithFrame:rightEyeR];
            eye.backgroundColor = [UIColor clearColor];
            eye.layer.borderColor = [UIColor greenColor].CGColor;
            eye.layer.borderWidth = 2;
            [self.faceView addSubview:eye];
        }
        NSValue * mouth = face[FACE_HANDLE_INFO_FACE_MOUTH_FRAME];
        if (mouth) {
            CGRect mouthR = [mouth CGRectValue];
            UIView * mouth = [[UIView alloc]initWithFrame:mouthR];
            mouth.backgroundColor = [UIColor clearColor];
            mouth.layer.borderColor = [UIColor orangeColor].CGColor;
            mouth.layer.borderWidth = 2;
            [self.faceView addSubview:mouth];
        }
    }
}


#pragma AVDelegate
//進行畫面的捕獲
-(void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
    if (self.hasHandle) {
        return;
    }
    self.hasHandle = YES;
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CVPixelBufferLockBaseAddress(imageBuffer,0);
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,width, height, 8, bytesPerRow, colorSpace,kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    CGImageRef newImage = CGBitmapContextCreateImage(newContext);
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
    CGImageRelease(newImage);
    //image
     //進行人臉識別的核心工具類
    [[FaceHandle sharedInstance] handleImage:image viewSize:self.frame.size completed:^(BOOL success, NSDictionary *info) {
        self.hasHandle  = NO;
        [self renderReactWithInfo:info];
    }];
    
    CVPixelBufferUnlockBaseAddress(imageBuffer,0);
}



#pragma mark - setter and getter

-(AVCaptureSession *)captureSession{
    if (!_captureSession) {
        _captureSession = [[AVCaptureSession alloc]init];
        [_captureSession addInput:self.captureInput];
        [_captureSession addOutput:self.captureOutput];
    }
    return _captureSession;
}

-(AVCaptureDeviceInput *)captureInput{
    if (!_captureInput) {
        _captureInput = [AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] error:nil];
    }
    return _captureInput;
}

-(AVCaptureVideoDataOutput *)captureOutput{
    if (!_captureOutput) {
        _captureOutput = [[AVCaptureVideoDataOutput alloc]init];
        _captureOutput.alwaysDiscardsLateVideoFrames = YES;
        [_captureOutput setSampleBufferDelegate:self queue:self.queue];
        _captureOutput.videoSettings = @{(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)};
    }
    return _captureOutput;
}


-(UIView *)faceView{
    if (!_faceView) {
        _faceView = [[UIView alloc]init];
        _faceView.backgroundColor = [UIColor clearColor];
    }
    return _faceView;
}



@end

在真機上運行工程遥金,通過攝像頭可以將實時的畫面捕獲到屏幕上,下面實現(xiàn)核心的人臉識別代碼:

創(chuàng)建繼承于NSObject的FaceHandle類蒜田,如下:

.h文件

extern const NSString * FACE_HANDLE_INFO_FACE_ARRAY;

extern const NSString * FACE_HANDLE_INFO_FACE_FRAME;

extern const NSString * FACE_HANDLE_INFO_FACE_LEFT_EYE_FRAME;

extern const NSString * FACE_HANDLE_INFO_FACE_RIGHT_EYE_FRAME;

extern const NSString * FACE_HANDLE_INFO_FACE_MOUTH_FRAME;

extern const NSString * FACE_HANDLE_INFO_ERROR;
@interface FaceHandle : NSObject
+(instancetype)sharedInstance;


-(void)handleImage:(UIImage *)image viewSize:(CGSize )viewSize completed:(void(^)(BOOL  success,NSDictionary * info))completion;
@end

.m文件

#import "FaceHandle.h"
#define FACE_HANDLE_DISPATCH_QUEUE "FACE_HANDLE_DISPATCH_QUEUE"
const NSString * FACE_HANDLE_INFO_FACE_FRAME = @"FACE_HANDLE_INFO_FACE_FRAME";

const NSString * FACE_HANDLE_INFO_FACE_LEFT_EYE_FRAME = @"FACE_HANDLE_INFO_FACE_LEFT_EYE_FRAME";

const NSString * FACE_HANDLE_INFO_FACE_RIGHT_EYE_FRAME = @"FACE_HANDLE_INFO_FACE_RIGHT_EYE_FRAME";

const NSString * FACE_HANDLE_INFO_FACE_MOUTH_FRAME = @"FACE_HANDLE_INFO_FACE_MOUTH_FRAME";

const NSString * FACE_HANDLE_INFO_ERROR = @"FACE_HANDLE_INFO_ERROR";

const NSString * FACE_HANDLE_INFO_FACE_ARRAY = @"FACE_HANDLE_INFO_FACE_ARRAY";
@interface FaceHandle()

@property(nonatomic,strong)dispatch_queue_t workingQueue;

@end

@implementation FaceHandle

+(instancetype)sharedInstance{
    static dispatch_once_t onceToken;
    static FaceHandle * sharedInstance = nil;
    if (!sharedInstance) {
        dispatch_once(&onceToken, ^{
            sharedInstance = [[FaceHandle alloc] init];
        });
    }
    return sharedInstance;
}

#pragma mark - Override
-(instancetype)init{
    self = [super init];
    if (self) {
        self.workingQueue = dispatch_queue_create(FACE_HANDLE_DISPATCH_QUEUE, NULL);
    }
    return self;
}


#pragma mark - InnerFunc
-(void)handleImage:(UIImage *)image viewSize:(CGSize )viewSize completed:(void (^)(BOOL , NSDictionary *))completion{
    if (!image) {
        if (completion) {
            completion(NO,@{FACE_HANDLE_INFO_ERROR:@"圖片捕獲出錯"});
        }
        return;
    }
    dispatch_async(self.workingQueue, ^{
        UIImage * newImage = [self strectImage:image withSize:viewSize];
        if (newImage) {
            NSArray * faceArray = [self analyseFaceImage:newImage];
            if (completion) {
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(YES,@{FACE_HANDLE_INFO_FACE_ARRAY:faceArray});
                });
            }
        }else{
            if (completion) {
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(NO,@{FACE_HANDLE_INFO_ERROR:@"圖片識別出錯"});
                });
            }
        }
    });
}

//圖片放大處理
-(UIImage *)strectImage:(UIImage *)img withSize:(CGSize)size{
    UIGraphicsBeginImageContext(size);
    CGRect thumbnailRect = CGRectZero;
    thumbnailRect.origin = CGPointMake(0, 0);
    thumbnailRect.size.width  = size.width;
    thumbnailRect.size.height = size.height;
    [img drawInRect:thumbnailRect];
    UIImage * newImage = UIGraphicsGetImageFromCurrentImageContext();
    UIGraphicsEndImageContext();
    
    if (newImage) {
        return  newImage;
    }
    return nil;
}

-(NSArray *)analyseFaceImage:(UIImage *)image{
    NSMutableArray * dataArray = [NSMutableArray array];
    CIImage * cImage = [CIImage imageWithCGImage:image.CGImage];
    NSDictionary* opts = [NSDictionary dictionaryWithObject:
                          CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
    //進行分析
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                              context:nil options:opts];
    //獲取特征數(shù)組
    NSArray* features = [detector featuresInImage:cImage];
    CGSize inputImageSize = [cImage extent].size;
    CGAffineTransform  transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform, 1, -1);
    transform = CGAffineTransformTranslate(transform, 0, -inputImageSize.height);
    
    for (CIFaceFeature *faceFeature in features){
        NSMutableDictionary * faceDic = [NSMutableDictionary dictionary];
        CGRect faceViewBounds = CGRectApplyAffineTransform(faceFeature.bounds, transform);
        [faceDic setValue:[NSValue valueWithCGRect:faceViewBounds] forKey:(NSString *)FACE_HANDLE_INFO_FACE_FRAME];
        CGFloat faceWidth = faceFeature.bounds.size.width;
        if(faceFeature.hasLeftEyePosition){
            CGPoint faceViewLeftPoint = CGPointApplyAffineTransform(faceFeature.leftEyePosition, transform);
            CGRect leftEyeBounds = CGRectMake(faceViewLeftPoint.x-faceWidth*0.1, faceViewLeftPoint.y-faceWidth*0.1, faceWidth*0.2, faceWidth*0.2);
            [faceDic setValue:[NSValue valueWithCGRect:leftEyeBounds] forKey:(NSString *)FACE_HANDLE_INFO_FACE_LEFT_EYE_FRAME];
        }
        
        if(faceFeature.hasRightEyePosition){
            //獲取人右眼對應的point
            CGPoint faceViewRightPoint = CGPointApplyAffineTransform(faceFeature.rightEyePosition, transform);
            CGRect rightEyeBounds = CGRectMake(faceViewRightPoint.x-faceWidth*0.1, faceViewRightPoint.y-faceWidth*0.1, faceWidth*0.2, faceWidth*0.2);
            [faceDic setValue:[NSValue valueWithCGRect:rightEyeBounds] forKey:(NSString *)FACE_HANDLE_INFO_FACE_RIGHT_EYE_FRAME];
        }
        
        if(faceFeature.hasMouthPosition){
            //獲取人嘴巴對應的point
            CGPoint faceViewMouthPoint = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);
            CGRect mouthBounds = CGRectMake(faceViewMouthPoint.x-faceWidth*0.2, faceViewMouthPoint.y-faceWidth*0.2, faceWidth*0.4, faceWidth*0.4);
            [faceDic setValue:[NSValue valueWithCGRect:mouthBounds] forKey:(NSString *)FACE_HANDLE_INFO_FACE_MOUTH_FRAME];
        }
        [dataArray addObject:faceDic];
    }
    return [dataArray copy];
}
@end

打開百度稿械,隨便搜索一些人臉圖片進行識別,可以看到識別率還是很高冲粤,如下圖:

image

三美莫、CIImage中提供了其他圖像識別功能

CIDetector除了可以用來進行人臉識別外,還支持進行二維碼梯捕、矩形厢呵、文字等檢測。

矩形區(qū)域識別傀顾,用來檢測圖像中的矩形邊界襟铭,核心代碼如下:

-(NSArray *)analyseRectImage:(UIImage *)image{
    NSMutableArray * dataArray = [NSMutableArray array];
    CIImage * cImage = [CIImage imageWithCGImage:image.CGImage];
    NSDictionary* opts = [NSDictionary dictionaryWithObject:
                          CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeRectangle
                                              context:nil options:opts];
    NSArray* features = [detector featuresInImage:cImage];
    CGSize inputImageSize = [cImage extent].size;
    CGAffineTransform  transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform, 1, -1);
    transform = CGAffineTransformTranslate(transform, 0, -inputImageSize.height);
    
    for (CIRectangleFeature *feature in features){
        NSLog(@"%lu",features.count);
        NSMutableDictionary * dic = [NSMutableDictionary dictionary];
        CGRect viewBounds = CGRectApplyAffineTransform(feature.bounds, transform);
        [dic setValue:[NSValue valueWithCGRect:viewBounds] forKey:@"rectBounds"];
        CGPoint topLeft = CGPointApplyAffineTransform(feature.topLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:topLeft] forKey:@"topLeft"];
        CGPoint topRight = CGPointApplyAffineTransform(feature.topRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:topRight] forKey:@"topRight"];
        CGPoint bottomLeft = CGPointApplyAffineTransform(feature.bottomLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomLeft] forKey:@"bottomLeft"];
        CGPoint bottomRight = CGPointApplyAffineTransform(feature.bottomRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomRight] forKey:@"bottomRight"];
        [dataArray addObject:dic];
    }
    return [dataArray copy];
}

效果如下圖所示:

image

二維碼掃描不僅可以分析出圖片中的二維碼位置,還可以解析出二維碼數(shù)據(jù),核心代碼如下:

-(NSArray *)analyseQRImage:(UIImage *)image{
    NSMutableArray * dataArray = [NSMutableArray array];
    CIImage * cImage = [CIImage imageWithCGImage:image.CGImage];
    NSDictionary* opts = [NSDictionary dictionaryWithObject:
                          CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeQRCode
                                              context:nil options:opts];
    NSArray* features = [detector featuresInImage:cImage];
    CGSize inputImageSize = [cImage extent].size;
    CGAffineTransform  transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform, 1, -1);
    transform = CGAffineTransformTranslate(transform, 0, -inputImageSize.height);
    
    for (CIQRCodeFeature *feature in features){
        NSMutableDictionary * dic = [NSMutableDictionary dictionary];
        CGRect viewBounds = CGRectApplyAffineTransform(feature.bounds, transform);
        [dic setValue:[NSValue valueWithCGRect:viewBounds] forKey:@"rectBounds"];
        CGPoint topLeft = CGPointApplyAffineTransform(feature.topLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:topLeft] forKey:@"topLeft"];
        CGPoint topRight = CGPointApplyAffineTransform(feature.topRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:topRight] forKey:@"topRight"];
        CGPoint bottomLeft = CGPointApplyAffineTransform(feature.bottomLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomLeft] forKey:@"bottomLeft"];
        CGPoint bottomRight = CGPointApplyAffineTransform(feature.bottomRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomRight] forKey:@"bottomRight"];
        [dic setValue:feature.messageString forKey:@"content"];
        [dataArray addObject:dic];
    }
    return [dataArray copy];
}

CIImage框架中還支持對文本區(qū)域進行分析蝌矛,核心代碼如下:

-(NSArray *)analyseTextImage:(UIImage *)image{
    NSMutableArray * dataArray = [NSMutableArray array];
    CIImage * cImage = [CIImage imageWithCGImage:image.CGImage];
    NSDictionary* opts = [NSDictionary dictionaryWithObject:
                          CIDetectorAccuracyHigh forKey:CIDetectorAccuracy];
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeText
                                              context:nil options:nil];
    NSArray* features = [detector featuresInImage:cImage options:@{CIDetectorReturnSubFeatures:@YES}];
    CGSize inputImageSize = [cImage extent].size;
    CGAffineTransform  transform = CGAffineTransformIdentity;
    transform = CGAffineTransformScale(transform, 1, -1);
    transform = CGAffineTransformTranslate(transform, 0, -inputImageSize.height);
    
    for (CITextFeature *feature in features){
        NSLog(@"%@",feature.subFeatures);
        NSMutableDictionary * dic = [NSMutableDictionary dictionary];
        CGRect viewBounds = CGRectApplyAffineTransform(feature.bounds, transform);
        [dic setValue:[NSValue valueWithCGRect:viewBounds] forKey:@"rectBounds"];
        CGPoint topLeft = CGPointApplyAffineTransform(feature.topLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:topLeft] forKey:@"topLeft"];
        CGPoint topRight = CGPointApplyAffineTransform(feature.topRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:topRight] forKey:@"topRight"];
        CGPoint bottomLeft = CGPointApplyAffineTransform(feature.bottomLeft, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomLeft] forKey:@"bottomLeft"];
        CGPoint bottomRight = CGPointApplyAffineTransform(feature.bottomRight, transform);
        [dic setValue:[NSValue valueWithCGPoint:bottomRight] forKey:@"bottomRight"];
        
        [dataArray addObject:dic];
    }
    return [dataArray copy];
}

效果如下圖所示:

image

四、CoreImage中的相關核心類

1.CIColor類

CIColor類是CoreImage中描述色彩的類错英。

//通過CGColor創(chuàng)建CIColor
+ (instancetype)colorWithCGColor:(CGColorRef)c;
//構造方法
+ (instancetype)colorWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b alpha:(CGFloat)a;
+ (instancetype)colorWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b;
+ (nullable instancetype)colorWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b alpha:(CGFloat)a colorSpace:(CGColorSpaceRef)colorSpace;
+ (nullable instancetype)colorWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b colorSpace:(CGColorSpaceRef)colorSpace;
- (instancetype)initWithCGColor:(CGColorRef)c;
//通過字符串創(chuàng)建CIColor對象
+ (instancetype)colorWithString:(NSString *)representation;
- (instancetype)initWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b alpha:(CGFloat)a;
- (instancetype)initWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b;
- (nullable instancetype)initWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b alpha:(CGFloat)a colorSpace:(CGColorSpaceRef)colorSpace;
- (nullable instancetype)initWithRed:(CGFloat)r green:(CGFloat)g blue:(CGFloat)b colorSpace:(CGColorSpaceRef)colorSpace;
//獲取顏色分量個數(shù)
@property (readonly) size_t numberOfComponents;
//顏色分量
@property (readonly) const CGFloat *components;
//顏色透明度
@property (readonly) CGFloat alpha;
//色彩空間
@property (readonly) CGColorSpaceRef colorSpace;
//紅綠藍分量
@property (readonly) CGFloat red;
@property (readonly) CGFloat green;
@property (readonly) CGFloat blue;
//下面是定義的一些便捷的顏色變量
@property (class, strong, readonly) CIColor *blackColor  ;
@property (class, strong, readonly) CIColor *whiteColor  ;
@property (class, strong, readonly) CIColor *grayColor   ;
@property (class, strong, readonly) CIColor *redColor    ;
@property (class, strong, readonly) CIColor *greenColor  ;
@property (class, strong, readonly) CIColor *blueColor   ;
@property (class, strong, readonly) CIColor *cyanColor   ;
@property (class, strong, readonly) CIColor *magentaColor ;
@property (class, strong, readonly) CIColor *yellowColor  ;
@property (class, strong, readonly) CIColor *clearColor 

2.CIImage類

CIImage是CoreImage中最核心的類入撒,它描述了圖像對象。

//創(chuàng)建一個新的CIImage實例
+ (CIImage *)imageWithCGImage:(CGImageRef)image;
//通過字典創(chuàng)建一個新的CIImage實例
/*
字典中的鍵
kCIImageColorSpace  設置顏色空間 為CGColorSpaceRef對象
kCIImageNearestSampling 是否臨近采樣  布爾值
kCIImageProperties    設置圖片屬性字典
kCIImageApplyOrientationProperty 布爾值 是否根據(jù)方向進行轉(zhuǎn)換
kCIImageTextureTarget  NSNumber值 設置OpebGL目標紋理常數(shù)
kCIImageTextureFormat NSNumber值 設置OpebGL format
kCIImageAuxiliaryDepth 布爾值 是否返回深度圖像
kCIImageAuxiliaryDisparity  布爾值 是否返回輔助時差圖像
kCIImageAuxiliaryPortraitEffectsMatte  布爾值 是否返回肖像模板
*/
+ (CIImage *)imageWithCGImage:(CGImageRef)image
                      options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過CALayer進行CIImage的創(chuàng)建
+ (CIImage *)imageWithCGLayer:(CGLayerRef)layer NS_DEPRECATED_MAC(10_4,10_11);
+ (CIImage *)imageWithCGLayer:(CGLayerRef)layer
                      options:(nullable NSDictionary<CIImageOption, id> *)options;
//使用bitmap數(shù)據(jù)創(chuàng)建CIImage
+ (CIImage *)imageWithBitmapData:(NSData *)data
                     bytesPerRow:(size_t)bytesPerRow
                            size:(CGSize)size
                          format:(CIFormat)format
                      colorSpace:(nullable CGColorSpaceRef)colorSpace;
//通過紋理創(chuàng)建CIImage
+ (CIImage *)imageWithTexture:(unsigned int)name
                         size:(CGSize)size
                      flipped:(BOOL)flipped
                   colorSpace:(nullable CGColorSpaceRef)colorSpace;
+ (CIImage *)imageWithTexture:(unsigned int)name
                         size:(CGSize)size
                      flipped:(BOOL)flipped
                      options:(nullable NSDictionary<CIImageOption, id> *)options;
+ (nullable CIImage *)imageWithMTLTexture:(id<MTLTexture>)texture
                                  options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過url創(chuàng)建CIImage
+ (nullable CIImage *)imageWithContentsOfURL:(NSURL *)url;
+ (nullable CIImage *)imageWithContentsOfURL:(NSURL *)url
                                     options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過NSDate創(chuàng)建CIImage
+ (nullable CIImage *)imageWithData:(NSData *)data;
+ (nullable CIImage *)imageWithData:(NSData *)data
                            options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過CVImageBufferRef創(chuàng)建CIImage
+ (CIImage *)imageWithCVImageBuffer:(CVImageBufferRef)imageBuffer;
+ (CIImage *)imageWithCVImageBuffer:(CVImageBufferRef)imageBuffer
                            options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過CVPixelBufferRef創(chuàng)建CIImage
+ (CIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer;
+ (CIImage *)imageWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
                            options:(nullable NSDictionary<CIImageOption, id> *)options;
//通過顏色創(chuàng)建CIImage
+ (CIImage *)imageWithColor:(CIColor *)color;
//創(chuàng)建空CIImage
+ (CIImage *)emptyImage;
//初始化方法
- (instancetype)initWithCGImage:(CGImageRef)image;
- (instancetype)initWithCGImage:(CGImageRef)image
                        options:(nullable NSDictionary<CIImageOption, id> *)options;
- (instancetype)initWithCGLayer:(CGLayerRef)layer);
- (instancetype)initWithCGLayer:(CGLayerRef)layer;
- (instancetype)initWithBitmapData:(NSData *)data
                       bytesPerRow:(size_t)bytesPerRow
                              size:(CGSize)size
                            format:(CIFormat)format
                        colorSpace:(nullable CGColorSpaceRef)colorSpace;
- (instancetype)initWithTexture:(unsigned int)name
                           size:(CGSize)size
                        flipped:(BOOL)flipped
                     colorSpace:(nullable CGColorSpaceRef)colorSpace;
- (instancetype)initWithTexture:(unsigned int)name
                           size:(CGSize)size
                        flipped:(BOOL)flipped
                        options:(nullable NSDictionary<CIImageOption, id> *)options;
- (nullable instancetype)initWithMTLTexture:(id<MTLTexture>)texture
                                    options:(nullable NSDictionary<CIImageOption, id> *)options椭岩;
- (nullable instancetype)initWithContentsOfURL:(NSURL *)url;
- (nullable instancetype)initWithContentsOfURL:(NSURL *)url
                                       options:(nullable NSDictionary<CIImageOption, id> *)options;
- (instancetype)initWithCVImageBuffer:(CVImageBufferRef)imageBuffer;
- (instancetype)initWithCVImageBuffer:(CVImageBufferRef)imageBuffer
                              options:(nullable NSDictionary<CIImageOption, id> *)options;
- (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer
                              options:(nullable NSDictionary<CIImageOption, id> *)options;
- (instancetype)initWithColor:(CIColor *)color;
//追加變換 返回結果CIImage對象
- (CIImage *)imageByApplyingTransform:(CGAffineTransform)matrix;
- (CIImage *)imageByApplyingOrientation:(int)orientation;
- (CIImage *)imageByApplyingCGOrientation:(CGImagePropertyOrientation)orientation;
//根據(jù)方向獲取變換
- (CGAffineTransform)imageTransformForOrientation:(int)orientation;
- (CGAffineTransform)imageTransformForCGOrientation:(CGImagePropertyOrientation)orientation;
//進行混合
- (CIImage *)imageByCompositingOverImage:(CIImage *)dest;
//區(qū)域裁剪
- (CIImage *)imageByCroppingToRect:(CGRect)rect;
//返回圖像邊緣
- (CIImage *)imageByClampingToExtent;
//設置邊緣 返回新圖像對象
- (CIImage *)imageByClampingToRect:(CGRect)rect;
//用過濾器進行過濾
- (CIImage *)imageByApplyingFilter:(NSString *)filterName
               withInputParameters:(nullable NSDictionary<NSString *,id> *)params;
- (CIImage *)imageByApplyingFilter:(NSString *)filterName;
//圖像邊緣
@property (NS_NONATOMIC_IOSONLY, readonly) CGRect extent;
//屬性字典
@property (atomic, readonly) NSDictionary<NSString *,id> *properties;
//通過URL創(chuàng)建的圖像的URL
@property (atomic, readonly, nullable) NSURL *url;
//顏色空間
@property (atomic, readonly, nullable) CGColorSpaceRef colorSpace;
//通過CGImage創(chuàng)建的CGImage對象
@property (nonatomic, readonly, nullable) CGImageRef CGImage;

3.CIContext類

CIContext是CoreImage中的上下文對象茅逮,用來進行圖片的渲染,已經(jīng)轉(zhuǎn)換為其他框架的圖像對象判哥。

//通過CGContextRef上下文創(chuàng)建CIContext上下文
/*
配置字典中可以進行配置的:
kCIContextOutputColorSpace   設置輸出的顏色空間
kCIContextWorkingColorSpace  設置工作的顏色空間
kCIContextWorkingFormat      設置緩沖區(qū)數(shù)據(jù)格式
kCIContextHighQualityDownsample 布爾值
kCIContextOutputPremultiplied  設置輸出是否帶alpha通道
kCIContextCacheIntermediates  布爾值
kCIContextUseSoftwareRenderer 設置是否使用軟件渲染
kCIContextPriorityRequestLow  是否低質(zhì)量

*/
+ (CIContext *)contextWithCGContext:(CGContextRef)cgctx
                            options:(nullable NSDictionary<CIContextOption, id> *)options;
//創(chuàng)建上下文對象
+ (CIContext *)contextWithOptions:(nullable NSDictionary<CIContextOption, id> *)options;
+ (CIContext *)context;
- (instancetype)initWithOptions:(nullable NSDictionary<CIContextOption, id> *)options;
//使用指定的處理器創(chuàng)建CIContext
+ (CIContext *)contextWithMTLDevice:(id<MTLDevice>)device;
+ (CIContext *)contextWithMTLDevice:(id<MTLDevice>)device
                            options:(nullable NSDictionary<CIContextOption, id> *)options;
//工作的顏色空間
@property (nullable, nonatomic, readonly) CGColorSpaceRef workingColorSpace;
//緩沖區(qū)格式
@property (nonatomic, readonly) CIFormat workingFormat;
//進行CIImage圖像的繪制
- (void)drawImage:(CIImage *)image
          atPoint:(CGPoint)atPoint
         fromRect:(CGRect)fromRect;
- (void)drawImage:(CIImage *)image
           inRect:(CGRect)inRect
         fromRect:(CGRect)fromRect;
//使用CIImage創(chuàng)建CGImageRef
- (nullable CGImageRef)createCGImage:(CIImage *)image;
                            fromRect:(CGRect)fromRect;
- (nullable CGImageRef)createCGImage:(CIImage *)image
                            fromRect:(CGRect)fromRect
                              format:(CIFormat)format
                          colorSpace:(nullable CGColorSpaceRef)colorSpace;
//創(chuàng)建CALayer
- (nullable CGLayerRef)createCGLayerWithSize:(CGSize)size
                                        info:(nullable CFDictionaryRef)info;
//將圖片寫入bitMap數(shù)據(jù)
- (void)render:(CIImage *)image
      toBitmap:(void *)data
      rowBytes:(ptrdiff_t)rowBytes
        bounds:(CGRect)bounds
        format:(CIFormat)format;
//將圖片寫入緩存
- (void)render:(CIImage *)image 
toCVPixelBuffer:(CVPixelBufferRef)buffer
    colorSpace:(nullable CGColorSpaceRef)colorSpace;
- (void)render:(CIImage *)image
toCVPixelBuffer:(CVPixelBufferRef)buffer
        bounds:(CGRect)bounds
    colorSpace:(nullable CGColorSpaceRef)colorSpace献雅;
//將圖片寫入紋理
- (void)render:(CIImage *)image
  toMTLTexture:(id<MTLTexture>)texture
 commandBuffer:(nullable id<MTLCommandBuffer>)commandBuffer
        bounds:(CGRect)bounds
    colorSpace:(CGColorSpaceRef)colorSpace;
//清除緩存
- (void)clearCaches;
//輸入圖像的最大尺寸
- (CGSize)inputImageMaximumSize;
//輸出圖像的最大尺寸
- (CGSize)outputImageMaximumSize;
//將CIImage寫成TIFF數(shù)據(jù)
- (nullable NSData*) TIFFRepresentationOfImage:(CIImage*)image
                                        format:(CIFormat)format
                                    colorSpace:(CGColorSpaceRef)colorSpace
                                       options:(NSDictionary<CIImageRepresentationOption, id>*)options;
//將CIImage寫成JPEG數(shù)據(jù)
- (nullable NSData*) JPEGRepresentationOfImage:(CIImage*)image
                                    colorSpace:(CGColorSpaceRef)colorSpace
                                       options:(NSDictionary<CIImageRepresentationOption, id>*)options;
//將CIImage寫成HEIF數(shù)據(jù)
- (nullable NSData*) HEIFRepresentationOfImage:(CIImage*)image
                                        format:(CIFormat)format
                                    colorSpace:(CGColorSpaceRef)colorSpace
                                       options:(NSDictionary<CIImageRepresentationOption, id>*)options;
//將CIImage寫成PNG數(shù)據(jù)
- (nullable NSData*) PNGRepresentationOfImage:(CIImage*)image
                                       format:(CIFormat)format
                                   colorSpace:(CGColorSpaceRef)colorSpace
                                      options:(NSDictionary<CIImageRepresentationOption, id>*)options;
//將CIImage寫入TIFF文件
- (BOOL) writeTIFFRepresentationOfImage:(CIImage*)image
                                  toURL:(NSURL*)url
                                 format:(CIFormat)format
                             colorSpace:(CGColorSpaceRef)colorSpace 
                                options:(NSDictionary<CIImageRepresentationOption, id>*)options
                                  error:(NSError **)errorPtr;
//將CIImage寫入PNG文件
- (BOOL) writePNGRepresentationOfImage:(CIImage*)image
                                 toURL:(NSURL*)url
                                format:(CIFormat)format
                            colorSpace:(CGColorSpaceRef)colorSpace
                               options:(NSDictionary<CIImageRepresentationOption, id>*)options
                                 error:(NSError **)errorPtr;
//將CIImage寫入JPEG文件
- (BOOL) writeJPEGRepresentationOfImage:(CIImage*)image
                                  toURL:(NSURL*)url
                             colorSpace:(CGColorSpaceRef)colorSpace
                                options:(NSDictionary<CIImageRepresentationOption, id>*)options
                                  error:(NSError **)errorPtr;
//將CIImage寫HEIF文件
- (BOOL) writeHEIFRepresentationOfImage:(CIImage*)image
                                  toURL:(NSURL*)url
                                 format:(CIFormat)format
                             colorSpace:(CGColorSpaceRef)colorSpace
                                options:(NSDictionary<CIImageRepresentationOption, id>*)options
                                  error:(NSError **)errorPtr;

4.CIDetector類

前面有過CIDetector類的功能演示,這是CIImage框架中非常強大的一個類塌计,使用它可以進行復雜的圖片識別技術挺身,解析如下:

//創(chuàng)建CIDetector實例 
/*
type用來指定識別的類型
CIDetectorTypeFace  人臉識別模式
CIDetectorTypeRectangle 矩形檢測模式
CIDetectorTypeText   文本區(qū)域檢測模式
CIDetectorTypeQRCode 二維碼掃描模式


option可以指定配置字典 可配置的鍵如下
CIDetectorAccuracy 設置檢測精度 CIDetectorAccuracyLow 低 CIDetectorAccuracyHigh 高
CIDetectorTracking 設置是否跟蹤特征
CIDetectorMinFeatureSize  設置特征最小尺寸 0-1之間 相對圖片
CIDetectorMaxFeatureCount 設置最大特征數(shù)
CIDetectorImageOrientation 設置方向
CIDetectorEyeBlink  設置布爾值 是否提取面部表情 眨眼
CIDetectorSmile    設置布爾值 是否提取面部表情  微笑
CIDetectorFocalLength  設置焦距
CIDetectorAspectRatio 設置檢測到矩形的寬高比
CIDetectorReturnSubFeatures 設置是否提取子特征

*/
+ (nullable CIDetector *)detectorOfType:(NSString*)type
                                context:(nullable CIContext *)context
                                options:(nullable NSDictionary<NSString *,id> *)options;
//進行圖片分析 提取特征數(shù)組
- (NSArray<CIFeature *> *)featuresInImage:(CIImage *)image;
- (NSArray<CIFeature *> *)featuresInImage:(CIImage *)image
                                  options:(nullable NSDictionary<NSString *,id> *)options;

5.CIFeature相關類

CIFeature與其相關子類定義了特征數(shù)據(jù)模型。

@interface CIFeature : NSObject {}
//特征類型
/*
CIFeatureTypeFace
CIFeatureTypeRectangle
CIFeatureTypeQRCode
CIFeatureTypeText
*/
@property (readonly, retain) NSString *type;
//特征在圖片中的bounds
@property (readonly, assign) CGRect bounds;
@end

//人臉特征對象
@interface CIFaceFeature : CIFeature
//位置尺寸
@property (readonly, assign) CGRect bounds;
//左眼位置
@property (readonly, assign) BOOL hasLeftEyePosition;
@property (readonly, assign) CGPoint leftEyePosition;
//是否有左眼特征
@property (readonly, assign) BOOL hasRightEyePosition;
//右眼位置
@property (readonly, assign) CGPoint rightEyePosition;
//是否有右眼特征
@property (readonly, assign) BOOL hasMouthPosition;
//口部特征
@property (readonly, assign) CGPoint mouthPosition;
//是否有跟蹤特征ID
@property (readonly, assign) BOOL hasTrackingID;
//跟蹤特征ID
@property (readonly, assign) int trackingID;
@property (readonly, assign) BOOL hasTrackingFrameCount;
@property (readonly, assign) int trackingFrameCount;
@property (readonly, assign) BOOL hasFaceAngle;
@property (readonly, assign) float faceAngle;
//是否微笑
@property (readonly, assign) BOOL hasSmile;
//左眼是否閉眼
@property (readonly, assign) BOOL leftEyeClosed;
//右眼是否閉眼
@property (readonly, assign) BOOL rightEyeClosed;

@end

//矩形特征對象
@interface CIRectangleFeature : CIFeature
//位置尺寸
@property (readonly) CGRect bounds;
@property (readonly) CGPoint topLeft;
@property (readonly) CGPoint topRight;
@property (readonly) CGPoint bottomLeft;
@property (readonly) CGPoint bottomRight;

@end

//二維碼特征對象
@interface CIQRCodeFeature : CIFeature
//位置尺寸信息
@property (readonly) CGRect bounds;
@property (readonly) CGPoint topLeft;
@property (readonly) CGPoint topRight;
@property (readonly) CGPoint bottomLeft;
@property (readonly) CGPoint bottomRight;
//二維碼內(nèi)容
@property (nullable, readonly) NSString* messageString;
//二維碼描述數(shù)據(jù)
@property (nullable, readonly) CIQRCodeDescriptor *symbolDescriptor NS_AVAILABLE(10_13, 11_0);

@end

//文本特征對象
@interface CITextFeature : CIFeature
//位置信息
@property (readonly) CGRect bounds;
@property (readonly) CGPoint topLeft;
@property (readonly) CGPoint topRight;
@property (readonly) CGPoint bottomLeft;
@property (readonly) CGPoint bottomRight;
//子特征
@property (nullable, readonly) NSArray *subFeatures;


@end
?著作權歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
  • 序言:七十年代末锌仅,一起剝皮案震驚了整個濱河市章钾,隨后出現(xiàn)的幾起案子,更是在濱河造成了極大的恐慌热芹,老刑警劉巖贱傀,帶你破解...
    沈念sama閱讀 219,039評論 6 508
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件,死亡現(xiàn)場離奇詭異伊脓,居然都是意外死亡府寒,警方通過查閱死者的電腦和手機,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 93,426評論 3 395
  • 文/潘曉璐 我一進店門报腔,熙熙樓的掌柜王于貴愁眉苦臉地迎上來株搔,“玉大人,你說我怎么就攤上這事榄笙⌒澳” “怎么了?”我有些...
    開封第一講書人閱讀 165,417評論 0 356
  • 文/不壞的土叔 我叫張陵茅撞,是天一觀的道長帆卓。 經(jīng)常有香客問我,道長米丘,這世上最難降的妖魔是什么剑令? 我笑而不...
    開封第一講書人閱讀 58,868評論 1 295
  • 正文 為了忘掉前任,我火速辦了婚禮拄查,結果婚禮上吁津,老公的妹妹穿的比我還像新娘译红。我一直安慰自己限番,他們只是感情好,可當我...
    茶點故事閱讀 67,892評論 6 392
  • 文/花漫 我一把揭開白布召庞。 她就那樣靜靜地躺著,像睡著了一般身冀。 火紅的嫁衣襯著肌膚如雪惩阶。 梳的紋絲不亂的頭發(fā)上,一...
    開封第一講書人閱讀 51,692評論 1 305
  • 那天钾埂,我揣著相機與錄音河闰,去河邊找鬼。 笑死褥紫,一個胖子當著我的面吹牛姜性,可吹牛的內(nèi)容都是我干的。 我是一名探鬼主播髓考,決...
    沈念sama閱讀 40,416評論 3 419
  • 文/蒼蘭香墨 我猛地睜開眼部念,長吁一口氣:“原來是場噩夢啊……” “哼!你這毒婦竟也來了氨菇?” 一聲冷哼從身側(cè)響起印机,我...
    開封第一講書人閱讀 39,326評論 0 276
  • 序言:老撾萬榮一對情侶失蹤,失蹤者是張志新(化名)和其女友劉穎门驾,沒想到半個月后射赛,有當?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體,經(jīng)...
    沈念sama閱讀 45,782評論 1 316
  • 正文 獨居荒郊野嶺守林人離奇死亡奶是,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點故事閱讀 37,957評論 3 337
  • 正文 我和宋清朗相戀三年楣责,在試婚紗的時候發(fā)現(xiàn)自己被綠了。 大學時的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片聂沙。...
    茶點故事閱讀 40,102評論 1 350
  • 序言:一個原本活蹦亂跳的男人離奇死亡秆麸,死狀恐怖,靈堂內(nèi)的尸體忽然破棺而出及汉,到底是詐尸還是另有隱情沮趣,我是刑警寧澤,帶...
    沈念sama閱讀 35,790評論 5 346
  • 正文 年R本政府宣布坷随,位于F島的核電站房铭,受9級特大地震影響,放射性物質(zhì)發(fā)生泄漏温眉。R本人自食惡果不足惜缸匪,卻給世界環(huán)境...
    茶點故事閱讀 41,442評論 3 331
  • 文/蒙蒙 一、第九天 我趴在偏房一處隱蔽的房頂上張望类溢。 院中可真熱鬧凌蔬,春花似錦、人聲如沸。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,996評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽。三九已至辩诞,卻和暖如春钦睡,著一層夾襖步出監(jiān)牢的瞬間,已是汗流浹背躁倒。 一陣腳步聲響...
    開封第一講書人閱讀 33,113評論 1 272
  • 我被黑心中介騙來泰國打工, 沒想到剛下飛機就差點兒被人妖公主榨干…… 1. 我叫王不留洒琢,地道東北人秧秉。 一個月前我還...
    沈念sama閱讀 48,332評論 3 373
  • 正文 我出身青樓,卻偏偏與公主長得像衰抑,于是被迫代替她去往敵國和親象迎。 傳聞我的和親對象是個殘疾皇子,可洞房花燭夜當晚...
    茶點故事閱讀 45,044評論 2 355

推薦閱讀更多精彩內(nèi)容