EasyAR iOS的SDK功能:
匹配場景圖片假哎,在匹配成功的圖片上加載圖像吝梅,3D模型员咽,視頻根暑;
EasyAR iOS的SDK每個類的功能:
http://m.blog.csdn.net/article/details?id=49615303
類名 | 描述 |
---|---|
base.hpp | 基類力试,是大多數EasyAR類的基類。 |
augmenter.hpp | Augmenter是一個渲染器排嫌,它從tracker獲取frame畸裳,然后將camera的圖像作為AR場景的背景渲染出來。 |
barcode.hpp | BarCodeScanner實現了二維碼掃描與識別功能淳地。 |
camera.hpp | CameraDevice實現了一個camera設備怖糊。CameraCalibration存儲了camera的標定數據 |
frame.hpp | Frame用來存儲跟蹤到的數據。這個數據包括當前camera的圖像颇象,跟蹤到的target和其它一些信息伍伤。 |
image.hpp | Image存儲了圖像數據,用來表示內存中的圖像遣钳。Image以byte數組的方式提供了對原始數據的訪問扰魂,同時也提供了訪問width/height/stride等信息的接口。 |
imagetarget.hpp | ImageTarget表示平面圖像的target蕴茴,它可以被ImageTracker所跟蹤劝评。 |
imagetracker.hpp | ImageTracker實現了target的檢測和跟蹤。 |
matrix.hpp | Matrix表示m x n的矩陣倦淀。 |
player.hpp | VideoPlayer是視頻播放類蒋畜。EasyAR支持普通的視頻、透明視頻和流媒體播放撞叽。視頻內容會被渲染到傳入SetRenderTexture的texture上姻成。 |
storage.hpp | StorageType表示圖像、json文件愿棋、視頻或其它文件的存放位置科展。StorageType指定了文件存放的根目錄,你可以在所有相關接口中使用相對于這個根目錄的相對路徑初斑。 |
target.hpp | Target是EasyAR里面所有可以被ImageTracker或其它算法跟蹤的目標的基類辛润。target的所有屬性要在加載之后才會有效。 |
utility.hpp | 自我理解:sdk初始化類 |
iOS EasyAR大概步驟:
配置好環(huán)境:
- 注冊sdk见秤;
- 初始化openGL繪制上下文砂竖;
- 初始化渲染器Augmenter;
- 初始化CameraDevice相機鹃答,渲染器關聯相機乎澄;
- 加載帶匹配的圖片;
- 打開相機测摔,渲染相機圖片到openGL置济,開始匹配解恰;
- 匹配跟蹤成功。
iOS EasyAR實現步驟:
//
// CameraView.m
// EasyAR
//
// Created by lv on 2016/12/27.
// Copyright ? 2016年 Albert. All rights reserved.
//
#import "CameraView.h"
#include "easyar/camera.hpp"
#import <easyar/matrix.hpp>
#import <easyar/augmenter.hpp>
#import <easyar/imagetarget.hpp>
#import <easyar/utility.hpp>
#import <easyar/frame.hpp>
#import <easyar/imagetracker.hpp>
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#include <iostream>
#import <QuartzCore/QuartzCore.h>
#import "renderer.hpp"
#import "SDWebImageManager.h"
@interface CameraView ()
{
EasyAR::CameraDevice _cameraDevice; //相機
EasyAR::ImageTarget _tar;
EasyAR::Augmenter _augmenter;//渲染器
EasyAR::Vec2I view_size;
EasyAR::Vec4I _viewport;
EasyAR::samples::Renderer _renderer;
EasyAR::ImageTracker _tracker;
CALayer *_imageLayer;
BOOL _portrait;//設備方向
}
@property(nonnull,retain)CADisplayLink * displayLink;//定時器浙于。
@property(nonatomic, strong) CAEAGLLayer * eaglLayer;
@property(nonatomic, strong) EAGLContext *context;//上下文
@property(nonatomic) GLuint colorRenderBuffer;
@end
@implementation CameraView
+ (Class)layerClass
{
return [CAEAGLLayer class];
}
- (instancetype)initWithFrame:(CGRect)frame
{
self = [super initWithFrame:frame];
if (self) {
[self initAll];
}
return self;
}
- (void)initAll {
_portrait = NO;
_augmenter = EasyAR::Augmenter();//創(chuàng)建渲染器
[self setupGL];
_renderer.init();
[self initCamera];
}
- (void)resize:(CGRect)frame orientation:(UIInterfaceOrientation)orientation
{
BOOL isPortrait = NO;
switch (orientation)
{
case UIInterfaceOrientationPortrait:
case UIInterfaceOrientationPortraitUpsideDown:
isPortrait = YES;
break;
case UIInterfaceOrientationLandscapeLeft:
case UIInterfaceOrientationLandscapeRight:
isPortrait = NO;
break;
default:
break;
}
[self setPortrait:isPortrait];
[self resizeGL:frame.size.width height:frame.size.height];
}
- (void)setOrientation:(UIInterfaceOrientation)orientation
{
switch (orientation)
{
case UIInterfaceOrientationPortrait:
EasyAR::setRotationIOS(270);
break;
case UIInterfaceOrientationPortraitUpsideDown:
EasyAR::setRotationIOS(90);
break;
case UIInterfaceOrientationLandscapeLeft:
EasyAR::setRotationIOS(180);
break;
case UIInterfaceOrientationLandscapeRight:
EasyAR::setRotationIOS(0);
break;
default:
break;
}
}
-(void)setPortrait:(BOOL)portrait
{
_portrait = portrait;
}
-(void)resizeGL:(float )width height:(float)height
{
EasyAR::Vec2I size = EasyAR::Vec2I(1, 1);
if(_cameraDevice.isOpened())
size = _cameraDevice.size();
if (size[0] == 0 || size[1] == 0)
return;
if(_portrait)
std::swap(size[0], size[1]);
float scaleRatio = std::max((float)width / (float)size[0], (float)height / (float)size[1]);
EasyAR::Vec2I viewport_size = EasyAR::Vec2I((int)(size[0] * scaleRatio), (int)(size[1] * scaleRatio));
_viewport = EasyAR::Vec4I(0, height - viewport_size[1], viewport_size[0], viewport_size[1]);
}
//開始掃描
-(void)start
{
BOOL isOpen = [self cameraStart];
NSLog(@"打開相機 = %zd",isOpen);
}
//掃描中
- (void)displayLinkCallback:(CADisplayLink*)displayLink
{
[self render];
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
[_context presentRenderbuffer:GL_RENDERBUFFER];
}
//openGL繪制層
- (void)setupGL
{
_eaglLayer = (CAEAGLLayer*) self.layer;
_eaglLayer.opaque = YES;
EAGLRenderingAPI api = kEAGLRenderingAPIOpenGLES2;
_context = [[EAGLContext alloc] initWithAPI:api];
if (!_context)
NSLog(@"Failed to initialize OpenGLES 2.0 context");
if (![EAGLContext setCurrentContext:_context])
NSLog(@"Failed to set current OpenGL context");
GLuint frameBuffer;
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glGenRenderbuffers(1, &_colorRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, _colorRenderBuffer);
[_context renderbufferStorage:GL_RENDERBUFFER fromDrawable:_eaglLayer];
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorRenderBuffer);
int width, height;
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &width);
glGetRenderbufferParameteriv(GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &height);
GLuint depthRenderBuffer;
glGenRenderbuffers(1, &depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderBuffer);
}
//核心部分:從augmenter中取出相機圖像繪制到屏幕中护盈,并檢測匹配跟蹤
-(void)render
{
glClearColor(0.f, 0.f, 0.f, 1.f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
EasyAR::Frame frame = _augmenter.newFrame();
if(view_size[0] > 0){
int width = view_size[0];
int height = view_size[1];
EasyAR::Vec2I size = EasyAR::Vec2I(1, 1);
if (_cameraDevice && _cameraDevice.isOpened())
size = _cameraDevice.size();
if(_portrait)
std::swap(size[0], size[1]);
float scaleRatio = std::max((float)width / (float)size[0], (float)height / (float)size[1]);
EasyAR::Vec2I viewport_size = EasyAR::Vec2I((int)(size[0] * scaleRatio), (int)(size[1] * scaleRatio));
if(_portrait){
_viewport = EasyAR::Vec4I(0, height - viewport_size[1], viewport_size[0], viewport_size[1]);
}
else
{
NSLog(@"_portrait==>%dx==>%dy==>%dz==>%d",_portrait,(width - height),viewport_size[0],viewport_size[1]);
_viewport = EasyAR::Vec4I(0, width - height, viewport_size[0], viewport_size[1]);
}
if(_cameraDevice && _cameraDevice.isOpened())
view_size[0] = -1;
}
_augmenter.setViewPort(_viewport);
_augmenter.drawVideoBackground();
glViewport(_viewport[0], _viewport[1], _viewport[2], _viewport[3]);//調用glViewPort函數來決定視見區(qū)域,告訴OpenGL應把渲染之后的圖形繪制在窗體的哪個部位
for (int i = 0; i < frame.targets().size(); ++i) {
EasyAR::AugmentedTarget::Status status = frame.targets()[i].status();
//圖像匹配成功
if(status == EasyAR::AugmentedTarget::kTargetStatusTracked) {
//加載3D模型
// EasyAR::Matrix44F projectionMatrix = EasyAR:: getProjectionGL(_cameraDevice.cameraCalibration(), 0.2f, 500.f);
// EasyAR::Matrix44F cameraview = getPoseGL(frame.targets()[i].pose());
// EasyAR::ImageTarget target = frame.targets()[i].target().cast_dynamic<EasyAR::ImageTarget>();
// _renderer.render(projectionMatrix, cameraview, target.size());
// NSLog(@"status===%zd",status);
// if (![self.layer.sublayers containsObject:_imageLayer])
// {
// _imageLayer = (CALayer *)[self initBonus:[UIImage imageNamed:@"Bonus"]];
// [_imageLayer removeFromSuperlayer];
// [self.layer addSublayer:_imageLayer];
// [self bonusAnimation];
// }
_matchBlock(status);
}
}
}
- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event{
CGPoint point = [[touches anyObject] locationInView:self];
CGPoint imgaelayerPiont = [_imageLayer convertPoint:point fromLayer:self.layer];
if ([self.layer containsPoint:imgaelayerPiont]) {
[_imageLayer removeFromSuperlayer];
}
}
-(id)initBonus:(UIImage *)image
{
CALayer * imageLayer = [CALayer layer];
imageLayer.frame = CGRectMake((SIZE.width-128)/2, (SIZE.height-192)/2, 128, 192);
imageLayer.cornerRadius = 10.0;
imageLayer.masksToBounds = YES;
imageLayer.contents = (id)image.CGImage;
return (id)imageLayer;
}
-(void)bonusAnimation
{
CAKeyframeAnimation *theAnimation = [CAKeyframeAnimation animation];
// CATransform3DMakeRotation(CGFloat angle, CGFloat x, CGFloat y, CGFloat z); 第一個參數是旋轉角度羞酗,后面三個參數形成一個圍繞其旋轉的向量腐宋,起點位置由UIView的center屬性標識。
theAnimation.values = [NSArray arrayWithObjects:
[NSValue valueWithCATransform3D:CATransform3DMakeRotation(0, 0, 0.5, 0)],
[NSValue valueWithCATransform3D:CATransform3DMakeRotation(3.13, 0, 0.5, 0)],
[NSValue valueWithCATransform3D:CATransform3DMakeRotation(6.26, 0, 0.5, 0)],
nil];
theAnimation.cumulative = YES;
// 每個幀的時間=總duration/(values.count - 1)
// 間隔時間 頻率
theAnimation.duration = 1;
// 重復次數
theAnimation.repeatCount = 0;
// 取消反彈// 告訴在動畫結束的時候不要移除
theAnimation.removedOnCompletion = NO;
// 始終保持最新的效果
theAnimation.fillMode = kCAFillModeForwards;
theAnimation.delegate = self;
_imageLayer.zPosition = 50;
[_imageLayer addAnimation:theAnimation forKey:@"transform"];
}
#pragma mark 初始化相機
-(BOOL)initCamera
{
BOOL status = YES;
status = _cameraDevice.open();
_cameraDevice.setSize( EasyAR::Vec2I(1280, 720));
_cameraDevice.setFocusMode(EasyAR::CameraDevice::kFocusModeContinousauto);
status = _augmenter.attachCamera(_cameraDevice);
status = _tracker.attachCamera(_cameraDevice);
// [self loadImage:@"namecard.jpg"];
NSString *savePath = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/test.jpg"];
// NSString *savePath = @"$(SRCROOT)/Documents/test.jpg";
NSLog(@"savePath= %@",savePath);
// [self downLoadImg:strURL savePath:savePath imgDownFinish:^(id image) {
[self loadImage:savePath];
// }];
// [self loadImagePath];
// [self loadImagePath];
[self loadImage:savePath];
return status;
}
-(BOOL)cameraStart
{
BOOL status = YES;
status = _cameraDevice.start();
status = _tracker.start();
self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
[self.displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
return status;
}
#pragma mark 加載圖片
-(void)loadImage:(NSString *)path
{
NSArray *arrName = [path componentsSeparatedByString:@"."];
NSString *str=[NSString stringWithFormat:@"{\"images\":[{\"image\":\"%@\",\"name\":\"%@\"}]}",path,@"test"];
NSLog(@"strJson=%@",str);
BOOL readImage = _tar.load([str UTF8String], EasyAR::kStorageAbsolute | EasyAR::kStorageJson| EasyAR::kStorageApp);
_tracker.loadTarget(_tar, nil);
NSLog(@"讀取圖片 = %zd",readImage);
}
-(void)loadImagePath
{
NSString *jstr = [NSString stringWithFormat:@"{\"images\":[{\"image\":\"%@\",\"name\":\"%@\"}]}",@"namecard.jpg",@"namecard"];
NSLog(@"jstr==>%@",jstr);
BOOL readImage = _tar.load([jstr UTF8String], EasyAR::kStorageAssets | EasyAR::kStorageJson);
NSLog(@"讀取圖片 = %zd",readImage);
_tracker.loadTarget(_tar,0);
}
-(void)downLoadImg:(NSString *)strURL savePath:(NSString *)savePath imgDownFinish:(imgDownFinish)finish
{
[[SDWebImageManager sharedManager] downloadImageWithURL:[NSURL URLWithString:strURL] options:0
progress:^(NSInteger receivedSize, NSInteger expectedSize)
{
//處理下載進度
} completed:^(UIImage *image, NSError *error, SDImageCacheType cacheType, BOOL finished, NSURL *imageURL) {
if (error)
{
NSLog(@"error is %@",error);
if (finish) {
finish(nil);
}
}
if (image)
{
NSArray *arrType = [strURL componentsSeparatedByString:@"."];
NSData *data = nil;
if ([[arrType lastObject] isEqualToString:@"png"]||[[arrType lastObject] isEqualToString:@"PNG"])
{
data = UIImageJPEGRepresentation(image,1);
}
if ([[arrType lastObject] isEqualToString:@"jpg"]||[[arrType lastObject] isEqualToString:@"JPG"])
{
data = UIImagePNGRepresentation(image);
}
if (data) {
if([data writeToFile:savePath atomically:YES])
{
NSLog(@"保存成功");
}
if (finish) {
finish(image);
}
}
else
{
if (finish) {
finish(nil);
}
}
}
}];
}
-(void)dealloc
{
}
-(void)clear
{
_tracker.stop();
_cameraDevice.stop();
_cameraDevice.close();
_cameraDevice.clear();
_augmenter.clear();
}
@end