方法一:
只能錄制iOS頁(yè)面,unity頁(yè)面為黑屏
方法二:
類(lèi)似于方法一
方法三:
Unity3D研究院之IOS截屏 話筒錄音 錄制截屏視頻保存沙盒(另類(lèi)實(shí)現(xiàn)方法)
OC的代碼截圖只有UI部分,U3D截圖只有3D部分。為了解決這個(gè)問(wèn)題截屏?xí)r我們需要把這兩張圖片合成為一張全新的圖片。這里再說(shuō)一下用蘋(píng)果私有API截圖是可以同時(shí)將UI部分U3D部分保存為一張圖片和悦,不過(guò)有可能APPStore不能審核通過(guò)所以大家還是老老實(shí)實(shí)用合并的方法來(lái)做庄萎。
unity腳本,如圖所示.添加
屏幕快照 2017-10-18 上午8.56.38.png
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.UI;
public class NewBehaviourScript : MonoBehaviour {
Button _start;
Button _stop;
Button _share;
// Use this for initialization
void Start () {
_start = this.transform.Find("Start").GetComponent<Button>();
_stop = this.transform.Find("Stop").GetComponent<Button>();
_share = this.transform.Find("Share").GetComponent<Button>();
_start.onClick.AddListener(StartVideo);
_stop.onClick.AddListener(StopVideo);
_share.onClick.AddListener (ShareVideo);
}
private void StopVideo()
{
#if UNITY_ANDROID
ShareREC.StopRecorder();
#elif UNITY_IOS
StopV();
#endif
}
[DllImport("__Internal")]
public static extern void StopV();
[DllImport("__Internal")]
public static extern void StartV();
[DllImport("__Internal")]
public static extern void SharaV();
private void ShareVideo()
{
#if UNITY_ANDROID
ShareREC.ShowShare();
#elif UNITY_IOS
StartV();
#endif
}
private void StartVideo()
{
#if UNITY_ANDROID
ShareREC.StartRecorder();
#elif UNITY_IOS
StartV();
#endif
}
[DllImport("__Internal")]
private static extern void unityToIOS (string str);
void OnGUI()
{
// 當(dāng)點(diǎn)擊按鈕后俺夕,調(diào)用外部方法
if (GUI.Button (new Rect (300, 300, 300, 100), "跳轉(zhuǎn)到IOS界面")) {
// Unity調(diào)用ios函數(shù),同時(shí)傳遞數(shù)據(jù)
unityToIOS ("Hello IOS");
}
}
// Update is called once per frame
void Update () {
}
int count = 0;
//在這里OC的代碼通知U3D截屏
void StartScreenshot(string str)
{
Application.CaptureScreenshot(count +"u3d.JPG");
count++;
}
}
然后我們將這個(gè)Unity3D工程導(dǎo)出成IOS的項(xiàng)目 犀盟。Unity會(huì)生成對(duì)應(yīng)的XCODE工程拣度。我們寫(xiě)一個(gè)全新的ViewController覆蓋在U3D生成的OPGL viewController之上,用于寫(xiě)UI高級(jí)控件,接著打開(kāi)UnityAppController.mm文件抗果。
//--------------------下面的MyViewController就是我們新寫(xiě)的Contoller----------------
YourViewController * myView = [[YourViewController alloc] init];
[UnityGetGLViewController().view addSubview:myView.view];
--------------------上面的MyViewController就是我們新寫(xiě)的Contoller----
[UnityGetGLViewController().view bringSubviewToFront:myView.view];
MyViewController.h
//
// MyViewController.h
// Unity-iPhone
//
// Created by guopenglai on 2017/10/17.
//
#import <UIKit/UIKit.h>
#import "Foundation/Foundation.h"
#import "AVFoundation/AVFoundation.h"
#import "MediaPlayer/MediaPlayer.h"
@interface MyViewController : UIViewController
@end
MyViewController.m
#import "MyViewController.h"
@interface MyViewController ()<AVAudioRecorderDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>
{
//時(shí)間計(jì)時(shí)器
NSTimer *_timer;
int _count;
UILabel * _labe;
//錄音
AVAudioRecorder * _recorder;
//讀取動(dòng)畫(huà)
UITextView *_sharedLoadingTextView;
UIActivityIndicatorView* _sharedActivityView;
}
@end
@implementation MyViewController
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
/*
#pragma mark - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
- (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender {
// Get the new view controller using [segue destinationViewController].
// Pass the selected object to the new view controller.
}
*/
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil
{
self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil];
if (self) {
// Custom initialization
}
return self;
}
- (void)viewDidLoad
{
[super viewDidLoad];
self.view.backgroundColor = [UIColor redColor];
UIWindow *screenWindow = [[UIApplication sharedApplication] keyWindow];
UIGraphicsBeginImageContext(screenWindow.frame.size);
[screenWindow.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageWriteToSavedPhotosAlbum(viewImage, nil, nil, nil);
#if !TARGET_IPHONE_SIMULATOR
self.view.backgroundColor = [UIColor clearColor];
#else
self.view.backgroundColor = [UIColor clearColor];
#endif
UIButton * start = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[start setFrame:CGRectMake(0, 100, 200, 30)];
[start setTitle:@"開(kāi)始截屏" forState:UIControlStateNormal];
[start addTarget:self action:@selector(startPress) forControlEvents:UIControlEventTouchDown];
UIButton * end = [UIButton buttonWithType:UIButtonTypeRoundedRect];
[end setFrame:CGRectMake(0, 150, 200, 30)];
[end setTitle:@"結(jié)束截屏(開(kāi)始錄制視頻)" forState:UIControlStateNormal];
[end addTarget:self action:@selector(endPress) forControlEvents:UIControlEventTouchUpInside];
[self.view addSubview:start];
[self.view addSubview:end];
_labe = [[UILabel alloc]initWithFrame:CGRectMake(30, 200, 300, 30)];
_labe.text = [NSString stringWithFormat:@"%@%d",@"雨松MOMO開(kāi)始計(jì)時(shí):=== ",_count];
[self.view addSubview:_labe];
//初始化錄音
[self prepareToRecord];
}
-(void)addLoading:(NSString*) info
{
//頂部文本視圖
_sharedLoadingTextView = [[UITextView alloc] initWithFrame:CGRectMake(0, 0, 130, 130)] ;
[_sharedLoadingTextView setBackgroundColor:[UIColor blackColor]];
[_sharedLoadingTextView setText:info];
[_sharedLoadingTextView setTextColor:[UIColor whiteColor]];
[_sharedLoadingTextView setTextAlignment:UITextAlignmentCenter];
[_sharedLoadingTextView setFont:[UIFont systemFontOfSize:15]];
_sharedLoadingTextView.textAlignment = UITextAlignmentCenter;
_sharedLoadingTextView.alpha = 0.8f;
_sharedLoadingTextView.center = self.view.center;
_sharedLoadingTextView.layer.cornerRadius = 10;
_sharedLoadingTextView.layer.masksToBounds = YES;
//創(chuàng)建Loading動(dòng)畫(huà)視圖
_sharedActivityView = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray] ;
//設(shè)置動(dòng)畫(huà)視圖的風(fēng)格筋帖,這里設(shè)定它為白色
_sharedActivityView.activityIndicatorViewStyle=UIActivityIndicatorViewStyleWhiteLarge;
//設(shè)置它顯示的區(qū)域
_sharedActivityView.frame = CGRectMake(0,0, 320, 480);
_sharedActivityView.center = self.view.center;
//開(kāi)始播放動(dòng)畫(huà)
[_sharedActivityView startAnimating];
[self.view addSubview:_sharedLoadingTextView];
[self.view addSubview:_sharedActivityView];
}
-(void)removeLoading
{
[_sharedLoadingTextView removeFromSuperview];
[_sharedActivityView removeFromSuperview];
}
-(void)startPress
{
_count = 0;
_timer = [NSTimer scheduledTimerWithTimeInterval: 0.1
target: self
selector: @selector(heartBeat:)
userInfo: nil
repeats: YES];
//開(kāi)始錄音
[_recorder record];
}
-(void)endPress
{
NSLog(@"開(kāi)始制作視頻");
if(_timer != nil)
{
[_timer invalidate];
_timer = nil;
[self addLoading:@"開(kāi)始制作視頻"];
[NSThread detachNewThreadSelector:@selector(startThreadMainMethod) toTarget:self withObject:nil];
}
}
-(void)startThreadMainMethod
{
//在這里制作視頻
NSMutableArray *_array = [[NSMutableArray alloc]init];
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
for(int i =0; i< _count; i++)
{
//讀取存在沙盒里面的文件圖片
NSString * _pathSecond = [NSString stringWithFormat:@"%@/%d%@",Path,i,@".JPG"];
NSString * _pathFirst = [NSString stringWithFormat:@"%@/%d%@",Path,i,@"u3d.JPG"];
//因?yàn)槟玫降氖莻€(gè)路徑 把它加載成一個(gè)data對(duì)象
NSData *data0=[NSData dataWithContentsOfFile:_pathFirst];
NSData *data1=[NSData dataWithContentsOfFile:_pathSecond];
//直接把該 圖片讀出來(lái)
UIImage *img0=[UIImage imageWithData:data0];
UIImage *img1=[UIImage imageWithData:data1];
[_array addObject:[self MergerImage : img0 : img1]];
}
Path = [NSString stringWithFormat:@"%@/%@%@",Path,@"veido",@".MP4"];
[_recorder stop];
[self writeImages:_array ToMovieAtPath:Path withSize: CGSizeMake(320, 480) inDuration:_count*0.1 byFPS:10];
[self removeLoading];
NSLog(@"recorder successfully");
// UIAlertView *recorderSuccessful = [[UIAlertView alloc] initWithTitle:@"" message:@"視頻錄制成功"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderSuccessful show];
}
- (void) heartBeat: (NSTimer*) timer
{
_labe.text = [NSString stringWithFormat:@"%@%d",@"雨松MOMO開(kāi)始計(jì)時(shí):=== ",_count];
//這個(gè)是私有API運(yùn)氣不好會(huì)被拒接
//這個(gè)方法比較給力 可以直接把ios前端和 U3D中的所有圖像都截取出來(lái)
//extern CGImageRef UIGetScreenImage();
//UIImage *image = [UIImage imageWithCGImage:UIGetScreenImage()];
//UIImageWriteToSavedPhotosAlbum(image,nil,nil,nil);
//保險(xiǎn)起見(jiàn)還是用如下方法截圖
//這個(gè)方法不能截取U3D的圖像
UIWindow *screenWindow = [[UIApplication sharedApplication]keyWindow];
UIGraphicsBeginImageContext(screenWindow.frame.size);
[screenWindow.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
NSData *data;
if (UIImagePNGRepresentation(image) == nil)
{
data = UIImageJPEGRepresentation(image, 1);
}
else
{
data = UIImagePNGRepresentation(image);
}
NSFileManager *fileManager = [NSFileManager defaultManager];
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
[fileManager createDirectoryAtPath:Path withIntermediateDirectories:YES attributes:nil error:nil];
Path = [NSString stringWithFormat:@"%@/%d%@",Path,_count,@".JPG"];
[fileManager createFileAtPath:Path contents:data attributes:nil];
//通知U3D開(kāi)始截屏
UnitySendMessage("Canvas","StartScreenshot","");
//注意unity腳本綁定的位置
_count++;
}
//合并圖片,把ios前景圖片和U3D圖片合并在一起
-(UIImage*) MergerImage:(UIImage*) firstImg:(UIImage*) secondImg
{
UIGraphicsBeginImageContext(CGSizeMake(320, 480));
[firstImg drawInRect:CGRectMake(0, 0, firstImg.size.width, firstImg.size.height)];
[secondImg drawInRect:CGRectMake(0, 0, secondImg.size.width, secondImg.size.height)];
UIImage *resultImage=UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return resultImage;
}
- (void)viewDidUnload
{
[super viewDidUnload];
// Release any retained subviews of the main view.
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation
{
return (interfaceOrientation == UIInterfaceOrientationPortrait);
}
- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image andSize:(CGSize) size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width,
size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
size.height, 8, 4*size.width, rgbColorSpace,
kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
- (void) writeImages:(NSArray *)imagesArray ToMovieAtPath:(NSString *) path withSize:(CGSize) size
inDuration:(float)duration byFPS:(int32_t)fps
{
//在這里將之前截取的圖片合并成一個(gè)視頻
//Wire the writer:
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
fileType:AVFileTypeQuickTimeMovie
error:&error] ;
NSParameterAssert(videoWriter);
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:size.width], AVVideoWidthKey,
[NSNumber numberWithInt:size.height], AVVideoHeightKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] ;
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
sourcePixelBufferAttributes:nil];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
[videoWriter addInput:videoWriterInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write some samples:
CVPixelBufferRef buffer = NULL;
int frameCount = 0;
int imagesCount = [imagesArray count];
float averageTime = duration/imagesCount;
int averageFrame = (int)(averageTime * fps);
for(UIImage * img in imagesArray)
{
buffer = [self pixelBufferFromCGImage:[img CGImage] andSize:size];
BOOL append_ok = NO;
int j = 0;
while (!append_ok && j < 30)
{
if (adaptor.assetWriterInput.readyForMoreMediaData)
{
printf("appending %d attemp %d\n", frameCount, j);
CMTime frameTime = CMTimeMake(frameCount,(int32_t) fps);
float frameSeconds = CMTimeGetSeconds(frameTime);
NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
if(buffer)
[NSThread sleepForTimeInterval:0.05];
}
else
{
printf("adaptor not ready %d, %d\n", frameCount, j);
[NSThread sleepForTimeInterval:0.1];
}
j++;
}
if (!append_ok) {
printf("error appending image %d times %d\n", frameCount, j);
}
frameCount = frameCount + averageFrame;
}
//Finish the session:
[videoWriterInput markAsFinished];
[videoWriter finishWriting];
NSLog(@"finishWriting");
//將靜態(tài)視頻 和聲音合并成一個(gè)新視頻
[self CompileFilesToMakeMovie];
}
- (void) prepareToRecord
{
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
NSError *err = nil;
[audioSession setCategory :AVAudioSessionCategoryPlayAndRecord error:&err];
if(err){
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
[audioSession setActive:YES error:&err];
err = nil;
if(err){
NSLog(@"audioSession: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
return;
}
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
[recordSetting setValue :[NSNumber numberWithInt:kAudioFormatLinearPCM] forKey:AVFormatIDKey];
[recordSetting setValue:[NSNumber numberWithFloat:44100.0] forKey:AVSampleRateKey];
[recordSetting setValue:[NSNumber numberWithInt: 2] forKey:AVNumberOfChannelsKey];
[recordSetting setValue :[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey];
[recordSetting setValue :[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey];
// Create a new dated file
NSString * recorderFilePath = [NSString stringWithFormat:@"%@/%@.caf", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], @"sound"] ;
NSURL *url = [NSURL fileURLWithPath:recorderFilePath];
err = nil;
_recorder = [[ AVAudioRecorder alloc] initWithURL:url settings:recordSetting error:&err];
if(!_recorder){
NSLog(@"recorder: %@ %d %@", [err domain], [err code], [[err userInfo] description]);
UIAlertView *alert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: [err localizedDescription]
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[alert show];
return;
}
//prepare to record
[_recorder setDelegate:self];
[_recorder prepareToRecord];
_recorder.meteringEnabled = YES;
BOOL audioHWAvailable = audioSession.inputIsAvailable;
if (! audioHWAvailable) {
UIAlertView *cantRecordAlert =
[[UIAlertView alloc] initWithTitle: @"Warning"
message: @"Audio input hardware not available"
delegate: nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil];
[cantRecordAlert show];
return;
}
}
//代理 這里可以監(jiān)聽(tīng)錄音成功
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *) aRecorder successfully:(BOOL)flag
{
// NSLog(@"recorder successfully");
// UIAlertView *recorderSuccessful = [[UIAlertView alloc] initWithTitle:@"" message:@"錄音成功"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderSuccessful show];
// [recorderSuccessful release];
}
//代理 這里可以監(jiān)聽(tīng)錄音失敗
- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)arecorder error:(NSError *)error
{
// UIAlertView *recorderFailed = [[UIAlertView alloc] initWithTitle:@"" message:@"發(fā)生錯(cuò)誤"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [recorderFailed show];
// [recorderFailed release];
}
-(void)CompileFilesToMakeMovie
{
//這個(gè)方法在沙盒中把靜態(tài)圖片組成的視頻 與錄制的聲音合并成一個(gè)新視頻
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSString* audio_inputFileName = @"sound.caf";
NSString* audio_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], audio_inputFileName] ;
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audio_inputFilePath];
NSString* video_inputFileName = @"veido.mp4";
NSString* video_inputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], video_inputFileName] ;
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:video_inputFilePath];
NSString* outputFileName = @"outputVeido.mov";
NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@", [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"], outputFileName] ;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
CMTime nextClipStartTime = kCMTimeZero;
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:nextClipStartTime error:nil];
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = @"com.apple.quicktime-movie";
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
}
];
}
@end
卡的一筆
方法四:
使用replaykit
import <ReplayKit/ReplayKit.h>記得Linked Frameworks and Libraries中導(dǎo)入
extern "C"
{
void StartV()
{
NSLog(@"開(kāi)始");
//如果還沒(méi)有開(kāi)始錄制冤馏,判斷系統(tǒng)是否支持
if ([RPScreenRecorder sharedRecorder].available) {
NSLog(@"OK");
//如果支持日麸,就使用下面的方法可以啟動(dòng)錄制回放
[[RPScreenRecorder sharedRecorder] startRecordingWithMicrophoneEnabled:YES handler:^(NSError * _Nullable error) {
NSLog(@"%@", error);
//處理發(fā)生的錯(cuò)誤,如設(shè)用戶權(quán)限原因無(wú)法開(kāi)始錄制等
}];
} else {
NSLog(@"錄制回放功能不可用");
}
}
void StopV()
{
NSLog(@"結(jié)束");
//停止錄制回放逮光,并顯示回放的預(yù)覽代箭,在預(yù)覽中用戶可以選擇保存視頻到相冊(cè)中、放棄涕刚、或者分享出去
[[RPScreenRecorder sharedRecorder] stopRecordingWithHandler:^(RPPreviewViewController * _Nullable previewViewController, NSError * _Nullable error) {
if (error) {
NSLog(@"%@", error);
//處理發(fā)生的錯(cuò)誤嗡综,如磁盤(pán)空間不足而停止等
}
if (previewViewController) {
//設(shè)置預(yù)覽頁(yè)面到代理
previewViewController.previewControllerDelegate = GetAppController();
[GetAppController().window.rootViewController presentViewController:previewViewController animated:YES completion:nil];
}
}];
}
void SharaV()
{
//通知U3D開(kāi)始截屏
UnitySendMessage("Canvas","StartScreenshot","");
NSString * Path = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
NSString * _pathFirst = [NSString stringWithFormat:@"%@/%ld%@",Path,GetAppController().frameCount,@"u3d.JPG"];
//因?yàn)槟玫降氖莻€(gè)路徑 把它加載成一個(gè)data對(duì)象
NSData *data0=[NSData dataWithContentsOfFile:_pathFirst];
//直接把該 圖片讀出來(lái)
UIImage *img0=[UIImage imageWithData:data0];
UIImageWriteToSavedPhotosAlbum(img0, GetAppController(), @selector(image:didFinishSavingWithError:contextInfo:), NULL);
}
void unityToIOS(char* str)
{
}
}
- (void)image: (UIImage *) image didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
{
NSString *msg = nil ;
if(error != NULL){
msg = @"保存圖片失敗" ;
}else{
msg = @"保存圖片成功" ;
}
}