先說(shuō)視頻合并 這是視頻和音頻合并 因?yàn)槭褂胹hareAEC sdk錄屏unity畫(huà)面 面對(duì)物體時(shí)候和easyARyou沖突 會(huì)黑屏 最終使用unity官方推薦的everyPlayer錄屏 但是錄不到unity自帶的聲音為,所以在錄屏的時(shí)候進(jìn)行錄音蛤肌,下面是錄音的關(guān)鍵代碼
-(void)setAudioSession{
AVAudioSession*audioSession=[AVAudioSessionsharedInstance];
//設(shè)置為播放和錄音狀態(tài)壁却,以便可以在錄制完之后播放錄音AVAudioSessionCategoryAmbient AVAudioSessionCategoryPlayAndRecord
[audioSessionsetCategory:AVAudioSessionCategoryPlayAndRecorderror:nil];
[audioSessionsetActive:YESerror:nil];
//下面這幾句代碼 是為了在錄音開(kāi)啟的時(shí)候繼續(xù)播放外音 以達(dá)到能錄到模型的聲音
UInt32doChangeDefaultRoute =1;
//kAudioSessionProperty_OverrideAudioRoute kAudioSessionProperty_OverrideCategoryDefaultToSpeaker
AudioSessionSetProperty(
kAudioSessionProperty_OverrideCategoryDefaultToSpeaker,
sizeof(doChangeDefaultRoute),
&doChangeDefaultRoute
);
}
/**
*錄音文件設(shè)置
*
*@return返回錄音設(shè)置
*/
- (NSDictionary*)getAudioSetting
{
NSMutableDictionary*dic = [NSMutableDictionarydictionary];
[dicsetObject:@(kAudioFormatLinearPCM)forKey:AVFormatIDKey];//設(shè)置錄音格式
[dicsetObject:@(4410)forKey:AVSampleRateKey];//設(shè)置采樣率8000
[dicsetObject:@(2)forKey:AVNumberOfChannelsKey];//設(shè)置通道,這里采用單聲道1 2
[dicsetObject:@(8)forKey:AVLinearPCMBitDepthKey];//每個(gè)采樣點(diǎn)位數(shù)裸准,分為8展东,16,24炒俱,32
[dicsetObject:@(YES)forKey:AVLinearPCMIsFloatKey];//是否使用浮點(diǎn)數(shù)采樣
[dicsetObject:@(128000)forKey:AVEncoderBitRateKey];
//AVEncoderAudioQualityKey:@(AVAudioQualityMax)
returndic;
}
/**
*錄音存儲(chǔ)路徑
*
*@return返回存儲(chǔ)路徑
*/
- (NSString*)getSavePath
{
NSString*url = [[UtinityHelpertmpPath]stringByAppendingPathComponent:[NSStringstringWithFormat:@"aa.%@",MusicType]];
returnurl;
}
- (AVAudioRecorder*)audioRecorder
{
if(!_audioRecorder) {
NSError*error =nil;
_audioRecorder= [[AVAudioRecorderalloc]initWithURL:[NSURLURLWithString:[selfgetSavePath]]settings:[selfgetAudioSetting]error:&error];
_audioRecorder.delegate=self;
//_audioRecorder.meteringEnabled = YES; //是否啟用錄音測(cè)量盐肃,如果啟用錄音測(cè)量可以獲得錄音分貝等數(shù)據(jù)信息
if(![_audioRecorderprepareToRecord]) {
SL_Log(@"錄音啟動(dòng)失敗");
}
[selfsetAudioSession];
if(error) {
NSLog(@"創(chuàng)建錄音機(jī)對(duì)象發(fā)生錯(cuò)誤:%@",error.localizedDescription);
returnnil;
}
}
return_audioRecorder;
}
//混合音樂(lè)
- (void)merge
{
NSFileManager*manger = [NSFileManagerdefaultManager];
//路徑
//NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
//聲音來(lái)源
NSString*strMusicPath = [selfgetSavePath];
if(!strMusicPath) {
//音頻不存在
return;
}
NSURL*audioInputUrl = [NSURLfileURLWithPath:[selfgetSavePath]];
//視頻來(lái)源
NSString*strVideoPath = [[UtinityHelpertmpPath]stringByAppendingPathComponent:@"Everyplay/session"];
NSArray*pathlList = [mangersubpathsAtPath:strVideoPath];
if(pathlList.count<=0) {
NSLog(@"視頻不存在");
[SVProgressHUDshowErrorWithStatus:@"錄屏失敗"];
return;
}else{
strVideoPath = [strVideoPathstringByAppendingPathComponent:pathlList.firstObject];
NSArray*pathlList1 = [mangersubpathsAtPath:strVideoPath];
if(pathlList.count<=0) {
return;
}else{
for(NSString*strPathinpathlList1) {
if([strPath.pathExtensionisEqualToString:@"mp4"]) {
strVideoPath = [strVideoPathstringByAppendingPathComponent:strPath];
break;
}
}
}
}
//strVideoPath = [[NSBundle mainBundle] pathForResource:@"screen-001" ofType:@"mp4"];
NSURL*videoInputUrl = [NSURLfileURLWithPath:strVideoPath];
//最終合成輸出路徑
NSString*videoPath = [[UtinityHelperdocmentPath]stringByAppendingPathComponent:VideoFolder];
[UtinityHelpercreatFolderWithPath:videoPath];
NSString*strVideoName = [NSStringstringWithFormat:@"recoder_%@.mp4",[UtinityHelperswitchStrDateWithDate:[NSDatedate]format:@"yyyyMMddHHmmss"]];
NSString*outPutFilePath = [videoPathstringByAppendingPathComponent:strVideoName];
if([UtinityHelperisHasPath:outPutFilePath]) {
[UtinityHelperremoveFileWithPath:outPutFilePath];
}
//添加合成路徑
NSURL*outputFileUrl = [NSURLfileURLWithPath:outPutFilePath];
//時(shí)間起點(diǎn)
CMTimenextClistartTime =kCMTimeZero;
//創(chuàng)建可變的音視頻組合
AVMutableComposition*comosition = [AVMutableCompositioncomposition];
//視頻采集
AVURLAsset*videoAsset = [[AVURLAssetalloc]initWithURL:videoInputUrloptions:nil];
//視頻時(shí)間范圍
CMTimeRangevideoTimeRange =CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
//視頻通道枚舉kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack*videoTrack = [comositionaddMutableTrackWithMediaType:AVMediaTypeVideopreferredTrackID:kCMPersistentTrackID_Invalid];
//視頻采集通道
AVAssetTrack*videoAssetTrack = [[videoAssettracksWithMediaType:AVMediaTypeVideo]firstObject];
//把采集軌道數(shù)據(jù)加入到可變軌道之中
[videoTrackinsertTimeRange:videoTimeRangeofTrack:videoAssetTrackatTime:nextClistartTimeerror:nil];
//由于合成之后視頻會(huì)旋轉(zhuǎn)180度所以合成之前先旋轉(zhuǎn)180度
CGAffineTransformpreferredTransform = videoTrack.preferredTransform;
//CGAffineTransform trans = CGAffineTransformTranslate(preferredTransform, 0.0, -videoTrack.naturalSize.height);
CGAffineTransformtransNew =CGAffineTransformRotate(preferredTransform,M_PI);
videoTrack.preferredTransform= transNew;
//聲音采集
AVURLAsset*audioAsset = [[AVURLAssetalloc]initWithURL:audioInputUrloptions:nil];
//因?yàn)橐曨l短這里就直接用視頻長(zhǎng)度了,如果自動(dòng)化需要自己寫(xiě)判斷
CMTimeRangeaudioTimeRange = videoTimeRange;
//音頻通道
AVMutableCompositionTrack*audioTrack = [comositionaddMutableTrackWithMediaType:AVMediaTypeAudiopreferredTrackID:kCMPersistentTrackID_Invalid];
//音頻采集通道
AVAssetTrack*audioAssetTrack = [[audioAssettracksWithMediaType:AVMediaTypeAudio]firstObject];
//加入合成軌道之中
[audioTrackinsertTimeRange:audioTimeRangeofTrack:audioAssetTrackatTime:nextClistartTimeerror:nil];
//創(chuàng)建一個(gè)輸出
AVAssetExportSession*assetExport = [[AVAssetExportSessionalloc]initWithAsset:comositionpresetName:AVAssetExportPresetMediumQuality];
//輸出類(lèi)型
//assetExport.outputFileType = AVFileType3GPP;
assetExport.outputFileType=AVFileTypeMPEG4;
//輸出地址
assetExport.outputURL= outputFileUrl;
//優(yōu)化
assetExport.shouldOptimizeForNetworkUse=YES;
//合成完畢
[assetExportexportAsynchronouslyWithCompletionHandler:^{
//回到主線(xiàn)程
SL_Log(@"合成完畢");
dispatch_async(dispatch_get_main_queue(), ^{
});
}];
}
關(guān)于視頻旋轉(zhuǎn)的一些代碼
- (void)mergeAndExportVideosAtFileURLs:(NSArray *)fileURLArray
{
//NSLog(@"the getVideoCount is %lu", (unsigned long)[self getVideoCount]);
//if (self.getVideoCount != fileURLArray.count) {
//NSLog(@"必定崩潰-------");
//NSLog(@"必定崩潰-------");
//NSLog(@"必定崩潰-------");
//}
NSLog(@"the fileURLArray is %@", fileURLArray);
if (fileURLArray.count <= 0) {
#warning小片段視頻還未成功生成就開(kāi)始合并視頻
NSLog(@"嚴(yán)重錯(cuò)誤!!!!!!!!!!!!!!!!!!!!!");
return;
}else{
for (NSURL *fileURL in fileURLArray) {
NSString *path = fileURL.resourceSpecifier;
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{
//JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs theVideoPath is %@", path);
NSUInteger size;
NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];
//JFLog(DBGUI, @"mergeAndExportVideosAtFileURLs fileSize is %lu", size/(1024*1024));
}
}
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSError *error = nil;
CGSize renderSize = CGSizeMake(0, 0);
NSMutableArray *layerInstructionArray = [[NSMutableArray alloc] init];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
CMTime totalDuration = kCMTimeZero;
//先去assetTrack也為了取renderSize
NSMutableArray *assetTrackArray = [[NSMutableArray alloc] init];
NSMutableArray *assetAudioTrackArray = [NSMutableArray arrayWithCapacity:0];
NSMutableArray *assetArray = [[NSMutableArray alloc] init];
for (NSURL *fileURL in fileURLArray) {
AVAsset *asset = [AVAsset assetWithURL:fileURL];
//AVURLAsset *asset = [AVURLAsset URLAssetWithURL:fileURL options:nil];
//NSString *tracksKey = @"tracks";
NSString *tracksKey = @"AVMediaTypeAudio";
if (!asset) {
continue;
}
[assetArray addObject:asset];
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[assetTrackArray addObject:assetTrack];
//AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//[assetAudioTrackArray addObject:assetAudioTrack];
renderSize.width = MAX(renderSize.width, assetTrack.naturalSize.height);
renderSize.height = MAX(renderSize.height, assetTrack.naturalSize.width);
}
NSLog(@"the assetAudioTrackArray is %@", assetAudioTrackArray);
CGFloat renderW = MIN(renderSize.width, renderSize.height);
NSLog(@"the renderW is %f", renderW);
NSLog(@"the assetArray cout is %lu", (unsigned long)[assetArray count]);
NSLog(@"the assetTrackArray cout is %lu", (unsigned long)[assetTrackArray count]);
for (int i = 0; i < [assetArray count] && i < [assetTrackArray count]; i++) {
AVAsset *asset = [assetArray objectAtIndex:i];
AVAssetTrack *assetTrack = [assetTrackArray objectAtIndex:i];
//AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
//ofTrack:[[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]
//atTime:totalDuration
//error:nil];
#warning這里加一個(gè)判斷
//[asset tracksWithMediaType:AVMediaTypeAudio]取出的數(shù)組可能為空這段視頻沒(méi)有音頻
NSArray *arr = [asset tracksWithMediaType:AVMediaTypeAudio];
//JFLog(DBGUI, @"the audioTrackArr is %@", arr);
if (arr.count <= 0) {
NSLog(@"沒(méi)有視頻!!!!!!!!!!!!!!!!!!!!!");
NSLog(@"沒(méi)有視頻!!!!!!!!!!!!!!!!!!!!!");
}
if (arr.count > 0) {
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[arr objectAtIndex:0] atTime:totalDuration error:nil];
}
//[audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:([arr count]>0)?[arr objectAtIndex:0]:nil atTime:totalDuration error:nil];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
NSLog(@"the assetduration is %lld", asset.duration.value/asset.duration.timescale);
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetTrack
atTime:totalDuration
error:&error];
//fix orientationissue
AVMutableVideoCompositionLayerInstruction *layerInstruciton = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
totalDuration = CMTimeAdd(totalDuration, asset.duration);
CGFloat rate;
NSLog(@"the renderW is %f", renderW);
NSLog(@"assetTrack.naturalSize.width is %f", assetTrack.naturalSize.width);
NSLog(@"assetTrack.naturalSize.height is %f", assetTrack.naturalSize.height);
rate = renderW / MIN(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
NSLog(@"the rate is %f", rate);
NSLog(@" preferredTransform.a is %f", assetTrack.preferredTransform.a);
NSLog(@" preferredTransform.b is %f", assetTrack.preferredTransform.b);
NSLog(@" preferredTransform.c is %f", assetTrack.preferredTransform.c);
NSLog(@" preferredTransform.d is %f", assetTrack.preferredTransform.d);
NSLog(@" preferredTransform.tx is %f", assetTrack.preferredTransform.tx);
NSLog(@" preferredTransform.ty is %f", assetTrack.preferredTransform.ty);
CGAffineTransform translateToCenter;
CGAffineTransform mixedTransform;
//AVMutableVideoComposition *waterMarkVideoComposition = [AVMutableVideoComposition videoComposition];
//waterMarkVideoComposition.frameDuration = CMTimeMake(1, 30);
int degrees = [self degressFromVideoFileWithURL:assetTrack];
//degrees = 180;
if (degrees == 0) {
//AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:assetpresetName:AVAssetExportPresetMediumQuality];
//session.outputURL = outputURL;
//session.outputFileType = AVFileTypeQuickTimeMovie;
}else{
if(degrees == 90){
//順時(shí)針旋轉(zhuǎn)90°
NSLog(@"視頻旋轉(zhuǎn)90度,home按鍵在左");
translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.height, 0.0);
//mixedTransform = CGAffineTransformRotate(translateToCenter,0);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2);
//videoTrack.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}else if(degrees == 180){
//順時(shí)針旋轉(zhuǎn)180°
NSLog(@"視頻旋轉(zhuǎn)180度,home按鍵在上");
translateToCenter = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, assetTrack.naturalSize.height);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI);
//waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.width,assetTrack.naturalSize.height);
}else if(degrees == 270){
//順時(shí)針旋轉(zhuǎn)270°
NSLog(@"視頻旋轉(zhuǎn)270度权悟,home按鍵在右");
translateToCenter = CGAffineTransformMakeTranslation(0.0, assetTrack.naturalSize.width);
mixedTransform = CGAffineTransformRotate(translateToCenter,M_PI_2*3.0);
//waterMarkVideoComposition.renderSize = CGSizeMake(assetTrack.naturalSize.height,assetTrack.naturalSize.width);
}
}
CGAffineTransform preferredTransform = assetTrack.preferredTransform;
CGAffineTransform trans = CGAffineTransformTranslate(preferredTransform, 0.0, -assetTrack.naturalSize.height);
CGAffineTransform transNew = CGAffineTransformRotate(preferredTransform,M_PI_2*3);
transNew = CGAffineTransformTranslate(transNew, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0);
transNew = CGAffineTransformConcat(trans, transNew);
transNew = CGAffineTransformScale(transNew, rate, rate);//放縮砸王,解決前后攝像結(jié)果大小不對(duì)稱(chēng)
//CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.preferredTransform.tx * rate, assetTrack.preferredTransform.ty * rate);
//layerTransform = (a = 0, b = 1, c = -1, d = 0, tx = 1080, ty = 0)
CGAffineTransform layerTransform = CGAffineTransformMake(assetTrack.preferredTransform.a, assetTrack.preferredTransform.b, assetTrack.preferredTransform.c, assetTrack.preferredTransform.d, assetTrack.naturalSize.height * rate, assetTrack.preferredTransform.ty * rate);
//
//layerTransform = CGAffineTransformConcat(layerTransform, CGAffineTransformMake(1, 0, 0, 1, 0, -(assetTrack.naturalSize.width - assetTrack.naturalSize.height) / 2.0));//向上移動(dòng)取中部影響
////
//layerTransform = CGAffineTransformScale(layerTransform, rate, rate);//放縮,解決前后攝像結(jié)果大小不對(duì)稱(chēng)
//[layerInstruciton setTransform:layerTransform atTime:kCMTimeZero];
//[layerInstruciton setOpacity:0.0 atTime:totalDuration];
[layerInstruciton setTransform:transNew atTime:kCMTimeZero];
//[layerInstruciton setTransform:mixedTransform atTime:kCMTimeZero];
//[layerInstruciton setTransform:assetTrack.preferredTransform atTime:kCMTimeZero];
//data
[layerInstructionArray addObject:layerInstruciton];
}
//get save path
NSURL *mergeFileURL = [NSURL fileURLWithPath:[[self class] getVideoMergeFilePathString]];
//export
AVMutableVideoCompositionInstruction *mainInstruciton = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruciton.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
mainInstruciton.layerInstructions = layerInstructionArray;
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = @[mainInstruciton];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
//mainCompositionInst.frameDuration = CMTimeMake(1, 24);
NSLog(@"the renderSize is %@", NSStringFromCGSize(CGSizeMake(renderW, renderW)));
mainCompositionInst.renderSize = CGSizeMake(renderW, renderW);
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.videoComposition = mainCompositionInst;
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
if ([exporter status] != AVAssetExportSessionStatusCompleted) {
NSLog(@"the status is %ld", (long)[exporter status]);
NSLog(@"the outPutPath is %@", [exporter.outputURL absoluteString]);
NSLog(@"the error is %@", [exporter error].userInfo);
NSLog(@"the error is %@", [exporter error]);
}
if ([exporter status] == AVAssetExportSessionStatusFailed) {
//if (DEBUG) {
//NSLog(@"error ");
//}
}
NSString *path = mergeFileURL.resourceSpecifier;
//NSString *pa1 = outputFileURL.absoluteString;
//NSString *pa2 = outputFileURL.resourceSpecifier;
//NSString *pa3 = outputFileURL.scheme;
//NSString *pa4 = outputFileURL.relativePath;
NSLog(@"theVideoPath is %@", path);
NSLog(@"outputFileURL is %@", mergeFileURL);
if ([[NSFileManager defaultManager] fileExistsAtPath:path])
{
NSLog(@"theVideoPath is %@", path);
NSUInteger size;
NSDictionary *attr = [[NSFileManager defaultManager] attributesOfItemAtPath:path error:nil];
size = [attr[NSFileSize] unsignedIntegerValue];
NSLog(@"didFinishRecordingToOutputFileAtURL fileSize is %lu", size/(1024*1024));
}
NSLog(@"the outputFile is %@", mergeFileURL);
dispatch_async(dispatch_get_main_queue(), ^{
//MSPreViewController *VC = [MSPreViewController new];
//
//VC.videoURL = mergeFileURL;
//
//NSLog(@"navi is %@", self.navigationController);
//
//[self.navigationController pushViewController:VC animated:YES];
});
//return;
//dispatch_async(dispatch_get_main_queue(), ^{
//if ([_delegate respondsToSelector:@selector(videoRecorder:didFinishMergingVideosToOutPutFileAtURL:)]) {
//[_delegate videoRecorder:self didFinishMergingVideosToOutPutFileAtURL:mergeFileURL];
//}
//});
}];
});
}
- (NSUInteger)degressFromVideoFileWithURL:(NSURL*)url
{
NSUIntegerdegress =0;
AVAsset*asset = [AVAssetassetWithURL:url];
NSArray*tracks = [assettracksWithMediaType:AVMediaTypeVideo];
if([trackscount] >0) {
AVAssetTrack*videoTrack = [tracksobjectAtIndex:0];
CGAffineTransformt = videoTrack.preferredTransform;
if(t.a==0&& t.b==1.0&& t.c== -1.0&& t.d==0){
// Portrait
degress =90;
}elseif(t.a==0&& t.b== -1.0&& t.c==1.0&& t.d==0){
// PortraitUpsideDown
degress =270;
}elseif(t.a==1.0&& t.b==0&& t.c==0&& t.d==1.0){
// LandscapeRight
degress =0;
}elseif(t.a== -1.0&& t.b==0&& t.c==0&& t.d== -1.0){
// LandscapeLeft
degress =180;
}
}
returndegress;
}
- (UIImage*)extractImageFromVideoFileWithUrl:(NSURL*)url
{
NSDictionary*opts =@{AVURLAssetPreferPreciseDurationAndTimingKey:@(NO)};
AVURLAsset*asset = [[AVURLAssetalloc]initWithURL:urloptions:opts];
AVAssetImageGenerator*gen = [[AVAssetImageGeneratoralloc]initWithAsset:asset];
//應(yīng)用方向
gen.appliesPreferredTrackTransform=YES;
CMTimetime =CMTimeMakeWithSeconds(1,60);
NSError*error =nil;
CMTimeactualTime;
CGImageRefimage = [gencopyCGImageAtTime:timeactualTime:&actualTimeerror:&error];
if(error)
{
SL_Log(@"error %@",error);
returnnil;
}
UIImage*thumb = [[UIImagealloc]initWithCGImage:image];
CGImageRelease(image);
returnthumb;
}