解決視頻轉(zhuǎn)webp方案
分為兩步:1.視頻剪輯后分幀數(shù). 2:圖片數(shù)組添加到webp
#pragma mark 裁剪視頻
/// 裁剪視頻
+ (void)cutVideoAndExportVideoWithVideoAsset:(AVAsset *)videoAsset startTime:(CGFloat)startTime endTime:(CGFloat)endTime completion:(void (^)(NSURL *outputPath, NSError *error, ST_VideoState state))completion
{
NSError *error;
//1 創(chuàng)建AVMutableComposition對(duì)象來添加視頻音頻資源的AVMutableCompositionTrack
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
// 2 設(shè)置采集區(qū)域
CGSize videoRange = CGSizeMake(startTime, (endTime - startTime)); // 開始位置, 裁剪的長度
CMTime startT = CMTimeMakeWithSeconds(videoRange.width, videoAsset.duration.timescale);
CMTime videoDuration = CMTimeMakeWithSeconds(videoRange.height, videoAsset.duration.timescale); //截取長度videoDuration
CMTimeRange timeRange = CMTimeRangeMake(startT, videoDuration);
// 3 - 視頻通道 工程文件中的軌道洛口,有音頻軌问拘、視頻軌等蒸矛,里面可以插入各種對(duì)應(yīng)的素材
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
/*TimeRange截取的范圍長度 ofTrack來源 atTime插放在視頻的時(shí)間位置*/
[videoTrack insertTimeRange:timeRange
ofTrack:([videoAsset tracksWithMediaType:AVMediaTypeVideo].count > 0) ? [videoAsset tracksWithMediaType:AVMediaTypeVideo].firstObject : nil
atTime:kCMTimeZero
error:&error];
// 3 - 導(dǎo)出視頻 - 返回?cái)?shù)字包含了 AVAssetExportPreset1280x720多個(gè)這樣的數(shù)組
AVMutableVideoComposition *videoComposition = [self fixedCompositionWithAsset:videoAsset];
[self _getExportVideoWithAvAssset:mixComposition videoComposition:videoComposition audioMix:nil timeRange:timeRange completion:completion cut:YES];
}
/// 獲取優(yōu)化后的視頻轉(zhuǎn)向信息
+ (AVMutableVideoComposition *)fixedCompositionWithAsset:(AVAsset *)videoAsset
{
//1,可以用來對(duì)視頻進(jìn)行操作,用來生成video的組合指令柿汛,包含多段instruction
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
// 視頻轉(zhuǎn)向
int degrees = [self degressFromVideoFileWithAsset:videoAsset];
CGAffineTransform translateToCenter;
CGAffineTransform mixedTransform;
videoComposition.frameDuration = CMTimeMake(1, 30);
NSArray *tracks = [videoAsset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
// 一個(gè)指令趟据,決定一個(gè)timeRange內(nèi)每個(gè)軌道的狀態(tài)券犁,包含多個(gè)layerInstruction
AVMutableVideoCompositionInstruction *roateInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
roateInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, [videoAsset duration]);
// 在一個(gè)指令的時(shí)間范圍內(nèi),某個(gè)軌道的狀態(tài)
AVMutableVideoCompositionLayerInstruction *roateLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
if (degrees == 90) // UIImageOrientationRight
{
// 順時(shí)針旋轉(zhuǎn)90°
translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.height, 0.0);
mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI_2);
videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
[roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
}
else if (degrees == 180) // UIImageOrientationDown
{
// 順時(shí)針旋轉(zhuǎn)180°
translateToCenter = CGAffineTransformMakeTranslation(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI);
videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.width, videoTrack.naturalSize.height);
[roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
}
else if (degrees == 270) // UIImageOrientationLeft
{
// 順時(shí)針旋轉(zhuǎn)270°
translateToCenter = CGAffineTransformMakeTranslation(0.0, videoTrack.naturalSize.width);
mixedTransform = CGAffineTransformRotate(translateToCenter, M_PI_2 * 3.0);
videoComposition.renderSize = CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width);
[roateLayerInstruction setTransform:mixedTransform atTime:kCMTimeZero];
}
// 方向是 0 不做處理
roateInstruction.layerInstructions = @[roateLayerInstruction];
videoComposition.instructions = @[roateInstruction]; // 加入視頻方向信息
return videoComposition;
}
/// 設(shè)置導(dǎo)出對(duì)象
+ (void)_getExportVideoWithAvAssset:(AVAsset *)videoAsset videoComposition:(AVVideoComposition *)videoComposition audioMix:(AVAudioMix *)audioMix timeRange:(CMTimeRange)timeRange completion:(void (^)(NSURL *outputPath, NSError *error, ST_VideoState state))completion cut:(BOOL)isCut
{
NSURL *outputURL = [self _getExportVideoPathForType:@"mp4"]; // 創(chuàng)建輸出的路徑
NSArray *compatiblePresets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
if ([compatiblePresets containsObject:AVAssetExportPresetHighestQuality])
{
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]
initWithAsset:videoAsset
presetName:AVAssetExportPresetHighestQuality]; //AVAssetExportPresetPassthrough可能返回沒有處理過的視頻
if (!isCut)
{
exportSession.timeRange = timeRange; //截取時(shí)間---直接導(dǎo)出的方法只能從0開始
}
if (videoComposition.renderSize.width)
{ // 注意方向是 0 不要做處理否則會(huì)導(dǎo)出失敗
exportSession.videoComposition = videoComposition; // 修正視頻轉(zhuǎn)向
}
exportSession.outputURL = outputURL; // 輸出URL
exportSession.shouldOptimizeForNetworkUse = YES; // 優(yōu)化網(wǎng)絡(luò)
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
NSArray *supportedTypeArray = exportSession.supportedFileTypes; //支持的格式
if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) //MP4
{
exportSession.outputFileType = AVFileTypeMPEG4;
}
else if (supportedTypeArray.count == 0)
{
NSError *error = [NSError ST_PhotoSDKVideoActionDescription:@"視頻類型暫不支持導(dǎo)出"];
if (completion)
{
completion(nil, error, ST_ExportSessionStatusFailed);
}
return;
}
else
{
exportSession.outputFileType = [supportedTypeArray objectAtIndex:0];
}
// 開始異步導(dǎo)出視頻
__block NSError *error;
[exportSession exportAsynchronouslyWithCompletionHandler:^(void) {
dispatch_async(dispatch_get_main_queue(), ^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusUnknown:
{
error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusUnknown"];
if (completion)
{
completion(nil, error, ST_ExportSessionStatusUnknown);
}
break;
}
case AVAssetExportSessionStatusWaiting:
{
error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusWaiting"];
if (completion)
{
completion(nil, error, ST_ExportSessionStatusWaiting);
}
break;
}
case AVAssetExportSessionStatusExporting:
{
error = [NSError ST_PhotoSDKVideoActionDescription:@"AVAssetExportSessionStatusExporting"];
if (completion)
{
completion(nil, error, ST_ExportSessionStatusExporting);
}
break;
}
case AVAssetExportSessionStatusCompleted:
{
if (completion)
{
completion(outputURL, nil, ST_ExportSessionStatusCompleted);
}
break;
}
case AVAssetExportSessionStatusFailed:
{
error = [NSError ST_PhotoSDKVideoActionDescription:[NSString stringWithFormat:@"導(dǎo)出失敗:%@", exportSession.error]];
if (completion)
{
completion(nil, error, ST_ExportSessionStatusFailed);
}
break;
}
default:
break;
}
});
}];
}
}
//分幀,合成處理.這里主要為了控制生成webp文件大小內(nèi)容
-(void)saveToWebpByVideoPath:(NSURL *)videoUrl andProgressBlock:(void (^)(float))progressBlock andPathBlock:(void (^)(NSString *))pathBlock{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSString *filePath = [self createWebpFilePath];
YYImageEncoder *encoder = [[YYImageEncoder alloc] initWithType:YYImageTypeWebP];
encoder.loopCount = 0;
encoder.quality = 0.4; // 質(zhì)量設(shè)置為0.5汹碱,減小文件大小
encoder.lossless = NO; // 使用有損壓縮模式
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc] initWithAsset:asset];
generator.appliesPreferredTrackTransform = YES;
generator.requestedTimeToleranceAfter = kCMTimeZero;
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.maximumSize = CGSizeMake(256, 256); // 設(shè)置最大尺寸為512x512
generator.apertureMode = AVAssetImageGeneratorApertureModeCleanAperture; // 設(shè)置孔徑模式為清晰孔徑
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if ([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
float frameRate = [videoTrack nominalFrameRate];
float duration = CMTimeGetSeconds([videoTrack timeRange].duration);
int frameCount = (int)(frameRate * duration); ///<---幀數(shù)率 * 時(shí)間
int maxCount = 20; ///<---最大幀數(shù) 計(jì)算方案,總幀數(shù)大于20,計(jì)算多少幀取一幀
int minTime = frameCount/maxCount;
// 最后一個(gè)有效幀的索引
int lastValidFrameIndex = -1;
int count = 0;
for (int i = 0; i < frameCount; i++) {
///<最大幀數(shù)/時(shí)間 == 每秒最小幀率 , 如果i%最小幀率==0 取
if(frameCount>20 && i%minTime!=0){
continue;
}
CMTime time = CMTimeMakeWithSeconds(i / frameRate, asset.duration.timescale);
NSError *error = nil;
CGImageRef image = [generator copyCGImageAtTime:time actualTime:nil error:&error];
if (image) { // 確保圖像不為空
UIImage *img = [UIImage imageWithCGImage:image];
img = [self resizeImage:img toSize:CGSizeMake(512, 512)];
NSData * data = [GIF2MP4 compressImageQualityWithImage:img toByte:10*1024];
// NSData *data = [self compressOriginalImage:img toMaxDataSizeKBytes:5];
[encoder addImageWithData:data duration:1.0 / frameRate*(minTime/2)];
lastValidFrameIndex = i;
CGImageRelease(image);
}
count++;
progressBlock(count*0.04);
if(count>=19){
break;
}
}
// 如果最后一個(gè)有效幀不是最后一幀粘衬,則將其重復(fù)添加
if (lastValidFrameIndex >= 0 && lastValidFrameIndex < frameCount - 1) {
CMTime time = CMTimeMakeWithSeconds(lastValidFrameIndex / frameRate, asset.duration.timescale);
CGImageRef image = [generator copyCGImageAtTime:time actualTime:nil error:nil];
if (image) {
UIImage *img = [UIImage imageWithCGImage:image];
img = [self resizeImage:img toSize:CGSizeMake(512, 512)];
NSData * data = [GIF2MP4 compressImageQualityWithImage:img toByte:10*1024];
[encoder addImageWithData:data duration:1.0 / frameRate*(minTime/2)];
CGImageRelease(image);
}
count+=1;
[SVProgressHUD showProgress:count*0.025 status:@"loadIng"];
progressBlock(count*0.04);
}
DLog(@"data 大小為%ld",[encoder encode].length);
[encoder encodeToFile:filePath];
progressBlock(1);
pathBlock(filePath);
}
});
}
記錄一下