看過<<東成西就>>的小伙伴,都知道周伯通為了給師兄報仇,用了三花聚頂,不慎走火入魔,讓時間倒流.于是,神奇的一幕發(fā)生了,歐陽鋒想殺洪七公就是殺不死,段王爺的小便卻總是解不完......看到這一幕,我就忍不住想搞一個視頻逆轉的方法.
其實,視頻逆向播放的原理很簡單.用通俗的話說,視頻本質上是一張張圖片連續(xù)播放出來的.所以我們只需要把組成視頻的圖片逆向排列一下,再播放就OK了.
但是,視頻因為是由很多圖片組成的,所以如果一下子把所有圖片加載到內存中,然后再重排,這樣1分鐘的視頻,就能占到500M,顯然不是理想的.
<b>于是想到一種解決內存的方法:</b>
1,我們把視頻進行分割成很多1秒的小視頻,把這么多視頻暫時放在沙盒里.
2,然后逐一的把1秒的視頻進行逆向排列,然后得到很多1秒的逆向播放的視頻.
3,最后,我們再把1秒的逆向的視頻組合起來,完畢.
所以,只要會了分割視頻,逆向視頻,合并視頻,就可以解決問題了.親測,這樣不管視頻的長短內存可以穩(wěn)定在80M左右.但是長視頻處理的時間肯定很長.
<b>1,遞歸分割視頻</b>
<pre>
-
(void)trimWithAssetPath:(NSString*)assetPath startPoint:(CMTime)startPoint
complete:(CompletePaths)complete{
//從路徑里獲取視頻資源
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:assetPath]];AVAssetTrack *assetVideoTrack = nil;
AVAssetTrack *assetAudioTrack = nil;
//獲取視頻資源里的視頻軌道
if ([[asset tracksWithMediaType:AVMediaTypeVideo] count] != 0) {
assetVideoTrack = [asset tracksWithMediaType:AVMediaTypeVideo][0];
}
//獲取視頻資源里的音頻軌道
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] != 0) {
assetAudioTrack = [asset tracksWithMediaType:AVMediaTypeAudio][0];
}NSError *error = nil;
CMTime assetTime = [asset duration];
CMTime sub1Time = CMTimeSubtract(startPoint,assetTime);
if (CMTimeGetSeconds(sub1Time) == 0) {//此時已經完成了分割,退出遞歸.
if(complete){
complete(self.paths);
}
return;
}
//設置每段視頻的長度為1秒,如果最后不足1秒了, intervalTime 就設置為余下的時間.
CMTime intervalTime = CMTimeMake(assetTime.timescale, assetTime.timescale);CMTime endTime = CMTimeAdd(startPoint, intervalTime);
CMTime subTime = CMTimeSubtract(endTime,assetTime);
if (CMTimeGetSeconds(subTime) > 0){
intervalTime = CMTimeSubtract(intervalTime,subTime);
endTime = CMTimeAdd(startPoint, intervalTime);
}
//相當于創(chuàng)建一個空的視頻
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//開始往空的視頻資源里插入我們想要時間段的視頻
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(startPoint, intervalTime) ofTrack:assetVideoTrack atTime:kCMTimeZero error:&error];AVMutableCompositionTrack *compositionAudioTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//開始往空的視頻資源里插入我們想要時間段的音頻
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(startPoint, intervalTime) ofTrack:assetAudioTrack atTime:kCMTimeZero error:&error];
//設置分割完之后視頻放置的路徑
NSString *outPath = [kVideoPath stringByAppendingPathComponent: [NSString stringWithFormat:@"%d.mp4", self.videoNum]];
self.videoNum++;NSURL *mergeFileURL = [NSURL fileURLWithPath:outPath];
//利用AVAssetExportSession把新的小視頻,輸送到沙盒里
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
// exporter.videoComposition = mixVideoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
//把生成的小視頻路徑用一個數組保存起來
[self.paths addObject:outPath];
//遞歸調用
[self trimWithAssetPath:assetPath startPoint:endTime complete:complete];
}];
}
</pre>
<b>2.0,遞歸逆向處理視頻</b>
<pre>
- (void)reversePathsComplete:(CompletePaths)complete{
if (self.videoNum == self.paths.count) {//逆向完成,退出遞歸.
complete(self.paths);
return;
}
NSString *path = self.paths[self.videoNum];
AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:path]];
NSString *reversePath = [NSString stringWithFormat:@"reverse%d.mp4", self.videoNum];
NSString *pathStr = [kVideoPath stringByAppendingPathComponent:reversePath];
NSURL *outputUrl = [NSURL fileURLWithPath:pathStr];
//這個方法就是具體怎么逆向操作視頻
[self assetByReversingAsset:asset outputURL:outputUrl complete:^(AVAsset *asset) {
NSError *error = nil;
//把視頻逆轉之后,正序的視頻刪除就可以了.
[self.filemanager removeItemAtPath:self.paths[self.videoNum] error:&error];
//把數組里正序視頻的路徑替換成逆序視頻的路徑
[self.paths replaceObjectAtIndex:self.videoNum withObject:pathStr];
self.videoNum++;
[self reversePathsComplete:complete];
}];
}
</pre>
<b>2.1,遞歸逆向處理具體實現</b>
<pre>
-
(void)assetByReversingAsset:(AVAsset *)asset outputURL:(NSURL *)outputURL complete:( void (^)(AVAsset *asset))complete{
NSError *error;// AVAssetReader 把視頻資源讀取出來
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] lastObject];
NSDictionary readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange], kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetReaderTrackOutput readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack
outputSettings:readerOutputSettings];
[reader addOutput:readerOutput];
[reader startReading];// read in the samples
NSMutableArray *samples = [[NSMutableArray alloc] init];CMSampleBufferRef sample;
//讀取的每一幀放到數組里.
while((sample = [readerOutput copyNextSampleBuffer])) {
[samples addObject:(__bridge id)sample];
CFRelease(sample);
}// AVAssetWriter 把數組里的每一幀重組,輸送到制定的沙盒文件中去
AVAssetWriter *writer = [[AVAssetWriter alloc] initWithURL:outputURL
fileType:AVFileTypeMPEG4
error:&error];
NSDictionary *videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
@(videoTrack.estimatedDataRate), AVVideoAverageBitRateKey,
nil];
NSDictionary *writerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:videoTrack.naturalSize.width], AVVideoWidthKey,
[NSNumber numberWithInt:videoTrack.naturalSize.height], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
AVAssetWriterInput *writerInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo
outputSettings:writerOutputSettings
sourceFormatHint:(__bridge CMFormatDescriptionRef)[videoTrack.formatDescriptions lastObject]];
[writerInput setExpectsMediaDataInRealTime:NO];// Initialize an input adaptor so that we can append PixelBuffer
AVAssetWriterInputPixelBufferAdaptor *pixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];[writer addInput:writerInput];
[writer startWriting];
[writer startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[0])];// Append the frames to the output.
// Notice we append the frames from the tail end, using the timing of the frames from the front.
for(NSInteger i = 0; i < samples.count; i++) {
// Get the presentation time for the frame
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp((__bridge CMSampleBufferRef)samples[i]);
// take the image/pixel buffer from tail end of the array
CVPixelBufferRef imageBufferRef = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)samples[samples.count - i - 1]);while (!writerInput.readyForMoreMediaData) { [NSThread sleepForTimeInterval:0.01]; } [pixelBufferAdaptor appendPixelBuffer:imageBufferRef withPresentationTime:presentationTime];
}
[writer finishWritingWithCompletionHandler:^{
complete([AVAsset assetWithURL:outputURL]);
}];
}
</pre>
<b>3,合并視頻</b>
<pre>
-
(void)mergeVideosWithPaths:(NSArray *)paths outputPath:(NSString )outputPath completed:(void(^)())completed {
if (!paths.count) return;
AVMutableComposition mixComposition = [[AVMutableComposition alloc] init];AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; videoTrack.preferredTransform = CGAffineTransformIdentity; for (int i = 0; i < paths.count; i++) {//遍歷數組,獲取每一個視頻資源 AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:paths[i]]]; AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject]; NSError *errorVideo = nil; //把視頻插入到新建的空視頻資源里,每一次插入都是查在頭部.就不用把數組逆向排列了. BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetVideoTrack atTime:kCMTimeZero error:&errorVideo]; NSLog(@"errorVideo:%@--%d",errorVideo,bl);
//每拼完一個小視頻就直接刪除
[self.filemanager removeItemAtPath:paths[i] error:&error];
}
//合并之后視頻的輸出路徑
NSURL *mergeFileURL = [NSURL fileURLWithPath:outputPath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
// exporter.videoComposition = mixVideoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
completed();
});
}];
}
</pre>
<a >Demo github地址</a>.如果您有更好的方法,也請不吝賜教.