AVFoundation reverse play 倒放實(shí)現(xiàn)
一.參考:
http://www.andyhin.com/post/5/reverse-video-avfoundation
二.幾種實(shí)現(xiàn)方案思路
1.預(yù)覽過程使用AVPlayer的倒放功能
設(shè)置AVPlayer的rate 為-1
檢查AVPlayerItem的canPlayReverse是否是YES
由于我們采用的GPUImage框架,在預(yù)覽過程沒有使用AVPlayer,次方案沒有繼續(xù)調(diào)研
2.使用AVAssetComposition把每幀位置翻轉(zhuǎn)
速度快,不生成臨時(shí)文件
必須每幀都是關(guān)鍵幀 否則嚴(yán)重卡頓 掉幀
無法精確控制insert單幀TimeRange
3.修改GPUImageMovie 倒序讀取CVPixelBuffer (不可行)
基于AVPlayerItemVideoOutput copyPixelBufferForItemTime方法,嘗試倒序copy.
實(shí)驗(yàn)發(fā)現(xiàn)該函數(shù)貌似不支持倒序讀取,倒序讀取前幾秒返回正序的pixelBuffer,之后始終返回空.閱讀函數(shù)文檔也證明了這點(diǎn).
4.使用AVAssetReader AVAssetWriter 讀取出每個(gè)CMSampleBuffer反向?qū)懭胛募?(可行)
播放非常流暢.
但需要生成臨時(shí)文件,處理時(shí)間較長.
三.AVAssetReader AVAssetWriter倒序視頻方案實(shí)現(xiàn)
AHSVVideoReverse.h
//
// AHSVVideoReverse.h
// AHVideoSDKFramework
//
// Created by 李田迎 on 2019/8/13.
// Copyright ? 2019 Autohome. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface AHSVVideoReverse : NSObject
@property (nonatomic, copy) NSDictionary *videoSettings; //!< 寫入視頻配置參數(shù) 有默認(rèn)參數(shù)
@property (strong, nonatomic) NSDictionary *readerOutputSettings; //!< 視頻軌道讀取出的數(shù)據(jù)格式
/**
根據(jù)原始視頻生成倒放視頻
@param origAsset 被倒放視頻asset
@param outputPath 倒放的視頻存儲(chǔ)路徑
@param handler 回調(diào)信息block
*/
- (void)reverseVideo:(AVAsset *)origAsset
outputPath:(NSString *)outputPath
completeBlock:(void (^)(NSError *error))handler;
@end
NS_ASSUME_NONNULL_END
AHSVVideoReverse.m
//
// AHSVVideoReverse.m
// AHVideoSDKFramework
//
// Created by 田迎 on 2019/8/13.
// Copyright ? 2019. All rights reserved.
//
#import "AHSVVideoReverse.h"
#import "AVAsset+Addition.h"
#import "AHVideoRecordCustomConfig.h"
#define kClipMaxContainCount 10
@interface AHSVVideoReverse ()
@property (nonatomic, strong) AVAsset *origAsset; //!< 原始資源對象
@property (nonatomic, strong) AVAssetReader *assetReader; //!< 資源讀取對象
@property (nonatomic, strong) AVAssetWriter *assetWriter; //!< 多媒體文件寫入
@property (nonatomic, strong) AVAssetWriterInput *videoWriterInput; //!< 視頻寫入 append
@property (nonatomic, strong) AVAssetReaderTrackOutput *videoTrackOutput; //!< 視頻輸出對象
//adapter 有CVPixelBufferPool緩沖池提高寫入效率 可以寫入CVPixelBuffer 和時(shí)間戳
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
@property (nonatomic, strong) NSURL *outputURL; //!< 輸出文件Url
@property (nonatomic, strong) dispatch_queue_t inputQueue; //!< 多媒體數(shù)據(jù)寫入隊(duì)列
@property (nonatomic, assign) CGSize targetSize; //!< 導(dǎo)出視頻size
@property (nonatomic, assign) float fps; //!< 幀率
@property (nonatomic, strong) void (^completionHandler)(NSError *); //!< 回調(diào)block
//內(nèi)部邏輯使用
@property (nonatomic, strong) NSMutableArray *sampleTimeArray; //!< 存儲(chǔ)采樣時(shí)間戳數(shù)組
@property (nonatomic, strong) NSMutableArray *clipTimeRangeArray; //!< 分段處理時(shí)間段數(shù)組
@end
@implementation AHSVVideoReverse
#pragma mark -
#pragma mark LifeCycle Method
- (instancetype)init {
if (self = [super init]) {
}
return self;
}
- (void)dealloc {
}
#pragma mark -
#pragma mark Public Method
- (void)reverseVideo:(AVAsset *)origAsset
outputPath:(NSString *)outputPath
completeBlock:(void (^)(NSError *error))handler {
self.completionHandler = handler;
if (!origAsset) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-100 userInfo:@{@"msg":@"參數(shù)origAsset 不能為空!"}];
self.completionHandler(error);
return;
}
if (!origAsset.videoTrack) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-101 userInfo:@{@"msg":@"origAsset中不含有視頻軌道信息!"}];
self.completionHandler(error);
return;
}
if (!outputPath || outputPath.length==0) {
NSError *error = [NSError errorWithDomain:@"com.avvideo.videoReverse" code:-102 userInfo:@{@"msg":@"參數(shù)outputPath 不能為空!"}];
self.completionHandler(error);
return;
}
self.outputURL = [NSURL fileURLWithPath:outputPath];
//本地目標(biāo)文件清理
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
}
self.origAsset = origAsset;
WEAKSELF;
[self.origAsset loadValuesAsynchronouslyForKeys:@[@"duration", @"tracks"] completionHandler:^{
dispatch_async(weakSelf.inputQueue, ^{
[weakSelf startReverseProcess];
});
}];
}
#pragma mark -
#pragma mark Private Method
- (void)startReverseProcess {
[self cancelRevese];
self.targetSize = self.origAsset.videoTrackSize;
self.fps = self.origAsset.videoTrack.nominalFrameRate;
//1. 生成每幀時(shí)間數(shù)組與分段數(shù)組
[self generateSampleTimesArray];
//2. 處理所有分段 正序讀取 倒序?qū)懭? [self processReadReverseWriter];
}
//生成每幀時(shí)間數(shù)組 用于獲取倒序時(shí)每個(gè)CVPixelBuffer的精確時(shí)間戳 以及分段數(shù)組
- (void)generateSampleTimesArray {
if ([self.assetReader canAddOutput:self.videoTrackOutput]) {
[self.assetReader addOutput:self.videoTrackOutput];
}
[self.assetReader startReading];
CMSampleBufferRef sample;
NSUInteger processIndex = 0;
CMTime startTime = kCMTimeZero;
CMTime endTime = kCMTimeZero;
CMTime presentationTime = kCMTimeZero;
while((sample = [self.videoTrackOutput copyNextSampleBuffer])) {
presentationTime = CMSampleBufferGetPresentationTimeStamp(sample);
NSValue *presentationValue = [NSValue valueWithBytes:&presentationTime objCType:@encode(CMTime)];
[self.sampleTimeArray addObject:presentationValue];
CFRelease(sample);
sample = NULL;
if (processIndex == 0) {
startTime = presentationTime;
processIndex ++;
} else if (processIndex == kClipMaxContainCount-1) {
endTime = presentationTime;
CMTimeRange timeRange = CMTimeRangeMake(startTime, CMTimeSubtract(endTime, startTime));
NSValue *timeRangeValue = [NSValue valueWithCMTimeRange:timeRange];
[self.clipTimeRangeArray addObject:timeRangeValue];
processIndex = 0;
startTime = kCMTimeZero;
endTime = kCMTimeZero;
} else {
processIndex ++;
}
}
//處理不夠kClipMaxContainCount數(shù)量的幀的timerange
if (CMTIME_COMPARE_INLINE(kCMTimeZero, !=, startTime) && CMTIME_COMPARE_INLINE(kCMTimeZero, ==, endTime)) {
endTime = presentationTime;
//單獨(dú)處理最后只剩一幀的情況
if (CMTIME_COMPARE_INLINE(endTime, ==, startTime) &&
processIndex == 1) {
startTime = CMTimeSubtract(startTime, CMTimeMake(1, self.fps));
}
CMTimeRange timeRange = CMTimeRangeMake(startTime, CMTimeSubtract(endTime, startTime));
NSValue *timeRangeValue = [NSValue valueWithCMTimeRange:timeRange];
[self.clipTimeRangeArray addObject:timeRangeValue];
}
}
- (void)processReadReverseWriter {
CMSampleBufferRef sampleBuffer;
//1.保護(hù)處理 清理之前可能未讀取完的數(shù)據(jù)
while((sampleBuffer = [self.videoTrackOutput copyNextSampleBuffer])) {
CFRelease(sampleBuffer);
}
//2.為asserWriter添加writerInput 開始讀寫操作
if ([self.assetWriter canAddInput:self.videoWriterInput]) {
[self.assetWriter addInput:self.videoWriterInput];
}
[self videoPixelBufferAdaptor];
BOOL success = [self.assetWriter startWriting];
if (!success) {
NSLog(@"self.assetWriter error = %@", self.assetWriter.error);
}
[self.assetWriter startSessionAtSourceTime:kCMTimeZero];
NSUInteger clipCount = self.clipTimeRangeArray.count;
//當(dāng)前處理幀索引
NSUInteger frameIndex = 0;
for (NSInteger i=clipCount-1; i>=0; i--) {
NSValue *clipTimeRangeValue = [self.clipTimeRangeArray objectAtIndex:i];
[self.videoTrackOutput resetForReadingTimeRanges:@[clipTimeRangeValue]];
//讀取分段中所有幀到緩存數(shù)組
NSMutableArray *tempSampleArray = [[NSMutableArray alloc] init];
while((sampleBuffer = [self.videoTrackOutput copyNextSampleBuffer])) {
[tempSampleArray addObject:(__bridge id)sampleBuffer];
CFRelease(sampleBuffer);
}
//每個(gè)分段內(nèi)的幀 倒序?qū)懭雡riter
for (NSInteger j=0; j<tempSampleArray.count; j++) {
//保護(hù)處理
if (frameIndex >= self.sampleTimeArray.count) {
continue;
}
NSValue *timeValue = [self.sampleTimeArray objectAtIndex:frameIndex];
CMTime frameTime = [timeValue CMTimeValue];
// CMTimeShow(frameTime);
CVPixelBufferRef pixefBuffer = CMSampleBufferGetImageBuffer((__bridge CMSampleBufferRef)tempSampleArray[tempSampleArray.count - j - 1]);
// append frames to output
BOOL appendSuccess = NO;
while (!appendSuccess) {
if (self.videoPixelBufferAdaptor.assetWriterInput.readyForMoreMediaData) {
appendSuccess = [self.videoPixelBufferAdaptor appendPixelBuffer:pixefBuffer withPresentationTime:frameTime];
if (!appendSuccess) {
NSLog(@"appendPixelBuffer error at time: %lld", frameTime.value);
} else {
// NSLog(@"appendPixelBuffer success at time: %f", CMTimeGetSeconds(frameTime));
}
} else {
// adaptor not ready
[NSThread sleepForTimeInterval:0.05];
}
}
frameIndex ++;
}
}
[self.videoWriterInput markAsFinished];
WEAKSELF;
[self.assetWriter finishWritingWithCompletionHandler:^(){
if (weakSelf.completionHandler) {
weakSelf.completionHandler(nil);
}
}];
}
- (void)cancelRevese {
if (!_inputQueue) {
return;
}
if (_assetReader && _assetReader.status == AVAssetReaderStatusReading) {
[self.assetReader cancelReading];
}
_assetReader = nil;
if (_assetWriter && _assetWriter.status == AVAssetWriterStatusWriting) {
[self.assetWriter cancelWriting];
}
_assetWriter = nil;
if (_videoTrackOutput) {
_videoTrackOutput = nil;
}
if (_videoWriterInput) {
_videoWriterInput = nil;
}
if (_videoPixelBufferAdaptor) {
_videoPixelBufferAdaptor = nil;
}
if (_clipTimeRangeArray) {
_clipTimeRangeArray = nil;
}
if (_sampleTimeArray) {
_sampleTimeArray = nil;
}
}
#pragma mark -
#pragma mark Get Method
- (AVAssetReader *)assetReader {
if (!_assetReader) {
NSError *error;
_assetReader = [[AVAssetReader alloc] initWithAsset:self.origAsset error:&error];
if (error) {
NSLog(@"assetReader 創(chuàng)建失敗!! %@", error);
}
}
return _assetReader;
}
- (AVAssetWriter *)assetWriter {
if (!_assetWriter) {
NSError *writerError;
_assetWriter = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:AVFileTypeQuickTimeMovie error:&writerError];
_assetWriter.shouldOptimizeForNetworkUse = YES;
if (writerError) {
NSLog(@"assetWriter 創(chuàng)建失敗 %@", writerError);
}
}
return _assetWriter;
}
- (AVAssetReaderTrackOutput *)videoTrackOutput {
if (!_videoTrackOutput) {
_videoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:self.origAsset.videoTrack outputSettings:self.readerOutputSettings];
//設(shè)置支持不按順序讀取數(shù)據(jù) 設(shè)置為YES后 resetForReadingTimeRanges方法才可用
_videoTrackOutput.supportsRandomAccess = YES;
//不需要修改sample中的CVPixelBuffer內(nèi)容 所以不需要copy
_videoTrackOutput.alwaysCopiesSampleData = NO;
}
return _videoTrackOutput;
}
- (AVAssetWriterInput *)videoWriterInput {
if (!_videoWriterInput) {
_videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
_videoWriterInput.expectsMediaDataInRealTime = NO;
[_videoWriterInput setTransform:self.origAsset.videoTrack.preferredTransform];
}
return _videoWriterInput;
}
- (AVAssetWriterInputPixelBufferAdaptor *)videoPixelBufferAdaptor {
if (!_videoPixelBufferAdaptor) {
NSDictionary *pixelBufferAttributes = @{
(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
(id)kCVPixelBufferWidthKey: @(self.targetSize.width),
(id)kCVPixelBufferHeightKey: @(self.targetSize.height),
@"IOSurfaceOpenGLESTextureCompatibility": @YES,
@"IOSurfaceOpenGLESFBOCompatibility": @YES,
};
_videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoWriterInput sourcePixelBufferAttributes:pixelBufferAttributes];
}
return _videoPixelBufferAdaptor;
}
- (NSDictionary *)readerOutputSettings {
if (!_readerOutputSettings) {
_readerOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], kCVPixelBufferPixelFormatTypeKey, nil];
}
return _readerOutputSettings;
}
- (dispatch_queue_t)inputQueue {
if (!_inputQueue) {
_inputQueue = dispatch_queue_create("com.ahvideo.reverseInputQueue", DISPATCH_QUEUE_SERIAL);
}
return _inputQueue;
}
- (NSDictionary *)videoSettings {
if (!_videoSettings) {
_videoSettings = @{AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: @(self.targetSize.width),
AVVideoHeightKey: @(self.targetSize.height),
AVVideoCompressionPropertiesKey: @{
AVVideoAverageBitRateKey: @(kDefaultVideoBitRate * 1000),
AVVideoExpectedSourceFrameRateKey : @(kDefaultVideoFrameRate),
AVVideoMaxKeyFrameIntervalKey : @(kDefaultVideoKeyFrameInterval),
AVVideoProfileLevelKey: kDefaultVideoProfileLevel
},
};
}
return _videoSettings;
}
- (NSMutableArray *)sampleTimeArray {
if (!_sampleTimeArray) {
_sampleTimeArray = [[NSMutableArray alloc] initWithCapacity:100];
}
return _sampleTimeArray;
}
- (NSMutableArray *)clipTimeRangeArray {
if (!_clipTimeRangeArray) {
_clipTimeRangeArray = [[NSMutableArray alloc] initWithCapacity:20];
}
return _clipTimeRangeArray;
}
#pragma mark -
#pragma mark Set Method
@end