ios 視頻壓縮SDAVAssetExportSession,可以自定義壓縮大小

最近項(xiàng)目中需要用到視頻壓縮的功能需了,發(fā)現(xiàn)系統(tǒng)的壓縮方法不太好用。達(dá)不到質(zhì)量大小與清晰度相匹配的需求钮莲。所以在GitHub上找到了這個大神寫的文件SDAVAssetExportSession葡公。先分享一下有需要的朋友們。
SDAVAssetExportSession.h文件

 #import <Foundation/Foundation.h>
 #import <AVFoundation/AVFoundation.h>

@protocol SDAVAssetExportSessionDelegate;

@interface SDAVAssetExportSession : NSObject

@property (nonatomic, weak) id<SDAVAssetExportSessionDelegate> delegate;

@property (nonatomic, strong, readonly) AVAsset *asset;
@property (nonatomic, copy) AVVideoComposition *videoComposition;

@property (nonatomic, copy) AVAudioMix *audioMix;

@property (nonatomic, copy) NSString *outputFileType;

@property (nonatomic, copy) NSURL *outputURL;
@property (nonatomic, copy) NSDictionary *videoInputSettings;
@property (nonatomic, copy) NSDictionary *videoSettings;
@property (nonatomic, copy) NSDictionary *audioSettings;
@property (nonatomic, assign) CMTimeRange timeRange;
@property (nonatomic, assign) BOOL shouldOptimizeForNetworkUse;

@property (nonatomic, copy) NSArray *metadata;
@property (nonatomic, strong, readonly) NSError *error;
@property (nonatomic, assign, readonly) float progress;
@property (nonatomic, assign, readonly) AVAssetExportSessionStatus status;

+ (id)exportSessionWithAsset:(AVAsset *)asset;

- (id)initWithAsset:(AVAsset *)asset;
- (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler;
- (void)cancelExport;

@end


@protocol SDAVAssetExportSessionDelegate <NSObject>
- (void)exportSession:(SDAVAssetExportSession *)exportSession renderFrame:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime toBuffer:(CVPixelBufferRef)renderBuffer;

@end

//////////////////////////////////////////////////////////////////////////////
SDAVAssetExportSession.m文件

#import "SDAVAssetExportSession.h"
@interface SDAVAssetExportSession ()
@property (nonatomic, assign, readwrite) float progress;
@property (nonatomic, strong) AVAssetReader *reader;
@property (nonatomic, strong) AVAssetReaderVideoCompositionOutput *videoOutput;
@property (nonatomic, strong) AVAssetReaderAudioMixOutput *audioOutput;
@property (nonatomic, strong) AVAssetWriter *writer;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoPixelBufferAdaptor;
@property (nonatomic, strong) AVAssetWriterInput *audioInput;
@property (nonatomic, strong) dispatch_queue_t inputQueue;
@property (nonatomic, strong) void (^completionHandler)(void);

@end

@implementation SDAVAssetExportSession
 {
NSError *_error;
NSTimeInterval duration;
CMTime lastSamplePresentationTime;
  }

 + (id)exportSessionWithAsset:(AVAsset *)asset
  {
return [SDAVAssetExportSession.alloc initWithAsset:asset];
  }

 - (id)initWithAsset:(AVAsset *)asset
   {
   if ((self = [super init]))
 {
    _asset = asset;
    _timeRange = CMTimeRangeMake(kCMTimeZero,
kCMTimePositiveInfinity);
 }
return self;
 }

- (void)exportAsynchronouslyWithCompletionHandler:(void (^)(void))handler
{
NSParameterAssert(handler != nil);
[self cancelExport];
self.completionHandler = handler;

if (!self.outputURL)
{
    _error = [NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorExportFailed userInfo:@
    {
        NSLocalizedDescriptionKey: @"Output URL not set"
    }];
    handler();
    return;
}

NSError *readerError;
self.reader = [AVAssetReader.alloc initWithAsset:self.asset error:&readerError];
if (readerError)
{
    _error = readerError;
    handler();
    return;
}

NSError *writerError;
self.writer = [AVAssetWriter assetWriterWithURL:self.outputURL fileType:self.outputFileType error:&writerError];
if (writerError)
{
    _error = writerError;
    handler();
    return;
}

self.reader.timeRange = self.timeRange;
self.writer.shouldOptimizeForNetworkUse = self.shouldOptimizeForNetworkUse;
self.writer.metadata = self.metadata;

NSArray *videoTracks = [self.asset tracksWithMediaType:AVMediaTypeVideo];


if (CMTIME_IS_VALID(self.timeRange.duration) && !CMTIME_IS_POSITIVE_INFINITY(self.timeRange.duration))
{
    duration = CMTimeGetSeconds(self.timeRange.duration);
}
else
{
    duration = CMTimeGetSeconds(self.asset.duration);
}
//
// Video output
//
if (videoTracks.count > 0) {
    self.videoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:videoTracks videoSettings:self.videoInputSettings];
    self.videoOutput.alwaysCopiesSampleData = NO;
    if (self.videoComposition)
    {
        self.videoOutput.videoComposition = self.videoComposition;
    }
    else
    {
        self.videoOutput.videoComposition = [self buildDefaultVideoComposition];
    }
    if ([self.reader canAddOutput:self.videoOutput])
    {
        [self.reader addOutput:self.videoOutput];
    }

    //
    // Video input
    //
    self.videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
    self.videoInput.expectsMediaDataInRealTime = NO;
    if ([self.writer canAddInput:self.videoInput])
    {
        [self.writer addInput:self.videoInput];
    }
    NSDictionary *pixelBufferAttributes = @
    {
        (id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
        (id)kCVPixelBufferWidthKey: @(self.videoOutput.videoComposition.renderSize.width),
        (id)kCVPixelBufferHeightKey: @(self.videoOutput.videoComposition.renderSize.height),
        @"IOSurfaceOpenGLESTextureCompatibility": @YES,
        @"IOSurfaceOpenGLESFBOCompatibility": @YES,
    };
    self.videoPixelBufferAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:self.videoInput sourcePixelBufferAttributes:pixelBufferAttributes];
}

//
//Audio output
//
NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
if (audioTracks.count > 0) {
  self.audioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
  self.audioOutput.alwaysCopiesSampleData = NO;
  self.audioOutput.audioMix = self.audioMix;
  if ([self.reader canAddOutput:self.audioOutput])
  {
      [self.reader addOutput:self.audioOutput];
  }
} else {
    // Just in case this gets reused
    self.audioOutput = nil;
}

//
// Audio input
//
if (self.audioOutput) {
    self.audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
    self.audioInput.expectsMediaDataInRealTime = NO;
    if ([self.writer canAddInput:self.audioInput])
    {
        [self.writer addInput:self.audioInput];
    }
}

[self.writer startWriting];
[self.reader startReading];
[self.writer startSessionAtSourceTime:self.timeRange.start];

__block BOOL videoCompleted = NO;
__block BOOL audioCompleted = NO;
__weak typeof(self) wself = self;
self.inputQueue = dispatch_queue_create("VideoEncoderInputQueue", DISPATCH_QUEUE_SERIAL);
if (videoTracks.count > 0) {
    [self.videoInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
    {
        if (![wself encodeReadySamplesFromOutput:wself.videoOutput toInput:wself.videoInput])
        {
            @synchronized(wself)
            {
                videoCompleted = YES;
                if (audioCompleted)
                {
                    [wself finish];
                }
            }
        }
    }];
}
else {
    videoCompleted = YES;
}

if (!self.audioOutput) {
    audioCompleted = YES;
} else {
    [self.audioInput requestMediaDataWhenReadyOnQueue:self.inputQueue usingBlock:^
     {
         if (![wself encodeReadySamplesFromOutput:wself.audioOutput toInput:wself.audioInput])
         {
             @synchronized(wself)
             {
                 audioCompleted = YES;
                 if (videoCompleted)
                 {
                     [wself finish];
                 }
             }
         }
     }];
   }
  }

 - (BOOL)encodeReadySamplesFromOutput:(AVAssetReaderOutput *)output toInput:(AVAssetWriterInput *)input
  {
while (input.isReadyForMoreMediaData)
{
    CMSampleBufferRef sampleBuffer = [output copyNextSampleBuffer];
    if (sampleBuffer)
    {
        BOOL handled = NO;
        BOOL error = NO;

        if (self.reader.status != AVAssetReaderStatusReading || self.writer.status != AVAssetWriterStatusWriting)
        {
            handled = YES;
            error = YES;
        }
        
        if (!handled && self.videoOutput == output)
        {
            // update the video progress
            lastSamplePresentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            lastSamplePresentationTime = CMTimeSubtract(lastSamplePresentationTime, self.timeRange.start);
            self.progress = duration == 0 ? 1 : CMTimeGetSeconds(lastSamplePresentationTime) / duration;

            if ([self.delegate respondsToSelector:@selector(exportSession:renderFrame:withPresentationTime:toBuffer:)])
            {
                CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
                CVPixelBufferRef renderBuffer = NULL;
                CVPixelBufferPoolCreatePixelBuffer(NULL, self.videoPixelBufferAdaptor.pixelBufferPool, &renderBuffer);
                [self.delegate exportSession:self renderFrame:pixelBuffer withPresentationTime:lastSamplePresentationTime toBuffer:renderBuffer];
                if (![self.videoPixelBufferAdaptor appendPixelBuffer:renderBuffer withPresentationTime:lastSamplePresentationTime])
                {
                    error = YES;
                }
                CVPixelBufferRelease(renderBuffer);
                handled = YES;
            }
        }
        if (!handled && ![input appendSampleBuffer:sampleBuffer])
        {
            error = YES;
        }
        CFRelease(sampleBuffer);

        if (error)
        {
            return NO;
        }
    }
    else
    {
        [input markAsFinished];
        return NO;
    }
}

return YES;
  }

  - (AVMutableVideoComposition *)buildDefaultVideoComposition
  {
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
AVAssetTrack *videoTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

// get the frame rate from videoSettings, if not set then try to get it from the video track,
// if not set (mainly when asset is AVComposition) then use the default frame rate of 30
float trackFrameRate = 0;
if (self.videoSettings)
{
    NSDictionary *videoCompressionProperties = [self.videoSettings objectForKey:AVVideoCompressionPropertiesKey];
    if (videoCompressionProperties)
    {
        NSNumber *frameRate = [videoCompressionProperties objectForKey:AVVideoAverageNonDroppableFrameRateKey];
        if (frameRate)
        {
            trackFrameRate = frameRate.floatValue;
        }
    }
}
else
{
    trackFrameRate = [videoTrack nominalFrameRate];
}

if (trackFrameRate == 0)
{
    trackFrameRate = 30;
}

videoComposition.frameDuration = CMTimeMake(1, trackFrameRate);
CGSize targetSize = CGSizeMake([self.videoSettings[AVVideoWidthKey] floatValue], [self.videoSettings[AVVideoHeightKey] floatValue]);
CGSize naturalSize = [videoTrack naturalSize];
CGAffineTransform transform = videoTrack.preferredTransform;
// Workaround radar 31928389, see https://github.com/rs/SDAVAssetExportSession/pull/70 for more info
if (transform.ty == -560) {
    transform.ty = 0;
}

if (transform.tx == -560) {
    transform.tx = 0;
}

CGFloat videoAngleInDegree  = atan2(transform.b, transform.a) * 180 / M_PI;
if (videoAngleInDegree == 90 || videoAngleInDegree == -90) {
    CGFloat width = naturalSize.width;
    naturalSize.width = naturalSize.height;
    naturalSize.height = width;
}
videoComposition.renderSize = naturalSize;
// center inside
{
    float ratio;
    float xratio = targetSize.width / naturalSize.width;
    float yratio = targetSize.height / naturalSize.height;
    ratio = MIN(xratio, yratio);

    float postWidth = naturalSize.width * ratio;
    float postHeight = naturalSize.height * ratio;
    float transx = (targetSize.width - postWidth) / 2;
    float transy = (targetSize.height - postHeight) / 2;

    CGAffineTransform matrix = CGAffineTransformMakeTranslation(transx / xratio, transy / yratio);
    matrix = CGAffineTransformScale(matrix, ratio / xratio, ratio / yratio);
    transform = CGAffineTransformConcat(transform, matrix);
}

// Make a "pass through video track" video composition.
AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
passThroughInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);

AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];

[passThroughLayer setTransform:transform atTime:kCMTimeZero];

passThroughInstruction.layerInstructions = @[passThroughLayer];
videoComposition.instructions = @[passThroughInstruction];

return videoComposition;
 }

 - (void)finish
 {
// Synchronized block to ensure we never cancel the writer before calling finishWritingWithCompletionHandler
if (self.reader.status == AVAssetReaderStatusCancelled || self.writer.status == AVAssetWriterStatusCancelled)
{
    return;
}

if (self.writer.status == AVAssetWriterStatusFailed)
{
    [self complete];
}
else if (self.reader.status == AVAssetReaderStatusFailed) {
    [self.writer cancelWriting];
    [self complete];
}
else
{
    [self.writer finishWritingWithCompletionHandler:^
    {
        [self complete];
    }];
}
 }

   - (void)complete
   {
if (self.writer.status == AVAssetWriterStatusFailed || self.writer.status == AVAssetWriterStatusCancelled)
{
    [NSFileManager.defaultManager removeItemAtURL:self.outputURL error:nil];
}

if (self.completionHandler)
{
    self.completionHandler();
    self.completionHandler = nil;
}
}

 - (NSError *)error
   {
         if (_error)
       {
          return _error;
          }
            else
         {
             return self.writer.error ? : self.reader.error;
        }
  }

- (AVAssetExportSessionStatus)status
 {
switch (self.writer.status)
{
    default:
    case AVAssetWriterStatusUnknown:
        return AVAssetExportSessionStatusUnknown;
    case AVAssetWriterStatusWriting:
        return AVAssetExportSessionStatusExporting;
    case AVAssetWriterStatusFailed:
        return AVAssetExportSessionStatusFailed;
    case AVAssetWriterStatusCompleted:
        return AVAssetExportSessionStatusCompleted;
    case AVAssetWriterStatusCancelled:
        return AVAssetExportSessionStatusCancelled;
}
 }

   - (void)cancelExport
 {
if (self.inputQueue)
{
    dispatch_async(self.inputQueue, ^
    {
        [self.writer cancelWriting];
        [self.reader cancelReading];
        [self complete];
        [self reset];
    });
}
 }

   - (void)reset
 {
_error = nil;
self.progress = 0;
self.reader = nil;
self.videoOutput = nil;
self.audioOutput = nil;
self.writer = nil;
self.videoInput = nil;
self.videoPixelBufferAdaptor = nil;
self.audioInput = nil;
self.inputQueue = nil;
self.completionHandler = nil;
 }

   @end

使用方法:

        PHVideoRequestOptions *options = [[PHVideoRequestOptions alloc] init];
        options.version = PHVideoRequestOptionsVersionOriginal;
        [[PHImageManager defaultManager] requestAVAssetForVideo:asset options:options resultHandler:^(AVAsset *asset, AVAudioMix *audioMix, NSDictionary *info) {

       NSURL *outputUrl = [NSURL fileURLWithPath:[[NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, true) lastObject] stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",movName]]];   //輸出路徑
   SDAVAssetExportSession *encoder =    [SDAVAssetExportSession.alloc initWithAsset:asset];
                encoder.outputFileType = AVFileTypeMPEG4;
                encoder.outputURL = outputUrl;
                //視頻設(shè)置
                encoder.videoSettings = @
                {
                AVVideoCodecKey: AVVideoCodecH264,
                AVVideoWidthKey: @720,
                AVVideoHeightKey: @1280,
                AVVideoCompressionPropertiesKey: @
                    {
                    AVVideoAverageBitRateKey: @6000000,
                    AVVideoProfileLevelKey: AVVideoProfileLevelH264High40,
                    },
                };
                //音頻設(shè)置
                encoder.audioSettings = @
                {
                AVFormatIDKey: @(kAudioFormatMPEG4AAC),
                AVNumberOfChannelsKey: @2,
                AVSampleRateKey: @44100,
                AVEncoderBitRateKey: @128000,
                };
                  __weak typeof(self) weakSelf  = self;
                [encoder exportAsynchronouslyWithCompletionHandler:^
                 {
                     if (encoder.status == AVAssetExportSessionStatusCompleted)
                     {
                         NSData *outputData = [NSData dataWithContentsOfURL:encoder.outputURL]; //壓縮后的視頻
                         [weakSelf saveAtta:outputData withName:movName toPath:USER_Chat_Folder_Path];//保存
                       
                     }
                     else if (encoder.status == AVAssetExportSessionStatusCancelled)
                     {
                         NSLog(@"Video export cancelled");
                     }
                     else
                     {
                         NSLog(@"Video export failed with error: %@ (%ld)", encoder.error.localizedDescription, (long)encoder.error.code);
                     }
                 }];
            }}];

[原文地址:] https://github.com/rs/SDAVAssetExportSession

喜歡的朋友點(diǎn)個贊吧莫秆!謝謝<淙浮!镊屎!

最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
  • 序言:七十年代末惹挟,一起剝皮案震驚了整個濱河市,隨后出現(xiàn)的幾起案子缝驳,更是在濱河造成了極大的恐慌连锯,老刑警劉巖,帶你破解...
    沈念sama閱讀 217,406評論 6 503
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件用狱,死亡現(xiàn)場離奇詭異运怖,居然都是意外死亡,警方通過查閱死者的電腦和手機(jī)齿拂,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 92,732評論 3 393
  • 文/潘曉璐 我一進(jìn)店門驳规,熙熙樓的掌柜王于貴愁眉苦臉地迎上來,“玉大人署海,你說我怎么就攤上這事吗购。” “怎么了砸狞?”我有些...
    開封第一講書人閱讀 163,711評論 0 353
  • 文/不壞的土叔 我叫張陵捻勉,是天一觀的道長。 經(jīng)常有香客問我刀森,道長踱启,這世上最難降的妖魔是什么? 我笑而不...
    開封第一講書人閱讀 58,380評論 1 293
  • 正文 為了忘掉前任研底,我火速辦了婚禮埠偿,結(jié)果婚禮上,老公的妹妹穿的比我還像新娘榜晦。我一直安慰自己冠蒋,他們只是感情好,可當(dāng)我...
    茶點(diǎn)故事閱讀 67,432評論 6 392
  • 文/花漫 我一把揭開白布乾胶。 她就那樣靜靜地躺著抖剿,像睡著了一般朽寞。 火紅的嫁衣襯著肌膚如雪。 梳的紋絲不亂的頭發(fā)上斩郎,一...
    開封第一講書人閱讀 51,301評論 1 301
  • 那天脑融,我揣著相機(jī)與錄音,去河邊找鬼缩宜。 笑死肘迎,一個胖子當(dāng)著我的面吹牛,可吹牛的內(nèi)容都是我干的锻煌。 我是一名探鬼主播膜宋,決...
    沈念sama閱讀 40,145評論 3 418
  • 文/蒼蘭香墨 我猛地睜開眼,長吁一口氣:“原來是場噩夢啊……” “哼炼幔!你這毒婦竟也來了?” 一聲冷哼從身側(cè)響起史简,我...
    開封第一講書人閱讀 39,008評論 0 276
  • 序言:老撾萬榮一對情侶失蹤乃秀,失蹤者是張志新(化名)和其女友劉穎,沒想到半個月后圆兵,有當(dāng)?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體跺讯,經(jīng)...
    沈念sama閱讀 45,443評論 1 314
  • 正文 獨(dú)居荒郊野嶺守林人離奇死亡,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點(diǎn)故事閱讀 37,649評論 3 334
  • 正文 我和宋清朗相戀三年殉农,在試婚紗的時(shí)候發(fā)現(xiàn)自己被綠了刀脏。 大學(xué)時(shí)的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片。...
    茶點(diǎn)故事閱讀 39,795評論 1 347
  • 序言:一個原本活蹦亂跳的男人離奇死亡超凳,死狀恐怖愈污,靈堂內(nèi)的尸體忽然破棺而出扰才,到底是詐尸還是另有隱情苏遥,我是刑警寧澤,帶...
    沈念sama閱讀 35,501評論 5 345
  • 正文 年R本政府宣布脓魏,位于F島的核電站创夜,受9級特大地震影響杭跪,放射性物質(zhì)發(fā)生泄漏。R本人自食惡果不足惜驰吓,卻給世界環(huán)境...
    茶點(diǎn)故事閱讀 41,119評論 3 328
  • 文/蒙蒙 一涧尿、第九天 我趴在偏房一處隱蔽的房頂上張望。 院中可真熱鬧檬贰,春花似錦姑廉、人聲如沸。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,731評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽瞬内。三九已至,卻和暖如春限书,著一層夾襖步出監(jiān)牢的瞬間虫蝶,已是汗流浹背。 一陣腳步聲響...
    開封第一講書人閱讀 32,865評論 1 269
  • 我被黑心中介騙來泰國打工倦西, 沒想到剛下飛機(jī)就差點(diǎn)兒被人妖公主榨干…… 1. 我叫王不留能真,地道東北人。 一個月前我還...
    沈念sama閱讀 47,899評論 2 370
  • 正文 我出身青樓扰柠,卻偏偏與公主長得像粉铐,于是被迫代替她去往敵國和親。 傳聞我的和親對象是個殘疾皇子卤档,可洞房花燭夜當(dāng)晚...
    茶點(diǎn)故事閱讀 44,724評論 2 354

推薦閱讀更多精彩內(nèi)容