最近一個新需求需要將選擇的視頻進(jìn)行壓縮后上傳臼闻,且要求按照壓縮參數(shù)進(jìn)行壓縮。因此寫上這文章記錄下囤采,我總結(jié)過后寫出的關(guān)于自定義壓縮參數(shù)壓縮視頻述呐。
- (void)compressVideo:(NSURL *)videoUrl
withVideoSettings:(NSDictionary *)videoSettings
AudioSettings:(NSDictionary *)audioSettings
fileType:(AVFileType)fileType
complete:(void (^)(NSURL * _Nullable, NSError * _Nullable))complete {
NSURL *outputUrl = [NSURL fileURLWithPath:[self buildFilePath]];
AVAsset *asset = [AVAsset assetWithURL:videoUrl];
AVAssetReader *reader = [AVAssetReader assetReaderWithAsset:asset error:nil];
AVAssetWriter *writer = [AVAssetWriter assetWriterWithURL:outputUrl fileType:fileType error:nil];
// video part
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVAssetReaderTrackOutput *videoOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:[self configVideoOutput]];
AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
if ([reader canAddOutput:videoOutput]) {
[reader addOutput:videoOutput];
}
if ([writer canAddInput:videoInput]) {
[writer addInput:videoInput];
}
// audio part
AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
AVAssetReaderTrackOutput *audioOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:[self configAudioOutput]];
AVAssetWriterInput *audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
if ([reader canAddOutput:audioOutput]) {
[reader addOutput:audioOutput];
}
if ([writer canAddInput:audioInput]) {
[writer addInput:audioInput];
}
// 開始讀寫
[reader startReading];
[writer startWriting];
[writer startSessionAtSourceTime:kCMTimeZero];
//創(chuàng)建視頻寫入隊(duì)列
dispatch_queue_t videoQueue = dispatch_queue_create("Video Queue", DISPATCH_QUEUE_SERIAL);
//創(chuàng)建音頻寫入隊(duì)列
dispatch_queue_t audioQueue = dispatch_queue_create("Audio Queue", DISPATCH_QUEUE_SERIAL);
//創(chuàng)建一個線程組
dispatch_group_t group = dispatch_group_create();
//進(jìn)入線程組
dispatch_group_enter(group);
//隊(duì)列準(zhǔn)備好后 usingBlock
[videoInput requestMediaDataWhenReadyOnQueue:videoQueue usingBlock:^{
BOOL completedOrFailed = NO;
while ([videoInput isReadyForMoreMediaData] && !completedOrFailed) {
CMSampleBufferRef sampleBuffer = [videoOutput copyNextSampleBuffer];
if (sampleBuffer != NULL) {
[videoInput appendSampleBuffer:sampleBuffer];
DLog(@"===%@===", sampleBuffer);
CFRelease(sampleBuffer);
}
else {
completedOrFailed = YES;
[videoInput markAsFinished];
dispatch_group_leave(group);
}
}
}];
dispatch_group_enter(group);
//隊(duì)列準(zhǔn)備好后 usingBlock
[audioInput requestMediaDataWhenReadyOnQueue:audioQueue usingBlock:^{
BOOL completedOrFailed = NO;
while ([audioInput isReadyForMoreMediaData] && !completedOrFailed) {
CMSampleBufferRef sampleBuffer = [audioOutput copyNextSampleBuffer];
if (sampleBuffer != NULL) {
BOOL success = [audioInput appendSampleBuffer:sampleBuffer];
DLog(@"===%@===", sampleBuffer);
CFRelease(sampleBuffer);
completedOrFailed = !success;
}
else {
completedOrFailed = YES;
}
}
if (completedOrFailed) {
[audioInput markAsFinished];
dispatch_group_leave(group);
}
}];
//完成壓縮
dispatch_group_notify(group, dispatch_get_main_queue(), ^{
if ([reader status] == AVAssetReaderStatusReading) {
[reader cancelReading];
}
switch (writer.status) {
case AVAssetWriterStatusWriting: {
DLog(@"視頻壓縮完成");
[writer finishWritingWithCompletionHandler:^{
// 可以嘗試異步回至主線程回調(diào)
if (complete) {
complete(outputUrl,nil);
}
}];
}
break;
case AVAssetWriterStatusCancelled:
DLog(@"取消壓縮");
break;
case AVAssetWriterStatusFailed:
DLog(@"===error:%@===", writer.error);
if (complete) {
complete(nil,writer.error);
}
break;
case AVAssetWriterStatusCompleted: {
DLog(@"視頻壓縮完成");
[writer finishWritingWithCompletionHandler:^{
// 可以嘗試異步回至主線程回調(diào)
if (complete) {
complete(outputUrl,nil);
}
}];
}
break;
default:
break;
}
});
}
/** 視頻解碼 */
- (NSDictionary *)configVideoOutput {
NSDictionary *videoOutputSetting = @{
(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_422YpCbCr8],
(__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey:[NSDictionary dictionary]
};
return videoOutputSetting;
}
/** 音頻解碼 */
- (NSDictionary *)configAudioOutput {
NSDictionary *audioOutputSetting = @{
AVFormatIDKey: @(kAudioFormatLinearPCM)
};
return audioOutputSetting;
}
/// 指定音視頻的壓縮碼率,profile蕉毯,幀率等關(guān)鍵參數(shù)信息乓搬,這些參數(shù)可以根據(jù)要求自行更改
- (NSDictionary *)performanceVideoSettings {
NSDictionary *compressionProperties = @{
AVVideoAverageBitRateKey : @(409600), // 碼率 400K
AVVideoExpectedSourceFrameRateKey : @24, // 幀率
AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel
};
NSString *videoCodeec;
if (@available(iOS 11.0, *)) {
videoCodeec = AVVideoCodecTypeH264;
} else {
videoCodeec = AVVideoCodecH264;
}
NSDictionary *videoCompressSettings = @{
AVVideoCodecKey : videoCodeec,
AVVideoWidthKey : @640,
AVVideoHeightKey : @360,
AVVideoCompressionPropertiesKey : compressionProperties,
AVVideoScalingModeKey : AVVideoScalingModeResizeAspectFill
};
return videoCompressSettings;
}
- (NSDictionary *)performanceAudioSettings {
AudioChannelLayout stereoChannelLayout = {
.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo,
.mChannelBitmap = kAudioChannelBit_Left,
.mNumberChannelDescriptions = 0
};
NSData *channelLayoutAsData = [NSData dataWithBytes:&stereoChannelLayout length:offsetof(AudioChannelLayout, mChannelDescriptions)];
NSDictionary *audioCompressSettings = @{
AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVEncoderBitRateKey : @(49152), // 碼率 48K
AVSampleRateKey : @44100, // 采樣率
AVChannelLayoutKey : channelLayoutAsData,
AVNumberOfChannelsKey : @(2) // 聲道
};
return audioCompressSettings;
}