上面的圖說明的是這個混合的過程瘤礁,下面放代碼:
- (void)mergeAndExportVideos:(NSArray*)videosPathArray withOutPath:(NSString*)outpath{
if (videosPathArray.count == 0) {
return;
}
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
CMTime totalDuration = kCMTimeZero;
for (int i = 0; i < videosPathArray.count; i++) {
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:videosPathArray[i]]];
NSError *erroraudio = nil;
//獲取AVAsset中的音頻 或者視頻
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
//向通道內(nèi)加入音頻或者視頻
BOOL ba = [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetAudioTrack
atTime:totalDuration
error:&erroraudio];
NSLog(@"erroraudio:%@%d",erroraudio,ba);
NSError *errorVideo = nil;
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject];
BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration)
ofTrack:assetVideoTrack
atTime:totalDuration
error:&errorVideo];
NSLog(@"errorVideo:%@%d",errorVideo,bl);
totalDuration = CMTimeAdd(totalDuration, asset.duration);
}
NSLog(@"%@",NSHomeDirectory());
NSURL *mergeFileURL = [NSURL fileURLWithPath:outpath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeMPEG4;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"exporter%@",exporter.error);
}];
下面是方法詳解:
- (AVMutableCompositionTrack *)addMutableTrackWithMediaType:(NSString *)mediaType preferredTrackID:(CMPersistentTrackID)preferredTrackID;```
參數(shù):
mediaType :數(shù)據(jù)類型 AVMediaTypeVideo 視頻 AVMediaTypeAudio 音頻
preferredTrackID :可自由設(shè)定 但建議kCMPersistentTrackID_Invalid也就是0
//向通道內(nèi)加入音頻或者視頻
- (BOOL)insertTimeRange:(CMTimeRange)timeRange ofTrack:(AVAssetTrack *)track atTime:(CMTime)startTime error:(NSError * __nullable * __nullable)outError;```
參數(shù):
timeRange: 插入視頻/音頻的的時間段
track :插入的視頻/音頻
- (void)startExportVideoWithVideoAsset:(AVURLAsset *)videoAsset completion:(void (^)(NSString *outputPath))completion {
// Find compatible presets by video asset.
// NSArray *presets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
// Begin to compress video
// Now we just compress to low resolution if it supports
// If you need to upload to the server, but server does't support to upload by streaming,
// You can compress the resolution to lower. Or you can support more higher resolution.
// if ([presets containsObject:AVAssetExportPreset640x480]) {
// 新增
int degress = [self degressFromVideoFileWithAsset:videoAsset];
AVAssetExportSession *session = nil;
if (degress != 0) {
AVMutableComposition* composition = [AVMutableComposition composition];
AVAssetTrack *sourceAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:sourceAudioTrack
atTime:kCMTimeZero error:nil];
session = [[AVAssetExportSession alloc]initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
// 修正視頻轉(zhuǎn)向
BOOL isPortrait_ = [self isVideoPortrait:videoAsset];
session.videoComposition = [self getVideoComposition:videoAsset composition:composition withisPortrait:isPortrait_];
}else{
session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPresetPassthrough];
}
//
NSDateFormatter *formater = [[NSDateFormatter alloc] init];
[formater setDateFormat:@"yyyy-MM-dd-HH:mm:ss"];
NSString *outputPath = [NSHomeDirectory() stringByAppendingFormat:@"/tmp/output-%@.mp4", [formater stringFromDate:[NSDate date]]];
session.outputURL = [NSURL fileURLWithPath:outputPath];
// Optimize for network use.
session.shouldOptimizeForNetworkUse = true;
NSArray *supportedTypeArray = session.supportedFileTypes;
if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) {
session.outputFileType = AVFileTypeMPEG4;
} else if (supportedTypeArray.count == 0) {
NSLog(@"No supported file types 視頻類型暫不支持導(dǎo)出");
return;
} else {
session.outputFileType = [supportedTypeArray objectAtIndex:0];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"]]) {
[[NSFileManager defaultManager] createDirectoryAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/tmp"] withIntermediateDirectories:YES attributes:nil error:nil];
}
// Begin to export video to the output path asynchronously.
[session exportAsynchronouslyWithCompletionHandler:^(void) {
switch (session.status) {
case AVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting"); break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCompleted: {
NSLog(@"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(outputPath);
}
});
} break;
case AVAssetExportSessionStatusFailed:{
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(nil);
}
});
NSLog(@"AVAssetExportSessionStatusFailed"); break;
}
default: break;
}
}];
// }
}
-(AVMutableVideoComposition *) getVideoComposition:(AVAsset *)asset composition:( AVMutableComposition*)composition withisPortrait:(BOOL) isPortrait{
BOOL isPortrait_ = isPortrait;
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTrack];
CGAffineTransform transform = videoTrack.preferredTransform;
[layerInst setTransform:transform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.instructions = [NSArray arrayWithObject:inst];
CGSize videoSize = videoTrack.naturalSize;
if(isPortrait_) {
NSLog(@"video is portrait ");
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
return videoComposition;
}
-(BOOL) isVideoPortrait:(AVAsset *)asset{
BOOL isPortrait = FALSE;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
// Portrait
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)
{
isPortrait = YES;
}
// PortraitUpsideDown
if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0) {
isPortrait = YES;
}
// LandscapeRight
if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0)
{
isPortrait = FALSE;
}
// LandscapeLeft
if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0)
{
isPortrait = FALSE;
}
}
return isPortrait;
}
/// 獲取視頻角度
- (int)degressFromVideoFileWithAsset:(AVAsset *)asset {
int degress = 0;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
// Portrait
degress = 90;
} else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
// PortraitUpsideDown
degress = 270;
} else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
// LandscapeRight
degress = 0;
} else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
// LandscapeLeft
degress = 180;
}
}
return degress;
}```