前言
從本文開始逐漸學(xué)習(xí)iOS自帶的多媒體處理框架池充,例如AVFoundation躯保,VideoToolbox,CoreMedia贩猎,CoreVideo實(shí)現(xiàn)多媒體的處理熊户,并且將實(shí)現(xiàn)方式以及效果和ffmpeg的方式做對(duì)比
給一個(gè)視頻添加音樂,將多段音視頻文件合并為一個(gè)文件是很常見的需求融欧,AVFoundation就提供了這樣的接口敏弃。
本文的目的:
為一段視頻添加背景音樂
音視頻合并相關(guān)流程
上圖介紹了AVFoundation框架中關(guān)于合并音視頻文件的相關(guān)的對(duì)象關(guān)系圖,可以看到使用AVFoundation合并音視頻還是相對(duì)比較簡(jiǎn)單的噪馏。
相關(guān)對(duì)象及函數(shù)介紹
1麦到、AVURLAsset
容器對(duì)象,代表了要操作的容器欠肾。封裝瓶颠,解封裝,音視頻播放刺桃,以及音視頻合并等等操作的基礎(chǔ)都涉及到這個(gè)對(duì)象粹淋。2、AVAssetTrack
音視頻軌道對(duì)象瑟慈,代表了文件中的一路音頻流或者一路視頻流桃移,它作為每一個(gè)要被合并的音頻或者視頻流被添加到組合對(duì)象中最終進(jìn)行合并3、AVMutableCompositionTrack
組合軌道對(duì)象葛碧,它作為音視頻合并的基礎(chǔ)借杰,通過它添加要合并的音頻流或者視頻流,分為兩種類型:音頻組合軌道對(duì)象和視頻組合軌道對(duì)象进泼,音頻組合軌道對(duì)象只能添加音頻流蔗衡,視頻組合軌道對(duì)象只能添加視頻流4、AVMutableComposition
組合對(duì)象乳绕,通過它構(gòu)建組合軌道對(duì)象5绞惦、AVAssetExportSession
執(zhí)行合并操作并導(dǎo)出為文件對(duì)象,該對(duì)象內(nèi)部應(yīng)該是封裝了合并多個(gè)音頻流或者視頻流的操作和封裝操作
實(shí)現(xiàn)代碼
#import <Foundation/Foundation.h>
@interface AVMYComposition : NSObject
/** 實(shí)現(xiàn)音視頻合并功能
* 1洋措、要合并的視頻時(shí)長(zhǎng)大于任何一個(gè)音頻的時(shí)長(zhǎng)济蝉,有可能小于兩段音頻的時(shí)長(zhǎng)
* 2、以視頻的時(shí)長(zhǎng)為基準(zhǔn)菠发,如果兩段音頻的時(shí)長(zhǎng)之和大于視頻時(shí)長(zhǎng)王滤,則截取掉第二個(gè)音頻的部分時(shí)間
*/
- (void)startMerge:(NSURL*)audioUrl audio2:(NSURL*)audioUrl2 videoUrl:(NSURL*)videoUrl dst:(NSURL*)dsturl;
@end
import "AVMYComposition.h"
#import <AVFoundation/AVFoundation.h>
@implementation AVMYComposition
{
dispatch_semaphore_t processSemaphore;
}
- (void)startMerge:(NSURL*)audioUrl1 audio2:(NSURL*)audioUrl2 videoUrl:(NSURL*)videoUrl dst:(NSURL*)dsturl
{
processSemaphore = dispatch_semaphore_create(0);
/** AVMutableComposition對(duì)象
* 組合對(duì)象,它是AVAsset的子類雷酪,通過它來實(shí)現(xiàn)音視頻的合并淑仆。它就相當(dāng)于一個(gè)編輯容器,每一個(gè)要被合并的
* 音頻或者視頻軌道被組裝為AVMutableCompositionTrack然后進(jìn)行合并
*
* AVMutableCompositionTrack組合對(duì)象軌道哥力,他是AVAssetTrack的子類蔗怠。代表了每一個(gè)要被合并的音頻或者視頻軌道
*/
AVMutableComposition *mixComposition = [AVMutableComposition composition];
// 添加一個(gè)組合對(duì)象軌道,用于添加視頻軌道
AVMutableCompositionTrack *videoCompostioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoUrl options:nil];
CMTime videoDuration = videoAsset.duration;
AVAssetTrack *vdeotrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CMTimeRange videoTiemRange = CMTimeRangeMake(kCMTimeZero, videoDuration);
NSError *error = nil;
[videoCompostioTrack insertTimeRange:videoTiemRange ofTrack:vdeotrack atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"video insert error %@",error);
return;
}
// 添加一個(gè)組合對(duì)象軌道吩跋,第二個(gè)參數(shù)為kCMPersistentTrackID_Invalid代表由系統(tǒng)自動(dòng)生成ID
AVMutableCompositionTrack *audioComTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVURLAsset *audioAsset1 = [AVURLAsset assetWithURL:audioUrl1];
// 將同步解析寞射,會(huì)阻塞當(dāng)前線程
CMTime duration1 = audioAsset1.duration;
AVAssetTrack *audioTrack1 = [[audioAsset1 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
CMTimeRange firstTimeRange = CMTimeRangeMake(kCMTimeZero,duration1);
// 往組合對(duì)象軌道中添加軌道對(duì)象
[audioComTrack insertTimeRange:firstTimeRange ofTrack:audioTrack1 atTime:kCMTimeZero error:&error];
if (error) {
NSLog(@"audio track %@",error);
return;
}
AVURLAsset *audioAsset2 = [AVURLAsset assetWithURL:audioUrl2];
// 阻塞當(dāng)前線程
CMTime duration2 = audioAsset2.duration;
CMTime newDuration2 = duration2;
if (CMTimeGetSeconds(duration1)+CMTimeGetSeconds(duration2) > CMTimeGetSeconds(videoDuration) && CMTimeGetSeconds(duration1) < CMTimeGetSeconds(duration2)) {
newDuration2 = CMTimeSubtract(videoDuration, duration1);
}
CMTimeRange secondTimeRange = CMTimeRangeMake(kCMTimeZero, newDuration2);
NSLog(@" tt %f tt %f",CMTimeGetSeconds(duration1),CMTimeGetSeconds(newDuration2));
AVAssetTrack *audioTrack2 = [[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
/** 參數(shù)解釋:
* timeRange:代表截取track的時(shí)間范圍內(nèi)容然后插入這個(gè)組合對(duì)象的軌道中
* startTime:代表將這段內(nèi)容按組對(duì)象軌道時(shí)間軸的指定位置插入
*/
[audioComTrack insertTimeRange:secondTimeRange ofTrack:audioTrack2 atTime:duration1 error:&error];
// 執(zhí)行合并
if ([[NSFileManager defaultManager] fileExistsAtPath:dsturl.path]) {
[[NSFileManager defaultManager] removeItemAtURL:dsturl error:nil];
}
// 合并導(dǎo)出會(huì)話
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exportSession.outputURL = dsturl;
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"over");
dispatch_semaphore_signal(self->processSemaphore);
}];
dispatch_semaphore_wait(processSemaphore, DISPATCH_TIME_FOREVER);
NSLog(@"結(jié)束了");
}
@end
遇到問題
項(xiàng)目地址
https://github.com/nldzsz/ffmpeg-demo
位于AVFoundation目錄下文件AVMYComposition.h/AVMYComposition.m中