#前言 ####因?yàn)樽罱鲰?xiàng)目有遇到音視頻合成的需求,但是網(wǎng)上的教程某些地方總是寫的很模糊,所以自己調(diào)研完成之后決定寫一篇博客分享出來,供大家一起學(xué)習(xí)進(jìn)步
#上代碼 ##storyBoard中拖入一個(gè)button,一個(gè)imageView##為了效果好可以將IamgeView的背景色調(diào)為黑色
##然后在ViewController中添加以下代碼
#import "ViewController.h"
#import
#import "MBProgressHUD+MJ.h"
@interface ViewController ()
/** 用于播放 */
@property (weak, nonatomic) IBOutlet UIImageView *imageView;
@end
@implementation ViewController
- (void)viewDidLoad {
[super viewDidLoad];
}
- (IBAction)mergeAction:(UIButton *)sender {
[self merge];
}
// 混合音樂
- (void)merge{
// mbp提示框
[MBProgressHUD showMessage:@"正在處理中"];
// 路徑
NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];
// 聲音來源
NSURL *audioInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"五環(huán)之歌" ofType:@"mp3"]];
// 視頻來源
NSURL *videoInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"myPlayer" ofType:@"mp4"]];
// 最終合成輸出路徑
NSString *outPutFilePath = [documents stringByAppendingPathComponent:@"merge.mp4"];
// 添加合成路徑
NSURL *outputFileUrl = [NSURL fileURLWithPath:outPutFilePath];
// 時(shí)間起點(diǎn)
CMTime nextClistartTime = kCMTimeZero;
// 創(chuàng)建可變的音視頻組合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 視頻采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 視頻時(shí)間范圍
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 視頻通道 枚舉 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 視頻采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
//? 把采集軌道數(shù)據(jù)加入到可變軌道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 聲音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因?yàn)橐曨l短這里就直接用視頻長(zhǎng)度了,如果自動(dòng)化需要自己寫判斷
CMTimeRange audioTimeRange = videoTimeRange;
// 音頻通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音頻采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成軌道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 創(chuàng)建一個(gè)輸出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 輸出類型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 輸出地址
assetExport.outputURL = outputFileUrl;
// 優(yōu)化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完畢
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主線程
dispatch_async(dispatch_get_main_queue(), ^{
// 調(diào)用播放方法
[self playWithUrl:outputFileUrl];
});
}];
}
/** 播放方法 */
- (void)playWithUrl:(NSURL *)url{
// 傳入地址
AVPlayerItem *playerItem = [AVPlayerItem playerItemWithURL:url];
// 播放器
AVPlayer *player = [AVPlayer playerWithPlayerItem:playerItem];
// 播放器layer
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player];
playerLayer.frame = self.imageView.frame;
// 視頻填充模式
playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;
// 添加到imageview的layer上
[self.imageView.layer addSublayer:playerLayer];
// 隱藏提示框 開始播放
[MBProgressHUD hideHUD];
[MBProgressHUD showSuccess:@"合成完成"];
// 播放
[player play];
}
##MBP是一個(gè)第三方提示類,如果不關(guān)心這個(gè)功能可以刪除這三行代碼和頭文件
// mbp提示框
[MBProgressHUD showMessage:@"正在處理中"];
// 隱藏提示框 開始播放
[MBProgressHUD hideHUD];
[MBProgressHUD showSuccess:@"合成完成"];
#效果圖 ###因?yàn)槭莋if..請(qǐng)自己yy出Uber視頻配上五環(huán)之歌(我感覺還挺配的)