正文
給視頻添加水印的原理是把視頻的每一幀都重新渲染,這個(gè)功能我們可以用GPUImage和AVFoundation兩個(gè)框架來做店茶。下面穿撮,我就來詳細(xì)闡述這兩種不同的方案。
AVFoundation
使用AVFoundation為視頻添加水印步驟如下:
1.拿到視頻和音頻資源
2.創(chuàng)建AVMutableComposition對(duì)象
3.往AVMutableComposition對(duì)象添加視頻資源上真,同時(shí)設(shè)置視頻資源的時(shí)間段和插入點(diǎn)
4.往AVMutableComposition對(duì)象添加音頻資源,同時(shí)設(shè)置音頻資源的時(shí)間段和插入點(diǎn)
5.往AVMutableComposition對(duì)象添加要追加的音頻資源羹膳,同時(shí)設(shè)置音頻資源的時(shí)間段睡互,插入點(diǎn)和混合模式
6.導(dǎo)出視頻。導(dǎo)出視頻依然使用的是AVAssetExportSession
/**
<#Description#>
@param videoPathURL 視頻URL
@param img 水印圖片
@param coverImg 水印圖片
@param text 文字
*/
- (void)saveAVideoPath:(NSURL *)videoPathURL withWaterImg:(UIImage *)img coverImg:(UIImage *)coverImg text:(NSString *)text{
if (!videoPathURL) return;
//1 創(chuàng)建AVAsset實(shí)例 AVAsset包含了video的所有信息 self.videoUrl輸入視頻的路徑
NSDictionary *opts = [NSDictionary dictionaryWithObject:@(YES) forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:videoPathURL options:opts]; //初始化視頻媒體文件
CMTime startTime = kCMTimeZero;
CMTime endTime = CMTimeMakeWithSeconds(videoAsset.duration.value/videoAsset.duration.timescale, videoAsset.duration.timescale);
//2 創(chuàng)建AVMutableComposition實(shí)例
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
//3 視頻通道 工程文件中的軌道,有音頻軌就珠、視頻軌等寇壳,里面可以插入各種對(duì)應(yīng)的素材
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
//把視頻軌道數(shù)據(jù)加入到可變軌道中 這部分可以做視頻裁剪TimeRange
[videoTrack insertTimeRange:CMTimeRangeMake(startTime, endTime)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
//說明有音頻
if ([[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0) {
//聲音采集
AVURLAsset * audioAsset = [[AVURLAsset alloc] initWithURL:videoPathURL options:opts];
//音頻通道
AVMutableCompositionTrack * audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//音頻采集通道
[audioTrack insertTimeRange:CMTimeRangeMake(startTime, endTime) ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject] atTime:kCMTimeZero error:nil];
}
//3.1 AVMutableVideoCompositionInstruction 視頻軌道中的一個(gè)視頻,可以縮放妻怎、旋轉(zhuǎn)等
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(startTime, endTime);
// 3.2 AVMutableVideoCompositionLayerInstruction 一個(gè)視頻軌道壳炎,包含了這個(gè)軌道上的所有視頻素材
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
// UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
// videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
// videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
// if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
// videoAssetOrientation_ = UIImageOrientationUp;
// }
// if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
// videoAssetOrientation_ = UIImageOrientationDown;
// }
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
// [videolayerInstruction setOpacity:0.0 atTime:endTime];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
//AVMutableVideoComposition:管理所有視頻軌道,可以決定最終視頻的尺寸逼侦,裁剪需要在這里進(jìn)行
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1,30);
//生成水印
[self creatWaterWithComposition:mainCompositionInst waterImg:[UIImage imageNamed:@"avatar"] coverImg:[UIImage imageNamed:@"demo"] text:@"avtest" size:CGSizeMake(renderWidth, renderHeight)];
NSString *pathToMovie =[self getVideoSaveFilePathString:@".MOV" addPathArray:NO];
unlink([pathToMovie UTF8String]);
NSURL *compressionFileURL = [NSURL fileURLWithPath:pathToMovie];
AVAssetExportSession *exporter =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exporter.outputURL=compressionFileURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
switch (exporter.status) {
case AVAssetExportSessionStatusFailed:{
_hud.hidden =YES;
NSLog(@"Export Status: Fail :%@",exporter.error.localizedDescription);
break;
}
case AVAssetExportSessionStatusCancelled:{
NSLog(@"Export Status: Cancell");
break;
}
case AVAssetExportSessionStatusCompleted: {
_hud.hidden =YES;
[self savePhotosAlbum:compressionFileURL];
break;
}
case AVAssetExportSessionStatusUnknown: {
NSLog(@"Export Status: Unknown");
break;
}
case AVAssetExportSessionStatusExporting : {
NSLog(@"Export Status: Exporting");
break;
}
case AVAssetExportSessionStatusWaiting: {
NSLog(@"Export Status: Wating");
break;
}
};
});
}];
}
注:必須判斷是否有音頻[[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0
冕广,否則的話會(huì)出現(xiàn)問題〕ソ啵框架定義了一個(gè)名為AVURLAssetPreferPreciseDurationAndTimingKey
的選項(xiàng)撒汉,選項(xiàng)帶有@YES值可以確保當(dāng)資源的屬性使用AVAsynchronousKeyValueLoading
協(xié)議載入時(shí)可以計(jì)算出準(zhǔn)確的時(shí)長和時(shí)間信息。雖然使用這個(gè)option時(shí)還會(huì)載入過程增添一些額外開銷涕滋,不過這可以保證資源正處于合適的編輯狀態(tài)睬辐。生成水印的代碼如下:
- (void)creatWaterWithComposition:(AVMutableVideoComposition *)composition waterImg:(UIImage *)img coverImg:(UIImage *)coverImg text:(NSString *)text size:(CGSize )size{
UIFont *font = [UIFont systemFontOfSize:30.0];
CATextLayer *subtitle1Text = [[CATextLayer alloc] init];
[subtitle1Text setFontSize:30];
[subtitle1Text setString:text];
[subtitle1Text setAlignmentMode:kCAAlignmentCenter];
[subtitle1Text setForegroundColor:[[UIColor whiteColor] CGColor]];
subtitle1Text.masksToBounds = YES;
subtitle1Text.cornerRadius = 23.0f;
[subtitle1Text setBackgroundColor:[UIColor colorWithRed:0 green:0 blue:0 alpha:0.5].CGColor];
CGSize textSize = [text sizeWithAttributes:[NSDictionary dictionaryWithObjectsAndKeys:font,NSFontAttributeName, nil]];
[subtitle1Text setFrame:CGRectMake(50, 100, textSize.width+20, textSize.height+10)];
//水印
CALayer *imgLayer = [CALayer layer];
imgLayer.contents = (id)img.CGImage;
// imgLayer.bounds = CGRectMake(0, 0, size.width, size.height);
imgLayer.bounds = CGRectMake(0, 0, 210, 50);
imgLayer.position = CGPointMake(size.width/2.0, size.height/2.0);
//第二個(gè)水印
CALayer *coverImgLayer = [CALayer layer];
coverImgLayer.contents = (id)coverImg.CGImage;
// [coverImgLayer setContentsGravity:@"resizeAspect"];
coverImgLayer.bounds = CGRectMake(50, 200,210, 50);
coverImgLayer.position = CGPointMake(size.width/4.0, size.height/4.0);
// 2 - The usual overlay
CALayer *overlayLayer = [CALayer layer];
[overlayLayer addSublayer:subtitle1Text];
[overlayLayer addSublayer:imgLayer];
overlayLayer.frame = CGRectMake(0, 0, size.width, size.height);
[overlayLayer setMasksToBounds:YES];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, size.width, size.height);
videoLayer.frame = CGRectMake(0, 0, size.width, size.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:overlayLayer];
[parentLayer addSublayer:coverImgLayer];
//設(shè)置封面
CABasicAnimation *anima = [CABasicAnimation animationWithKeyPath:@"opacity"];
anima.fromValue = [NSNumber numberWithFloat:1.0f];
anima.toValue = [NSNumber numberWithFloat:0.0f];
anima.repeatCount = 0;
anima.duration = 5.0f; //5s之后消失
[anima setRemovedOnCompletion:NO];
[anima setFillMode:kCAFillModeForwards];
anima.beginTime = AVCoreAnimationBeginTimeAtZero;
[coverImgLayer addAnimation:anima forKey:@"opacityAniamtion"];
composition.animationTool = [AVVideoCompositionCoreAnimationTool
videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
其中使用AVFoundation + CoreAnimation的合成方式為視頻水印添加動(dòng)態(tài)效果我以后還會(huì)討論,今天就先到這里宾肺。
GPUImage
GPUImage使用GPUImageUIElement和GPUImageMovieWriter重新進(jìn)行渲染溯饵,通過疊加濾鏡來重新生成視頻。濾鏡可以采用GPUImageDissolveBlendFilter锨用。這種方案相當(dāng)于把視頻重新錄制一邊丰刊,邊錄制邊重新添加水印,所以兼容性比較好增拥,幾乎只要支持的視頻格式全都可以處理啄巧,但是注意沒有聲音視頻的情況,如果原視頻沒有聲音掌栅,在創(chuàng)建新視頻采集聲音的時(shí)候秩仆,會(huì)出現(xiàn)錯(cuò)誤Assertion failure in -[GPUImageMovieWriter createDataFBO]
,所以也要先判斷是否有音頻[[videoAsset tracksWithMediaType:AVMediaTypeAudio] count]>0
猾封〕嗡#總體來說GPUImage只是簡單的提供了加載濾鏡的變相方案,原理其實(shí)就是加了一個(gè)濾鏡晌缘,這個(gè)濾鏡的紋理是水印的那個(gè)圖片齐莲,然后混合罷了,并沒有提供更深層的編輯功能磷箕。不多說选酗,代碼如下:
/**
使用GPUImage加載水印
@param videoPathURL 視頻路徑
@param img 水印圖片
@param coverImg 水印圖片二
@param text 字符串水印
*/
- (void)saveVideoPath:(NSURL*)videoPathURL WithWaterImg:(UIImage*)img WithCoverImage:(UIImage*)coverImg WithText:(NSString*)text
{
if (!videoPathURL) return;
GPUImageOutput <GPUImageInput> *filter = [[GPUImageNormalBlendFilter alloc] init];
//濾鏡
// filter =[[GPUImageDissolveBlendFilter alloc]init];
// [(GPUImageDissolveBlendFilter *)_filter setMix:0.0f];
//也可以使用透明濾鏡
// filter =[[GPUImageAlphaBlendFilter alloc]init];
// [(GPUImageDissolveBlendFilter *)_filter setMix:1.0f];
AVAsset *asset = [AVAsset assetWithURL:videoPathURL];
CGSize size = asset.naturalSize;
_movieFile = [[GPUImageMovie alloc] initWithAsset:asset];
_movieFile.playAtActualSpeed = NO;
// 文字水印
UILabel *label = [[UILabel alloc] init];
label.text = text;
label.font = [UIFont systemFontOfSize:30];
label.textColor = [UIColor whiteColor];
[label setTextAlignment:NSTextAlignmentCenter];
[label sizeToFit];
label.layer.masksToBounds = YES;
label.layer.cornerRadius = 18.0f;
[label setBackgroundColor:[UIColor colorWithRed:0 green:0 blue:0 alpha:0.5]];
[label setFrame:CGRectMake(50, 100, label.frame.size.width+20, label.frame.size.height)];
//圖片水印
UIImage *coverImage1 = [img copy];
UIImageView *coverImageView1 = [[UIImageView alloc] initWithImage:coverImage1];
[coverImageView1 setFrame:CGRectMake(0, 100, 210, 50)];
//第二個(gè)圖片水印
UIImage *coverImage2 = [coverImg copy];
UIImageView *coverImageView2 = [[UIImageView alloc] initWithImage:coverImage2];
[coverImageView2 setFrame:CGRectMake(270, 100, 210, 50)];
UIView *subView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, size.width, size.height)];
subView.backgroundColor = [UIColor clearColor];
[subView addSubview:coverImageView1];
[subView addSubview:coverImageView2];
[subView addSubview:label];
GPUImageUIElement *element = [[GPUImageUIElement alloc] initWithView:subView];
NSString *pathToMovie =[self getVideoSaveFilePathString:@".MOV" addPathArray:NO];
unlink([pathToMovie UTF8String]);
NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
self.movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(720.0, 1280.0)];
GPUImageFilter* progressFilter = [[GPUImageFilter alloc] init];
[progressFilter addTarget:filter];
[_movieFile addTarget:progressFilter];
[element addTarget:filter];
self.movieWriter.shouldPassthroughAudio = YES;
if ([[asset tracksWithMediaType:AVMediaTypeAudio] count] > 0){
_movieFile.audioEncodingTarget = self.movieWriter;
} else {
//no audio
_movieFile.audioEncodingTarget = nil;
}
// movieFile.playAtActualSpeed = true;
[_movieFile enableSynchronizedEncodingUsingMovieWriter:self.movieWriter];
// 顯示到界面
[filter addTarget:self.movieWriter];
[self.movieWriter startRecording];
[_movieFile startProcessing];
WeakSelf(self);
//渲染
[progressFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
//水印可以移動(dòng)
CGRect frame = coverImageView1.frame;
frame.origin.x += 1;
frame.origin.y += 1;
coverImageView1.frame = frame;
//第5秒之后隱藏coverImageView2
if (time.value/time.timescale>=5.0) {
[coverImageView2 removeFromSuperview];
}
[element update];
}];
[self.movieWriter setCompletionBlock:^{
StrongSelf(self);
dispatch_async(dispatch_get_main_queue(), ^{
[self.hud hideAnimated:YES];
[filter removeTarget:self.movieWriter];
[self.movieWriter finishRecording];
//保存相冊(cè)
[self savePhotosAlbum:movieURL];
});
}];
}
代碼中在progressFilter setFrameProcessingCompletionBlock的回調(diào)中,設(shè)置了各個(gè)元素的顯示搀捷、移動(dòng)星掰,這樣的話就可以更加自由的設(shè)置水印的顯示與否,比如水印在剛開始顯示嫩舟,五秒之后消失等功能氢烘,這個(gè)相當(dāng)于每一幀都在渲染,所以如果是采用什么動(dòng)畫效果的話可以直接設(shè)置frame即可家厌。
有什么問題歡迎大家留言討論播玖。