AVFoundation框架
1.AVAsset:用于獲取一個多媒體文件的信息挠进,相當(dāng)于獲取一個視頻或音頻文件,是一個抽象類誊册,不能直接使用领突。
2.AVURLAsset:AVAsset的子類,通過URL路徑創(chuàng)建一個包含多媒體信息的對象案怯。
NSURL *url = <#A URL that identifies an audiovisual asset such as a movie file#>;
AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil];
3.AVCaptureSession:用于捕捉視頻和音頻攘须,負(fù)責(zé)協(xié)調(diào)視頻和音頻的輸入流和輸出流。
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
4.AVCaptureDevice:表示輸入設(shè)備殴泰,如照相機(jī)或麥克風(fēng)于宙。
AVCaptureDevice *device = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition)position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
if ([device supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) return device;
return nil;
}
}
return nil;
}
5.AVCaptureDeviceInput:視頻或音頻的輸入流,把該對象添加到AVCaptureSession對象中管理悍汛。
NSError *error;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input) {
// Handle the error appropriately.
}
if ([captureSession canAddInput:captureDeviceInput]) {
[captureSession addInput:captureDeviceInput];
}
6.AVCaptureOutput:視頻或音頻的輸出流捞魁,通常使用它的子類:AVCaptureAudioDataOutput,AVCaptureVideoDataOutput离咐,AVCaptureStillImageOutput谱俭,AVCaptureFileOutput等,把該對象添加到AVCaptureSession對象中管理宵蛀。
AVCaptureMovieFileOutput *movieOutput = [[AVCaptureMovieFileOutput alloc] init];
if ([captureSession canAddOutput:movieOutput]) {
[captureSession addOutput:movieOutput];
}
7.AVCaptureVideoPreviewLayer:預(yù)覽圖層昆著,實(shí)時查看攝像頭捕捉的畫面。
AVCaptureVideoPreviewLayer *captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
captureVideoPreviewLayer.frame = <#Set layer frame#>;
8.AVCaptureConnection:AVCaptureSession和輸入輸出流之間的連接术陶,可以用來調(diào)節(jié)一些設(shè)置凑懂,如光學(xué)防抖。
AVCaptureConnection *captureConnection = [movieOutput connectionWithMediaType:AVMediaTypeVideo];
// 打開影院級光學(xué)防抖
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
9.AVCaptureDeviceFormat:輸入設(shè)備的一些設(shè)置梧宫,可以用來修改一些設(shè)置接谨,如ISO摆碉,慢動作,防抖等脓豪。
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
CGFloat minISO = captureDevice.activeFormat.minISO;
CGFloat maxISO = captureDevice.activeFormat.maxISO;
// 調(diào)節(jié)ISO為全范圍的70%
CGFloat currentISO = (maxISO - minISO) * 0.7 + minISO;
[captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent ISO:currentISO completionHandler:nil];
[captureDevice unlockForConfiguration];
}else{
// Handle the error appropriately.
}
初始化相機(jī)
/// 負(fù)責(zé)輸入和輸出設(shè)備之間的數(shù)據(jù)傳遞
@property (nonatomic, strong) AVCaptureSession *captureSession;
/// 負(fù)責(zé)從AVCaptureDevice獲得視頻輸入流
@property (nonatomic, strong) AVCaptureDeviceInput *captureDeviceInput;
/// 負(fù)責(zé)從AVCaptureDevice獲得音頻輸入流
@property (nonatomic, strong) AVCaptureDeviceInput *audioCaptureDeviceInput;
/// 視頻輸出流
@property (nonatomic, strong) AVCaptureMovieFileOutput *captureMovieFileOutput;
/// 相機(jī)拍攝預(yù)覽圖層
@property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;
... 公用方法
/// 獲取攝像頭設(shè)備
- (AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition)position {
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if ([device position] == position) {
if ([device supportsAVCaptureSessionPreset:AVCaptureSessionPreset1280x720]) return device;
return nil;
}
}
return nil;
}
... 創(chuàng)建自定義相機(jī)
// 創(chuàng)建AVCaptureSession
_captureSession = [[AVCaptureSession alloc] init];
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset1280x720]) _captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
// 獲取攝像設(shè)備
AVCaptureDevice *videoCaptureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack];
if (!videoCaptureDevice) {
// Handle the error appropriately.
}
// 獲取視頻輸入流
NSError *error = nil;
_captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoCaptureDevice error:&error];
if (error) {
// Handle the error appropriately.
}
// 獲取錄音設(shè)備
AVCaptureDevice *audioCaptureDevice = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject];
// 獲取音頻輸入流
_audioCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
if (error) {
// Handle the error appropriately.
}
// 將視頻和音頻輸入添加到AVCaptureSession
if ([_captureSession canAddInput:_captureDeviceInput] && [_captureSession canAddInput:_audioCaptureDeviceInput]) {
[_captureSession addInput:_captureDeviceInput];
[_captureSession addInput:_audioCaptureDeviceInput];
}
// 創(chuàng)建輸出流
_captureMovieFileOutput = [[AVCaptureMovieFileOutput alloc] init];
// 將輸出流添加到AVCaptureSession
if ([_captureSession canAddOutput:_captureMovieFileOutput]) {
[_captureSession addOutput:_captureMovieFileOutput];
// 根據(jù)設(shè)備輸出獲得連接
AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
// 判斷是否支持光學(xué)防抖
if ([videoCaptureDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeCinematic]) {
// 如果支持防抖就打開防抖
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
}
}
// 保存默認(rèn)的AVCaptureDeviceFormat
// 之所以保存是因?yàn)樾薷臄z像頭捕捉頻率之后巷帝,防抖就無法再次開啟,試了下只能夠用這個默認(rèn)的format才可以扫夜,所以把它存起來楞泼,關(guān)閉慢動作拍攝后在設(shè)置會默認(rèn)的format開啟防抖
_defaultFormat = videoCaptureDevice.activeFormat;
_defaultMinFrameDuration = videoCaptureDevice.activeVideoMinFrameDuration;
_defaultMaxFrameDuration = videoCaptureDevice.activeVideoMaxFrameDuration;
// 創(chuàng)建預(yù)覽圖層
_captureVideoPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_captureVideoPreviewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//填充模式
_captureVideoPreviewLayer.frame = self.bounds;
// 相機(jī)的預(yù)覽圖層是一個CALayer,所以可以創(chuàng)建一個UIView笤闯,在view的layer上addSublayer就可以
// 因?yàn)檫@里是寫在view的init方法里堕阔,所以直接調(diào)用了self.layer的addSublayer方法
[self.layer addSublayer:_captureVideoPreviewLayer];
// 開始捕獲
[self.captureSession startRunning];
配置操作界面
可以在相機(jī)的預(yù)覽圖層所在的view上面直接addSubview我們需要的視圖,我的做法是直接創(chuàng)建一個和當(dāng)前預(yù)覽圖層一樣大的UIView做控制面板望侈,背景色為透明。然后整體蓋在相機(jī)預(yù)覽圖層上面勋桶,所有的手勢方法脱衙,按鈕點(diǎn)擊等都在我們的控制面板上作響應(yīng),具體代碼其實(shí)就是通過代理傳遞控制面板的操作讓相機(jī)界面去做對應(yīng)的處理例驹,這里就不貼無用代碼了痴突。
相機(jī)設(shè)置
1.切換到后攝像頭
#pragma mark - 切換到后攝像頭
- (void)cameraBackgroundDidClickChangeBack {
AVCaptureDevice *toChangeDevice;
AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionBack;
toChangeDevice = [self getCameraDeviceWithPosition:toChangePosition];
AVCaptureDeviceInput *toChangeDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:toChangeDevice error:nil];
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.captureDeviceInput];
if ([self.captureSession canAddInput:toChangeDeviceInput]) {
[self.captureSession addInput:toChangeDeviceInput];
self.captureDeviceInput = toChangeDeviceInput;
}
[self.captureSession commitConfiguration];
}
2.切換到前攝像頭
- (void)cameraBackgroundDidClickChangeFront {
AVCaptureDevice *toChangeDevice;
AVCaptureDevicePosition toChangePosition = AVCaptureDevicePositionFront;
toChangeDevice = [self getCameraDeviceWithPosition:toChangePosition];
AVCaptureDeviceInput *toChangeDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:toChangeDevice error:nil];
[self.captureSession beginConfiguration];
[self.captureSession removeInput:self.captureDeviceInput];
if ([self.captureSession canAddInput:toChangeDeviceInput]) {
[self.captureSession addInput:toChangeDeviceInput];
self.captureDeviceInput = toChangeDeviceInput;
}
[self.captureSession commitConfiguration];
}
3.打開閃光燈
- (void)cameraBackgroundDidClickOpenFlash {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOn]) [captureDevice setTorchMode:AVCaptureTorchModeOn];
}else{
// Handle the error appropriately.
}
}
4.關(guān)閉閃光燈
- (void)cameraBackgroundDidClickCloseFlash {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
if ([captureDevice isTorchModeSupported:AVCaptureTorchModeOff]) [captureDevice setTorchMode:AVCaptureTorchModeOff];
}else{
// Handle the error appropriately.
}
}
5.調(diào)節(jié)焦距
// 焦距范圍0.0-1.0
- (void)cameraBackgroundDidChangeFocus:(CGFloat)focus {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
if ([captureDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]) [captureDevice setFocusModeLockedWithLensPosition:focus completionHandler:nil];
}else{
// Handle the error appropriately.
}
}
6.數(shù)碼變焦
// 數(shù)碼變焦 1-3倍
- (void)cameraBackgroundDidChangeZoom:(CGFloat)zoom {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
[captureDevice rampToVideoZoomFactor:zoom withRate:50];
}else{
// Handle the error appropriately.
}
}
7.調(diào)節(jié)ISO狸驳,光感度
// 調(diào)節(jié)ISO,光感度 0.0-1.0
- (void)cameraBackgroundDidChangeISO:(CGFloat)iso {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
CGFloat minISO = captureDevice.activeFormat.minISO;
CGFloat maxISO = captureDevice.activeFormat.maxISO;
CGFloat currentISO = (maxISO - minISO) * iso + minISO;
[captureDevice setExposureModeCustomWithDuration:AVCaptureExposureDurationCurrent ISO:currentISO completionHandler:nil];
[captureDevice unlockForConfiguration];
}else{
// Handle the error appropriately.
}
}
8.點(diǎn)擊屏幕自動對焦
// 當(dāng)前屏幕上點(diǎn)擊的點(diǎn)坐標(biāo)
- (void)cameraBackgroundDidTap:(CGPoint)point {
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
CGPoint location = point;
CGPoint pointOfInerest = CGPointMake(0.5, 0.5);
CGSize frameSize = self.captureVideoPreviewLayer.frame.size;
if ([captureDevice position] == AVCaptureDevicePositionFront) location.x = frameSize.width - location.x;
pointOfInerest = CGPointMake(location.y / frameSize.height, 1.f - (location.x / frameSize.width));
[self focusWithMode:AVCaptureFocusModeAutoFocus exposureMode:AVCaptureExposureModeAutoExpose atPoint:pointOfInerest];
[[self.captureDeviceInput device] addObserver:self forKeyPath:@"ISO" options:NSKeyValueObservingOptionNew context:NULL];
}else{
// Handle the error appropriately.
}
}
-(void)focusWithMode:(AVCaptureFocusMode)focusMode exposureMode:(AVCaptureExposureMode)exposureMode atPoint:(CGPoint)point{
AVCaptureDevice *captureDevice = [self.captureDeviceInput device];
NSError *error;
if ([captureDevice lockForConfiguration:&error]) {
if ([captureDevice isFocusModeSupported:focusMode]) [captureDevice setFocusMode:AVCaptureFocusModeAutoFocus];
if ([captureDevice isFocusPointOfInterestSupported]) [captureDevice setFocusPointOfInterest:point];
if ([captureDevice isExposureModeSupported:exposureMode]) [captureDevice setExposureMode:AVCaptureExposureModeAutoExpose];
if ([captureDevice isExposurePointOfInterestSupported]) [captureDevice setExposurePointOfInterest:point];
}else{
// Handle the error appropriately.
}
}
9.獲取錄制時視頻的方向
因?yàn)橄鄼C(jī)的特殊性,不能夠用常規(guī)的控制器的方向來獲取當(dāng)前的方向船逮,因?yàn)橛脩艨赡荜P(guān)閉屏幕旋轉(zhuǎn),這里用重力感應(yīng)來計算當(dāng)前手機(jī)的放置狀態(tài)窑邦。
@property (nonatomic, strong) CMMotionManager *motionManager;
@property (nonatomic, assign) UIDeviceOrientation deviceOrientation;
...
_motionManager = [[CMMotionManager alloc] init];
_motionManager.deviceMotionUpdateInterval = 1/15.0;
if (_motionManager.deviceMotionAvailable) {
[_motionManager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:^(CMDeviceMotion * _Nullable motion, NSError * _Nullable error) {
[self performSelectorOnMainThread:@selector(handleDeviceMotion:) withObject:motion waitUntilDone:YES];
}];
} else {
NSLog(@"No device motion on device");
}
...
/// 重力感應(yīng)回調(diào)
- (void)handleDeviceMotion:(CMDeviceMotion *)deviceMotion{
double x = deviceMotion.gravity.x;
double y = deviceMotion.gravity.y;
CGAffineTransform videoTransform;
if (fabs(y) >= fabs(x)) {
if (y >= 0) {
videoTransform = CGAffineTransformMakeRotation(M_PI);
_deviceOrientation = UIDeviceOrientationPortraitUpsideDown;
} else {
videoTransform = CGAffineTransformMakeRotation(0);
_deviceOrientation = UIDeviceOrientationPortrait;
}
} else {
if (x >= 0) {
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
_deviceOrientation = UIDeviceOrientationLandscapeRight; // Home鍵左側(cè)水平拍攝
} else {
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
_deviceOrientation = UIDeviceOrientationLandscapeLeft; // Home鍵右側(cè)水平拍攝
}
}
// 告訴操作界面當(dāng)前屏幕的方向拜英,做按鈕跟隨屏幕方向旋轉(zhuǎn)的操作
[self.backgroundView setOrientation:_deviceOrientation];
}
11.慢動作拍攝
- (void)cameraBackgroundDidClickOpenSlow {
[self.captureSession stopRunning];
CGFloat desiredFPS = 240.0;
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
AVCaptureDeviceFormat *selectedFormat = nil;
int32_t maxWidth = 0;
AVFrameRateRange *frameRateRange = nil;
for (AVCaptureDeviceFormat *format in [videoDevice formats]) {
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
CMFormatDescriptionRef desc = format.formatDescription;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
int32_t width = dimensions.width;
if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) {
selectedFormat = format;
frameRateRange = range;
maxWidth = width;
}
}
}
if (selectedFormat) {
if ([videoDevice lockForConfiguration:nil]) {
NSLog(@"selected format: %@", selectedFormat);
videoDevice.activeFormat = selectedFormat;
videoDevice.activeVideoMinFrameDuration = CMTimeMake(1, (int32_t)desiredFPS);
videoDevice.activeVideoMaxFrameDuration = CMTimeMake(1, (int32_t)desiredFPS);
[videoDevice unlockForConfiguration];
}
}
[self.captureSession startRunning];
}
12.慢動作拍攝關(guān)
- (void)cameraBackgroundDidClickCloseSlow {
[self.captureSession stopRunning];
CGFloat desiredFPS = 60.0;
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
AVCaptureDeviceFormat *selectedFormat = nil;
int32_t maxWidth = 0;
AVFrameRateRange *frameRateRange = nil;
for (AVCaptureDeviceFormat *format in [videoDevice formats]) {
for (AVFrameRateRange *range in format.videoSupportedFrameRateRanges) {
CMFormatDescriptionRef desc = format.formatDescription;
CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(desc);
int32_t width = dimensions.width;
if (range.minFrameRate <= desiredFPS && desiredFPS <= range.maxFrameRate && width >= maxWidth) {
selectedFormat = format;
frameRateRange = range;
maxWidth = width;
}
}
}
if (selectedFormat) {
if ([videoDevice lockForConfiguration:nil]) {
NSLog(@"selected format: %@", selectedFormat);
videoDevice.activeFormat = _defaultFormat;
videoDevice.activeVideoMinFrameDuration = _defaultMinFrameDuration;
videoDevice.activeVideoMaxFrameDuration = _defaultMaxFrameDuration;
[videoDevice unlockForConfiguration];
}
}
[self.captureSession startRunning];
}
13.防抖開啟
- (void)cameraBackgroundDidClickOpenAntiShake {
AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
NSLog(@"change captureConnection: %@", captureConnection);
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
NSLog(@"set format: %@", videoDevice.activeFormat);
if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeCinematic]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeCinematic;
}
}
14.防抖關(guān)閉
#pragma mark - 防抖關(guān)
- (void)cameraBackgroundDidClickCloseAntiShake {
AVCaptureConnection *captureConnection = [_captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
NSLog(@"change captureConnection: %@", captureConnection);
AVCaptureDevice *videoDevice = self.captureDeviceInput.device;
if ([videoDevice.activeFormat isVideoStabilizationModeSupported:AVCaptureVideoStabilizationModeOff]) {
captureConnection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeOff;
}
}
15.錄制視頻
#pragma mark - 錄制
- (void)cameraBackgroundDidClickPlay {
// 根據(jù)設(shè)備輸出獲得連接
AVCaptureConnection *captureConnection = [self.captureMovieFileOutput connectionWithMediaType:AVMediaTypeVideo];
// 根據(jù)連接取得設(shè)備輸出的數(shù)據(jù)
if (![self.captureMovieFileOutput isRecording]) {
captureConnection.videoOrientation = (AVCaptureVideoOrientation)_deviceOrientation; // 視頻方向和手機(jī)方向一致
NSString *outputFilePath = [kCachePath stringByAppendingPathComponent:[self movieName]];
NSURL *fileURL = [NSURL fileURLWithPath:outputFilePath];
[self.captureMovieFileOutput startRecordingToOutputFileURL:fileURL recordingDelegate:self];
_currentMoviePath = outputFilePath;
}
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections {
NSLog(@"開始錄制");
}
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
NSLog(@"錄制完成");
}
16.暫停錄制
[self.captureMovieFileOutput stopRecording];
17.調(diào)節(jié)視頻的速度
慢動作拍攝的時候要調(diào)節(jié)攝像頭的捕捉頻率,快速的時候直接調(diào)節(jié)視頻速度就可以了盆驹。
慢動作下拍攝的視頻視頻的播放時長還是實(shí)際拍攝的時間圆丹,這里根據(jù)設(shè)置的慢速倍率,把視頻的時長拉長躯喇。
/// 處理速度視頻
- (void)setSpeedWithVideo:(NSDictionary *)video completed:(void(^)())completed {
dispatch_async(dispatch_get_global_queue(0, 0), ^{
NSLog(@"video set thread: %@", [NSThread currentThread]);
// 獲取視頻
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:[NSURL fileURLWithPath:video[kMoviePath]] options:nil];
// 視頻混合
AVMutableComposition* mixComposition = [AVMutableComposition composition];
// 視頻軌道
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 音頻軌道
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 視頻的方向
CGAffineTransform videoTransform = [videoAsset tracksWithMediaType:AVMediaTypeVideo].lastObject.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
NSLog(@"垂直拍攝");
videoTransform = CGAffineTransformMakeRotation(M_PI_2);
}else if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
NSLog(@"倒立拍攝");
videoTransform = CGAffineTransformMakeRotation(-M_PI_2);
}else if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
NSLog(@"Home鍵右側(cè)水平拍攝");
videoTransform = CGAffineTransformMakeRotation(0);
}else if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
NSLog(@"Home鍵左側(cè)水平拍攝");
videoTransform = CGAffineTransformMakeRotation(M_PI);
}
// 根據(jù)視頻的方向同步視頻軌道方向
compositionVideoTrack.preferredTransform = videoTransform;
compositionVideoTrack.naturalTimeScale = 600;
// 插入視頻軌道
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject] atTime:kCMTimeZero error:nil];
// 插入音頻軌道
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeAudio] firstObject] atTime:kCMTimeZero error:nil];
// 適配視頻速度比率
CGFloat scale = 1.0;
if([video[kMovieSpeed] isEqualToString:kMovieSpeed_Fast]){
scale = 0.2f; // 快速 x5
} else if ([video[kMovieSpeed] isEqualToString:kMovieSpeed_Slow]) {
scale = 4.0f; // 慢速 x4
}
// 根據(jù)速度比率調(diào)節(jié)音頻和視頻
[compositionVideoTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) toDuration:CMTimeMake(videoAsset.duration.value * scale , videoAsset.duration.timescale)];
[compositionAudioTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMake(videoAsset.duration.value, videoAsset.duration.timescale)) toDuration:CMTimeMake(videoAsset.duration.value * scale, videoAsset.duration.timescale)];
// 配置導(dǎo)出
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720];
// 導(dǎo)出視頻的臨時保存路徑
NSString *exportPath = [kCachePath stringByAppendingPathComponent:[self movieName]];
NSURL *exportUrl = [NSURL fileURLWithPath:exportPath];
// 導(dǎo)出視頻的格式 .MOV
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = exportUrl;
_assetExport.shouldOptimizeForNetworkUse = YES;
// 導(dǎo)出視頻
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
[_processedVideoPaths addObject:exportPath];
// 將導(dǎo)出的視頻保存到相冊
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if (![library videoAtPathIsCompatibleWithSavedPhotosAlbum:[NSURL URLWithString:exportPath]]){
NSLog(@"cache can't write");
completed();
return;
}
[library writeVideoAtPathToSavedPhotosAlbum:[NSURL URLWithString:exportPath] completionBlock:^(NSURL *assetURL, NSError *error) {
if (error) {
completed();
NSLog(@"cache write error");
} else {
completed();
NSLog(@"cache write success");
}
}];
});
}];
});
}
18.將多個視頻合并為一個視頻
- (void)mergeVideosWithPaths:(NSArray *)paths completed:(void(^)(NSString *videoPath))completed {
if (!paths.count) return;
dispatch_async(dispatch_get_main_queue(), ^{
AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
videoTrack.preferredTransform = CGAffineTransformRotate(CGAffineTransformIdentity, M_PI_2);
CMTime totalDuration = kCMTimeZero;
// NSMutableArray<AVMutableVideoCompositionLayerInstruction *> *instructions = [NSMutableArray array];
for (int i = 0; i < paths.count; i++) {
AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:paths[i]]];
AVAssetTrack *assetAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]firstObject];
NSLog(@"%lld", asset.duration.value/asset.duration.timescale);
NSError *erroraudio = nil;
BOOL ba = [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetAudioTrack atTime:totalDuration error:&erroraudio];
NSLog(@"erroraudio:%@--%d", erroraudio, ba);
NSError *errorVideo = nil;
BOOL bl = [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:assetVideoTrack atTime:totalDuration error:&errorVideo];
NSLog(@"errorVideo:%@--%d",errorVideo,bl);
// AVMutableVideoCompositionLayerInstruction *instruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
// UIImageOrientation assetOrientation = UIImageOrientationUp;
// BOOL isAssetPortrait = NO;
// // 根據(jù)視頻的實(shí)際拍攝方向來調(diào)整視頻的方向
// CGAffineTransform videoTransform = assetVideoTrack.preferredTransform;
// if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
// NSLog(@"垂直拍攝");
// assetOrientation = UIImageOrientationRight;
// isAssetPortrait = YES;
// }else if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
// NSLog(@"倒立拍攝");
// assetOrientation = UIImageOrientationLeft;
// isAssetPortrait = YES;
// }else if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
// NSLog(@"Home鍵右側(cè)水平拍攝");
// assetOrientation = UIImageOrientationUp;
// }else if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
// NSLog(@"Home鍵左側(cè)水平拍攝");
// assetOrientation = UIImageOrientationDown;
// }
// CGFloat assetScaleToFitRatio = 720.0 / assetVideoTrack.naturalSize.width;
// if (isAssetPortrait) {
// assetScaleToFitRatio = 720.0 / assetVideoTrack.naturalSize.height;
// CGAffineTransform assetSacleFactor = CGAffineTransformMakeScale(assetScaleToFitRatio, assetScaleToFitRatio);
// [instruction setTransform:CGAffineTransformConcat(assetVideoTrack.preferredTransform, assetSacleFactor) atTime:totalDuration];
// } else {
// /**
// 豎直方向視頻尺寸:720*1280
// 水平方向視頻尺寸:720*405
// 水平方向視頻需要劇中的y值:(1280 - 405)/ 2 = 437.5
// **/
// CGAffineTransform assetSacleFactor = CGAffineTransformMakeScale(assetScaleToFitRatio, assetScaleToFitRatio);
// [instruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(assetVideoTrack.preferredTransform, assetSacleFactor), CGAffineTransformMakeTranslation(0, 437.5)) atTime:totalDuration];
// }
// // 把新的插入到最上面辫封,最后是按照數(shù)組順序播放的。
// [instructions insertObject:instruction atIndex:0];
// totalDuration = CMTimeAdd(totalDuration, asset.duration);
// // 在當(dāng)前視頻時間點(diǎn)結(jié)束后需要清空尺寸廉丽,否則如果第二個視頻尺寸比第一個小倦微,它會顯示在第二個視頻的下方。
// [instruction setCropRectangle:CGRectZero atTime:totalDuration];
}
// AVMutableVideoCompositionInstruction *mixInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
// mixInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, totalDuration);
// mixInstruction.layerInstructions = instructions;
// AVMutableVideoComposition *mixVideoComposition = [AVMutableVideoComposition videoComposition];
// mixVideoComposition.instructions = [NSArray arrayWithObject:mixInstruction];
// mixVideoComposition.frameDuration = CMTimeMake(1, 25);
// mixVideoComposition.renderSize = CGSizeMake(720.0, 1280.0);
//
NSString *outPath = [kVideoPath stringByAppendingPathComponent:[self movieName]];
NSURL *mergeFileURL = [NSURL fileURLWithPath:outPath];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = mergeFileURL;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
// exporter.videoComposition = mixVideoComposition;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
completed(outPath);
});
}];
});
}
如果對你有幫助請點(diǎn)贊加關(guān)注正压,每周都會更新欣福。