前言
這是一個關(guān)于視音頻的系列文章蹲坷,代碼不追求質(zhì)量坷剧,只把我認(rèn)為最關(guān)鍵的部分提取出來浦夷,旨在方便學(xué)習(xí)辖试,所以不建議直接用在項目之中。
本篇文章主要參考自:ObjC 中國
視頻資源獲取
UIImagePickerController使用
if UIImagePickerController.isSourceTypeAvailable(.camera) {
let imageVc = UIImagePickerController.init()
imageVc.sourceType = .camera
imageVc.mediaTypes = [kUTTypeMovie as String]
imageVc.videoQuality = .typeHigh
imageVc.delegate = self;
self.present(imageVc, animated: true, completion: nil)
}
AVFoundation視音頻采集
AVCaptureDevice -> AVCaptureDeviceInput -> AVCaptureSession
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
self.captureSession = captureSession;
// 攝像頭設(shè)備配置
AVCaptureDevice *cameraDevice;
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices) {
if (device.positon == AVCaptureDevicePositionBack) {
cameraDevice = device;
}
}
AVCaptureDeviceInput *cameraDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:cameraDevice error:nil];
// 麥克風(fēng)設(shè)備配置
AVAudioSession *audioSession = [AVAudioSession shareInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayBack error:nil];
NSArray *inputs = [audioSession availableInputs];
AVAudioSessionPortDescription *builtInMic = nil;
for (AVAudioSessionPortDescription *port in inputs) {
if ([port.portType isEqualToString:AVAudioSessionPortBuiltInMic]) {
builtInMic = port;
break;
}
}
for (AVAudioSessionDataSourceDescription *source in builtInMic.dataSources) {
if ([source.orientation isEqualToString:AVAudioSessionOrientationFront]) {
[builtInMic setPreferredDataSource:source error:nil];
[audioSession setPreferredInput:builtInMic error:nil];
break;
}
}
AVCaptureDeviceInput *micDeviceInput = [[AVCaptureDeviceInput alloc] initWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio] error:nil];
if ([captureSession canAddInput:cameraDeviceInput]) {
[captureSession addInput:cameraDeviceInput];
}
if ([captureSession canAddInput:micDeviceInput]) {
[captureSession addInput:micDeviceInput];
}
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
previewLayer.frame = self.view.bounds;
[self.view.layer insertSublayer:previewLayer atIndex:0];
self.captureSession.sessionPreset = AVCaptureSessionPresetHigh;
[self.captureSession startRunning];
FileOutput視音頻文件輸出
AVCaptureSession -> AVCaptureMovieFileOutput
AVCaptureMovieFileOutput *movieFileOutput = [AVCaptureMovieFileOutput new];
if ([self.captureSession canAddOutput:movieFileOutput]) {
[self.captureSession addOutput:movieFileOutput];
}
// 開始錄像
[self.movieFileOutput startRecordingToOutputFileURL:url recordingDelegate:self];
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error {
// 錄像結(jié)束后調(diào)用
}
Writer視音頻輸出
captureSession -> captureVideo(or Audio)DataOutput
assetWriter -> assetWriterInput
captureSession 和 writer是平級劈狐,不需要關(guān)聯(lián)
// Output
AVCaptureVideoDataOutput *videoDataOutput = [AVCaptureVideoDataOutput new];
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
videoDataOutput.videoSettings = nil;
[self.captureSession addOutput:videoDataOutput];
self.videoConnection = [videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
self.videoSettings = [videoDataOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie];
AVCaptureAudioDataOutput *audioDataOutput = [AVCaptureAudioDataOutput new];
[audioDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self.captureSession addOutput:audioDataOutput];
self.audioConnection = [audioDataOutput connectionWithMediaType:AVMediaTypeAudio];
self.audioSettings = [audioDataOutput recommendedAudioSettingsForAssetWriterWithOutputFileType:AVFileTypeQuickTimeMovie];
// Writer
AVAssetWriter *write = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie error:nil];
AVAssetWriterInput *videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:self.videoSettings];
videoInput.expectsMediaDataInRealTime = YES;
videoInput.transform = CGAffineTransformMakeRotation(M_PI_2);
if ([write canAddInput:videoInput]) {
[write addInput:videoInput];
self.videoInput = videoInput;
}
AVAssetWriterInput *audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:self.audioSettings];
audioInput.expectsMediaDataInRealTime = YES;
if ([write canAddInput:audioInput]) {
[write addInput:audioInput];
self.audioInput = audioInput;
}
self.writer = write;
BOOL success = [self.writer startWriting];
if (!success) {
NSError *error = self.writer.error;
NSLog(@"error ---------- %@", error);
}
視頻開始采集后需要在DataOutput的代理方法中使用writerInput采集buffer罐孝。
使用writer采集數(shù)據(jù)可定制性更高。