將最近學(xué)習(xí)的直播推流技術(shù)做個(gè)筆記盯蝴。
iOS推流的主要流程如下:
采集
iOS一般使用AVFundation進(jìn)行采集,涉及到的類如下:
AVCaptureSession 采集的會話對象听怕,它一頭連接輸入對象捧挺,一頭連接輸出對象向app提供采集好的原始音視頻數(shù)據(jù),通過它管理采集的開始與結(jié)束
AVCaptureDevice 采集用的設(shè)備尿瞭,比如麥克風(fēng)還是攝像頭闽烙,是前置攝像頭還是后置攝像頭。所以初始化時(shí)一般指定mediaType:
AVMediaTypeAudio
(音頻) orAVMediaTypeVideo
(視頻)AVCaptureDeviceInput 采集輸入對象声搁,通過AVCaptureDevice對象進(jìn)行初始化
AVCaptureVideoDataOutput 黑竞、AVCaptureAudioDataOutput 視頻數(shù)據(jù)和音頻數(shù)據(jù)采集后的輸出對象,與輸入對象對應(yīng)疏旨。廢話不多說了看代碼吧很魂!
// 先創(chuàng)建一個(gè)采集類,并進(jìn)行必要屬性定義吧
@interface Capture()<AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate> {
// 采集相關(guān)對象
AVCaptureVideoDataOutput *videoOutput;
AVCaptureAudioDataOutput *audioOutput;
AVCaptureConnection *videoConnection;
AVCaptureConnection *audioConnection;
dispatch_queue_t acaptureQueue;
dispatch_queue_t vcaptureQueue;
dispatch_semaphore_t samaphore;
AVCaptureDeviceInput *_deviceInput;
int sampleCount;
}
@property (nonatomic, assign) AVCaptureDevicePosition devicePosition;
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
@property (nonatomic, assign) AVCaptureSessionPreset preset;
@property (nonatomic, assign) BOOL isMirrored;
@end
- 初始化
// 1.創(chuàng)建一個(gè)采集類會話 AVCaptureSession
_session = [[AVCaptureSession alloc] init];
// 設(shè)置視頻采集的寬高參數(shù)
_session.sessionPreset = _preset; // @sea AVCaptureSessionPreset1280x720
// 2.設(shè)置音頻采集
AVCaptureDevice *micro = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInMicrophone mediaType:AVMediaTypeAudio position:AVCaptureDevicePositionUnspecified];
AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:micro error:nil];
if (![_session canAddInput:audioInput]) {
NSLog(@"can not add audioInput");
return NO;
}
// 將采集輸入源添加到session
[_session addInput:audioInput];
// 3. 采集物理設(shè)備對象檐涝,選擇后置攝像頭
AVCaptureDevice *camera = [self videoDeviceWitchPosition:_devicePosition];
// 通過視頻物理設(shè)備對象創(chuàng)建視頻輸入對象
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:camera error:nil];
if (![_session canAddInput:videoInput]) {
NSLog(@"can not add video input");
return NO;
}
_deviceInput = videoInput;
[_session addInput:videoInput];
/* 如果調(diào)用了[session startRunning]之后要想改變音視頻輸出對象配置參數(shù)遏匆,則必須調(diào)用[session beginConfiguration];和 [session commitConfiguration];才能生效。
如果沒有調(diào)用[session startRunning]則這兩句代碼可以不寫 */
[_session beginConfiguration];
// 4. 創(chuàng)建視頻輸出對象
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
// 設(shè)置視頻輸出參數(shù)谁榜,這里設(shè)置輸出格式為YUV420YpCbCr8
NSDictionary *videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
videoOutput.videoSettings = videoSettings;
// 當(dāng)采集速度過快而處理速度跟不上時(shí)的丟棄策略幅聘,默認(rèn)丟棄最新采集的視頻。這里設(shè)置為NO窃植,表示不丟棄緩存起來
videoOutput.alwaysDiscardsLateVideoFrames = YES;
// vcaptureQueue 輸出執(zhí)行的視頻隊(duì)列帝蒿,它是一個(gè)串行隊(duì)列
// 視頻輸出代理
[videoOutput setSampleBufferDelegate:self queue:vcaptureQueue];
// 添加到輸出
[_session addOutput:videoOutput];
// 5. 創(chuàng)建音頻輸出對象
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
// acaptureQueue 輸出執(zhí)行的音頻隊(duì)列,它是一個(gè)串行隊(duì)列
// 音頻輸出代理
[audioOutput setSampleBufferDelegate:self queue:acaptureQueue];
// 添加到輸出
[_session addOutput:audioOutput];
// 6. 分別創(chuàng)建音頻和視頻AVCaptureConnection
/*AVCaptureConnection表示avcaptureputport或端口之間的連接巷怜, 可用AVCaptureVideoPreviewLayer呈現(xiàn)采集
的內(nèi)容陵叽。*/
videoConnection = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
audioConnection = [audioOutput connectionWithMediaType:AVMediaTypeAudio];
// 視頻鏡像采集
videoConnection.videoMirrored = _isMirrored;
// 設(shè)置采集的方向狞尔,如果不設(shè)置采集到的視頻旋轉(zhuǎn)了90度。
if([videoConnection isVideoOrientationSupported]) {
videoConnection.videoOrientation = _orientation;
}
// 提交配置
[_session commitConfiguration];
- 以上是初始化的過程巩掺,開始采集和結(jié)束采集還需要調(diào)用:
/// 開始采集
- (void)startRunning {
[_session startRunning];
}
/// 停止采集
- (void)stopRunning {
[_session stopRunning];
}
- 設(shè)置代理
// 這里的sampleBuffer就是采集到的數(shù)據(jù)了,根據(jù)connection來判斷偏序,是Video還是Audio的數(shù)據(jù)
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
// 這里的sampleBuffer就是采集到的數(shù)據(jù)了,根據(jù)connection來判斷,是Video還是Audio的數(shù)據(jù)
if (connection == videoConnection) {
NSLog(@"這里獲的 video sampleBuffer胖替,做進(jìn)一步處理(編碼H.264)%i", ++sampleCount);
if (self.delegate) {
[self.delegate capture:self videoBuffer:sampleBuffer];
}
} else if (connection == audioConnection) {
NSLog(@"這里獲得 audio sampleBuffer研儒,做進(jìn)一步處理(編碼AAC)");
if (self.delegate) {
[self.delegate capture:self audioBuffer:sampleBuffer];
}
}
}
- 顯示,使用AVCaptureVideoPreviewLayer可以顯示采集的視頻独令,自定義一個(gè)UIView, 命名CapturePreviewView端朵,設(shè)置layerClass為[AVCaptureVideoPreviewLayer class],設(shè)置layer基本參數(shù):
self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait;
在viewDidLoad里調(diào)用
- (void)showPreview {
self.preview = [[CapturePreviewView alloc] initWithFrame:self.view.bounds];
_preview.previewLayer.session = self.capture.session;
_preview.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
[self.view insertSubview:_preview atIndex:0];
}
- 我們可以對采集到的數(shù)據(jù)進(jìn)行預(yù)覽播放燃箭,使用AVSampleBufferDisplayLayer播放 CMSampleBufferRef格式數(shù)據(jù)
-(void)showSampleLayer {
_displayLayer = [[AVSampleBufferDisplayLayer alloc] init];
_displayLayer.frame = CGRectMake(0, 0, _videoEncoder.config->width / 5.0, _videoEncoder.config->height / 5.0);
_displayLayer.videoGravity = AVLayerVideoGravityResizeAspect;
[self.preview.layer insertSublayer:_displayLayer above:_preview.previewLayer];
}
// 在采集到視頻數(shù)據(jù)的回調(diào)里播放buffer
-(void)capture:(Capture *)capture videoBuffer:(CMSampleBufferRef _Nullable)buffer
{
// 顯示播放
[_displayLayer enqueueSampleBuffer:buffer];
// [_videoEncoder encode:buffer timeStamp:CACurrentMediaTime()*1000];
}
6.修改采樣參數(shù)冲呢,如采樣率,幀率等等
/// 更新幀率
- (void)updateFps:(int32_t)fps {
AVCaptureDevice *vDevice = [self videoDeviceWitchPosition:_devicePosition];
//獲取當(dāng)前支持的最大fps
float maxRate = [(AVFrameRateRange *)[vDevice.activeFormat.videoSupportedFrameRateRanges objectAtIndex:0] maxFrameRate];
//如果想要設(shè)置的fps小于或等于做大fps招狸,就進(jìn)行修改
if (maxRate >= fps) {
//實(shí)際修改fps的代碼
if ([vDevice lockForConfiguration:NULL]) {
vDevice.activeVideoMinFrameDuration = CMTimeMake(10, (int)(fps * 10));
vDevice.activeVideoMaxFrameDuration = vDevice.activeVideoMinFrameDuration;
[vDevice unlockForConfiguration];
}
}
}
/// 切換攝像頭(前置或后置)
- (void)changeCamaraPosition {
dispatch_async(vcaptureQueue, ^{
if (self.devicePosition == AVCaptureDevicePositionFront) {
self.devicePosition = AVCaptureDevicePositionBack;
} else {
self.devicePosition = AVCaptureDevicePositionFront;
}
AVCaptureDevice *camera = [self videoDeviceWitchPosition:self.devicePosition];
[self.session beginConfiguration];
[self.session removeInput:self->_deviceInput];
AVCaptureDeviceInput *videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:camera error:nil];
if (!videoInput) {
NSLog(@"can not init video input");
return;
}
if (![self.session canAddInput:videoInput]) {
NSLog(@"can not add video input");
return;
}
self->_deviceInput = videoInput;
[self.session addInput:videoInput];
[self.session commitConfiguration];
});
}
/// 設(shè)置視頻采集方向
- (void)setVideoOrientation:(AVCaptureVideoOrientation)orientation
{
_orientation = orientation;
dispatch_async(vcaptureQueue, ^{
self->videoConnection.videoOrientation = orientation;
});
}
/// 設(shè)置是否鏡像
- (void)setVideoMirrored:(BOOL)isMirrored
{
_isMirrored = isMirrored;
dispatch_async(vcaptureQueue, ^{
self->videoConnection.videoMirrored = isMirrored;
});
}
/// 設(shè)置采集分辨率
- (void)setVideoDimension:(AVCaptureSessionPreset)preset
{
_preset = preset;
dispatch_async(vcaptureQueue, ^{
[self.session beginConfiguration];
if ([self.session canSetSessionPreset:preset]) {
[self.session setSessionPreset:preset];
};
[self.session commitConfiguration];
});
}