前言
前段時間忙著找工作所以沒有進行更新,現(xiàn)在抽空更新一下吧,目前的進度由于離開了前公司,而現(xiàn)在的公司任務(wù)又比較多,所以更新的速度會慢一些吧.目前寫到了RTP分包了,打算春節(jié)期間把C語言和TCP傳輸再好好看一下吧.后面還會把ffmpeg+opengl補完的.本文學(xué)習(xí)自鏈接,大家可以去看一下.
蘋果提供了一個硬編碼的框架VideoToolBox,這個框架iOS8以后開發(fā)者可以去使用,這里是VideoToolBox提供的編碼類型:
支持類型
初始化VideoToolBox
這里要提的一點是碼率的設(shè)置,這里給一個公式吧,方便大家查看(原文鏈接),關(guān)于碼率的理解我可以給大家舉一個形象的例子.有錢的人可以過好一點的生活,沒錢的人可以過差一點的生活,但也不至于餓死,碼率大了的話就非常清晰,但同時文件也會比較大,碼率小了的話,圖像有時會糊,但也是勉強能看的,這里盡量給一個合適的碼率吧.
碼率公式,僅供參考
/*
1、-initVideoToolBox中調(diào)用VTCompressionSessionCreate創(chuàng)建編碼session秃嗜,然后調(diào)用VTSessionSetProperty設(shè)置參數(shù),最后調(diào)用VTCompressionSessionPrepareToEncodeFrames開始編碼吁恍;
2敞咧、開始視頻錄制匹表,獲取到攝像頭的視頻幀,傳入-encode:讶坯,調(diào)用VTCompressionSessionEncodeFrame傳入需要編碼的視頻幀番电,如果返回失敗,調(diào)用VTCompressionSessionInvalidate銷毀session辆琅,然后釋放session漱办;
3、每一幀視頻編碼完成后會調(diào)用預(yù)先設(shè)置的編碼函數(shù)didCompressH264婉烟,如果是關(guān)鍵幀需要用CMSampleBufferGetFormatDescription獲取CMFormatDescriptionRef娩井,然后用
CMVideoFormatDescriptionGetH264ParameterSetAtIndex取得PPS和SPS;
最后把每一幀的所有NALU數(shù)據(jù)前四個字節(jié)變成0x00 00 00 01之后再寫入文件似袁;
4洞辣、調(diào)用VTCompressionSessionCompleteFrames完成編碼昙衅,然后銷毀session:VTCompressionSessionInvalidate扬霜,釋放session。
*/
/**
初始化videoToolBox
*/
-(void)initVideoToolBox{
//同步
dispatch_sync(_encodeQueue, ^{
_frameID = 0;
//給定寬高,過高的話會編碼失敗
int width = 640 , height = 480;
/**
創(chuàng)建編碼會話
@param allocator#> 會話的分配器,傳入NULL默認(rèn) description#>
@param width#> 幀寬 description#>
@param height#> 幀高 description#>
@param codecType#> 編碼器類型 description#>
@param encoderSpecification#> 指定必須使用的特定視頻編碼器绒尊。通過空來讓視頻工具箱選擇一個編碼器畜挥。 description#>
@param sourceImageBufferAttributes#> 像素緩存池源幀 description#>
@param compressedDataAllocator#> 壓縮數(shù)據(jù)分配器,默認(rèn)為空 description#>
@param outputCallback#> 回調(diào)函數(shù),圖像編碼成功后調(diào)用 description#>
@param outputCallbackRefCon#> 客戶端定義的輸出回調(diào)的參考值。 description#>
@param compressionSessionOut#> 指向一個變量婴谱,以接收新的壓縮會話 description#>
@return <#return value description#>
*/
OSStatus status = VTCompressionSessionCreate(NULL, width, height, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)(self), &_encodeingSession);
NSLog(@"H264狀態(tài):VTCompressionSessionCreate %d",(int)status);
if (status != 0) {
NSLog(@"H264會話創(chuàng)建失敗");
return ;
}
//設(shè)置實時編碼輸出(避免延遲)
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel);
// 設(shè)置關(guān)鍵幀(GOPsize)間隔,gop太小的話有時候圖像會糊
int frameInterval = 10;
CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef);
// 設(shè)置期望幀率,不是實際幀率
int fps = 10;
CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef);
//設(shè)置碼率蟹但,上限,單位是bps
int bitRate = width * height * 3 * 4 * 8 ;
CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef);
// 設(shè)置碼率谭羔,均值华糖,單位是byte
int bitRateLimit = width * height * 3 * 4 ;
CFNumberRef bitRateLimitRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRateLimit);
NSLog(@"碼率%@",bitRateLimitRef);
VTSessionSetProperty(_encodeingSession, kVTCompressionPropertyKey_DataRateLimits, bitRateLimitRef);
//可以開始編碼
VTCompressionSessionPrepareToEncodeFrames(_encodeingSession);
});
}
根據(jù)代理方法判斷是音頻數(shù)據(jù)還是視頻數(shù)據(jù)
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{
if ([self.videoDataOutput isEqual:captureOutput]) {
//捕獲到視頻數(shù)據(jù)
NSLog(@"視頻");
//將視頻數(shù)據(jù)轉(zhuǎn)換成YUV420數(shù)據(jù)
// NSData *yuv420Data = [self convertVideoSampleToYUV420:sampleBuffer];
dispatch_sync(_encodeQueue, ^{
//開始硬編碼
_isStartHardEncoding = 1;
// 攝像頭采集后的圖像是未編碼的CMSampleBuffer形式,
[self videoEncode:sampleBuffer];
});
// [self sendVideoSampleBuffer:sampleBuffer];
}else if([self.audioDataOutput isEqual:captureOutput]){
//捕獲到音頻數(shù)據(jù)
NSLog(@"音頻");
//AudioToolBox PCM->AAC硬編碼
dispatch_sync(_encodeQueue, ^{
[self.aacEncode encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) {
[_audioFileHandle writeData:encodedData];
NSLog(@"%@",_audioFileHandle);
}];
});
//音頻數(shù)據(jù)轉(zhuǎn)PCM
// NSData *pcmData = [self convertAudioSampleToYUV420:sampleBuffer];
}
}
編碼完成后的回調(diào)
/**
* h.264硬編碼完成后回調(diào) VTCompressionOutputCallback
* 將硬編碼成功的CMSampleBuffer轉(zhuǎn)換成H264碼流瘟裸,通過網(wǎng)絡(luò)傳播
* 解析出參數(shù)集SPS和PPS客叉,加上開始碼后組裝成NALU。提取出視頻數(shù)據(jù)话告,將長度碼轉(zhuǎn)換成開始碼兼搏,組長成NALU。將NALU發(fā)送出去沙郭。
*/
//編碼完成后回調(diào)
void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer){
// NSLog(@"didCompressH264 called with status %d infoFlags %d", (int)status, (int)infoFlags);
//狀態(tài)錯誤
if (status != 0) {
return;
}
//沒準(zhǔn)備好
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
NSLog(@"didCompressH264 data is not ready ");
return;
}
VideoEncodeVC * encoder = (__bridge VideoEncodeVC*)outputCallbackRefCon;
bool keyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync);
// 判斷當(dāng)前幀是否為關(guān)鍵幀 獲取sps & pps 數(shù)據(jù)
// 解析出參數(shù)集SPS和PPS佛呻,加上開始碼后組裝成NALU。提取出視頻數(shù)據(jù)病线,將長度碼轉(zhuǎn)換成開始碼吓著,組長成NALU鲤嫡。將NALU發(fā)送出去。
if (keyframe) {
// CMVideoFormatDescription:圖像存儲方式绑莺,編解碼器等格式描述
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
// sps
size_t sparameterSetSize, sparameterSetCount;
const uint8_t *sparameterSet;
OSStatus statusSPS = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (statusSPS == noErr) {
// Found sps and now check for pps
// pps
size_t pparameterSetSize, pparameterSetCount;
const uint8_t *pparameterSet;
OSStatus statusPPS = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (statusPPS == noErr) {
// found sps pps
NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
if (encoder) {
[encoder gotSPS:sps withPPS:pps];
}
}
}
}
// 編碼后的圖像暖眼,以CMBlockBuffe方式存儲
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char *dataPointer;
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
if (statusCodeRet == noErr) {
size_t bufferOffSet = 0;
// 返回的nalu數(shù)據(jù)前四個字節(jié)不是0001的startcode,而是大端模式的幀長度length
static const int AVCCHeaderLength = 4;
// 循環(huán)獲取nalu數(shù)據(jù)
while (bufferOffSet < totalLength - AVCCHeaderLength) {
uint32_t NALUUnitLength = 0;
// Read the NAL unit length
memcpy(&NALUUnitLength, dataPointer + bufferOffSet, AVCCHeaderLength);
// 從大端轉(zhuǎn)系統(tǒng)端
NALUUnitLength = CFSwapInt32BigToHost(NALUUnitLength);
NSData *data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffSet + AVCCHeaderLength) length:NALUUnitLength];
[encoder gotEncodedData:data isKeyFrame:keyframe];
// Move to the next NAL unit in the block buffer
bufferOffSet += AVCCHeaderLength + NALUUnitLength;
}
}
}
h264視頻編碼
/**
視頻編碼
@param videoSample <#videoSample description#>
*/
-(void)videoEncode:(CMSampleBufferRef)videoSampleBuffer{
// CVPixelBufferRef 編碼前圖像數(shù)據(jù)結(jié)構(gòu)
// 利用給定的接口函數(shù)CMSampleBufferGetImageBuffer從中提取出CVPixelBufferRef
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(videoSampleBuffer);
// 幀時間, 如果不設(shè)置會導(dǎo)致時間軸過長
CMTime presentationTimeStamp = CMTimeMake(_frameID++, 1000);
VTEncodeInfoFlags flags;
// 使用硬編碼接口VTCompressionSessionEncodeFrame來對該幀進行硬編碼
// 編碼成功后纺裁,會自動調(diào)用session初始化時設(shè)置的回調(diào)函數(shù)
OSStatus statusCode = VTCompressionSessionEncodeFrame(_encodeingSession, imageBuffer, presentationTimeStamp, kCMTimeInvalid, NULL, NULL, &flags);
if (statusCode != noErr) {
NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode);
VTCompressionSessionInvalidate(_encodeingSession);
CFRelease(_encodeingSession);
_encodeingSession = NULL;
return;
}
// NSLog(@"H264: VTCompressionSessionEncodeFrame Success : %d", (int)statusCode);
}
寫入沙盒
//傳入PPS和SPS,寫入到文件
- (void)gotSPS:(NSData *)sps withPPS:(NSData *)pps{
// NSLog(@"gotSPSAndPPS %d withPPS %d", (int)[sps length], (int)[pps length]);
const char bytes[] = "\x00\x00\x00\x01";
size_t length = (sizeof bytes) - 1;
NSData *byteHeader = [NSData dataWithBytes:bytes length:length];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:sps];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:pps];
}
- (void)gotEncodedData:(NSData *)data isKeyFrame:(BOOL)isKeyFrame {
// NSLog(@"gotEncodedData %d", (int)[data length]);
if (_fileHandle != NULL) {
const char bytes[]= "\x00\x00\x00\x01";
size_t lenght = (sizeof bytes) - 1;
NSData *byteHeader = [NSData dataWithBytes:bytes length:lenght];
[_fileHandle writeData:byteHeader];
[_fileHandle writeData:data];
}
}
結(jié)束編碼
/**
結(jié)束編碼
*/
- (void)endVideoToolBox{
VTCompressionSessionCompleteFrames(_encodeingSession, kCMTimeInvalid);
VTCompressionSessionInvalidate(_encodeingSession);
CFRelease(_encodeingSession);
_encodeingSession = NULL;
}