使用HTTP-FLV把iPhone攝像頭的畫面進(jìn)行直播,局域網(wǎng)內(nèi)的設(shè)備可以通過VLC進(jìn)行觀看展鸡,不通過服務(wù)器姨丈,實(shí)現(xiàn)局域網(wǎng)點(diǎn)對(duì)點(diǎn)直播畅卓。
實(shí)現(xiàn)步驟
1、采集iPhone攝像頭畫面
2蟋恬、采集到的數(shù)據(jù)硬編碼成H264數(shù)據(jù)
3翁潘、把編碼的數(shù)據(jù)通過FFmpeg封裝成FLV tag
4、搭建HTTP服務(wù)器監(jiān)聽HTTP連接筋现,連接成功之后發(fā)送數(shù)據(jù)
代碼實(shí)現(xiàn)
1唐础、采集iPhone攝像頭畫面
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.sessionPreset = AVCaptureSessionPreset1280x720;
_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError * error = nil;
_captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:_captureDevice error:&error];
if (_captureDeviceInput) {
[_captureSession addInput:_captureDeviceInput];
}
_captureVideoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
[_captureVideoDataOutput setAlwaysDiscardsLateVideoFrames:YES];
NSDictionary * settings = [[NSDictionary alloc] initWithObjectsAndKeys:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange), kCVPixelBufferPixelFormatTypeKey, nil];
_captureVideoDataOutput.videoSettings = settings;
dispatch_queue_t queue = dispatch_queue_create("CaptureQueue", NULL);
[_captureVideoDataOutput setSampleBufferDelegate:self queue:queue];
[_captureSession addOutput:_captureVideoDataOutput];
_previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_captureSession];
_previewLayer.frame = CGRectMake(0, 100, self.view.bounds.size.width, self.view.bounds.size.height - 100);
[self.view.layer addSublayer:_previewLayer];
[_captureSession startRunning];
2、采集到的數(shù)據(jù)硬編碼成H264數(shù)據(jù)
//初始化硬編碼器
OSStatus status = VTCompressionSessionCreate(NULL, 1280, 720, kCMVideoCodecType_H264, NULL, NULL, NULL, didCompressH264, (__bridge void *)self, &_compressionSession);
if (status != noErr) {
NSLog(@"Create compressionSession error");
return;
}
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_High_AutoLevel);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanTrue);
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, (__bridge CFTypeRef)(@(30)));
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)(@(30)));
VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)(@(800 * 1024)));
status = VTCompressionSessionPrepareToEncodeFrames(_compressionSession);
status = VTCompressionSessionCompleteFrames(_compressionSession, kCMTimeInvalid);
if (status != noErr) {
NSLog(@"Prepare error");
}
//編碼采集到的數(shù)據(jù)
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
VTEncodeInfoFlags flags;
OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, imageBuffer, pts, dur, NULL, NULL, &flags);
if (status != noErr) {
NSLog(@"Encode fail");
}
//此處編碼也可以使用FFmpeg進(jìn)行軟編碼CVImageBufferRef是采集出的像素?cái)?shù)據(jù)矾飞,和CVPixelBufferRef一樣一膨,可以取出yuv數(shù)據(jù)傳入FFmpeg中進(jìn)行編碼
}
3、把編碼的數(shù)據(jù)通過FFmpeg封裝成FLV tag
int ret = avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", NULL);
if (ret < 0) {
NSLog(@"Could not allocate output format context!");
}
//這里我們通過write_buffer方法把數(shù)據(jù)寫入內(nèi)存通過HTTP發(fā)送出去洒沦,而不是寫入文件或服務(wù)器地址豹绪,需要?jiǎng)?chuàng)建AVIOContext然后賦值給AVFormatContext
//這里申請(qǐng)的AVIOContext要通過avio_context_free()釋放
unsigned char * outBuffer = (unsigned char *)av_malloc(32768);
AVIOContext * avio_out = avio_alloc_context(outBuffer, 32768, 1, NULL, NULL, write_buffer, NULL);
ofmt_ctx->pb = avio_out;
ofmt_ctx->flags = AVFMT_FLAG_CUSTOM_IO;
AVCodec * codec = avcodec_find_encoder(AV_CODEC_ID_H264);
out_stream = avformat_new_stream(ofmt_ctx, codec);
codec_ctx = avcodec_alloc_context3(codec);
AVRational dst_fps = {30, 1};
codec_ctx->codec_tag = 0;
codec_ctx->codec_id = AV_CODEC_ID_H264;
codec_ctx->codec_type = AVMEDIA_TYPE_VIDEO;
codec_ctx->width = 1280;
codec_ctx->height = 720;
codec_ctx->gop_size = 12;
codec_ctx->pix_fmt = AV_PIX_FMT_NV12;
codec_ctx->framerate = dst_fps;
codec_ctx->time_base = av_inv_q(dst_fps);
if(ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER) {
codec_ctx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
ret = avcodec_parameters_from_context(out_stream->codecpar, codec_ctx);
if (ret < 0) {
NSLog(@"Could not initialize stream codec parameters!");
}
AVDictionary * codec_options = NULL;
av_dict_set(&codec_options, "profile", "high", 0);
av_dict_set(&codec_options, "preset", "superfast", 0);
av_dict_set(&codec_options, "tune", "zerolatency", 0);
ret = avcodec_open2(codec_ctx, codec, &codec_options);
if (ret < 0) {
NSLog(@"Could not open video encoder!");
}
out_stream->codecpar->extradata = codec_ctx->extradata;
out_stream->codecpar->extradata_size = codec_ctx->extradata_size;
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
NSLog(@"Could not write header!");
}
static int write_buffer(void * opaque, uint8_t * buf, int buf_size) {
//在avformat_write_header的時(shí)候這里得到的數(shù)據(jù)是FLV文件的頭部,在av_write_frame的時(shí)候這里得到的是FLV tag數(shù)據(jù),可以通過HTTP發(fā)送出去瞒津,我使用的GCDWebServer庫(kù)
//這里buf_size上限是我們創(chuàng)建時(shí)的32768蝉衣,如果每個(gè)tag數(shù)據(jù)小于32768就會(huì)得到完整的tag數(shù)據(jù),如果tag數(shù)據(jù)大于32768就會(huì)得到部分tag數(shù)據(jù)巷蚪,要自行處理
return 0;
}
static void didCompressH264(void *outputCallbackRefCon, void *sourceFrameRefCon, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer ) {
//iOS硬編碼的H264數(shù)據(jù)是AVCC格式的病毡,每個(gè)NALU的前4個(gè)字節(jié)是數(shù)據(jù)長(zhǎng)度,AVCC格式的數(shù)據(jù)直接寫入文件是不能播放的屁柏,需要轉(zhuǎn)換成0x00000001開始碼開頭的Annex B格式
if (status != noErr) {
NSLog(@"Compress H264 failed");
return;
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length, totalLength;
char * dataPointer;
const char bytesHeader[] = "\x00\x00\x00\x01";
OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
bool keyFrame = !CFDictionaryContainsKey((CFDictionaryRef)(CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), (const void)kCMSampleAttachmentKey_NotSync);
NSMutableData * pktData = [NSMutableData data];
if (keyFrame) {
CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sample);
size_t sparameterSetSize, sparameterSetCount;
const uint8_t * sparameterSet;
status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0);
if (status == noErr) {
size_t pparameterSetSize, pparameterSetCount;
const uint8_t * pparameterSet;
status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0);
if (status == noErr) {
size_t headerLength = 4;
size length = 2 * headerLength + sparameterSetSize + pparameterSetSize;
unsigned char * buffer = (unsigned char *)malloc(sizeof(unsigned char) * length);
memcpy(buffer, bytesHeader, headerLength);
memcpy(buffer + headerLength, sparameterSet, sparameterSetSize);
memcpy(buffer + headerLength + sparameterSetSize, bytesHeader, headerLength);
memcpy(buffer + headerLength + sparameterSetSize + headerLength, pparameterSet, pparameterSetSize);
[pktData appendBytes:buffer length:length];
}
}
}
size_t bufferOffset = 0;
int AVCCHeaderLength = 4;
while(bufferOffset < totalLength - AVCCHeaderLength) {
uint32_t NALUintLength = 0;
memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
unsigned char * buffer = (unsigned char *)malloc(sizeof(unsigned char) * (NALUnitLength + AVCCHeaderLength));
memcpy(buffer, bytesHeader, AVCCHeaderLength);
memcpy(buffer + AVCCHeaderLength, dataPointer + bufferOffset + AVCCHeaderLength, NALUnitLength);
[pktData appendBytes:buffer length:NALUnitLength + AVCCHeaderLength];
bufferOffset += AVCCHeaderLength + NALUnitLength;
}
AVPacket pkt = {0};
av_init_packet(&pkt);
pkt.data = (uint8_t *)[pktData bytes];
pkt.size = (int)[pktData length];
//pkt_pts從0開始遞增
pkt.pts = pkt_pts;
pkt.dts = pkt.pts;
if (keyFrame) {
pkt.flags = AV_PKT_FLAG_KEY;
} else {
pkt.flags = 0;
}
pkt.stream_index = 0;
av_packet_rescale_ts(&pkt, codec_ctx->time_base, out_stream->time_base);
av_write_frame(ofmt_ctx, &pkt);
pkt_pts++;
}
通過VLC可以觀看直播啦膜,通過ffplay播放黑屏,原因還未發(fā)現(xiàn)淌喻。