上一篇使用AVFilter把YUV數據轉換成RGB數據然后顯示妓湘,這一篇省去轉換過程直接使用CoreImage顯示YUV數據
解碼代碼
去掉之前的AVFilter相關代碼
- (void)decodeVideo {
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ //在全局隊列中解碼
AVPacket * packet = av_packet_alloc();
if (av_read_frame(self->pFormatCtx, packet) >= 0) {
if (packet->stream_index == self->videoIndex) { //解碼視頻流
//FFmpeg 3.0之后avcodec_send_packet和avcodec_receive_frame成對出現用于解碼,包括音頻和視頻的解碼芽腾,avcodec_decode_video2和avcodec_decode_audio4被廢棄
NSInteger ret = avcodec_send_packet(self->pCodecCtx, packet);
if (ret < 0) {
NSLog(@"send packet error");
av_packet_free(&packet);
return;
}
AVFrame * frame = av_frame_alloc();
ret = avcodec_receive_frame(self->pCodecCtx, frame);
if (ret < 0) {
NSLog(@"receive frame error");
av_frame_free(&frame);
return;
}
//frame中data存放解碼出的yuv數據梁钾,data[0]中是y數據庸毫,data[1]中是u數據,data[2]中是v數據南誊,linesize對應的數據長度
float time = packet->pts * av_q2d(self->pFormatCtx->streams[self->videoIndex]->time_base); //計算當前幀時間
av_packet_free(&packet);
CVReturn theError;
if (!self->pixelBufferPool){ //創(chuàng)建pixelBuffer緩存池,從緩存池中創(chuàng)建pixelBuffer以便復用
NSMutableDictionary* attributes = [NSMutableDictionary dictionary];
[attributes setObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[attributes setObject:[NSNumber numberWithInt:frame->width] forKey: (NSString*)kCVPixelBufferWidthKey];
[attributes setObject:[NSNumber numberWithInt:frame->height] forKey: (NSString*)kCVPixelBufferHeightKey];
[attributes setObject:@(frame->linesize[0]) forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey];
[attributes setObject:[NSDictionary dictionary] forKey:(NSString*)kCVPixelBufferIOSurfacePropertiesKey];
theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef) attributes, &self->pixelBufferPool);
if (theError != kCVReturnSuccess){
NSLog(@"CVPixelBufferPoolCreate Failed");
}
}
CVPixelBufferRef pixelBuffer = nil;
theError = CVPixelBufferPoolCreatePixelBuffer(NULL, self->pixelBufferPool, &pixelBuffer);
if(theError != kCVReturnSuccess){
NSLog(@"CVPixelBufferPoolCreatePixelBuffer Failed");
}
theError = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
if (theError != kCVReturnSuccess) {
NSLog(@"lock error");
}
/*
PixelBuffer中Y數據存放在Plane0中蜜托,UV數據存放在Plane1中抄囚,數據格式如下
frame->data[0] ......... YYYYYYYYY
frame->data[1] ......... UUUUUUUU
frame->data[2] ......... VVVVVVVVV
PixelBuffer->Plane0 ....... YYYYYYYY
PixelBuffer->Plane1 ....... UVUVUVUVUV
所以需要把Y數據拷貝到Plane0上,把U和V數據交叉拷貝到Plane1上
*/
size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//獲取Plane0的起始地址
void* base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
memcpy(base, frame->data[0], bytePerRowY * frame->height);
//獲取Plane1的起始地址
base = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
uint32_t size = frame->linesize[1] * frame->height / 2;
//把UV數據交叉存儲到dstData然后拷貝到Plane1上
uint8_t* dstData = new uint8_t[2 * size];
uint8_t * firstData = new uint8_t[size];
memcpy(firstData, frame->data[1], size);
uint8_t * secondData = new uint8_t[size];
memcpy(secondData, frame->data[2], size);
for (int i = 0; i < 2 * size; i++){
if (i % 2 == 0){
dstData[i] = firstData[i/2];
}else {
dstData[i] = secondData[i/2];
}
}
memcpy(base, dstData, bytesPerRowUV * frame->height/2);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
av_frame_free(&frame);
free(dstData);
free(firstData);
free(secondData);
CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
//避免頻繁的創(chuàng)建CIContext橄务,所以把context設為全局變量并初始化self->context = [CIContext contextWithOptions:nil];
CGImageRef videoImage = [self->context createCGImage:coreImage
fromRect:CGRectMake(0, 0, self->pCodecCtx->width, self->pCodecCtx->height)];
NSImage * image = [[NSImage alloc] initWithCGImage:videoImage size:NSSizeFromCGSize(CGSizeMake(self->pCodecCtx->width, self->pCodecCtx->height))];
CVPixelBufferRelease(pixelBuffer);
CGImageRelease(videoImage);
dispatch_async(dispatch_get_main_queue(), ^{
self.label.stringValue = [NSString stringWithFormat:@"%.2d:%.2d", (int)time/60, (int)time%60];
self.imageView.image = image;
self.slider.floatValue = time / (float)self->videoDuration;
});
}
} else {
avcodec_free_context(&self->pCodecCtx);
avformat_close_input(&self->pFormatCtx);
avformat_free_context(self->pFormatCtx);
[self->timer invalidate];
}
});
}