有時候想對攝像頭采集的視頻流進(jìn)行區(qū)域裁剪,可以使用libyuv這個庫,原理就是先把NV12轉(zhuǎn)換為i420,對i420做裁剪悉抵,然后再把i420轉(zhuǎn)換為NV12,NV12再轉(zhuǎn)換為CVPixelBufferRef摘完,CVPixelBufferRef再轉(zhuǎn)換為CMSampleBufferRef姥饰。
這里有幾個注意點:
1.iOS13使用了64位對齊,也就是步長是64的倍數(shù)描焰。而之前的版本使用的是16位對齊媳否。
2.使用 libyuv::ConvertToI420 方法時栅螟,src_width需要填入步長而不是寬度荆秦,因為yuv內(nèi)部要根據(jù)步長來取U、V數(shù)據(jù)力图,如果是填入的寬度步绸,那么就會取值位移錯誤,導(dǎo)致轉(zhuǎn)換失真吃媒。
3.因為我是直接NV12數(shù)據(jù)轉(zhuǎn)換的瓤介,所以填寫的類型是:libyuv::FOURCC_NV12。應(yīng)該根據(jù)當(dāng)前數(shù)據(jù)的類型選擇對應(yīng)的格式赘那。
4.NV12轉(zhuǎn)換為CVPixelBufferRef時刑桑,填入對應(yīng)的步長:nv12_plane1_stride。
關(guān)于步長解釋:http://www.reibang.com/p/eace8c08b169
一:對NV12裁剪代碼如下:
+ (CVPixelBufferRef)convertNV12ToI420Screenshots:(CMSampleBufferRef)sampleBufRef screenshotsFrame:(CGRect)screenshotsFrame {
int screenshots_x = screenshotsFrame.origin.x;
int screenshots_y = screenshotsFrame.origin.y;
int screenshots_width = screenshotsFrame.size.width;
int screenshots_hight = screenshotsFrame.size.height;
// 確保寬高是偶數(shù)
if (screenshots_width % 2 != 0) {
screenshots_width++;
}
if (screenshots_hight % 2 != 0) {
screenshots_hight++;
}
//CVPixelBufferRef是CVImageBufferRef的別名,兩者操作幾乎一致募舟。
//獲取CMSampleBuffer的圖像地址
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
if (!pixelBuffer) {
return nil;
}
//表示開始操作數(shù)據(jù)
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//圖像高度(像素)
size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
//獲取CVImageBufferRef中的y數(shù)據(jù)
uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//y stride
size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
//uv stride
size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//y height
size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//uv height
size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//y_size
size_t plane1_size = plane1_stride * plane1_height;
//uv_size
size_t plane2_size = plane2_stride * plane2_height;
//yuv_size(內(nèi)存空間)
size_t frame_size = plane1_size + plane2_size;
// 截取區(qū)域不能超出原視頻大小
if (screenshots_x >= buffer_width ||
screenshots_width > buffer_width ||
screenshots_x + screenshots_width > buffer_width ||
screenshots_y >= buffer_height ||
screenshots_hight > buffer_height ||
screenshots_y + screenshots_hight > buffer_height) {
return nil;
}
// 1.NV12數(shù)據(jù)進(jìn)行相應(yīng)的裁剪
// 步長必須是16的倍數(shù)祠斧,因為涉及到字節(jié)對齊,而且iOS13和之前的版本處理方式不一樣拱礁,要注意
int stride_length = 16;
int scale_plane1_stride = screenshots_width;
if ([UIDevice currentDevice].systemVersion.floatValue >= 13.0) {
stride_length = 64;
} else {
stride_length = 16;
}
if ((screenshots_width % stride_length) != 0) {
scale_plane1_stride = (screenshots_width / stride_length + 1) * stride_length;
}
int scale_plane2_stride = scale_plane1_stride;
int scale_plane1_height = screenshots_hight;
int scale_plane2_height = screenshots_hight / 2;
int scale_plane1_size = scale_plane1_stride * scale_plane1_height;
int scale_plane2_size = scale_plane2_stride * scale_plane2_height;
int scale_frame_size = scale_plane1_size + scale_plane2_size;
uint8* scale_buffer = (unsigned char *)malloc(scale_frame_size);
uint8* scale_buffer_u = scale_buffer + scale_plane1_size;
uint8* scale_buffer_v = scale_buffer_u + scale_plane1_size / 4;
libyuv::ConvertToI420(/*const uint8 *src_frame*/ src_y_frame,
/*size_t src_size*/ frame_size,
/*uint8 *dst_y*/ scale_buffer,
/*int dst_stride_y*/ scale_plane1_stride,
/*uint8 *dst_u*/ scale_buffer_u,
/*int dst_stride_u*/ scale_plane1_stride >> 1,
/*uint8 *dst_v*/ scale_buffer_v,
/*int dst_stride_v*/ scale_plane1_stride >> 1,
/*int crop_x*/ screenshots_x,
/*int crop_y*/ screenshots_y,
/*int src_width*/ (int)plane1_stride, // 注意這里使用的是步長琢锋,不是寬度辕漂,因為yuv內(nèi)部要根據(jù)步長來取U、V數(shù)據(jù)
/*int src_height*/ (int)buffer_height,
/*int crop_width*/ screenshots_width,
/*int crop_height*/ screenshots_hight,
/*enum RotationMode rotation*/ libyuv::kRotate0,
/*uint32 format*/ libyuv::FOURCC_NV12);
// 2.把縮放后的I420數(shù)據(jù)轉(zhuǎn)換為NV12
int nv12_plane1_stride = scale_plane1_stride;
int nv12_width = screenshots_width;
int nv12_hight = screenshots_hight;
int nv12_frame_size = scale_frame_size;
uint8 *nv12_dst_y = (uint8 *)malloc(nv12_frame_size);
uint8 *nv12_dst_uv = nv12_dst_y + nv12_plane1_stride * nv12_hight;
libyuv::I420ToNV12(/*const uint8 *src_y*/ scale_buffer,
/*int src_stride_y*/ scale_plane1_stride,
/*const uint8 *src_u*/ scale_buffer_u,
/*int src_stride_u*/ scale_plane1_stride >> 1,
/*const uint8 *src_v*/ scale_buffer_v,
/*int src_stride_v*/ scale_plane1_stride >> 1,
/*uint8 *dst_y*/ nv12_dst_y,
/*int dst_stride_y*/ nv12_plane1_stride,
/*uint8 *dst_uv*/ nv12_dst_uv,
/*int dst_stride_uv*/ nv12_plane1_stride,
/*int width*/ nv12_width,
/*int height*/ nv12_hight);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
free(scale_buffer);
// 4.NV12轉(zhuǎn)換為CVPixelBufferRef
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef dstPixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
nv12_width, nv12_hight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);
CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
uint8_t *yDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
memcpy(yDstPlane, nv12_dst_y, nv12_plane1_stride * nv12_hight);
uint8_t *uvDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
memcpy(uvDstPlane, nv12_dst_uv, nv12_plane1_stride * nv12_hight / 2);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
free(nv12_dst_y);
return dstPixelBuffer;
}
二:CVPixelBufferRef轉(zhuǎn)換為CMSampleBufferRef:
// NV12數(shù)據(jù)轉(zhuǎn)換為數(shù)據(jù)流
+ (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer {
CMSampleBufferRef sampleBuffer;
CMTime frameTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSince1970], 1000000000);
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
if (status != noErr) {
NSLog(@"Failed to create sample buffer with error %d.", (int)status);
}
CVPixelBufferRelease(pixelBuffer);
if (videoInfo) {
CFRelease(videoInfo);
}
return sampleBuffer;
}
三:對NV12裁剪代碼2:
其實這個方法更多的是介紹怎么把i420進(jìn)行裁剪吴超。
我沒有單獨弄i420文件钉嘹,這里直接先把NV12轉(zhuǎn)換為i420,再進(jìn)行裁剪
+ (CVPixelBufferRef)convertNV12ToI420ScreenshotsType1:(CMSampleBufferRef)sampleBufRef screenshotsFrame:(CGRect)screenshotsFrame {
int screenshots_x = screenshotsFrame.origin.x;
int screenshots_y = screenshotsFrame.origin.y;
int screenshots_width = screenshotsFrame.size.width;
int screenshots_hight = screenshotsFrame.size.height;
// 確保寬高是偶數(shù)
if (screenshots_width % 2 != 0) {
screenshots_width++;
}
if (screenshots_hight % 2 != 0) {
screenshots_hight++;
}
//CVPixelBufferRef是CVImageBufferRef的別名,兩者操作幾乎一致鲸阻。
//獲取CMSampleBuffer的圖像地址
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
if (!pixelBuffer) {
return nil;
}
//表示開始操作數(shù)據(jù)
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//圖像寬度(像素)
size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
//圖像高度(像素)
size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
//獲取CVImageBufferRef中的y數(shù)據(jù)
uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//獲取CMVImageBufferRef中的uv數(shù)據(jù)
uint8_t *src_uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
//y stride
size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
//uv stride
size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//y height
size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//uv height
size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//y_size
size_t plane1_size = plane1_stride * plane1_height;
//uv_size
size_t plane2_size = plane2_stride * plane2_height;
//yuv_size(內(nèi)存空間)
size_t frame_size = plane1_size + plane2_size;
size_t buffer_u_strate = plane2_stride / 2;
size_t buffer_v_strate = plane2_stride / 2;
// 截取區(qū)域不能超出原視頻大小
if (screenshots_x >= buffer_width ||
screenshots_width > buffer_width ||
screenshots_x + screenshots_width > buffer_width ||
screenshots_y >= buffer_height ||
screenshots_hight > buffer_height ||
screenshots_y + screenshots_hight > buffer_height) {
return nil;
}
// 1.NV12轉(zhuǎn)換為I420
uint8* buffer_frame = (unsigned char *)malloc(frame_size);
uint8* buffer_u = buffer_frame + plane1_size;
uint8* buffer_v = buffer_u + plane1_size / 4;
libyuv::NV12ToI420(/*const uint8 *src_y*/ src_y_frame,
/*int src_stride_y*/ (int)plane1_stride,
/*const uint8 *src_uv*/ src_uv_frame,
/*int src_stride_uv*/ (int)plane2_stride,
/*uint8 *dst_y*/ buffer_frame,
/*int dst_stride_y*/ (int)plane1_stride,
/*uint8 *dst_u*/ buffer_u,
/*int dst_stride_u*/ (int)buffer_u_strate,
/*uint8 *dst_v*/ buffer_v,
/*int dst_stride_v*/ (int)buffer_v_strate,
/*int width*/ (int)buffer_width,
/*int height*/ (int)buffer_height);
// 2.I420數(shù)據(jù)進(jìn)行相應(yīng)的裁剪
// 步長必須是16的倍數(shù)跋涣,因為涉及到字節(jié)對齊,而且iOS13和之前的版本處理方式不一樣赘娄,要注意
int stride_length = 16;
int scale_plane1_stride = screenshots_width;
if ([UIDevice currentDevice].systemVersion.floatValue >= 13.0) {
stride_length = 64;
} else {
stride_length = 16;
}
if ((screenshots_width % stride_length) != 0) {
scale_plane1_stride = (screenshots_width / stride_length + 1) * stride_length;
}
int scale_plane2_stride = scale_plane1_stride;
int scale_plane1_height = screenshots_hight;
int scale_plane2_height = screenshots_hight / 2;
int scale_plane1_size = scale_plane1_stride * scale_plane1_height;
int scale_plane2_size = scale_plane2_stride * scale_plane2_height;
int scale_frame_size = scale_plane1_size + scale_plane2_size;
uint8* scale_buffer = (unsigned char *)malloc(scale_frame_size);
uint8* scale_buffer_u = scale_buffer + scale_plane1_size;
uint8* scale_buffer_v = scale_buffer_u + scale_plane1_size / 4;
libyuv::ConvertToI420(/*const uint8 *src_frame*/ buffer_frame,
/*size_t src_size*/ frame_size,
/*uint8 *dst_y*/ scale_buffer,
/*int dst_stride_y*/ scale_plane1_stride,
/*uint8 *dst_u*/ scale_buffer_u,
/*int dst_stride_u*/ scale_plane1_stride >> 1,
/*uint8 *dst_v*/ scale_buffer_v,
/*int dst_stride_v*/ scale_plane1_stride >> 1,
/*int crop_x*/ screenshots_x,
/*int crop_y*/ screenshots_y,
/*int src_width*/ (int)plane1_stride,
/*int src_height*/ (int)buffer_height,
/*int crop_width*/ screenshots_width,
/*int crop_height*/ screenshots_hight,
/*enum RotationMode rotation*/ libyuv::kRotate0,
/*uint32 format*/ libyuv::FOURCC_I420);
// 3.把縮放后的I420數(shù)據(jù)轉(zhuǎn)換為NV12
int nv12_plane1_stride = scale_plane1_stride;
int nv12_width = screenshots_width;
int nv12_hight = screenshots_hight;
int nv12_frame_size = scale_frame_size;
uint8 *nv12_dst_y = (uint8 *)malloc(nv12_frame_size);
uint8 *nv12_dst_uv = nv12_dst_y + nv12_plane1_stride * nv12_hight;
libyuv::I420ToNV12(/*const uint8 *src_y*/ scale_buffer,
/*int src_stride_y*/ scale_plane1_stride,
/*const uint8 *src_u*/ scale_buffer_u,
/*int src_stride_u*/ scale_plane1_stride >> 1,
/*const uint8 *src_v*/ scale_buffer_v,
/*int src_stride_v*/ scale_plane1_stride >> 1,
/*uint8 *dst_y*/ nv12_dst_y,
/*int dst_stride_y*/ nv12_plane1_stride,
/*uint8 *dst_uv*/ nv12_dst_uv,
/*int dst_stride_uv*/ nv12_plane1_stride,
/*int width*/ nv12_width,
/*int height*/ nv12_hight);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
free(buffer_frame);
free(scale_buffer);
// 4.NV12轉(zhuǎn)換為CVPixelBufferRef
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef dstPixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
nv12_width, nv12_hight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);
CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
uint8_t *yDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
memcpy(yDstPlane, nv12_dst_y, nv12_plane1_stride * nv12_hight);
uint8_t *uvDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
memcpy(uvDstPlane, nv12_dst_uv, nv12_plane1_stride * nv12_hight / 2);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
free(nv12_dst_y);
return dstPixelBuffer;
}