- (uint8_t *)convertVideoSmapleBufferToYuvData:(CMSampleBufferRef) videoSample{
// 獲取yuv數(shù)據(jù)
// 通過CMSampleBufferGetImageBuffer方法掌敬,獲得CVImageBufferRef。
// 這里面就包含了yuv420(NV12)數(shù)據(jù)的指針
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(videoSample);
//表示開始操作數(shù)據(jù)
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//圖像寬度(像素)
size_t pixelWidth = CVPixelBufferGetWidth(pixelBuffer);
//圖像高度(像素)
size_t pixelHeight = CVPixelBufferGetHeight(pixelBuffer);
//yuv中的y所占字節(jié)數(shù)
size_t y_size = pixelWidth * pixelHeight;
//yuv中的uv所占的字節(jié)數(shù)
size_t uv_size = y_size / 2;
//? ? uint8_t *yuv_frame = malloc(uv_size + y_size);
//獲取CVImageBufferRef中的y數(shù)據(jù)
uint8_t *y_frame = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
memcpy(yuv_frame, y_frame, y_size);
//獲取CMVImageBufferRef中的uv數(shù)據(jù)
uint8_t *uv_frame = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
memcpy(yuv_frame + y_size, uv_frame, uv_size);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
return yuv_frame;
//返回數(shù)據(jù)
//? ? return [NSData dataWithBytesNoCopy:yuv_frame length:y_size + uv_size];
}