在iOS 11中,蘋果新出了一個(gè)類用于描述景深數(shù)據(jù):AVDepthData较木,AVDepthData內(nèi)部提供了一系列的屬性和方法來(lái)獲取景深的CVPixelBuffer,景深數(shù)據(jù)類型等僻弹,AVDepthData在iOS炉菲、macOS等平臺(tái)上都是用來(lái)描述景深數(shù)據(jù),且屬于AVFundation框架补君。
1.景深攝像頭
// 創(chuàng)建 AVCaptureSession 實(shí)例
captureSession = AVCaptureSession()
// 配置 AVCaptureDeviceInput 前置:builtInTrueDepthCamera 后置:builtInDualWideCamera
guard let videoDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .depthData, position: .front),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice), captureSession.canAddInput(videoInput) else {
print("Failed to set up video input")
return
}
2. AVCaptureDepthDataOutput使用
let depthDataOutput = AVCaptureDepthDataOutput()
depthDataOutput.setDelegate(self, callbackQueue: DispatchQueue.main)
if captureSession.canAddOutput(depthDataOutput) {
captureSession.addOutput(depthDataOutput)
}
// AVCaptureDepthDataOutputDelegate
func depthDataOutput(_ output: AVCaptureDepthDataOutput, didOutput depthData: AVDepthData, timestamp: CMTime, connection: AVCaptureConnection) {
// 深度數(shù)據(jù) CVPixelBuffer
let depthDataMap = depthData.depthDataMap
}
注意點(diǎn):AVCaptureDepthDataOutput和AVCaptureVideoDataOutput的幀率是不一致的
3.怎么保持同步
dataOutputSynchronizer = AVCaptureDataOutputSynchronizer(dataOutputs: [videoOutput,depthDataOutput])
dataOutputSynchronizer.setDelegate(self, queue: DispatchQueue.main)
// AVCaptureDataOutputSynchronizerDelegate
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer, didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection) {
var depthDataWasDropped = true
var sampleBufferWasDropped = true
var depthData: AVDepthData?
var videoData: CMSampleBuffer?
if let depthBufferData = synchronizedDataCollection.synchronizedData(for: self.depthDataOutput) as? AVCaptureSynchronizedDepthData {
depthData = depthBufferData.depthData
depthDataWasDropped = depthBufferData.depthDataWasDropped
}
if let videoBufferData = synchronizedDataCollection.synchronizedData(for: self.videoOutput) as? AVCaptureSynchronizedSampleBufferData{
videoData = videoBufferData.sampleBuffer
sampleBufferWasDropped = videoBufferData.sampleBufferWasDropped
}
if depthDataWasDropped || sampleBufferWasDropped {
return
}
guard let depthData = depthData, let videoData = videoData, let pixelBuffer = CMSampleBufferGetImageBuffer(videoData) else { return }
let depthDataMap = depthData.depthDataMap
let depthDataCIImage = CIImage(cvPixelBuffer: depthDataMap)
let originalCIImage = CIImage(cvPixelBuffer: pixelBuffer)
DispatchQueue.main.async {
let uiImage: UIImage = UIImage(ciImage: depthDataCIImage)
self.imageView.image = uiImage
}
}