本文借鑒:落影大神--iOS開發(fā)-OpenGL ES實踐教程(一)
本文Demo
前面一篇我們學(xué)習(xí)了OpenGL ES渲染攝像頭錄制的視頻幀的知識窿冯,就是將照相機(jī)錄制的CMSampleBuffer
轉(zhuǎn)換成CVOpenGLESTexture
紋理的再進(jìn)行渲染的過程幕庐。如下圖:
CMSampleBuffer轉(zhuǎn)紋理
這篇呢我們將要學(xué)習(xí)使用OpenGL ES渲染YUV視頻幀,原理和渲染照相機(jī)是一樣的愿卸。重點在于視頻幀的獲取佳晶。這里我使用兩種方式進(jìn)行視頻幀的獲取桅狠。這里由于沒有錄制視頻時的回調(diào)方法,我們可以使用一個定時器來代替轿秧,這里使用
CADisplayLink
displayLink = CADisplayLink(target: self, selector: #selector(displayLinkDidUpdate(_:)))
displayLink.add(to: RunLoop.current, forMode: RunLoop.Mode.default)
displayLink.preferredFramesPerSecond = 30
displayLink.isPaused = true
-
1. 使用 AVPlayerItemVideoOutput 方式獲戎械:
let videoURL = URL(fileURLWithPath: Bundle.main.path(forResource: "test.mov", ofType: nil)!)
self.reader = DDAssetReader(videoURL)
let item = AVPlayerItem(url: videoURL)
player = AVPlayer(playerItem: item)
let asset: AVAsset = item.asset
asset.loadValuesAsynchronously(forKeys: ["tracks"]) {
if asset.statusOfValue(forKey: "tracks", error: nil) == AVKeyValueStatus.loaded {
let tracks = asset.tracks(withMediaType: AVMediaType.video)
if tracks.count > 0 {
// Choose the first video track.
let videoTrack: AVAssetTrack = tracks.first!
videoTrack.loadValuesAsynchronously(forKeys: ["preferredTransform"]) {
if videoTrack.statusOfValue(forKey: "preferredTransform", error: nil) == AVKeyValueStatus.loaded {
let preferredTransform: CGAffineTransform = videoTrack.preferredTransform
let preferredRotation = -1 * atan2(preferredTransform.b, preferredTransform.a)
NSLog("preferredRotation ----> \(preferredRotation)")
DispatchQueue.main.async {
item.add(self.videoOutput)
self.player.replaceCurrentItem(with: item)
self.videoOutput.requestNotificationOfMediaDataChange(withAdvanceInterval: 0.03)
self.player.play()
}
}
}
}
}
}
player.actionAtItemEnd = AVPlayer.ActionAtItemEnd.none
NotificationCenter.default.addObserver(forName: NSNotification.Name.AVPlayerItemDidPlayToEndTime, object: item, queue: OperationQueue.main) { noti in
self.player.currentItem?.seek(to: CMTime.zero, completionHandler: { suc in
})
}
mProcessQueue = DispatchQueue(label: "mProcessQueue")
//kCVPixelFormatType_32BGRA
videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange])
videoOutput.setDelegate(self, queue: mProcessQueue)
然后通過CADisplayLink
的回調(diào)方法來獲取視頻幀:
@objc func displayLinkDidUpdate(_ sender: CADisplayLink) {
var outputItemTime: CMTime = .invalid
// Calculate the nextVsync time which is when the screen will be refreshed next.
let nextVSync: CFTimeInterval = sender.timestamp + sender.duration
outputItemTime = videoOutput.itemTime(forHostTime: nextVSync)
if videoOutput.hasNewPixelBuffer(forItemTime: outputItemTime) {
var pixelBuffer: CVPixelBuffer?
pixelBuffer = videoOutput.copyPixelBuffer(forItemTime: outputItemTime, itemTimeForDisplay: nil)
self.renderView.renderBuffer(pixelBuffer: pixelBuffer)
}
}
-
2. 使用
AVAssetReader
+AVAssetReaderTrackOutput
方式獲取:
class DDAssetReader: NSObject {
var readerVideoTrackOutput: AVAssetReaderTrackOutput!
var assetReader: AVAssetReader!
var videoUrl: URL!
var lock: NSLock!
init(_ url: URL) {
super.init()
videoUrl = url
lock = NSLock()
customInit()
}
func customInit() {
let inputOptions = [AVURLAssetPreferPreciseDurationAndTimingKey : true]
let inputAsset = AVURLAsset(url: videoUrl, options: inputOptions)
inputAsset.loadValuesAsynchronously(forKeys: ["tracks"]) {
DispatchQueue.global().async {
var error: NSError?
let tracksStatus = inputAsset.statusOfValue(forKey: "tracks", error: &error)
if (tracksStatus != AVKeyValueStatus.loaded) {
NSLog("error = \(error!)")
return
}
self.processWithAsset(inputAsset)
}
}
}
func processWithAsset(_ asset: AVAsset) {
lock.lock()
NSLog("processWithAsset")
assetReader = try? AVAssetReader(asset: asset)
let outputSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset.tracks(withMediaType: AVMediaType.video).first!, outputSettings: outputSettings)
readerVideoTrackOutput.alwaysCopiesSampleData = false
assetReader.add(readerVideoTrackOutput)
if (assetReader.startReading() == false) {
NSLog("Error reading from file at URL: %@", asset)
}
lock.unlock()
}
func readBuffer() -> CMSampleBuffer? {
lock.lock()
var sampleBuffer: CMSampleBuffer?
if ((readerVideoTrackOutput) != nil) {
sampleBuffer = readerVideoTrackOutput.copyNextSampleBuffer()
}
if ((assetReader != nil) && assetReader.status == AVAssetReader.Status.completed) {
NSLog("customInit")
readerVideoTrackOutput = nil
assetReader = nil
customInit()
}
lock.unlock()
return sampleBuffer
}
}