下面是我寫的播放器
支持VR,全景,視頻縮放购公,本地萌京,網(wǎng)絡(luò)視頻播放,實時獲取視頻幀宏浩,獲取播放時間知残,獲取緩存時間,播放比庄,暫停
要想完成一個Vr播放器求妹,需要完成兩個功能
1、寫一個可以實時獲取視頻幀的播放器
2佳窑、寫一個可以渲染每一幀圖片為全景圖片的view
SCN3DVideoAdatper 視頻播放器
用于解碼視頻的每一幀圖片
使用的是<AVFoundation/AVFoundation.h>框架
下面是一些相關(guān)的方法
//
// SCN3DVideoAdatper.h
// SCN3DPlayer
//
// Created by 俞濤濤 on 16/11/11.
// Copyright ? 2016年 俞濤濤. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
/////////////////////////////////////////////////////////////////////////////////////////////////////////
@class SCN3DVideoAdatper;
@protocol SCN3DVideoAdatperDelegate <NSObject>
@optional
/**
準(zhǔn)備播放視頻
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議最先調(diào)用該方法
@param videoAdatper SCN3DVideoAdatper對象
*/
- (void)videoPlayerIsReadyToPlayVideo:(SCN3DVideoAdatper *)videoAdatper;
/**
播放視頻結(jié)束
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議 在視頻播放結(jié)束以后調(diào)用該方法
@param videoAdatper SCN3DVideoAdatper
*/
- (void)videoPlayerDidReachEnd:(SCN3DVideoAdatper *)videoAdatper;
/**
播放時間監(jiān)聽
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議 在視頻播放時會返回當(dāng)前播放的時間
@param videoAdatper SCN3DVideoAdatper 對象
@param cmTime CMTime
*/
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper timeDidChange:(CMTime)cmTime;
/**
播放已加載的緩存時間監(jiān)聽
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議 在視頻播放的時候會返回當(dāng)前已加載的視頻緩存百分比
@param videoAdatper SCN3DVideoAdatper 對象
@param duration float
*/
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper loadedTimeRangeDidChange:(float)duration;
/**
播放錯誤監(jiān)聽
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議 在播放視頻失敗的時候會調(diào)用該方法
@param videoAdatper SCN3DVideoAdatper 對象
@param error NSError 對象
*/
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper didFailWithError:(NSError *)error;
/**
獲取視頻的每一幀
實現(xiàn)SCN3DVideoAdatperDelegate協(xié)議 在視頻播放的時候制恍,該方法可以得到視頻得每一幀圖片
@param videoAdatper SCN3DVideoAdatper 對象
@param videoImage UIImage 對象
*/
- (void)videoPlayer:(SCN3DVideoAdatper *)videoAdatper displaylinkCallbackImage:(UIImage *)videoImage;
@end
/////////////////////////////////////////////////////////////////////////////////////////////////////////
@interface SCN3DVideoAdatper : NSObject
@property (nonatomic, weak) id<SCN3DVideoAdatperDelegate> delegate;
@property (nonatomic, strong, readonly) AVPlayer *player;
@property (nonatomic, strong, readonly) AVPlayerItem *playerItem;
@property (nonatomic, strong, readonly) AVPlayerItemVideoOutput *output;
@property (nonatomic, assign, getter=isPlaying, readonly) BOOL playing;
@property (nonatomic, assign, getter=isLooping) BOOL looping;
@property (nonatomic, assign, getter=isMuted) BOOL muted;
// Setting
- (void)setURL:(NSURL *)URL;
- (void)setPlayerItem:(AVPlayerItem *)playerItem;
- (void)setAsset:(AVAsset *)asset;
// 開始播放
- (void)play;
//暫停播放
- (void)pause;
//重置播放器
- (void)reset;
/**
跳轉(zhuǎn)到相應(yīng)的時間段
在相應(yīng)的時間段里面播放
@param time 傳入一個float 得參數(shù),
@param completion completion description
*/
- (void)seekToTime:(float)time completion:(void (^)())completion;
//設(shè)置音量大小
- (void)setVolume:(float)volume;
//增加音量
- (void)fadeInVolume;
//降低音量
- (void)fadeOutVolume;
// 添加 Displaylink
- (void)addDisplaylink;
// 移除 Displaylink
- (void)removeDisplaylink;
@end
下面講一下主要方法實現(xiàn)
添加移除播放器監(jiān)聽
/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - Player Observers
/////////////////////////////////////////////////////////////////////////////////////////////////////////
- (void)addPlayerObservers {
[self.player addObserver:self
forKeyPath:NSStringFromSelector(@selector(rate))
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:VideoPlayer_PlayerRateChangedContext];
}
- (void)removePlayerObservers {
@try {
[self.player removeObserver:self
forKeyPath:NSStringFromSelector(@selector(rate))
context:VideoPlayer_PlayerRateChangedContext];
}
@catch (NSException *exception) {
NSLog(@"Exception removing observer: %@", exception);
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - PlayerItem Observers
/////////////////////////////////////////////////////////////////////////////////////////////////////////
- (void)addPlayerItemObservers:(AVPlayerItem *)playerItem {
[playerItem addObserver:self
forKeyPath:NSStringFromSelector(@selector(status))
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionOld | NSKeyValueObservingOptionNew
context:VideoPlayer_PlayerItemStatusContext];
[playerItem addObserver:self
forKeyPath:NSStringFromSelector(@selector(loadedTimeRanges))
options:NSKeyValueObservingOptionInitial | NSKeyValueObservingOptionNew
context:VideoPlayer_PlayerItemLoadedTimeRangesContext];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(playerItemDidPlayToEndTime:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:playerItem];
}
- (void)removePlayerItemObservers:(AVPlayerItem *)playerItem {
[playerItem cancelPendingSeeks];
@try {
[playerItem removeObserver:self
forKeyPath:NSStringFromSelector(@selector(status))
context:VideoPlayer_PlayerItemStatusContext];
}
@catch (NSException *exception) {
NSLog(@"Exception removing observer: %@", exception);
}
@try {
[playerItem removeObserver:self
forKeyPath:NSStringFromSelector(@selector(loadedTimeRanges))
context:VideoPlayer_PlayerItemLoadedTimeRangesContext];
}
@catch (NSException *exception) {
NSLog(@"Exception removing observer: %@", exception);
}
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:playerItem];
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
#pragma mark - Time Observer
/////////////////////////////////////////////////////////////////////////////////////////////////////////
- (void)addTimeObserver {
if (self.timeObserverToken || self.player == nil) {
return;
}
__weak typeof (self) weakSelf = self;
self.timeObserverToken = [self.player addPeriodicTimeObserverForInterval:CMTimeMakeWithSeconds(TimeObserverInterval, NSEC_PER_SEC)
queue:dispatch_get_main_queue()
usingBlock:^(CMTime time)
{
__strong typeof (self) strongSelf = weakSelf;
if (!strongSelf) {
return;
}
if ([strongSelf.delegate respondsToSelector:@selector(videoPlayer:timeDidChange:)]) {
[strongSelf.delegate videoPlayer:strongSelf timeDidChange:time];
}
}];
}
判斷視頻是否播放結(jié)束和獲取視頻緩存時間
- (BOOL)isAtEndTime { // TODO: this is a fucked up override, seems like something could be wrong [AH]
if (self.player && self.player.currentItem) {
if (_isAtEndTime) {
return _isAtEndTime;
}
float currentTime = 0.0f;
if (CMTIME_IS_INVALID(self.player.currentTime) == NO) {
currentTime = CMTimeGetSeconds(self.player.currentTime);
}
float videoDuration = 0.0f;
if (CMTIME_IS_INVALID(self.player.currentItem.duration) == NO) {
videoDuration = CMTimeGetSeconds(self.player.currentItem.duration);
}
if (currentTime > 0.0f && videoDuration > 0.0f) {
if (fabs(currentTime - videoDuration) <= 0.01f) {
return YES;
}
}
}
return NO;
}
//視頻緩存時間
- (float)calcLoadedDuration {
float loadedDuration = 0.0f;
if (self.player && self.player.currentItem) {
NSArray *loadedTimeRanges = self.player.currentItem.loadedTimeRanges;
if (loadedTimeRanges && [loadedTimeRanges count]) {
CMTimeRange timeRange = [[loadedTimeRanges firstObject] CMTimeRangeValue];
float startSeconds = CMTimeGetSeconds(timeRange.start);
float durationSeconds = CMTimeGetSeconds(timeRange.duration);
loadedDuration = startSeconds + durationSeconds;
}
}
return loadedDuration;
}
實時獲取視頻每一幀
- (void)addDisplaylink {
self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
[self.displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
}
- (void)removeDisplaylink {
if (self.displayLink) {
[self.displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes];
self.displayLink = nil;
}
}
- (void)displayLinkCallback:(CADisplayLink *)sender {
@autoreleasepool {
CMTime outputItemTime = [self.output itemTimeForHostTime:CACurrentMediaTime()];
if([self.output hasNewPixelBufferForItemTime:outputItemTime]) {
CVPixelBufferRef bufferRef = [self.output copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
if (bufferRef != nil) {
UIImage *videoImage = [self pixelBufferToImage:bufferRef];
if ([self.delegate respondsToSelector:@selector(videoPlayer:displaylinkCallbackImage:)]) {
[self.delegate videoPlayer:self displaylinkCallbackImage:videoImage];
}
CFRelease(bufferRef);
}
}
}
}
- (UIImage *)pixelBufferToImage:(CVPixelBufferRef)bufferRef {
CIImage *ciImage = [CIImage imageWithCVPixelBuffer:bufferRef];
CIContext *tempContext = [CIContext contextWithOptions:nil];
CGFloat videoWidth = CVPixelBufferGetWidth(bufferRef);
CGFloat videoHeight = CVPixelBufferGetHeight(bufferRef);
CGImageRef videoImage = [tempContext createCGImage:ciImage fromRect:CGRectMake(0, 0, videoWidth, videoHeight)];
UIImage *image = [UIImage imageWithCGImage:videoImage];
CGImageRelease(videoImage);
return image;
}
音量相關(guān)設(shè)置
- (void)setVolume:(float)volume {
[self cancelFadeVolume];
self.player.volume = volume;
}
- (void)cancelFadeVolume {
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fadeInVolume) object:nil];
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fadeOutVolume) object:nil];
}
- (void)fadeInVolume {
if (self.player == nil) {
return;
}
[self cancelFadeVolume];
if (self.player.volume >= 1.0f - 0.01f) {
self.player.volume = 1.0f;
}
else {
self.player.volume += 1.0f/10.0f;
[self performSelector:@selector(fadeInVolume) withObject:nil afterDelay:DefaultVolumeFadeDuration/10.0f];
}
}
- (void)fadeOutVolume {
if (self.player == nil) {
return;
}
[self cancelFadeVolume];
if (self.player.volume <= 0.01f) {
self.player.volume = 0.0f;
}
else {
self.player.volume -= 1.0f/10.0f;
[self performSelector:@selector(fadeOutVolume) withObject:nil afterDelay:DefaultVolumeFadeDuration/10.0f];
}
}
源代碼下載
如果喜歡的話神凑,就點個贊净神,star一下,本文里面有誤的地方溉委,請大家指教