最近在重構(gòu)自己寫的代碼逛裤,項(xiàng)目中需要將相冊中的視頻上傳到云服務(wù)器踩身。做個(gè)總結(jié)吐句。
使用UIImagePickerController獲取相冊的視頻嗦枢,研究發(fā)現(xiàn),獲取后的視頻是經(jīng)過壓縮的侣诺,經(jīng)測試如果一個(gè)3M的視頻經(jīng)過壓縮后會(huì)變成1.3M.如果你們服務(wù)器需要的正是經(jīng)過壓縮的年鸳,那么恭喜你,不用走那么多彎路了彼棍。我這邊恰好需要的是不經(jīng)過壓縮的視頻膳算,然后網(wǎng)上找了很多方法涕蜂,發(fā)現(xiàn)要獲取相冊中的視頻是必須經(jīng)過壓縮的(如果你有獲取相冊原生視頻的方法請?jiān)谙路搅粞愿嬖V我,萬分感謝J菡妗)诸尽,既然不讓獲取原生的印颤,那么只能退而求其次, ipc.sourceType = UIImagePickerControllerSourceTypePhotoLibraryshi;(這個(gè)方法呢有個(gè)問題,就是如果視頻的分辨率小于手機(jī)的分辨率际看,視頻會(huì)變大仲闽,一般3M會(huì)變成10.3M,有多大自己體會(huì)~)設(shè)置視頻的導(dǎo)出質(zhì)量為高質(zhì)量僵朗。
UIImagePickerController *ipc=[[UIImagePickerController alloc] init];
ipc=[[UIImagePickerController alloc] init];
ipc.delegate=self;
ipc.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
ipc.videoQuality = UIImagePickerControllerQualityTypeMedium;
ipc.mediaTypes = [NSArray arrayWithObjects:@"public.movie", nil];
一般呢验庙,我們會(huì)在didFinishPickingMediaWithInfo代理方法中獲取視頻路徑粪薛。
這個(gè)時(shí)候我們得出的是臨時(shí)路徑,不能是指作為視頻路徑直接上傳湃交,我再模擬器下截取了這個(gè)臨時(shí)路徑,如圖:
我們需要把原視頻導(dǎo)出到自己的APP內(nèi)痛阻,而后根據(jù)這個(gè)路徑進(jìn)行上傳即可阱当。
導(dǎo)出的方法也有兩種:
一是根據(jù)路徑直接拷貝弊添,二是進(jìn)行視頻導(dǎo)出捌木,第二中方式可以對視頻進(jìn)行進(jìn)一步壓縮〕喝Γ可以根據(jù)自己的項(xiàng)目需求選擇瞬女。
代碼如下(ViewController頁面全部代碼):
#import "ViewController.h"
#import <AVFoundation/AVFoundation.h>
@interface ViewController ()<UIImagePickerControllerDelegate,UINavigationControllerDelegate,UITextViewDelegate>
@property (weak, nonatomic) IBOutlet UIImageView *videoImageView;
@property (weak, nonatomic) IBOutlet UILabel *videoMessageLabel;
@property (nonatomic,assign)BOOL isImagePicker;
@property (nonatomic,strong)NSString *filePath;
@property (nonatomic,strong)NSString *imagePath;
@property (nonatomic,strong)NSString *pingUploadUrlString;
@property (nonatomic,assign)NSInteger timeSecond;
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
NSLog(@"%@",NSHomeDirectory());
}
- (IBAction)getVideo:(UIButton *)sender
{
if ([UIImagePickerController isSourceTypeAvailable:UIImagePickerControllerSourceTypePhotoLibrary])
{
UIImagePickerController *ipc=[[UIImagePickerController alloc] init];
ipc=[[UIImagePickerController alloc] init];
ipc.delegate=self;
ipc.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
ipc.videoQuality = UIImagePickerControllerQualityTypeMedium;
ipc.mediaTypes = [NSArray arrayWithObjects:@"public.movie", nil];
[self presentViewController:ipc animated:YES completion:nil];
_isImagePicker = YES;
}
}
- (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSString *mediaType = [info objectForKey:UIImagePickerControllerMediaType];
if ([mediaType isEqualToString:@"public.movie"])
{
NSURL *videoUrl = [info objectForKey:UIImagePickerControllerMediaURL];
AVURLAsset *asset = [AVURLAsset assetWithURL:videoUrl];
NSString *videoPath = info[UIImagePickerControllerMediaURL];
NSLog(@"相冊視頻路徑是:%@",videoPath);
//第一中方法,通過路徑直接copy
// //刪除原來的 防止重復(fù)選
// [[NSFileManager defaultManager] removeItemAtPath:_filePath error:nil];
// [[NSFileManager defaultManager] removeItemAtPath:_imagePath error:nil];
// NSDateFormatter *formater = [[NSDateFormatter alloc] init];
// [formater setDateFormat:@"yy-MM-dd-HH:mm:ss"];
//
// _filePath = [NSHomeDirectory() stringByAppendingFormat:@"/Documents/%@", [[formater stringFromDate:[NSDate date]] stringByAppendingString:@".mp4"]];
//
//
// NSString *videoPath = info[UIImagePickerControllerMediaURL];
//
// NSFileManager *fileManager = [NSFileManager defaultManager];
//
// NSError *error;
// [fileManager copyItemAtPath:videoPath toPath:_filePath error:&error];
// if (error)
// {
//
// NSLog(@"文件保存到緩存失敗");
// }
//
// [self getSomeMessageWithFilePath:_filePath];
//第二種方法报慕,進(jìn)行視頻導(dǎo)出
[self startExportVideoWithVideoAsset:asset completion:^(NSString *outputPath) {
[self getSomeMessageWithFilePath:_filePath];
}];;
}
_isImagePicker = NO;
[picker dismissViewControllerAnimated:YES completion:nil];
}
//獲取視頻第一幀
- (void)getSomeMessageWithFilePath:(NSString *)filePath
{
NSURL *fileUrl = [NSURL fileURLWithPath:filePath];
AVURLAsset *asset = [AVURLAsset assetWithURL:fileUrl];
NSString *duration = [NSString stringWithFormat:@"%0.0f", ceil(CMTimeGetSeconds(asset.duration))];
_videoImageView.image = [self getImageWithAsset:asset];
// _image = _imageView.image;
_timeSecond = duration.integerValue;
_videoMessageLabel.text = [NSString stringWithFormat:@"時(shí)長是:%ld",(long)_timeSecond];
NSLog(@"時(shí)長是:%@",duration);
}
- (UIImage *)getImageWithAsset:(AVAsset *)asset
{
AVURLAsset *assetUrl = (AVURLAsset *)asset;
NSParameterAssert(assetUrl);
AVAssetImageGenerator *assetImageGenerator =[[AVAssetImageGenerator alloc] initWithAsset:assetUrl];
assetImageGenerator.appliesPreferredTrackTransform = YES;
assetImageGenerator.apertureMode = AVAssetImageGeneratorApertureModeEncodedPixels;
CGImageRef thumbnailImageRef = NULL;
CFTimeInterval thumbnailImageTime = 0;
NSError *thumbnailImageGenerationError = nil;
thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 60)actualTime:NULL error:&thumbnailImageGenerationError];
if(!thumbnailImageRef)
NSLog(@"thumbnailImageGenerationError %@",thumbnailImageGenerationError);
UIImage *thumbnailImage = thumbnailImageRef ? [[UIImage alloc]initWithCGImage: thumbnailImageRef] : nil;
return thumbnailImage;
}
- (void)startExportVideoWithVideoAsset:(AVURLAsset *)videoAsset completion:(void (^)(NSString *outputPath))completion
{
// Find compatible presets by video asset.
NSArray *presets = [AVAssetExportSession exportPresetsCompatibleWithAsset:videoAsset];
NSString *pre = nil;
if ([presets containsObject:AVAssetExportPreset3840x2160])
{
pre = AVAssetExportPreset3840x2160;
}
else if([presets containsObject:AVAssetExportPreset1920x1080])
{
pre = AVAssetExportPreset1920x1080;
}
else if([presets containsObject:AVAssetExportPreset1280x720])
{
pre = AVAssetExportPreset1280x720;
}
else if([presets containsObject:AVAssetExportPreset960x540])
{
pre = AVAssetExportPreset1280x720;
}
else
{
pre = AVAssetExportPreset640x480;
}
// Begin to compress video
// Now we just compress to low resolution if it supports
// If you need to upload to the server, but server does't support to upload by streaming,
// You can compress the resolution to lower. Or you can support more higher resolution.
if ([presets containsObject:AVAssetExportPreset640x480]) {
// AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPreset640x480];
AVAssetExportSession *session = [[AVAssetExportSession alloc]initWithAsset:videoAsset presetName:AVAssetExportPreset640x480];
NSDateFormatter *formater = [[NSDateFormatter alloc] init];
[formater setDateFormat:@"yy-MM-dd-HH:mm:ss"];
NSString *outputPath = [NSHomeDirectory() stringByAppendingFormat:@"/Documents/%@", [[formater stringFromDate:[NSDate date]] stringByAppendingString:@".mov"]];
NSLog(@"video outputPath = %@",outputPath);
//刪除原來的 防止重復(fù)選
_timeSecond = 0;
[[NSFileManager defaultManager] removeItemAtPath:_filePath error:nil];
[[NSFileManager defaultManager] removeItemAtPath:_imagePath error:nil];
_filePath = outputPath;
session.outputURL = [NSURL fileURLWithPath:outputPath];
// Optimize for network use.
session.shouldOptimizeForNetworkUse = true;
NSArray *supportedTypeArray = session.supportedFileTypes;
if ([supportedTypeArray containsObject:AVFileTypeMPEG4]) {
session.outputFileType = AVFileTypeMPEG4;
} else if (supportedTypeArray.count == 0) {
NSLog(@"No supported file types 視頻類型暫不支持導(dǎo)出");
return;
} else {
session.outputFileType = [supportedTypeArray objectAtIndex:0];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/Documents"]]) {
[[NSFileManager defaultManager] createDirectoryAtPath:[NSHomeDirectory() stringByAppendingFormat:@"/Documents"] withIntermediateDirectories:YES attributes:nil error:nil];
}
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath]) {
[[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];
}
// Begin to export video to the output path asynchronously.
[session exportAsynchronouslyWithCompletionHandler:^(void) {
switch (session.status) {
case AVAssetExportSessionStatusUnknown:
NSLog(@"AVAssetExportSessionStatusUnknown"); break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"AVAssetExportSessionStatusWaiting"); break;
case AVAssetExportSessionStatusExporting:
NSLog(@"AVAssetExportSessionStatusExporting"); break;
case AVAssetExportSessionStatusCompleted: {
NSLog(@"AVAssetExportSessionStatusCompleted");
dispatch_async(dispatch_get_main_queue(), ^{
if (completion) {
completion(outputPath);
}
// _videoArray = [VRVideoTool getAllFileNameFormDoucuments];
// [_tableView reloadData];
});
} break;
case AVAssetExportSessionStatusFailed:
NSLog(@"AVAssetExportSessionStatusFailed"); break;
default: break;
}
}];
}
}
@end
獲取視頻第一幀圖片的原理是通過視頻路徑獲取視頻的AVURLAsset,通過對AVURLAsset進(jìn)行處理可以獲得視頻的一些信息,例如時(shí)長宫患,第一幀圖片等这弧。上面代碼里面有虚汛,不再贅述卷哩。
如果你有更好的獲取相冊視頻的方法属拾,請?jiān)谙路搅粞愿嬷獈~。
DEMO地址