前段時間在做音頻的相關(guān)技術(shù)焚廊,涉及到iOS錄音轉(zhuǎn)碼的一些知識,這里在這里記錄下习劫。
一咆瘟、使用iOS自帶的AVAudioRecorder錄音時錄音的格式可以是.caf,wav榜聂,但有時候可能需要需要把這些格式轉(zhuǎn)成其他的格式搞疗,比如從caf轉(zhuǎn)為mp3,從wav轉(zhuǎn)mp3,m4a轉(zhuǎn)wav再轉(zhuǎn)mp3(因為有時候在對音頻做合并和剪切的時候生成的音頻格式是m4a的)這里就說一說音頻格式的集中轉(zhuǎn)碼操作。
caf音頻格式轉(zhuǎn)mp3
caf轉(zhuǎn)mp3音頻格式需要用到一個c語言的第三方庫lame须肆,這個庫可以把caf和wav的成功轉(zhuǎn)為mp3格式(其他格式的沒有測試)匿乃。只需設置一些參數(shù)就可以了,詳見代碼:
//轉(zhuǎn)換為mp3
+ (void)convenrtToMp3WithResult:(NSString *)originalPath outPath:(NSString *)outPath success:(BaseIdBlock)successBlock{
[[NSFileManager defaultManager] removeItemAtPath:outPath error:nil];
@try {
int read, write;
FILE *pcm = fopen([originalPath cStringUsingEncoding:1], "rb");//被轉(zhuǎn)換的文件
fseek(pcm, 4*1024, SEEK_CUR); //skip file header
FILE *mp3 = fopen([outPath cStringUsingEncoding:1], "wb");//轉(zhuǎn)換后文件的存放位置
const int PCM_SIZE = 8192;
const int MP3_SIZE = 8192;
short int pcm_buffer[PCM_SIZE*2];
unsigned char mp3_buffer[MP3_SIZE];
lame_t lame = lame_init();
lame_set_num_channels (lame, 2 ); // 設置 1 為單通道豌汇,默認為 2 雙通道
lame_set_in_samplerate(lame, 44100);//
lame_set_brate (lame, 8);
lame_set_mode (lame, 3);
lame_set_VBR(lame, vbr_default);
lame_set_quality (lame, 2); /* 2=high 5 = medium 7=low 音 質(zhì) */
lame_init_params(lame);
do {
read = fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
if (read == 0)
write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
else
write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
fwrite(mp3_buffer, write, 1, mp3);
} while (read != 0);
lame_close(lame);
fclose(mp3);
fclose(pcm);
}
@catch (NSException *exception) {
// NSLog(@"%@",[exception description]);
}
@finally {
successBlock(outPath);
}
}
m4a格式轉(zhuǎn)wav格式
因為在對音頻做合并或者裁切的時候生成的音頻格式是m4a的幢炸,但是m4a轉(zhuǎn)成mp3會損壞音頻格式,所以我當時采用先把m4a轉(zhuǎn)為wav拒贱,再用wav轉(zhuǎn)成mp3宛徊。以下粘出代碼:
+ (void)convertM4aToWav:(NSString *)originalPath outPath:(NSString *)outPath success:(BaseIdBlock)block{
if ([FileUitl isExist:outPath]) {
[FileUitl removeFile:outPath];
}
NSURL *originalUrl = [NSURL fileURLWithPath:originalPath];
NSURL *outPutUrl = [NSURL fileURLWithPath:outPath];
AVURLAsset *songAsset = [AVURLAsset URLAssetWithURL:originalUrl options:nil]; //讀取原始文件信息
NSError *error = nil;
AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:songAsset error:&error];
if (error) {
NSLog (@"error: %@", error);
return;
}
AVAssetReaderOutput *assetReaderOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:songAsset.tracks audioSettings: nil];
if (![assetReader canAddOutput:assetReaderOutput]) {
NSLog (@"can't add reader output... die!");
return;
}
[assetReader addOutput:assetReaderOutput];
AVAssetWriter *assetWriter = [AVAssetWriter assetWriterWithURL:outPutUrl fileType:AVFileTypeCoreAudioFormat error:&error];
if (error) {
NSLog (@"error: %@", error);
return;
}
AudioChannelLayout channelLayout;
memset(&channelLayout, 0, sizeof(AudioChannelLayout));
channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo;
/** 配置音頻參數(shù) */
NSDictionary *outputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatLinearPCM], AVFormatIDKey, [NSNumber numberWithFloat:44100.0], AVSampleRateKey, [NSNumber numberWithInt:2], AVNumberOfChannelsKey, [NSData dataWithBytes:&channelLayout length:sizeof(AudioChannelLayout)], AVChannelLayoutKey,[NSNumber numberWithInt:16], AVLinearPCMBitDepthKey, [NSNumber numberWithBool:NO], AVLinearPCMIsNonInterleaved, [NSNumber numberWithBool:NO],AVLinearPCMIsFloatKey, [NSNumber numberWithBool:NO], AVLinearPCMIsBigEndianKey, nil];
AVAssetWriterInput *assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings];
if ([assetWriter canAddInput:assetWriterInput]) {
[assetWriter addInput:assetWriterInput];
} else {
NSLog (@"can't add asset writer input... die!");
return;
}
assetWriterInput.expectsMediaDataInRealTime = NO;
[assetWriter startWriting];
[assetReader startReading];
AVAssetTrack *soundTrack = [songAsset.tracks objectAtIndex:0];
CMTime startTime = CMTimeMake (0, soundTrack.naturalTimeScale);
[assetWriter startSessionAtSourceTime:startTime];
__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[assetWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock: ^ {
while (assetWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer];
if (nextBuffer) {
// append buffer
[assetWriterInput appendSampleBuffer: nextBuffer];
convertedByteCount += CMSampleBufferGetTotalSampleSize (nextBuffer);
} else {
[assetWriterInput markAsFinished];
[assetWriter finishWritingWithCompletionHandler:^{
}];
[assetReader cancelReading];
NSDictionary *outputFileAttributes = [[NSFileManager defaultManager] attributesOfItemAtPath:[outPutUrl path] error:nil];
NSLog (@"FlyElephant %lld",[outputFileAttributes fileSize]);
if ([FileUitl isExist:originalPath]) {
[FileUitl removeFile:originalPath];
}
block(outPath);
break;
}
}
}];
}
簡單的來說用AVAssetReader和AVAssetWrite,AVAssetReader用于從AVAsset資源讀取媒體樣本逻澳,AVAssetWrite用于對媒體資源進行編碼并寫入到新的文件中闸天。然后wav轉(zhuǎn)mp3就按照第一步的那樣做就可以了
音頻合并
音頻合并和裁切用的AVFoundation框架下一個多媒體的載體類:AVAsset。它提供了一系列的接口來處理多媒體斜做,只需要我們寫很少的代碼就能對音頻苞氮,視頻做出來。下面貼出代碼:
+(void)jointAudioPath:(NSString *)audio1 withPath:(NSString *)audio2 outPath:(NSString *)outPath success:(BaseIdBlock)block
{
AVURLAsset *audioAsset1 = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audio2]];
AVURLAsset *audioAsset2 = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:audio1]];
AVMutableComposition *composition = [AVMutableComposition composition];
// 音頻通道
AVMutableCompositionTrack *audioTrack1 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:0];
AVMutableCompositionTrack *audioTrack2 = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:0];
// 音頻采集通道
AVAssetTrack *audioAssetTrack1 = [[audioAsset1 tracksWithMediaType:AVMediaTypeAudio] firstObject];
AVAssetTrack *audioAssetTrack2 = [[audioAsset2 tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 音頻合并 - 插入音軌文件
[audioTrack1 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset1.duration) ofTrack:audioAssetTrack1 atTime:kCMTimeZero error:nil];
// `startTime`參數(shù)要設置為第一段音頻的時長瓤逼,即`audioAsset1.duration`, 表示將第二段音頻插入到第一段音頻的尾部笼吟。
[audioTrack2 insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAsset2.duration) ofTrack:audioAssetTrack2 atTime:audioAsset1.duration error:nil];
// 合并后的文件導出 - `presetName`要和之后的`session.outputFileType`相對應库物。
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetAppleM4A];
NSString *outPutFilePath = [self m4aRecordPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPutFilePath]) {
[[NSFileManager defaultManager] removeItemAtPath:outPutFilePath error:nil];
}
// 查看當前session支持的fileType類型
NSLog(@"---%@",[session supportedFileTypes]);
session.outputURL = [NSURL fileURLWithPath:outPutFilePath];
session.outputFileType = AVFileTypeAppleM4A; //與上述的`present`相對應
session.shouldOptimizeForNetworkUse = YES; //優(yōu)化網(wǎng)絡
WeakSelf(self);
[session exportAsynchronouslyWithCompletionHandler:^{
if (session.status == AVAssetExportSessionStatusCompleted) {
NSLog(@"合并成功----%@", outPutFilePath);
if ([FileUitl isExist:audio1]) {
[FileUitl removeFile:audio1];
[FileUitl removeFile:audio2];
}
[weakself convertM4aToWav:[self m4aRecordPath] outPath:outPath success:^(id parameter) {
block(parameter);
}];
}else if (session.status == AVAssetExportSessionStatusFailed){
NSLog(@"合并失敗贷帮!");
}
}];
}
音頻裁剪
+(void)cutAudioStartTime:(CGFloat)source endTime:(CGFloat)end withPath:(NSString *)path withBlock:(BaseIdBlock)block{
NSString *m4aOutPath = [AudioFileManager m4aRecordName:kAuditionRecord];
NSString *wavOutPath = [AudioFileManager wavRecordName:kAuditionRecord];
//音頻輸出會話
AVURLAsset *videoAsset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:path]];
//音頻輸出會話
//AVAssetExportPresetAppleM4A:(輸出音頻,并且是.m4a格式)
AVAssetExportSession *exportSession = [AVAssetExportSession exportSessionWithAsset:videoAsset presetName:AVAssetExportPresetAppleM4A];
exportSession.outputURL = [NSURL fileURLWithPath:m4aOutPath];
exportSession.outputFileType = AVFileTypeAppleM4A;
exportSession.timeRange = CMTimeRangeFromTimeToTime(CMTimeMake(source, 1), CMTimeMake(end, 1));
[exportSession exportAsynchronouslyWithCompletionHandler:^{
//exporeSession.status
if (AVAssetExportSessionStatusCompleted == exportSession.status) {
[self convertM4aToWav:m4aOutPath outPath:wavOutPath success:^(id parameter) {
block(parameter);
}];
} else if (AVAssetExportSessionStatusFailed == exportSession.status) {
NSLog(@"剪切失斊萁摇!");
}else{
NSLog(@"Export Session Status: %ld", (long)exportSession.status);
}
}];
}