Commit f016bd75 authored by lmj_521aiau@163.com's avatar lmj_521aiau@163.com

Audio unit

parent 5aa6add5
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1160"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "A95CDF6324E0E8B50066DAE6"
BuildableName = "&#x901f;&#x8bb0;&#x5927;&#x5e08;.app"
BlueprintName = "ShorthandMaster"
ReferencedContainer = "container:ShorthandMaster.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "A95CDF7924E0E8B80066DAE6"
BuildableName = "ShorthandMasterTests.xctest"
BlueprintName = "ShorthandMasterTests"
ReferencedContainer = "container:ShorthandMaster.xcodeproj">
</BuildableReference>
</TestableReference>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "A95CDF8424E0E8B80066DAE6"
BuildableName = "ShorthandMasterUITests.xctest"
BlueprintName = "ShorthandMasterUITests"
ReferencedContainer = "container:ShorthandMaster.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "A95CDF6324E0E8B50066DAE6"
BuildableName = "&#x901f;&#x8bb0;&#x5927;&#x5e08;.app"
BlueprintName = "ShorthandMaster"
ReferencedContainer = "container:ShorthandMaster.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "A95CDF6324E0E8B50066DAE6"
BuildableName = "&#x901f;&#x8bb0;&#x5927;&#x5e08;.app"
BlueprintName = "ShorthandMaster"
ReferencedContainer = "container:ShorthandMaster.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
...@@ -10,5 +10,23 @@ ...@@ -10,5 +10,23 @@
<integer>8</integer> <integer>8</integer>
</dict> </dict>
</dict> </dict>
<key>SuppressBuildableAutocreation</key>
<dict>
<key>A95CDF6324E0E8B50066DAE6</key>
<dict>
<key>primary</key>
<true/>
</dict>
<key>A95CDF7924E0E8B80066DAE6</key>
<dict>
<key>primary</key>
<true/>
</dict>
<key>A95CDF8424E0E8B80066DAE6</key>
<dict>
<key>primary</key>
<true/>
</dict>
</dict>
</dict> </dict>
</plist> </plist>
//
// XBAACEncoder_system.h
// XBVoiceTool
//
// Created by xxb on 2018/11/29.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <AudioToolbox/AudioToolbox.h>
///编码后的数据再回调里提供给外部
typedef void (^AACEncodeCompleteBlock)(NSData * encodedData, NSError* error);
@interface XBAACEncoder_system : NSObject
- (id)initWithInputAudioStreamDesc:(AudioStreamBasicDescription)inputAudioStreamDesc;
/**
编码pcm数据
*/
- (void)encodePCMData:(void *)pcmData len:(int)len completionBlock:(AACEncodeCompleteBlock)completionBlock;
/**
编码CMSampleBufferRef数据
*/
- (void)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(AACEncodeCompleteBlock)completionBlock;
@end
//
// ExtAudioFileMixer.h
// XBVoiceTool
//
// Created by xxb on 2018/7/25.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface ExtAudioFileMixer : NSObject
+ (OSStatus)mixAudio:(NSString *)audioPath1
andAudio:(NSString *)audioPath2
toFile:(NSString *)outputPath
preferedSampleRate:(float)sampleRate;
@end
This diff is collapsed.
This diff is collapsed.
//
// MP3Encoder.h
// XBVoiceTool
//
// Created by xxb on 2018/11/29.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#include "lame.h"
///编码后的数据再回调里提供给外部
typedef void (^MP3EncodeCompleteBlock)(unsigned char * encodedData, int len);
@interface MP3Encoder : NSObject
- (id)initWithSampleRate:(int)sampleRate channels:(int)channels bitRate:(int)bitRate;
- (void)encodePCMData:(void *)pcmData len:(int)len completeBlock:(MP3EncodeCompleteBlock)completeBlock;
@end
//
// MP3Encoder.m
// XBVoiceTool
//
// Created by xxb on 2018/11/29.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "MP3Encoder.h"
@interface MP3Encoder ()
{
lame_t lameClient;
}
@end
@implementation MP3Encoder
- (id)initWithSampleRate:(int)sampleRate channels:(int)channels bitRate:(int)bitRate
{
if (self = [super init])
{
lameClient = lame_init();
lame_set_in_samplerate(lameClient, sampleRate);
// lame_set_out_samplerate(lameClient, sampleRate);
// lame_set_mode(lameClient, 1);
lame_set_num_channels(lameClient, channels);
lame_set_brate(lameClient, bitRate);
// lame_set_quality(lameClient, 2);
lame_init_params(lameClient);
}
return self;
}
- (void)dealloc
{
if (lameClient)
{
lame_close(lameClient);
}
}
- (void)encodePCMData:(void *)pcmData len:(int)len completeBlock:(MP3EncodeCompleteBlock)completeBlock
{
int mp3DataSize = len;
unsigned char mp3Buffer[mp3DataSize];
/**
这里的len / 2,是因为我们录音数据是char *类型的,一个char占一个字节。而这里要传的数据是short *类型的,一个short占2个字节
lame_encode_buffer //录音数据单声道16位整形用这个方法
lame_encode_buffer_interleaved //录音数据双声道交错用这个方法
lame_encode_buffer_float //录音数据采样深度32位浮点型用这个方法
*/
int encodedBytes = lame_encode_buffer(lameClient, pcmData, pcmData, len / 2, mp3Buffer, mp3DataSize);
if (completeBlock)
{
completeBlock(mp3Buffer,encodedBytes);
}
}
@end
//
// XBAudioConverterPlayer.h
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface XBAudioConverterPlayer : NSObject
@property (nonatomic,assign) BOOL isPlaying;
- (instancetype)initWithFilePath:(NSString *)filePath;
- (void)play;
- (void)stop;
@end
//
// XBAudioConverterPlayer.m
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioConverterPlayer.h"
#import "XBAudioUnitPlayer.h"
#import "XBAudioTool.h"
@interface XBAudioConverterPlayer ()
{
AudioFileID audioFileID;
AudioStreamBasicDescription audioFileFormat;
AudioStreamPacketDescription *audioPacketFormat;
UInt64 packetNums;
SInt64 readedPacket; // 已读的packet数量
AudioBufferList *buffList;
Byte *convertBuffer;
AudioConverterRef audioConverter;
}
@property (nonatomic,strong) XBAudioUnitPlayer *player;
@end
@implementation XBAudioConverterPlayer
- (instancetype)initWithFilePath:(NSString *)filePath
{
if (self = [super init])
{
[XBAudioTool getAudioPropertyWithFilepath:filePath completeBlock:^(AudioFileID audioFileIDT, AudioStreamBasicDescription audioFileFormatT, UInt64 packetNumsT, UInt64 maxFramesPerPacketT ,UInt64 fileLengthFrames) {
audioConverter = NULL;
audioFileID = audioFileIDT;
audioFileFormat = audioFileFormatT;
packetNums = packetNumsT;
readedPacket = 0;
audioPacketFormat = malloc(sizeof(AudioStreamPacketDescription) * (CONST_BUFFER_SIZE / maxFramesPerPacketT + 1));
buffList = [XBAudioTool allocAudioBufferListWithMDataByteSize:CONST_BUFFER_SIZE mNumberChannels:1 mNumberBuffers:1];
convertBuffer = malloc(CONST_BUFFER_SIZE);
int mFramesPerPacket = 1;
int mBitsPerChannel = 32;
int mChannelsPerFrame = 1;
//输出格式
AudioStreamBasicDescription outputFormat = [XBAudioTool allocAudioStreamBasicDescriptionWithMFormatID:kAudioFormatLinearPCM mFormatFlags:(kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved) mSampleRate:44100 mFramesPerPacket:mFramesPerPacket mChannelsPerFrame:mChannelsPerFrame mBitsPerChannel:mBitsPerChannel];
[XBAudioTool printAudioStreamBasicDescription:audioFileFormat];
[XBAudioTool printAudioStreamBasicDescription:outputFormat];
CheckError(AudioConverterNew(&audioFileFormat, &outputFormat, &audioConverter), "AudioConverterNew eror");
self.player = [[XBAudioUnitPlayer alloc] initWithRate:outputFormat.mSampleRate bit:outputFormat.mBitsPerChannel channel:outputFormat.mChannelsPerFrame];
} errorBlock:^(NSError *error) {
NSLog(@"%@",error);
}];
}
return self;
}
- (void)dealloc
{
NSLog(@"XBPCMPlayer销毁");
[self stop];
free(convertBuffer);
}
- (void)play
{
if (self.player)
{
if (self.player.bl_input == nil)
{
typeof(self) __weak weakSelf = self;
typeof(weakSelf) __strong strongSelf = weakSelf;
self.player.bl_inputFull = ^(XBAudioUnitPlayer *player, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
strongSelf->buffList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
OSStatus status = AudioConverterFillComplexBuffer(strongSelf->audioConverter, lyInInputDataProc, (__bridge void * _Nullable)(strongSelf), &inNumberFrames, strongSelf->buffList, NULL);
if (status) {
NSLog(@"转换格式失败 %d", status);
}
// NSLog(@"out size: %d", strongSelf->buffList->mBuffers[0].mDataByteSize);
memcpy(ioData->mBuffers[0].mData, strongSelf->buffList->mBuffers[0].mData, strongSelf->buffList->mBuffers[0].mDataByteSize);
ioData->mBuffers[0].mDataByteSize = strongSelf->buffList->mBuffers[0].mDataByteSize;
if (strongSelf->buffList->mBuffers[0].mDataByteSize <= 0) {
dispatch_async(dispatch_get_main_queue(), ^{
// [weakSelf stop];
});
}
};
}
[self.player start];
self.isPlaying = YES;
}
}
- (void)stop
{
self.player.bl_input = nil;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kPreferredIOBufferDuration*0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self.player stop];
self.isPlaying = NO;
});
}
OSStatus lyInInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData)
{
XBAudioConverterPlayer *player = (__bridge XBAudioConverterPlayer *)(inUserData);
UInt32 byteSize = CONST_BUFFER_SIZE;
OSStatus status = AudioFileReadPacketData(player->audioFileID, NO, &byteSize, player->audioPacketFormat, player->readedPacket, ioNumberDataPackets, player->convertBuffer);
if (outDataPacketDescription) { // 这里要设置好packetFormat,否则会转码失败
*outDataPacketDescription = player->audioPacketFormat;
}
if(status) {
NSLog(@"读取文件失败");
}
if (!status && ioNumberDataPackets > 0) {
ioData->mBuffers[0].mDataByteSize = byteSize;
ioData->mBuffers[0].mData = player->convertBuffer;
player->readedPacket += *ioNumberDataPackets;
return noErr;
}
else {
return NO_MORE_DATA;
}
}
- (double)getCurrentProgress
{
Float64 timeInterval = (readedPacket * 1.0) / packetNums;
return timeInterval;
}
@end
//
// XBAudioDataBuffer.h
// smanos
//
// Created by xxb on 2018/9/7.
// Copyright © 2018年 sven. All rights reserved.
// 缓冲池,用于接受网络数据
#import <Foundation/Foundation.h>
@interface XBAudioDataBuffer : NSObject
/**
bufferSize : 缓冲池最大长度
*/
- (instancetype)initWithBufferSize:(int)bufferSize;
/** 写入数据
data : 要写入到缓冲池的数据
len : 要写入到数据的长度
*/
- (int)writeData:(void *)data len:(int)len;
/** 读取数据
len : 要读取多长的数据
data : 读取的数据提供给谁
*/
- (int)readLen:(int)len toData:(void *)data;
- (void)clearData;
/** 读取数据
获取当前已经缓冲好的长度
*/
- (int)availableLen;
@end
//
// XBAudioDataBuffer.m
// smanos
//
// Created by xxb on 2018/9/7.
// Copyright © 2018年 sven. All rights reserved.
//
#import "XBAudioDataBuffer.h"
@interface XBAudioDataBuffer ()
{
NSLock *_lock;//锁
char *_dataBuffer;//缓冲池
int _availableLen;//可用数据的长度
int _buf_size;//记录XBAudioDataBuffer的最大长度
int _w_pos;//写入数据的偏移地址
int _r_pos;//读取数据的偏移地址
}
@end
@implementation XBAudioDataBuffer
- (instancetype)init
{
if (self = [super init])
{
_lock = [NSLock new];
_buf_size = 1024 * 1024;
_dataBuffer = (char *)malloc(_buf_size * sizeof(char));
}
return self;
}
/**
bufferSize : 缓冲池最大长度
*/
- (instancetype)initWithBufferSize:(int)bufferSize
{
if (self = [super init])
{
_lock = [NSLock new];
_buf_size = bufferSize;
_dataBuffer = (char *)malloc(bufferSize * sizeof(char));
}
return self;
}
/** 读取数据
获取当前已经缓冲好的长度
*/
- (int)availableLen
{
return _availableLen;
}
/** 写入数据
data : 要写入到缓冲池的数据
len : 要写入到数据的长度
*/
- (int)writeData:(void *)data len:(int)len
{
if(len+_availableLen > _buf_size)
{
printf("Data len is more than buffer size!\n");
return 0;
}
[_lock lock];
if((_w_pos+len) <= _buf_size)
{
memcpy(_dataBuffer+_w_pos,data,len);
_w_pos += len;
}
else
{
int len1 = _buf_size - _w_pos;
memcpy(_dataBuffer+_w_pos,data,len1);
memcpy(_dataBuffer,data+len1,len-len1);
_w_pos = len-len1;
}
_availableLen += len;
[_lock unlock];
return len;
}
/** 读取数据
len : 要读取多长的数据
data : 读取的数据提供给谁
*/
- (int)readLen:(int)len toData:(void *)data
{
if(_availableLen < len)
{
return 0;
}
[_lock lock];
if((_r_pos+len) <= _buf_size)
{
memcpy(data,_dataBuffer+_r_pos,len);
_r_pos += len;
}
else
{
int len1 = _buf_size - _r_pos;
memcpy(data,_dataBuffer+_r_pos,len1);
memcpy(data+len1,_dataBuffer,len-len1);
_r_pos = len-len1;
}
_availableLen -= len;
[_lock unlock];
return len;
}
- (void)dealloc
{
[self free];
}
- (void)free
{
free(_dataBuffer);
}
- (void)clearData
{
_r_pos = 0;
_w_pos = 0;
_availableLen = 0;
}
@end
//
// XBAudioFileDataReader.h
// XBVoiceTool
//
// Created by xxb on 2018/7/23.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
typedef void (^XBAudioFileDataReaderLoadFileToMemoryCompleteBlock)(XBAudioBuffer *xbBufferList);
@interface XBAudioFileDataReader : NSObject
///yes:已经把歌曲载入内存了
@property (nonatomic,assign,readonly) BOOL endLoadFileToMemory;
/**
获取载入到内存以后的数据
返回nil,说明没有载入完
*/
- (XBAudioBuffer *)getBufferList;
///数组长度
- (NSInteger)getBufferListLength;
///载入歌曲到内存
- (void)loadFileToMemoryWithFilePathArr:(NSArray *)filePathArr;
///载入歌曲到内存
- (void)loadFileToMemoryWithFilePathArr:(NSArray *)filePathArr completeBlock:(XBAudioFileDataReaderLoadFileToMemoryCompleteBlock)completeBlock;
@end
//
// XBAudioFileDataReader.m
// XBVoiceTool
//
// Created by xxb on 2018/7/23.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioFileDataReader.h"
#import "XBAudioTool.h"
@interface XBAudioFileDataReader ()
{
XBAudioBuffer *_bufferList;
NSInteger _bufferListLength;
}
@end
@implementation XBAudioFileDataReader
- (XBAudioBuffer *)getBufferList
{
if (_endLoadFileToMemory)
{
return _bufferList;
}
return nil;
}
- (NSInteger)getBufferListLength
{
return _bufferListLength;
}
///载入歌曲到内存
- (void)loadFileToMemoryWithFilePathArr:(NSArray *)filePathArr
{
[self loadFileToMemoryWithFilePathArr:filePathArr completeBlock:nil];
}
- (void)loadFileToMemoryWithFilePathArr:(NSArray *)filePathArr completeBlock:(XBAudioFileDataReaderLoadFileToMemoryCompleteBlock)completeBlock
{
_bufferListLength = filePathArr.count;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
XBAudioBuffer *bufferList = (XBAudioBuffer *)malloc(sizeof(XBAudioBuffer) * filePathArr.count);
// XBAudioBufferList bufferList = (XBAudioBufferList)malloc(sizeof(XBAudioBuffer) * filePathArr.count);
for (NSString *filePath in filePathArr)
{
NSInteger i = [filePathArr indexOfObject:filePath];
__block float rateRate = 1;
__block UInt32 channel = 1;
[XBAudioTool getAudioPropertyWithFilepath:filePath completeBlock:^(AudioFileID audioFileID, AudioStreamBasicDescription audioFileFormat, UInt64 packetNums, UInt64 maxFramesPerPacket, UInt64 fileLengthFrames) {
rateRate = kSmapleRate * 1.0 / audioFileFormat.mSampleRate;
if (audioFileFormat.mChannelsPerFrame == 2)
{
channel = 2;
}
} errorBlock:^(NSError *error) {
}];
//NO表示如果是双声道数据,两个声道的数据分别在不同的数组里
AVAudioFormat *clientFormat = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatFloat32
sampleRate:kSmapleRate
channels:channel
interleaved:NO];
UInt32 size = sizeof(AudioStreamBasicDescription);
ExtAudioFileRef fp;
NSURL *url = [NSURL fileURLWithPath:filePath];
CheckError(ExtAudioFileOpenURL((__bridge CFURLRef _Nonnull)(url), &fp), "cant open the file");
//设置从文件中读出的音频格式
CheckError(ExtAudioFileSetProperty(fp, kExtAudioFileProperty_ClientDataFormat,
size, clientFormat.streamDescription),
"cant set the file output format");
//获取总帧数,乘以rateRate,是为了获取正确的帧数,因为这里设置输出的格式,rate为kSmapleRate
UInt64 numFrames = 0;
size = sizeof(numFrames);
CheckError(ExtAudioFileGetProperty(fp, kExtAudioFileProperty_FileLengthFrames,
&size, &numFrames),
"cant get the fileLengthFrames");
numFrames = numFrames * rateRate;
//设置bufferList
bufferList[i].totalFrames = numFrames;
bufferList[i].asbd = *(clientFormat.streamDescription);
bufferList[i].channelCount = channel;
bufferList[i].startFrame = 0;
bufferList[i].leftData = (Float32 *)malloc(numFrames * sizeof(Float32));
if (channel == 2)
{
bufferList[i].rightData = (Float32 *)malloc(numFrames * sizeof(Float32));
}
//把数据读取到bufferList的leftData和rightData中
AudioBufferList *bufList = (AudioBufferList *)malloc(sizeof(AudioBufferList) + (channel - 1) * sizeof(AudioBuffer));
AudioBuffer emptyBuffer = {0};
for (int j = 0; j < channel; j++) {
bufList->mBuffers[j] = emptyBuffer;
}
bufList->mNumberBuffers = channel;
bufList->mBuffers[0].mNumberChannels = 1;
bufList->mBuffers[0].mData = bufferList[i].leftData;
bufList->mBuffers[0].mDataByteSize = (UInt32)numFrames*sizeof(Float32);
if (2 == channel) {
bufList->mBuffers[1].mNumberChannels = 1;
bufList->mBuffers[1].mDataByteSize = (UInt32)numFrames*sizeof(Float32);
bufList->mBuffers[1].mData = bufferList[i].rightData;
}
UInt32 numberOfPacketsToRead = (UInt32) numFrames;
CheckError(ExtAudioFileRead(fp,
&numberOfPacketsToRead,
bufList),
"cant read the audio file");
free(bufList);
ExtAudioFileDispose(fp);
}
_bufferList = bufferList;
dispatch_async(dispatch_get_main_queue(), ^{
if (completeBlock)
{
completeBlock(bufferList);
}
_endLoadFileToMemory = YES;
});
});
}
@end
//
// XBAudioFormatConversion.h
// XBVoiceTool
//
// Created by xxb on 2018/6/27.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBAudioFormatConversion : NSObject
/*
PCM转MP3
只能转双声道数据,单声道会变快
*/
+ (NSString *)audio_PCMToMP3:(NSString *)pcmFilePath rate:(XBAudioRate)rate;
///PCM转WAV
+ (NSString *)audio_PCMToWAV:(NSString *)pcmFilePath rate:(XBAudioRate)rate channels:(int)channels;
@end
//
// XBAudioFormatConversion.m
// XBVoiceTool
//
// Created by xxb on 2018/6/27.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioFormatConversion.h"
#import "lame.h"
#import <UIKit/UIKit.h>
@implementation XBAudioFormatConversion
///PCM转MP3
+ (NSString *)audio_PCMToMP3:(NSString *)pcmFilePath rate:(XBAudioRate)rate
{
NSString *mp3FilePath = [NSString stringWithFormat:@"%@XB_PCMToMP3.mp3",NSTemporaryDirectory()];
NSString *_recordFilePath = pcmFilePath;
@try {
int read, write;
FILE *pcm = fopen([_recordFilePath cStringUsingEncoding:1], "rb"); //source 被转换的音频文件位置
fseek(pcm, 4*1024, SEEK_CUR); //skip file header
FILE *mp3 = fopen([mp3FilePath cStringUsingEncoding:1], "wb+"); //output 输出生成的Mp3文件位置
const int PCM_SIZE = 8192;//8192
const int MP3_SIZE = 8192;//8192
short int pcm_buffer[PCM_SIZE*2];
unsigned char mp3_buffer[MP3_SIZE];
lame_t lame = lame_init();
// lame_set_in_samplerate(lame, 7500.0);//采样播音速度,值越大播报速度越快,反之。
lame_set_in_samplerate(lame, rate);//采样播音速度,值越大播报速度越快,反之。
lame_set_VBR(lame, vbr_default);
lame_init_params(lame);
do {
read = (int)fread(pcm_buffer, 2*sizeof(short int), PCM_SIZE, pcm);
if (read == 0)
{
write = lame_encode_flush(lame, mp3_buffer, MP3_SIZE);
}
else
write = lame_encode_buffer_interleaved(lame, pcm_buffer, read, mp3_buffer, MP3_SIZE);
fwrite(mp3_buffer, write, 1, mp3);
} while (read != 0);
lame_close(lame);
fclose(mp3);
fclose(pcm);
}
@catch (NSException *exception) {
NSLog(@"%@",[exception description]);
}
@finally {
//do some
NSLog(@"MP3生成成功: %@",mp3FilePath);
// UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"mp3转化成功!" message:nil delegate:self cancelButtonTitle:@"确定" otherButtonTitles:nil, nil];
// [alert show];
}
return mp3FilePath;
}
///PCM转WAV
+ (NSString *)audio_PCMToWAV:(NSString *)pcmFilePath rate:(XBAudioRate)rate channels:(int)channels
{
NSString *wavPath = [NSString stringWithFormat:@"%@XB_PCMToWAV.wav",NSTemporaryDirectory()];
char *pcmPath_c = (char *)[pcmFilePath UTF8String];
char *wavPath_c = (char *)[wavPath UTF8String];
convertPcm2Wav(pcmPath_c, wavPath_c, channels, rate);
return wavPath;
}
// pcm 转wav
//wav头的结构如下所示:
typedef struct {
char fccID[4];
int32_t dwSize;
char fccType[4];
} HEADER;
typedef struct {
char fccID[4];
int32_t dwSize;
int16_t wFormatTag;
int16_t wChannels;
int32_t dwSamplesPerSec;
int32_t dwAvgBytesPerSec;
int16_t wBlockAlign;
int16_t uiBitsPerSample;
}FMT;
typedef struct {
char fccID[4];
int32_t dwSize;
}DATA;
/*
int convertPcm2Wav(char *src_file, char *dst_file, int channels, int sample_rate)
请问这个方法怎么用?参数都是什么意思啊
赞 回复
code书童: @不吃鸡爪 pcm文件路径,wav文件路径,channels为通道数,手机设备一般是单身道,传1即可,sample_rate为pcm文件的采样率,有44100,16000,8000,具体传什么看你录音时候设置的采样率。
*/
int convertPcm2Wav(char *src_file, char *dst_file, int channels, int sample_rate)
{
int bits = 16;
//以下是为了建立.wav头而准备的变量
HEADER pcmHEADER;
FMT pcmFMT;
DATA pcmDATA;
unsigned short m_pcmData;
FILE *fp,*fpCpy;
if((fp=fopen(src_file, "rb")) == NULL) //读取文件
{
printf("open pcm file %s error\n", src_file);
return -1;
}
if((fpCpy=fopen(dst_file, "wb+")) == NULL) //为转换建立一个新文件
{
printf("create wav file error\n");
return -1;
}
//以下是创建wav头的HEADER;但.dwsize未定,因为不知道Data的长度。
strncpy(pcmHEADER.fccID,"RIFF",4);
strncpy(pcmHEADER.fccType,"WAVE",4);
fseek(fpCpy,sizeof(HEADER),1); //跳过HEADER的长度,以便下面继续写入wav文件的数据;
//以上是创建wav头的HEADER;
if(ferror(fpCpy))
{
printf("error\n");
}
//以下是创建wav头的FMT;
pcmFMT.dwSamplesPerSec=sample_rate;
pcmFMT.dwAvgBytesPerSec=pcmFMT.dwSamplesPerSec*sizeof(m_pcmData);
pcmFMT.uiBitsPerSample=bits;
strncpy(pcmFMT.fccID,"fmt ", 4);
pcmFMT.dwSize=16;
pcmFMT.wBlockAlign=2;
pcmFMT.wChannels=channels;
pcmFMT.wFormatTag=1;
//以上是创建wav头的FMT;
fwrite(&pcmFMT,sizeof(FMT),1,fpCpy); //将FMT写入.wav文件;
//以下是创建wav头的DATA; 但由于DATA.dwsize未知所以不能写入.wav文件
strncpy(pcmDATA.fccID,"data", 4);
pcmDATA.dwSize=0; //给pcmDATA.dwsize 0以便于下面给它赋值
fseek(fpCpy,sizeof(DATA),1); //跳过DATA的长度,以便以后再写入wav头的DATA;
fread(&m_pcmData,sizeof(int16_t),1,fp); //从.pcm中读入数据
while(!feof(fp)) //在.pcm文件结束前将他的数据转化并赋给.wav;
{
pcmDATA.dwSize+=2; //计算数据的长度;每读入一个数据,长度就加一;
fwrite(&m_pcmData,sizeof(int16_t),1,fpCpy); //将数据写入.wav文件;
fread(&m_pcmData,sizeof(int16_t),1,fp); //从.pcm中读入数据
}
fclose(fp); //关闭文件
pcmHEADER.dwSize = 0; //根据pcmDATA.dwsize得出pcmHEADER.dwsize的值
rewind(fpCpy); //将fpCpy变为.wav的头,以便于写入HEADER和DATA;
fwrite(&pcmHEADER,sizeof(HEADER),1,fpCpy); //写入HEADER
fseek(fpCpy,sizeof(FMT),1); //跳过FMT,因为FMT已经写入
fwrite(&pcmDATA,sizeof(DATA),1,fpCpy); //写入DATA;
fclose(fpCpy); //关闭文件
return 0;
}
@end
//
// XBAudioPCMDataReader.h
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface XBAudioPCMDataReader : NSObject
///从已有的数据中读取数据
- (int)readDataFrom:(NSData *)dataStore len:(int)len forData:(Byte *)data;
@end
//
// XBAudioPCMDataReader.m
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioPCMDataReader.h"
@interface XBAudioPCMDataReader ()
@property (nonatomic,assign) UInt32 readerLength;
@end
@implementation XBAudioPCMDataReader
- (int)readDataFrom:(NSData *)dataStore len:(int)len forData:(Byte *)data
{
UInt32 currentReadLength = 0;
if (_readerLength >= dataStore.length)
{
_readerLength = 0;
return currentReadLength;
}
NSRange range;
if (_readerLength+ len <= dataStore.length)
{
currentReadLength = len;
range = NSMakeRange(_readerLength, currentReadLength);
_readerLength = _readerLength + len;
}
else
{
currentReadLength = (UInt32)(dataStore.length - _readerLength);
range = NSMakeRange(_readerLength, currentReadLength);
_readerLength = (UInt32) dataStore.length;
}
NSData *subData = [dataStore subdataWithRange:range];
Byte *tempByte = (Byte *)[subData bytes];
memcpy(data,tempByte,currentReadLength);
return currentReadLength;
}
@end
//
// XBAudioPlayer.h
// XBVoiceTool
//
// Created by xxb on 2018/7/10.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBAudioPlayer : NSObject
- (instancetype)initWithFilePath:(NSString *)filePath;
- (void)start;
- (void)stop;
- (float)getProgress;
@end
//
// XBAudioPlayer.m
// XBVoiceTool
//
// Created by xxb on 2018/7/10.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioPlayer.h"
#import "XBAudioUnitPlayer.h"
#import "XBAudioTool.h"
@interface XBAudioPlayer ()
{
ExtAudioFileRef _audioFile;
XBAudioUnitPlayer *_player;
AudioBufferList *_bufferList;
AudioStreamBasicDescription _outputFormat;
UInt64 _totalFrame;
UInt64 _readedFrame;
}
@end
@implementation XBAudioPlayer
- (instancetype)initWithFilePath:(NSString *)filePath
{
if (self == [super init])
{
NSURL *url = [NSURL fileURLWithPath:filePath];
CheckError(ExtAudioFileOpenURL((__bridge CFURLRef)url, &_audioFile),"打开文件失败");
_bufferList = [XBAudioTool allocAudioBufferListWithMDataByteSize:CONST_BUFFER_SIZE mNumberChannels:1 mNumberBuffers:1];
_outputFormat = [XBAudioTool allocAudioStreamBasicDescriptionWithMFormatID:kAudioFormatLinearPCM mFormatFlags:kLinearPCMFormatFlagIsSignedInteger mSampleRate:XBAudioRate_44k mFramesPerPacket:1 mChannelsPerFrame:2 mBitsPerChannel:16];
uint size = sizeof(_outputFormat);
CheckError(ExtAudioFileSetProperty(_audioFile, kExtAudioFileProperty_ClientDataFormat, size, &_outputFormat), "setkExtAudioFileProperty_ClientDataFormat failure");
size = sizeof(_totalFrame);
CheckError(ExtAudioFileGetProperty(_audioFile,
kExtAudioFileProperty_FileLengthFrames,
&size,
&_totalFrame), "获取总帧数失败");
_readedFrame = 0;
}
return self;
}
- (void)start
{
if (_player == nil)
{
typeof(self) __weak weakSelf = self;
typeof(self) __strong strongSelf = weakSelf;
_player = [[XBAudioUnitPlayer alloc] initWithRate:_outputFormat.mSampleRate bit:_outputFormat.mBitsPerChannel channel:_outputFormat.mChannelsPerFrame];
_player.bl_inputFull = ^(XBAudioUnitPlayer *player, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
strongSelf->_bufferList->mBuffers[0].mDataByteSize = CONST_BUFFER_SIZE;
OSStatus status = ExtAudioFileRead(strongSelf->_audioFile, &inNumberFrames, strongSelf->_bufferList);
memcpy(ioData->mBuffers[0].mData, strongSelf->_bufferList->mBuffers[0].mData, strongSelf->_bufferList->mBuffers[0].mDataByteSize);
ioData->mBuffers[0].mDataByteSize = strongSelf->_bufferList->mBuffers[0].mDataByteSize;
if (ioData->mBuffers[0].mDataByteSize == 0)
{
[weakSelf stop];
}
strongSelf->_readedFrame += ioData->mBuffers[0].mDataByteSize / strongSelf->_outputFormat.mBytesPerFrame;
CheckError(status, "转换格式失败");
if (inNumberFrames == 0) NSLog(@"播放结束");
NSLog(@"%f",[strongSelf getProgress]);
};
}
[_player start];
}
- (void)stop
{
_player.bl_input = nil;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[_player stop];
_player = nil;
});
}
- (float)getProgress
{
return _readedFrame * 1.0 / _totalFrame;
}
@end
//
// XBAudioTool.h
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBAudioTool : NSObject
/** 同步获取文件信息
filePath: 文件路径
audioFileFormat : 文件格式描述
packetNums : 总的packet数量
maxFramesPerPacket:单个packet的最大帧数
fileLengthFrames : 总帧数
*/
+ (void)getAudioPropertyWithFilepath:(NSString *)filePath completeBlock:(void (^)(AudioFileID audioFileID,AudioStreamBasicDescription audioFileFormat,UInt64 packetNums,UInt64 maxFramesPerPacket,UInt64 fileLengthFrames))completeBlock errorBlock:(void (^)(NSError *error))errorBlock;
/** 异步获取文件信息
filePath: 文件路径
audioFileFormat : 文件格式描述
packetNums : 总的packet数量
maxFramesPerPacket:单个packet的最大帧数
fileLengthFrames : 总帧数
*/
+ (void)getAudioPropertyAsyncWithFilepath:(NSString *)filePath completeBlock:(void (^)(AudioFileID audioFileID,AudioStreamBasicDescription audioFileFormat,UInt64 packetNums,UInt64 maxFramesPerPacket,UInt64 fileLengthFrames))completeBlock errorBlock:(void (^)(NSError *error))errorBlock;
/**
打印格式信息
*/
+ (void)printAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd;
/**
创建AudioBufferList
mDataByteSize :AudioBuffer.mData (是一个Byte *数组) 数组长度
mNumberChannels :声道数
mNumberBuffers :AudioBuffer(mBuffers[1]) 数组的元素个数
*/
+ (AudioBufferList *)allocAudioBufferListWithMDataByteSize:(UInt32)mDataByteSize mNumberChannels:(UInt32)mNumberChannels mNumberBuffers:(UInt32)mNumberBuffers;
/**
mSampleRate : 采样率
mFormatID :格式
mFormatFlags : 不知道是啥
mFramesPerPacket : 每packet多少frames
mChannelsPerFrame : 每frame多少channel
mBitsPerChannel : 采样精度
*/
+ (AudioStreamBasicDescription)allocAudioStreamBasicDescriptionWithMFormatID:(XBAudioFormatID)mFormatID mFormatFlags:(XBAudioFormatFlags)mFormatFlags mSampleRate:(XBAudioRate)mSampleRate mFramesPerPacket:(UInt32)mFramesPerPacket mChannelsPerFrame:(UInt32)mChannelsPerFrame mBitsPerChannel:(UInt32)mBitsPerChannel;
/**
componentType : kAudioUnitType_
componentSubType : kAudioUnitSubType_
componentFlags : 0
componentFlagsMask : 0
*/
+ (AudioComponentDescription)allocAudioComponentDescriptionWithComponentType:(OSType)componentType componentSubType:(OSType)componentSubType componentFlags:(UInt32)componentFlags componentFlagsMask:(UInt32)componentFlagsMask;
@end
/*
struct AudioBufferList
{
UInt32 mNumberBuffers;
AudioBuffer mBuffers[1]; // this is a variable length array of mNumberBuffers elements
#if defined(__cplusplus) && defined(CA_STRICT) && CA_STRICT
public:
AudioBufferList() {}
private:
// Copying and assigning a variable length struct is problematic; generate a compile error.
AudioBufferList(const AudioBufferList&);
AudioBufferList& operator=(const AudioBufferList&);
#endif
};
*/
/*
CF_ENUM(AudioFormatID)
{
kAudioFormatLinearPCM = 'lpcm',
kAudioFormatAC3 = 'ac-3',
kAudioFormat60958AC3 = 'cac3',
kAudioFormatAppleIMA4 = 'ima4',
kAudioFormatMPEG4AAC = 'aac ',
kAudioFormatMPEG4CELP = 'celp',
kAudioFormatMPEG4HVXC = 'hvxc',
kAudioFormatMPEG4TwinVQ = 'twvq',
kAudioFormatMACE3 = 'MAC3',
kAudioFormatMACE6 = 'MAC6',
kAudioFormatULaw = 'ulaw',
kAudioFormatALaw = 'alaw',
kAudioFormatQDesign = 'QDMC',
kAudioFormatQDesign2 = 'QDM2',
kAudioFormatQUALCOMM = 'Qclp',
kAudioFormatMPEGLayer1 = '.mp1',
kAudioFormatMPEGLayer2 = '.mp2',
kAudioFormatMPEGLayer3 = '.mp3',
kAudioFormatTimeCode = 'time',
kAudioFormatMIDIStream = 'midi',
kAudioFormatParameterValueStream = 'apvs',
kAudioFormatAppleLossless = 'alac',
kAudioFormatMPEG4AAC_HE = 'aach',
kAudioFormatMPEG4AAC_LD = 'aacl',
kAudioFormatMPEG4AAC_ELD = 'aace',
kAudioFormatMPEG4AAC_ELD_SBR = 'aacf',
kAudioFormatMPEG4AAC_ELD_V2 = 'aacg',
kAudioFormatMPEG4AAC_HE_V2 = 'aacp',
kAudioFormatMPEG4AAC_Spatial = 'aacs',
kAudioFormatAMR = 'samr',
kAudioFormatAMR_WB = 'sawb',
kAudioFormatAudible = 'AUDB',
kAudioFormatiLBC = 'ilbc',
kAudioFormatDVIIntelIMA = 0x6D730011,
kAudioFormatMicrosoftGSM = 0x6D730031,
kAudioFormatAES3 = 'aes3',
kAudioFormatEnhancedAC3 = 'ec-3',
kAudioFormatFLAC = 'flac',
kAudioFormatOpus = 'opus'
};
*/
/*
CF_ENUM(AudioFormatFlags)
{
kAudioFormatFlagIsFloat = (1U << 0), // 0x1
kAudioFormatFlagIsBigEndian = (1U << 1), // 0x2
kAudioFormatFlagIsSignedInteger = (1U << 2), // 0x4
kAudioFormatFlagIsPacked = (1U << 3), // 0x8
kAudioFormatFlagIsAlignedHigh = (1U << 4), // 0x10
kAudioFormatFlagIsNonInterleaved = (1U << 5), // 0x20
kAudioFormatFlagIsNonMixable = (1U << 6), // 0x40
kAudioFormatFlagsAreAllClear = 0x80000000,
kLinearPCMFormatFlagIsFloat = kAudioFormatFlagIsFloat,
kLinearPCMFormatFlagIsBigEndian = kAudioFormatFlagIsBigEndian,
kLinearPCMFormatFlagIsSignedInteger = kAudioFormatFlagIsSignedInteger,
kLinearPCMFormatFlagIsPacked = kAudioFormatFlagIsPacked,
kLinearPCMFormatFlagIsAlignedHigh = kAudioFormatFlagIsAlignedHigh,
kLinearPCMFormatFlagIsNonInterleaved = kAudioFormatFlagIsNonInterleaved,
kLinearPCMFormatFlagIsNonMixable = kAudioFormatFlagIsNonMixable,
kLinearPCMFormatFlagsSampleFractionShift = 7,
kLinearPCMFormatFlagsSampleFractionMask = (0x3F << kLinearPCMFormatFlagsSampleFractionShift),
kLinearPCMFormatFlagsAreAllClear = kAudioFormatFlagsAreAllClear,
kAppleLosslessFormatFlag_16BitSourceData = 1,
kAppleLosslessFormatFlag_20BitSourceData = 2,
kAppleLosslessFormatFlag_24BitSourceData = 3,
kAppleLosslessFormatFlag_32BitSourceData = 4
};
*/
/*
CF_ENUM(UInt32) {
kAudioUnitType_Output = 'auou',
kAudioUnitType_MusicDevice = 'aumu',
kAudioUnitType_MusicEffect = 'aumf',
kAudioUnitType_FormatConverter = 'aufc',
kAudioUnitType_Effect = 'aufx',
kAudioUnitType_Mixer = 'aumx',
kAudioUnitType_Panner = 'aupn',
kAudioUnitType_Generator = 'augn',
kAudioUnitType_OfflineEffect = 'auol',
kAudioUnitType_MIDIProcessor = 'aumi'
};
*/
//
// XBAudioTool.m
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioTool.h"
@implementation XBAudioTool
/** 同步获取文件信息
filePath: 文件路径
audioFileFormat : 文件格式描述
packetNums : 总的packet数量
maxFramesPerPacket:单个packet的最大帧数
fileLengthFrames : 总帧数
*/
+ (void)getAudioPropertyWithFilepath:(NSString *)filePath completeBlock:(void (^)(AudioFileID audioFileID,AudioStreamBasicDescription audioFileFormat,UInt64 packetNums,UInt64 maxFramesPerPacket,UInt64 fileLengthFrames))completeBlock errorBlock:(void (^)(NSError *error))errorBlock
{
AudioFileID audioFileID;
AudioStreamBasicDescription audioFileFormat = {};
UInt64 packetNums = 0;
UInt64 maxFramesPerPacket = 0;
NSError *error;
//打开文件
NSURL *url = [NSURL fileURLWithPath:filePath];
OSStatus status = AudioFileOpenURL((__bridge CFURLRef)url, kAudioFileReadPermission, 0, &audioFileID);
if (status != noErr)
{
NSLog(@"打开文件失败 %@", url);
error = [NSError errorWithDomain:@"打开文件失败" code:1008601 userInfo:nil];
if (errorBlock)
{
errorBlock(error);
}
return;
}
//读取文件格式
uint32_t size = sizeof(AudioStreamBasicDescription);
status = AudioFileGetProperty(audioFileID, kAudioFilePropertyDataFormat, &size, &audioFileFormat);
if (status != noErr)
{
error = [NSError errorWithDomain:[NSString stringWithFormat:@"读取文件格式出错,error status %zd", status] code:1008602 userInfo:nil];
if (errorBlock)
{
errorBlock(error);
}
return;
}
//读取文件总的packet数量
size = sizeof(packetNums);
status = AudioFileGetProperty(audioFileID,
kAudioFilePropertyAudioDataPacketCount,
&size,
&packetNums);
if (error != noErr)
{
error = [NSError errorWithDomain:[NSString stringWithFormat:@"读取文件packets总数出错,error status %zd", status] code:1008603 userInfo:nil];
if (errorBlock)
{
errorBlock(error);
}
return;
}
// 读取单个packet的最大帧数
maxFramesPerPacket = audioFileFormat.mFramesPerPacket;
if (maxFramesPerPacket == 0) {
size = sizeof(maxFramesPerPacket);
status = AudioFileGetProperty(audioFileID, kAudioFilePropertyMaximumPacketSize, &size, &maxFramesPerPacket);
if (status != noErr)
{
error = [NSError errorWithDomain:[NSString stringWithFormat:@"读取单个packet的最大数量出错,error status %zd", status] code:1008604 userInfo:nil];
if (errorBlock)
{
errorBlock(error);
}
return;
}
if(status ==noErr && maxFramesPerPacket == 0)
{
error = [NSError errorWithDomain:@"AudioFileGetProperty error or sizePerPacket = 0" code:1008605 userInfo:nil];
if (errorBlock)
{
errorBlock(error);
}
return;
}
}
// 总帧数
UInt64 numFrames = maxFramesPerPacket * packetNums;
AudioFileClose(audioFileID);
if (completeBlock)
{
completeBlock(audioFileID,audioFileFormat,packetNums,maxFramesPerPacket,numFrames);
}
}
/** 异步获取文件信息
filePath: 文件路径
audioFileFormat : 文件格式描述
packetNums : 总的packet数量
maxFramesPerPacket:单个packet的最大帧数
fileLengthFrames : 总帧数
*/
+ (void)getAudioPropertyAsyncWithFilepath:(NSString *)filePath completeBlock:(void (^)(AudioFileID audioFileID,AudioStreamBasicDescription audioFileFormat,UInt64 packetNums,UInt64 maxFramesPerPacket,UInt64 fileLengthFrames))completeBlock errorBlock:(void (^)(NSError *error))errorBlock
{
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[XBAudioTool getAudioPropertyWithFilepath:filePath completeBlock:^(AudioFileID audioFileID, AudioStreamBasicDescription audioFileFormat, UInt64 packetNums, UInt64 maxFramesPerPacket, UInt64 fileLengthFrames) {
dispatch_async(dispatch_get_main_queue(), ^{
if (completeBlock)
{
completeBlock(audioFileID,audioFileFormat,packetNums,maxFramesPerPacket,fileLengthFrames);
}
});
} errorBlock:^(NSError *error) {
dispatch_async(dispatch_get_main_queue(), ^{
if (errorBlock)
{
errorBlock(error);
}
});
}];
});
}
+ (void)printAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd
{
char formatID[5];
UInt32 mFormatID = CFSwapInt32HostToBig(asbd.mFormatID);
bcopy (&mFormatID, formatID, 4);
formatID[4] = '\0';
printf("Sample Rate: %10.0f\n", asbd.mSampleRate);
printf("Format ID: %10s\n", formatID);
printf("Format Flags: %10X\n", (unsigned int)asbd.mFormatFlags);
printf("Bytes per Packet: %10d\n", (unsigned int)asbd.mBytesPerPacket);
printf("Frames per Packet: %10d\n", (unsigned int)asbd.mFramesPerPacket);
printf("Bytes per Frame: %10d\n", (unsigned int)asbd.mBytesPerFrame);
printf("Channels per Frame: %10d\n", (unsigned int)asbd.mChannelsPerFrame);
printf("Bits per Channel: %10d\n", (unsigned int)asbd.mBitsPerChannel);
printf("\n");
}
/**
创建AudioBufferList
mDataByteSize :AudioBuffer.mData (是一个Byte *数组) 数组长度
mNumberChannels :声道数
mNumberBuffers :AudioBuffer(mBuffers[1]) 数组的元素个数
*/
+ (AudioBufferList *)allocAudioBufferListWithMDataByteSize:(UInt32)mDataByteSize mNumberChannels:(UInt32)mNumberChannels mNumberBuffers:(UInt32)mNumberBuffers
{
AudioBufferList *_bufferList;
_bufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList));
_bufferList->mNumberBuffers = 1;
_bufferList->mBuffers[0].mData = malloc(mDataByteSize);
_bufferList->mBuffers[0].mDataByteSize = mDataByteSize;
_bufferList->mBuffers[0].mNumberChannels = 1;
return _bufferList;
}
/**
mSampleRate : 采样率
mFormatID :格式
mFormatFlags : 不知道是啥
mFramesPerPacket : 每packet多少frames
mChannelsPerFrame : 每frame多少channel
mBitsPerChannel : 采样精度
*/
+ (AudioStreamBasicDescription)allocAudioStreamBasicDescriptionWithMFormatID:(XBAudioFormatID)mFormatID mFormatFlags:(XBAudioFormatFlags)mFormatFlags mSampleRate:(XBAudioRate)mSampleRate mFramesPerPacket:(UInt32)mFramesPerPacket mChannelsPerFrame:(UInt32)mChannelsPerFrame mBitsPerChannel:(UInt32)mBitsPerChannel
{
AudioStreamBasicDescription _outputFormat;
memset(&_outputFormat, 0, sizeof(_outputFormat));
_outputFormat.mSampleRate = mSampleRate;
_outputFormat.mFormatID = mFormatID;
_outputFormat.mFormatFlags = mFormatFlags;
_outputFormat.mFramesPerPacket = mFramesPerPacket;
_outputFormat.mChannelsPerFrame = mChannelsPerFrame;
_outputFormat.mBitsPerChannel = mBitsPerChannel;
_outputFormat.mBytesPerFrame = mBitsPerChannel * mChannelsPerFrame / 8;
_outputFormat.mBytesPerPacket = mBitsPerChannel * mChannelsPerFrame / 8 * mFramesPerPacket;
return _outputFormat;
}
/**
componentType : kAudioUnitType_
componentSubType : kAudioUnitSubType_
componentFlags : 0
componentFlagsMask : 0
*/
+ (AudioComponentDescription)allocAudioComponentDescriptionWithComponentType:(OSType)componentType componentSubType:(OSType)componentSubType componentFlags:(UInt32)componentFlags componentFlagsMask:(UInt32)componentFlagsMask
{
AudioComponentDescription outputDesc;
outputDesc.componentType = componentType;
outputDesc.componentSubType = componentSubType;
outputDesc.componentManufacturer = kAudioUnitManufacturer_Apple;
outputDesc.componentFlags = componentFlags;
outputDesc.componentFlagsMask = componentFlagsMask;
return outputDesc;
}
@end
//
// XBAudioUnitMixer.h
// XBVoiceTool
//
// Created by xxb on 2018/7/18.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBAudioUnitMixer : NSObject
@property (nonatomic,assign,readonly) BOOL isPlaying;
- (instancetype)initWithFilePathArr:(NSArray *)filePathArr;
- (void)start;
- (void)pause;
- (void)enableInput:(BOOL)enable forBus:(int)busIndex;
- (void)setInputVolumeValue:(CGFloat)value forBus:(int)busIndex;
- (void)setOutputVolumeValue:(AudioUnitParameterValue)value;
@end
This diff is collapsed.
//
// XBAudioUnitMixerTest.h
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBAudioUnitMixerTest : NSObject
- (instancetype)initWithPCMFilePath:(NSString *)filePath rate:(XBAudioRate)rate channels:(XBAudioChannel)channels bit:(XBAudioBit)bit;
- (void)start;
- (void)stop;
@end
//
// XBAudioUnitMixerTest.m
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioUnitMixerTest.h"
#import "XBAudioUnitRecorder.h"
#import "XBAudioUnitPlayer.h"
#import "XBAudioPCMDataReader.h"
#import "XBDataWriter.h"
#define subPathPCM @"/Documents/xbMixData.caf"
#define stroePath [NSHomeDirectory() stringByAppendingString:subPathPCM]
//#define CONST_BUFFER_SIZE 2048*2*10
@interface XBAudioUnitMixerTest ()
{
Byte *recorderTempBuffer;
}
@property (nonatomic,strong) XBAudioUnitRecorder *recorder;
@property (nonatomic,strong) XBAudioUnitPlayer *player;
@property (nonatomic,strong) XBAudioPCMDataReader *dataReader;
@property (nonatomic,strong) XBDataWriter *dataWriter;
@property (nonatomic,strong) NSData *data;
@end
@implementation XBAudioUnitMixerTest
- (instancetype)initWithPCMFilePath:(NSString *)filePath rate:(XBAudioRate)rate channels:(XBAudioChannel)channels bit:(XBAudioBit)bit
{
if (self = [super init])
{
self.data = [NSData dataWithContentsOfFile:filePath];
self.player = [[XBAudioUnitPlayer alloc] initWithRate:rate bit:bit channel:channels];
self.recorder = [[XBAudioUnitRecorder alloc] initWithRate:rate bit:bit channel:channels];
self.dataReader = [XBAudioPCMDataReader new];
self.dataWriter = [XBDataWriter new];
[self initParams];
}
return self;
}
- (void)initParams
{
recorderTempBuffer = malloc(CONST_BUFFER_SIZE);
typeof(self) __weak weakSelf = self;
typeof(weakSelf) __strong strongSelf = weakSelf;
if (self.recorder.bl_output == nil)
{
self.recorder.bl_output = ^(AudioBufferList *bufferList) {
AudioBuffer buffer = bufferList->mBuffers[0];
int len = buffer.mDataByteSize;
memcpy(strongSelf->recorderTempBuffer, buffer.mData, len);
};
}
if (self.player.bl_input == nil)
{
self.player.bl_input = ^(AudioBufferList *bufferList) {
AudioBuffer buffer = bufferList->mBuffers[0];
int len = buffer.mDataByteSize;
int readLen = [weakSelf.dataReader readDataFrom:weakSelf.data len:len forData:buffer.mData];
buffer.mDataByteSize = readLen;
for (int i = 0; i < readLen; i++)
{
((Byte *)buffer.mData)[i] = ((Byte *)buffer.mData)[i] + strongSelf->recorderTempBuffer[i];
}
//写文件
[strongSelf.dataWriter writeBytes:buffer.mData len:readLen toPath:stroePath];
if (readLen == 0)
{
[weakSelf stop];
}
};
}
}
- (void)delete
{
NSString *pcmPath = stroePath;
if ([[NSFileManager defaultManager] fileExistsAtPath:pcmPath])
{
[[NSFileManager defaultManager] removeItemAtPath:pcmPath error:nil];
}
}
- (void)start
{
[self delete];
[self.recorder start];
[self.player start];
}
- (void)stop
{
self.recorder.bl_output = nil;
self.player.bl_input = nil;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self.recorder stop];
[self.player stop];
});
}
@end
//
// XBAudioUnitPlayer.h
// XBVoiceTool
//
// Created by xxb on 2018/6/29.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@class XBAudioUnitPlayer;
typedef void (^XBAudioUnitPlayerInputBlock)(AudioBufferList *bufferList);
typedef void (^XBAudioUnitPlayerInputBlockFull)(XBAudioUnitPlayer *player,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData);
@interface XBAudioUnitPlayer : NSObject
@property (nonatomic,copy) XBAudioUnitPlayerInputBlock bl_input;
@property (nonatomic,copy) XBAudioUnitPlayerInputBlockFull bl_inputFull;
- (instancetype)initWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel;
- (void)start;
- (void)stop;
- (void)destroy;
@end
//
// XBAudioUnitPlayer.m
// XBVoiceTool
//
// Created by xxb on 2018/6/29.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioUnitPlayer.h"
#import "XBAudioTool.h"
@interface XBAudioUnitPlayer ()
{
AudioUnit audioUnit;
}
@property (nonatomic,assign) XBAudioBit bit;
@property (nonatomic,assign) XBAudioRate rate;
@property (nonatomic,assign) XBAudioChannel channel;
@end
@implementation XBAudioUnitPlayer
- (instancetype)initWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel
{
if (self = [super init])
{
self.rate = rate;
self.bit = bit;
self.channel = channel;
}
return self;
}
- (instancetype)init
{
if (self = [super init])
{
self.rate = XBAudioRate_44k;
self.bit = XBAudioBit_16;
self.channel = XBAudioChannel_1;
}
return self;
}
- (void)dealloc
{
NSLog(@"XBAudioUnitPlayer销毁");
[self destroy];
}
- (void)destroy
{
if (audioUnit)
{
OSStatus status;
status = AudioComponentInstanceDispose(audioUnit);
CheckError(status, "audioUnit释放失败");
}
}
- (void)initAudioUnitWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel
{
//设置session
NSError *error = nil;
AVAudioSession* session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:&error];
[session setActive:YES error:nil];
//初始化audioUnit
AudioComponentDescription outputDesc = [XBAudioTool allocAudioComponentDescriptionWithComponentType:kAudioUnitType_Output
componentSubType:kAudioUnitSubType_VoiceProcessingIO
componentFlags:0
componentFlagsMask:0];
AudioComponent outputComponent = AudioComponentFindNext(NULL, &outputDesc);
AudioComponentInstanceNew(outputComponent, &audioUnit);
//设置输出格式
int mFramesPerPacket = 1;
AudioStreamBasicDescription streamDesc = [XBAudioTool allocAudioStreamBasicDescriptionWithMFormatID:kAudioFormatLinearPCM
mFormatFlags:(kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved)
mSampleRate:rate
mFramesPerPacket:mFramesPerPacket
mChannelsPerFrame:channel
mBitsPerChannel:bit];
OSStatus status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
kOutputBus,
&streamDesc,
sizeof(streamDesc));
CheckError(status, "SetProperty StreamFormat failure");
//设置回调
AURenderCallbackStruct outputCallBackStruct;
outputCallBackStruct.inputProc = outputCallBackFun;
outputCallBackStruct.inputProcRefCon = (__bridge void * _Nullable)(self);
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Input,
kOutputBus,
&outputCallBackStruct,
sizeof(outputCallBackStruct));
CheckError(status, "SetProperty EnableIO failure");
}
- (void)start
{
if (audioUnit == nil)
{
[self initAudioUnitWithRate:self.rate bit:self.bit channel:self.channel];
}
AudioOutputUnitStart(audioUnit);
}
- (void)stop
{
if (audioUnit == nil)
{
return;
}
OSStatus status;
status = AudioOutputUnitStop(audioUnit);
CheckError(status, "audioUnit停止失败");
}
static OSStatus outputCallBackFun( void * inRefCon,
AudioUnitRenderActionFlags * ioActionFlags,
const AudioTimeStamp * inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList * __nullable ioData)
{
memset(ioData->mBuffers[0].mData, 0, ioData->mBuffers[0].mDataByteSize);
// memset(ioData->mBuffers[1].mData, 0, ioData->mBuffers[1].mDataByteSize);
XBAudioUnitPlayer *player = (__bridge XBAudioUnitPlayer *)(inRefCon);
typeof(player) __weak weakPlayer = player;
if (player.bl_input)
{
player.bl_input(ioData);
}
if (player.bl_inputFull)
{
player.bl_inputFull(weakPlayer, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, ioData);
}
return noErr;
}
@end
//
// XBAudioUnitRecorder.h
// XBVoiceTool
//
// Created by xxb on 2018/6/28.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@class XBAudioUnitRecorder;
typedef void (^XBAudioUnitRecorderOnputBlock)(AudioBufferList *bufferList);
typedef void (^XBAudioUnitRecorderOnputBlockFull)(XBAudioUnitRecorder *player,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData);
@interface XBAudioUnitRecorder : NSObject
@property (nonatomic,readonly,assign) BOOL isRecording;
@property (nonatomic,copy) XBAudioUnitRecorderOnputBlock bl_output;
@property (nonatomic,copy) XBAudioUnitRecorderOnputBlockFull bl_outputFull;
- (instancetype)initWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel;
- (void)start;
- (void)stop;
- (AudioStreamBasicDescription)getOutputFormat;
@end
//
// XBAudioUnitRecorder.m
// XBVoiceTool
//
// Created by xxb on 2018/6/28.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBAudioUnitRecorder.h"
#import "XBAudioTool.h"
#define subPathPCM @"/Documents/xbMedia"
#define stroePath [NSHomeDirectory() stringByAppendingString:subPathPCM]
@interface XBAudioUnitRecorder ()
{
AudioUnit audioUnit;
}
@property (nonatomic,assign) XBAudioBit bit;
@property (nonatomic,assign) XBAudioRate rate;
@property (nonatomic,assign) XBAudioChannel channel;
@property (nonatomic,assign) AudioStreamBasicDescription inputStreamDesc;
@end
@implementation XBAudioUnitRecorder
- (instancetype)initWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel
{
if (self = [super init])
{
self.bit = bit;
self.rate = rate;
self.channel = channel;
[self initInputAudioUnitWithRate:self.rate bit:self.bit channel:self.channel];
}
return self;
}
- (instancetype)init
{
if (self = [super init])
{
self.bit = XBAudioBit_16;
self.rate = XBAudioRate_44k;
self.channel = XBAudioChannel_1;
[self initInputAudioUnitWithRate:self.rate bit:self.bit channel:self.channel];
}
return self;
}
- (void)dealloc
{
CheckError(AudioComponentInstanceDispose(audioUnit),
"AudioComponentInstanceDispose failed");
NSLog(@"XBAudioUnitRecorder销毁");
}
- (void)initInputAudioUnitWithRate:(XBAudioRate)rate bit:(XBAudioBit)bit channel:(XBAudioChannel)channel
{
//设置AVAudioSession
NSError *error = nil;
AVAudioSession* session = [AVAudioSession sharedInstance];
[session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:&error];
[session setActive:YES error:nil];
//初始化audioUnit
AudioComponentDescription inputDesc = [XBAudioTool allocAudioComponentDescriptionWithComponentType:kAudioUnitType_Output componentSubType:kAudioUnitSubType_RemoteIO componentFlags:0 componentFlagsMask:0];
AudioComponent inputComponent = AudioComponentFindNext(NULL, &inputDesc);
CheckError(AudioComponentInstanceNew(inputComponent, &audioUnit), "AudioComponentInstanceNew failure");
//设置输出流格式
int mFramesPerPacket = 1;
AudioStreamBasicDescription inputStreamDesc = [XBAudioTool allocAudioStreamBasicDescriptionWithMFormatID:kAudioFormatLinearPCM mFormatFlags:(kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked) mSampleRate:rate mFramesPerPacket:mFramesPerPacket mChannelsPerFrame:channel mBitsPerChannel:bit];
self.inputStreamDesc = inputStreamDesc;
OSStatus status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&inputStreamDesc,
sizeof(inputStreamDesc));
CheckError(status, "setProperty inputStreamFormat error");
// status = AudioUnitSetProperty(audioUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Input,
// kOutputBus,
// &inputStreamDesc,
// sizeof(inputStreamDesc));
// CheckError(status, "setProperty outputStreamFormat error");
//麦克风输入设置为1(yes)
int inputEnable = 1;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&inputEnable,
sizeof(inputEnable));
CheckError(status, "setProperty EnableIO error");
//设置回调
AURenderCallbackStruct inputCallBackStruce;
inputCallBackStruce.inputProc = inputCallBackFun;
inputCallBackStruce.inputProcRefCon = (__bridge void * _Nullable)(self);
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Output,
kInputBus,
&inputCallBackStruce,
sizeof(inputCallBackStruce));
CheckError(status, "setProperty InputCallback error");
AudioStreamBasicDescription outputDesc0;
UInt32 size = sizeof(outputDesc0);
CheckError(AudioUnitGetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
0,
&outputDesc0,
&size),"get property failure");
AudioStreamBasicDescription outputDesc1;
size = sizeof(outputDesc1);
CheckError(AudioUnitGetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
0,
&outputDesc1,
&size),"get property failure");
}
- (void)start
{
[self delete];
AudioOutputUnitStart(audioUnit);
_isRecording = YES;
}
- (void)stop
{
CheckError(AudioOutputUnitStop(audioUnit),
"AudioOutputUnitStop failed");
_isRecording = NO;
}
- (AudioStreamBasicDescription)getOutputFormat
{
return self.inputStreamDesc;
// AudioStreamBasicDescription outputDesc0;
// UInt32 size = sizeof(outputDesc0);
// CheckError(AudioUnitGetProperty(audioUnit,
// kAudioUnitProperty_StreamFormat,
// kAudioUnitScope_Output,
// 0,
// &outputDesc0,
// &size),"get property failure");
// return outputDesc0;
}
static OSStatus inputCallBackFun( void * inRefCon,
AudioUnitRenderActionFlags * ioActionFlags,
const AudioTimeStamp * inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList * __nullable ioData)
{
XBAudioUnitRecorder *recorder = (__bridge XBAudioUnitRecorder *)(inRefCon);
typeof(recorder) __weak weakRecorder = recorder;
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0].mData = NULL;
bufferList.mBuffers[0].mDataByteSize = 0;
AudioUnitRender(recorder->audioUnit,
ioActionFlags,
inTimeStamp,
kInputBus,
inNumberFrames,
&bufferList);
if (recorder.bl_output)
{
recorder.bl_output(&bufferList);
}
if (recorder.bl_outputFull)
{
recorder.bl_outputFull(weakRecorder, ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
}
return noErr;
}
- (void)delete
{
NSString *pcmPath = stroePath;
if ([[NSFileManager defaultManager] fileExistsAtPath:pcmPath])
{
[[NSFileManager defaultManager] removeItemAtPath:pcmPath error:nil];
}
}
@end
//
// XBDataWriter.h
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBDataWriter : NSObject
- (void)writeBytes:(void *)bytes len:(NSUInteger)len toPath:(NSString *)path;
- (void)writeData:(NSData *)data toPath:(NSString *)path;
@end
//
// XBDataWriter.m
// XBVoiceTool
//
// Created by xxb on 2018/7/5.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBDataWriter.h"
@interface XBDataWriter ()
@property (nonatomic,strong) NSLock *lock;
@end
@implementation XBDataWriter
- (void)writeBytes:(void *)bytes len:(NSUInteger)len toPath:(NSString *)path
{
NSData *data = [NSData dataWithBytes:bytes length:len];
[self writeData:data toPath:path];
// NSString *savePath = path;
// if ([[NSFileManager defaultManager] fileExistsAtPath:savePath] == false)
// {
// [[NSFileManager defaultManager] createFileAtPath:savePath contents:nil attributes:nil];
// }
// static FILE *fp=NULL;
//
// if(fp==NULL || access( [path UTF8String], F_OK )==-1){
//
// fp = fopen([path UTF8String], "ab+" );
//
// if(fp==NULL){
//
// printf("can't open file!");
//
// fp=NULL;
//
// return;
//
// }
//
// }
//
// if(fp!=NULL){
//
// fwrite(bytes , 1 , len , fp );
//
// printf("write to file %zd bytes",bytes);
//
// }
}
- (void)writeData:(NSData *)data toPath:(NSString *)path
{
[self.lock lock];
NSString *savePath = path;
if ([[NSFileManager defaultManager] fileExistsAtPath:savePath] == false)
{
[[NSFileManager defaultManager] createFileAtPath:savePath contents:nil attributes:nil];
}
NSFileHandle * handle = [NSFileHandle fileHandleForWritingAtPath:savePath];
[handle seekToEndOfFile];
[handle writeData:data];
[self.lock unlock];
}
- (NSLock *)lock
{
if (_lock == nil)
{
_lock = [NSLock new];
}
return _lock;
}
@end
//
// XBExtAudioFileRef.h
// XBVoiceTool
//
// Created by xxb on 2018/7/24.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@interface XBExtAudioFileRef : NSObject
/**
storePath:存储路径
inputFormat : The format of the audio data to be written to the file.
*/
- (instancetype)initWithStorePath:(NSString *)storePath inputFormat:(AudioStreamBasicDescription *)inputFormat;
- (void)writeIoData:(AudioBufferList *)ioData inNumberFrames:(UInt32)inNumberFrames;
- (void)stopWrite;
@end
//
// XBExtAudioFileRef.m
// XBVoiceTool
//
// Created by xxb on 2018/7/24.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBExtAudioFileRef.h"
#import "XBAudioTool.h"
@interface XBExtAudioFileRef ()
{
ExtAudioFileRef _mAudioFileRef;
}
@end
@implementation XBExtAudioFileRef
- (instancetype)initWithStorePath:(NSString *)storePath inputFormat:(AudioStreamBasicDescription *)inputFormat
{
if (self = [super init])
{
[self createOutFileWithStorePath:storePath inputFormat:inputFormat];
}
return self;
}
- (void)createOutFileWithStorePath:(NSString *)storePath inputFormat:(AudioStreamBasicDescription *)inputFormat
{
AudioStreamBasicDescription outputDesc = *inputFormat;
NSString *destinationFilePath = storePath;
CFURLRef destinationURL = CFURLCreateWithFileSystemPath(kCFAllocatorDefault, (CFStringRef)destinationFilePath, kCFURLPOSIXPathStyle, false);
CheckError(ExtAudioFileCreateWithURL(destinationURL,
kAudioFileCAFType,
&outputDesc,
NULL,
kAudioFileFlags_EraseFile,
&_mAudioFileRef),"Couldn't create a file for writing");
CFRelease(destinationURL);
// AudioStreamBasicDescription tempDesc;
// uint32_t size = sizeof(AudioStreamBasicDescription);
// CheckError(ExtAudioFileGetProperty(_mAudioFileRef,
// kExtAudioFileProperty_ClientDataFormat,
// &size,
// &tempDesc),
// "cant get the DataFormat");
// UInt32 codecManf = kAppleHardwareAudioCodecManufacturer;
// CheckError(ExtAudioFileSetProperty(_mAudioFileRef, kExtAudioFileProperty_CodecManufacturer, sizeof(UInt32), &codecManf)," set CodecManufacturer failure");
// CheckError(ExtAudioFileSetProperty(_mAudioFileRef, kExtAudioFileProperty_ClientDataFormat, sizeof(outputDesc), &outputDesc),"set ClientDataFormat failure");
}
- (void)writeIoData:(AudioBufferList *)ioData inNumberFrames:(UInt32)inNumberFrames
{
CheckError(ExtAudioFileWrite(_mAudioFileRef, inNumberFrames, ioData), "写入失败");
}
- (void)dealloc
{
[self stopWrite];
}
- (void)stopWrite
{
CheckError(ExtAudioFileDispose(_mAudioFileRef),"ExtAudioFileDispose failed");
}
@end
//
// XBPCMPlayer.h
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "Header_audio.h"
@class XBPCMPlayer;
@protocol XBPCMPlayerDelegate <NSObject>
- (void)playToEnd:(XBPCMPlayer *)player;
@end
@interface XBPCMPlayer : NSObject
@property (nonatomic,copy) NSString *filePath;
@property (nonatomic,assign) BOOL isPlaying;
@property (nonatomic,weak) id<XBPCMPlayerDelegate>delegate;
- (instancetype)initWithPCMFilePath:(NSString *)filePath rate:(XBAudioRate)rate channels:(XBAudioChannel)channels bit:(XBAudioBit)bit;
- (void)play;
- (void)stop;
@end
//
// XBPCMPlayer.m
// XBVoiceTool
//
// Created by xxb on 2018/7/2.
// Copyright © 2018年 xxb. All rights reserved.
//
#import "XBPCMPlayer.h"
#import "XBAudioUnitPlayer.h"
#import "XBAudioPCMDataReader.h"
@interface XBPCMPlayer ()
@property (nonatomic,strong) NSData *dataStore;
@property (nonatomic,strong) XBAudioUnitPlayer *player;
@property (nonatomic,strong) XBAudioPCMDataReader *reader;
@end
@implementation XBPCMPlayer
- (instancetype)initWithPCMFilePath:(NSString *)filePath rate:(XBAudioRate)rate channels:(XBAudioChannel)channels bit:(XBAudioBit)bit
{
if (self = [super init])
{
self.filePath = filePath;
self.player = [[XBAudioUnitPlayer alloc] initWithRate:rate bit:bit channel:channels];
self.reader = [XBAudioPCMDataReader new];
}
return self;
}
- (void)dealloc
{
NSLog(@"XBPCMPlayer销毁");
[self.player stop];
self.player = nil;
}
- (void)play
{
if (self.player.bl_input == nil)
{
typeof(self) __weak weakSelf = self;
self.player.bl_input = ^(AudioBufferList *bufferList) {
// NSLog(@"xxxxxinputCallBackTime:%f",[[NSDate date] timeIntervalSince1970]);
AudioBuffer buffer = bufferList->mBuffers[0];
int len = buffer.mDataByteSize;
int readLen = [weakSelf.reader readDataFrom:weakSelf.dataStore len:len forData:buffer.mData];
buffer.mDataByteSize = readLen;
if (readLen == 0)
{
[weakSelf stop];
if ([weakSelf.delegate respondsToSelector:@selector(playToEnd:)])
{
[weakSelf.delegate playToEnd:weakSelf];
}
}
};
}
[self.player start];
self.isPlaying = YES;
}
- (void)stop
{
self.player.bl_input = nil;
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(kPreferredIOBufferDuration*0.1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
[self.player stop];
self.isPlaying = NO;
});
}
#pragma mark - 方法重写
- (void)setFilePath:(NSString *)filePath
{
_filePath = filePath;
self.dataStore = [NSData dataWithContentsOfFile:filePath];
}
@end
This diff is collapsed.
//
// SHMp3RecordManager.h
// ShorthandMaster
//
// Created by 明津李 on 2020/9/1.
// Copyright © 2020 明津李. All rights reserved.
//
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
@interface SHMp3RecordManager : NSObject
+ (instancetype)shared;
- (void)configMp3Path:(NSString *)mp3Path PCMPath:(NSString *)PCMPath;
- (void)start;
- (void)pause;
- (void)stop;
@end
NS_ASSUME_NONNULL_END
//
// SHMp3RecordManager.m
// ShorthandMaster
//
// Created by 明津李 on 2020/9/1.
// Copyright © 2020 明津李. All rights reserved.
//
#import "SHMp3RecordManager.h"
//#import "XBPCMPlayer.h"
//#import "XBAudioFormatConversion.h"
#import "XBAudioUnitRecorder.h"
//#import "XBAudioUnitMixerTest.h"
#import "XBAudioTool.h"
//#import "XBAudioConverterPlayer.h"
//#import "XBAudioPlayer.h"
//#import "XBAudioUnitMixer.h"
//#import "XBAudioPCMDataReader.h"
//#import "XBAudioFileDataReader.h"
#import "XBExtAudioFileRef.h"
//#import "ExtAudioFileMixer.h"
#import "XBDataWriter.h"
//#import "XBAACEncoder_system.h"
#import "MP3Encoder.h"
@interface SHMp3RecordManager ()
//@property (nonatomic,strong) XBPCMPlayer *palyer;
@property (nonatomic,strong) XBAudioUnitRecorder *recorder;
//@property (nonatomic,strong) XBAudioUnitMixerTest *mixer;
//@property (nonatomic,strong) XBAudioConverterPlayer *audioPlayer;
//@property (nonatomic,strong) XBAudioPlayer *audioPlayerNew;
//@property (nonatomic,strong) XBAudioUnitMixer *musicMixer;
//@property (nonatomic,strong) XBAudioPCMDataReader *dataReader;
@property (nonatomic,strong) XBExtAudioFileRef *xbFile;
@property (nonatomic,strong) XBDataWriter *dataWriter;
//@property (nonatomic,strong) XBAACEncoder_system *aacEncoder;
@property (nonatomic,strong) MP3Encoder *mp3Encoder;
@property (nonatomic, copy) NSString * mp3StroePath;
@property (nonatomic, copy) NSString * PCMStorePath;
@end
static SHMp3RecordManager * manager;
@implementation SHMp3RecordManager
+ (instancetype)shared{
static dispatch_once_t o;
dispatch_once(&o, ^{
manager = [[SHMp3RecordManager alloc] init];
});
return manager;
}
+ (instancetype)alloc{
if (manager) {
// 如果单例对象存在则抛出异常
NSException *exception = [NSException exceptionWithName:[NSString stringWithFormat:@"重复创建%@单例对象 异常", [self class]] reason:@"请使用单例" userInfo:nil];
[exception raise];
}
return [super alloc]; // 如果单例对象不存在 正常创建
}
- (id)copyWithZone:(struct _NSZone *)zone{
return manager;
}
+ (id)allocWithZone:(struct _NSZone *)zone{
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
manager = [super allocWithZone:zone];
});
return manager;
}
- (instancetype)init{
if (self = [super init]) {
}
return self;
}
- (void)configMp3Path:(NSString *)mp3Path PCMPath:(NSString *)PCMPath{
self.mp3StroePath = mp3Path;
self.PCMStorePath = PCMPath;
[self mp3EncodeTest];
}
- (void)start{
[self.recorder start];
}
- (void)pause{
[self.recorder stop];
}
- (void)stop{
[self.recorder stop];
self.recorder = nil;
}
- (void)reset{
}
#pragma mark - mp3编码
- (void)mp3EncodeTest{
// NSLog(@"%@", mp3StroePath);
[self deleteFileAtPath:self.mp3StroePath];
self.recorder = [[XBAudioUnitRecorder alloc] initWithRate:XBAudioRate_44k bit:XBAudioBit_16 channel:XBAudioChannel_1];
_mp3Encoder = [[MP3Encoder alloc] initWithSampleRate:XBAudioRate_44k channels:1 bitRate:128];
self.dataWriter = [[XBDataWriter alloc] init];
typeof(self) __weak weakSelf = self;
self.recorder.bl_output = ^(AudioBufferList *bufferList) {
AudioBuffer buffer = bufferList->mBuffers[0];
[weakSelf.mp3Encoder encodePCMData:buffer.mData len:buffer.mDataByteSize completeBlock:^(unsigned char *encodedData, int len) {
[weakSelf.dataWriter writeBytes:encodedData len:len toPath:weakSelf.mp3StroePath];
}];
};
[self writeTest];
// [self.recorder start];
}
#pragma mark - 测试文件写入
- (void)writeTest{
[self deleteFileAtPath:self.PCMStorePath];
// self.recorder = [[XBAudioUnitRecorder alloc] initWithRate:XBAudioRate_44k bit:XBAudioBit_16 channel:XBAudioChannel_1];
AudioStreamBasicDescription desc = [XBAudioTool allocAudioStreamBasicDescriptionWithMFormatID:XBAudioFormatID_PCM mFormatFlags:(XBAudioFormatFlags)(kAudioFormatFlagIsBigEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked) mSampleRate:XBAudioRate_44k mFramesPerPacket:1 mChannelsPerFrame:XBAudioChannel_2 mBitsPerChannel:XBAudioBit_16];
self.xbFile = [[XBExtAudioFileRef alloc] initWithStorePath:self.PCMStorePath inputFormat:&desc];
typeof(self) __weak weakSelf = self;
self.recorder.bl_outputFull = ^(XBAudioUnitRecorder *player, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) {
[weakSelf.xbFile writeIoData:ioData inNumberFrames:inNumberFrames];
};
// [self.recorder start];
}
- (void)deleteFileAtPath:(NSString *)path{
if ([[NSFileManager defaultManager] fileExistsAtPath:path]){
[[NSFileManager defaultManager] removeItemAtPath:path error:nil];
}
}
@end
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
// //
import UIKit import UIKit
import CloudKit
class SHRecordListViewController: SHBaseViewController { class SHRecordListViewController: SHBaseViewController {
......
...@@ -14,24 +14,29 @@ class SHRecordModel: NSObject, NSCoding{ ...@@ -14,24 +14,29 @@ class SHRecordModel: NSObject, NSCoding{
@objc var during: NSInteger = 0 @objc var during: NSInteger = 0
@objc var address: String = "" @objc var address: String = ""
@objc var pathFile: String = "" @objc var pathFile: String = ""
@objc var pcmPathFile: String = ""
@objc var txt: String = "" @objc var txt: String = ""
@objc var delete: Bool = false @objc var delete: Bool = false
@objc var deleteDate: Date = Date() @objc var deleteDate: Date = Date()
@objc var fileData: Data = Data()
override func setValue(_ value: Any?, forUndefinedKey key: String) { override func setValue(_ value: Any?, forUndefinedKey key: String) {
} }
//构造方法 //构造方法
required init(time:Date=Date(), during:NSInteger=0, address:String="", pathFile:String="", txt:String="", delete:Bool=false, deleteDate:Date=Date()) { required init(time:Date=Date(), during:NSInteger=0, address:String="", pathFile:String="", pcmPathFile:String="", txt:String="", delete:Bool=false, deleteDate:Date=Date(), fileData:Data=Data()) {
self.time = time self.time = time
self.during = during self.during = during
self.address = address self.address = address
self.pathFile = pathFile self.pathFile = pathFile
self.pcmPathFile = pcmPathFile
self.txt = txt self.txt = txt
self.delete = delete self.delete = delete
self.deleteDate = deleteDate self.deleteDate = deleteDate
self.fileData = fileData
} }
//从object解析回来 //从object解析回来
...@@ -40,9 +45,11 @@ class SHRecordModel: NSObject, NSCoding{ ...@@ -40,9 +45,11 @@ class SHRecordModel: NSObject, NSCoding{
self.during = (decoder.decodeObject(forKey: "during") as? NSInteger)! self.during = (decoder.decodeObject(forKey: "during") as? NSInteger)!
self.address = decoder.decodeObject(forKey: "address") as? String ?? "" self.address = decoder.decodeObject(forKey: "address") as? String ?? ""
self.pathFile = decoder.decodeObject(forKey: "pathFile") as? String ?? "" self.pathFile = decoder.decodeObject(forKey: "pathFile") as? String ?? ""
self.pcmPathFile = decoder.decodeObject(forKey: "pcmPathFile") as? String ?? ""
self.txt = decoder.decodeObject(forKey: "txt") as? String ?? "" self.txt = decoder.decodeObject(forKey: "txt") as? String ?? ""
self.delete = decoder.decodeObject(forKey: "delete") as? Bool ?? false self.delete = decoder.decodeObject(forKey: "delete") as? Bool ?? false
self.deleteDate = (decoder.decodeObject(forKey: "deleteDate") as? Date)! self.deleteDate = (decoder.decodeObject(forKey: "deleteDate") as? Date)!
self.fileData = (decoder.decodeObject(forKey: "fileData") as? Data)!
} }
//编码成object //编码成object
...@@ -51,8 +58,10 @@ class SHRecordModel: NSObject, NSCoding{ ...@@ -51,8 +58,10 @@ class SHRecordModel: NSObject, NSCoding{
coder.encode(during, forKey:"during") coder.encode(during, forKey:"during")
coder.encode(address, forKey:"address") coder.encode(address, forKey:"address")
coder.encode(pathFile, forKey:"pathFile") coder.encode(pathFile, forKey:"pathFile")
coder.encode(pcmPathFile, forKey:"pcmPathFile")
coder.encode(txt, forKey:"txt") coder.encode(txt, forKey:"txt")
coder.encode(delete, forKey:"delete") coder.encode(delete, forKey:"delete")
coder.encode(deleteDate, forKey:"deleteDate") coder.encode(deleteDate, forKey:"deleteDate")
coder.encode(fileData, forKey:"fileData")
} }
} }
...@@ -175,7 +175,9 @@ class SHRecordShowViewController: SHBaseViewController { ...@@ -175,7 +175,9 @@ class SHRecordShowViewController: SHBaseViewController {
} }
do { do {
let documentsFile = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!.appending(model!.pathFile) let documentsFile = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!.appending(model!.pathFile)
// documentsFile = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!.appending(model!.pcmPathFile)
// player = try AVAudioPlayer.init(data: model!.fileData)
player = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: documentsFile)) player = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: documentsFile))
print("歌曲长度:\(player!.duration)") print("歌曲长度:\(player!.duration)")
player!.play() player!.play()
......
...@@ -11,7 +11,7 @@ import AVFoundation ...@@ -11,7 +11,7 @@ import AVFoundation
import Speech import Speech
import PDFGenerator import PDFGenerator
class SHRecordViewController: SHBaseViewController { class SHRecordViewController: SHBaseViewController{
private var waveView: SHRecordWaveView! private var waveView: SHRecordWaveView!
...@@ -42,8 +42,9 @@ class SHRecordViewController: SHBaseViewController { ...@@ -42,8 +42,9 @@ class SHRecordViewController: SHBaseViewController {
// url : 录音文件的路径 // url : 录音文件的路径
var wav_file_path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/record.wav") var wav_file_path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/record.wav")
var txt_file_path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/text.txt") var mp3_file_path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/record.mp3")
var postfix_wav_file_path = "" var pcm_file_path = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/xbMixData.caf")
var session:AVAudioSession { var session:AVAudioSession {
let session:AVAudioSession = AVAudioSession.sharedInstance() let session:AVAudioSession = AVAudioSession.sharedInstance()
do { do {
...@@ -56,6 +57,7 @@ class SHRecordViewController: SHBaseViewController { ...@@ -56,6 +57,7 @@ class SHRecordViewController: SHBaseViewController {
var player: AVAudioPlayer? var player: AVAudioPlayer?
lazy var recorder: AVAudioRecorder? = self.getRecorder() lazy var recorder: AVAudioRecorder? = self.getRecorder()
var recorder_mp3: SHMp3RecordManager = SHMp3RecordManager.shared()
// 创建语音识别器,指定语音识别的语言环境 locale ,将来会转化为什么语言,这里是使用的当前区域,那肯定就是简体汉语啦 // 创建语音识别器,指定语音识别的语言环境 locale ,将来会转化为什么语言,这里是使用的当前区域,那肯定就是简体汉语啦
// private let speechRecognizer = SFSpeechRecognizer(locale: Locale.autoupdatingCurrent) // private let speechRecognizer = SFSpeechRecognizer(locale: Locale.autoupdatingCurrent)
...@@ -72,12 +74,12 @@ class SHRecordViewController: SHBaseViewController { ...@@ -72,12 +74,12 @@ class SHRecordViewController: SHBaseViewController {
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
setupUI() setupUI()
configPathFile() configPathFile()
configRecorder() configRecorder()
} }
override func setupUI() { override func setupUI() {
super.setupUI() super.setupUI()
...@@ -166,8 +168,6 @@ class SHRecordViewController: SHBaseViewController { ...@@ -166,8 +168,6 @@ class SHRecordViewController: SHBaseViewController {
return return
} }
// play()
stopRecord() stopRecord()
save = true save = true
...@@ -202,6 +202,7 @@ class SHRecordViewController: SHBaseViewController { ...@@ -202,6 +202,7 @@ class SHRecordViewController: SHBaseViewController {
if sender.isSelected == true { if sender.isSelected == true {
self.view.sendSubviewToBack(maskView) self.view.sendSubviewToBack(maskView)
recorder?.record() recorder?.record()
recorder_mp3.start()
start = true start = true
configSpeechTask() configSpeechTask()
...@@ -211,7 +212,8 @@ class SHRecordViewController: SHBaseViewController { ...@@ -211,7 +212,8 @@ class SHRecordViewController: SHBaseViewController {
}else{ }else{
recorder?.pause() recorder?.pause()
recorder_mp3.pause()
start = false start = false
recognitionTask?.cancel() recognitionTask?.cancel()
...@@ -237,7 +239,7 @@ class SHRecordViewController: SHBaseViewController { ...@@ -237,7 +239,7 @@ class SHRecordViewController: SHBaseViewController {
AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.min.rawValue)) AVEncoderAudioQualityKey: NSNumber(value: Int32(AVAudioQuality.min.rawValue))
] ]
do { do {
let recorder = try AVAudioRecorder(url: URL(fileURLWithPath: wav_file_path!) as URL, settings: configDic) let recorder = try AVAudioRecorder(url: URL(fileURLWithPath: wav_file_path!), settings: configDic)
recorder.isMeteringEnabled = true recorder.isMeteringEnabled = true
// 准备录音(系统会给我们分配一些资源) // 准备录音(系统会给我们分配一些资源)
recorder.prepareToRecord() recorder.prepareToRecord()
...@@ -247,45 +249,43 @@ class SHRecordViewController: SHBaseViewController { ...@@ -247,45 +249,43 @@ class SHRecordViewController: SHBaseViewController {
return nil return nil
} }
} }
private func configPathFile(){ private func configPathFile(){
let fileManager = FileManager.default let fileManager = FileManager.default
let fileArr = SHRecordViewController.getAllFilePath((NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!) let fileArr = SHRecordViewController.getAllFilePath((NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!)
let subFile = Date().format("yyyyMMdd") let subFile = Date().format("yyyyMMdd")
var wavFilePath = ""
var txtFilePath = ""
var exist = false var exist = false
let time = Date.init().timeStamp let time = Date.init().timeStamp
for (_, documentsFile) in fileArr!.enumerated() { for (_, documentsFile) in fileArr!.enumerated() {
// let documentsFile = fileArr![index] as String
if documentsFile == NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/\(subFile)") { if documentsFile == NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/\(subFile)") {
postfix_wav_file_path = "/"+subFile+time+"/record.wav"
let path = documentsFile.appending(time) let path = documentsFile.appending(time)
wavFilePath = path.appending("/record.wav") mp3_file_path = "/"+subFile+time+"/record.mp3"
txtFilePath = path.appending("/text.txt") pcm_file_path = "/"+subFile+time+"/xbMixData.caf"
try! fileManager.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil) try! fileManager.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
recorder_mp3.configMp3Path((NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!+mp3_file_path!, pcmPath: (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!+pcm_file_path!)
exist = true exist = true
break break
} }
} }
if exist == false { if exist == false {
let path = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/\(subFile)").appending(time))! let path = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first?.appending("/\(subFile)").appending(time))!
postfix_wav_file_path = "/"+subFile+time+"/record.wav"
wavFilePath = path.appending("/record.wav") mp3_file_path = "/"+subFile+time+"/record.mp3"
txtFilePath = path.appending("/text.txt") pcm_file_path = "/"+subFile+time+"/xbMixData.caf"
try! fileManager.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil) try! fileManager.createDirectory(atPath: path, withIntermediateDirectories: true, attributes: nil)
recorder_mp3.configMp3Path((NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!+mp3_file_path!, pcmPath: (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!+pcm_file_path!)
} }
wav_file_path = wavFilePath NSLog("mp3_file_path=======\(String(describing: mp3_file_path)) pcm_file_path=======\(String(describing: pcm_file_path))")
txt_file_path = txtFilePath
NSLog("wav_file_path==========\(String(describing: wav_file_path))")
} }
private func configRecorder(){ private func configRecorder(){
...@@ -380,6 +380,7 @@ class SHRecordViewController: SHBaseViewController { ...@@ -380,6 +380,7 @@ class SHRecordViewController: SHBaseViewController {
//结束录音 //结束录音
func stopRecord() { func stopRecord() {
recorder_mp3.stop()
if let recorder = self.recorder { if let recorder = self.recorder {
if recorder.isRecording { if recorder.isRecording {
print("正在录音,马上结束它,文件保存到了:\(wav_file_path!)") print("正在录音,马上结束它,文件保存到了:\(wav_file_path!)")
...@@ -405,7 +406,8 @@ class SHRecordViewController: SHBaseViewController { ...@@ -405,7 +406,8 @@ class SHRecordViewController: SHBaseViewController {
model.time = Date() model.time = Date()
model.address = currentAddress ?? "" model.address = currentAddress ?? ""
model.txt = recognitionTaskText.first ?? (self.currentTxt ?? "") model.txt = recognitionTaskText.first ?? (self.currentTxt ?? "")
model.pathFile = postfix_wav_file_path model.pathFile = mp3_file_path!
model.pcmPathFile = pcm_file_path!
model.during = seconds model.during = seconds
let dic = ["time":model.time, "address":model.address, "txt":model.txt, "pathFile":model.pathFile, "during":model.during, "delete":model.delete] as [String : Any] let dic = ["time":model.time, "address":model.address, "txt":model.txt, "pathFile":model.pathFile, "during":model.during, "delete":model.delete] as [String : Any]
...@@ -429,6 +431,39 @@ class SHRecordViewController: SHBaseViewController { ...@@ -429,6 +431,39 @@ class SHRecordViewController: SHBaseViewController {
CRUserDefaults.recordList = list CRUserDefaults.recordList = list
currentModel = model currentModel = model
// do{
//// let documentsFile = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!.appending(model.pathFile)
//// let url = URL.init(fileURLWithPath: documentsFile)
////
//// let data = try Data.init(contentsOf: url)
//
// let dic = ["time":model.time, "address":model.address, "txt":model.txt, "pathFile":model.pathFile, "during":model.during, "delete":model.delete, "fileData":recorder_ss_data] as [String : Any]
//
// let keyValueStore = NSUbiquitousKeyValueStore.default
// var list = keyValueStore.object(forKey: "list") as? [Dictionary<String, Any>]
// if list == nil {
// list = [dic]
// }else{
// var contains = false
// for (index, subDic) in list!.enumerated(){
// if (subDic["pathFile"] as! String) == model.pathFile {
// contains = true
// list![index] = dic
// break
// }
// }
// if contains == false {
// list!.append(dic)
// }
// }
// keyValueStore.set(list, forKey: "list")
// keyValueStore.synchronize()
//
// } catch {
//
// }
} }
func removeCurrentRecored(_ model:SHRecordModel){ func removeCurrentRecored(_ model:SHRecordModel){
...@@ -454,25 +489,6 @@ class SHRecordViewController: SHBaseViewController { ...@@ -454,25 +489,6 @@ class SHRecordViewController: SHBaseViewController {
self.recognitionRequest = nil self.recognitionRequest = nil
self.recognitionTask = nil self.recognitionTask = nil
} }
//播放
func play() {
//设置外放模式,不然录音会用听筒模式播放,就很小声
if session.category != AVAudioSession.Category.playback {
do{
try session.setCategory(AVAudioSession.Category.playback)
} catch{
print("外放模式设置失败")
}
}
do {
player = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: wav_file_path!))
print("歌曲长度:\(player!.duration)")
player!.play()
} catch let err {
print("播放失败:\(err.localizedDescription)")
}
}
deinit { deinit {
waveTimer?.invalidate() waveTimer?.invalidate()
...@@ -492,10 +508,6 @@ extension SHRecordViewController{ ...@@ -492,10 +508,6 @@ extension SHRecordViewController{
let second = (seconds)%60; let second = (seconds)%60;
let timeString = String(format: "%02lu:%02lu:%02lu", hours, minutes, second) let timeString = String(format: "%02lu:%02lu:%02lu", hours, minutes, second)
secondsLabel.text = timeString secondsLabel.text = timeString
if seconds%2 == 0{
saveContent()
}
} }
@objc private func updateMeters() { @objc private func updateMeters() {
......
//
// SHCloudManager.swift
// ShorthandMaster
//
// Created by 明津李 on 2020/8/31.
// Copyright © 2020 明津李. All rights reserved.
//
import UIKit
import CloudKit
class SHCloudManager: NSObject {
@objc static let shared = SHCloudManager()
private override init() {}
let recordName = "Recording"
lazy var container = CKContainer.default()
lazy var resultRecords:[CKRecord] = []
typealias handler = ((Bool) -> ())
var statusHandler: handler?
// func cheakAccountStatus(_ handler:@escaping handler){
//
// statusHandler = handler
//
// let container = CKContainer.default()
// container.accountStatus { (status, statusError) in
//
// if let error = statusError {
// self.statusHandler?(false)
// print("\(error.localizedDescription)")
// }else{
// switch status {
// case .available:
// self.statusHandler?(true)
// break;
// default:
// self.statusHandler?(false)
// break;
// }
// }
// }
// }
func getAllRecordDataSource(){
let dateBase = container.publicCloudDatabase
let predicate = NSPredicate.init(value: true)
let query = CKQuery.init(recordType: recordName, predicate: predicate)
dateBase.perform(query, inZoneWith: nil) { (records, queryError) in
if let error = queryError {
print("\(error.localizedDescription)")
}else{
self.resultRecords = records!
}
}
}
func addNewRecord(_ model: SHRecordModel){
let documentsFile = (NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first)!.appending(model.pathFile)
let url = URL.init(fileURLWithPath: documentsFile)
let asset = CKAsset.init(fileURL: url)
let dateBase = container.publicCloudDatabase
let newRecording = CKRecord.init(recordType: recordName)
newRecording.setValue(model.address, forKey: "title")
newRecording.setValue(model.address, forKey: "address")
newRecording.setValue(model.txt, forKey: "content")
newRecording.setValue(model.time, forKey: "createTime")
newRecording.setValue(0, forKey: "delete")
newRecording.setValue(asset, forKey: "recordAsset")
dateBase.save(newRecording) { (subscription, saveError) in
if let error = saveError {
print("\(error.localizedDescription)")
}else{
}
}
}
func modifyRecord(){
let dateBase = container.publicCloudDatabase
let record = resultRecords.first
dateBase.fetch(withRecordID: record!.recordID) { (record, fetchError) in
if let error = fetchError {
print("\(error.localizedDescription)")
}else {
record![""] = ""
dateBase.save(record!) { (saveRecord, saveError) in
if let error = saveError {
print("\(error.localizedDescription)")
}else{
}
}
}
}
}
func deleteRecord(){
let dateBase = container.publicCloudDatabase
let record = resultRecords.first
dateBase.delete(withRecordID: record!.recordID) { (recordID, deleteError) in
if let error = deleteError {
print("\(error.localizedDescription)")
}else{
}
}
}
}
...@@ -3,8 +3,6 @@ ...@@ -3,8 +3,6 @@
// //
#import "MBProgressHUD+MJ.h" #import "MBProgressHUD+MJ.h"
//#import "LameTool.h"
#import "AESCipher.h" #import "AESCipher.h"
#import "UIView+CornerRadii.h" #import "UIView+CornerRadii.h"
//#import "MJRefresh.h" #import "SHMp3RecordManager.h"
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>aps-environment</key>
<string>development</string>
<key>com.apple.developer.icloud-container-identifiers</key>
<array>
<string>iCloud.com.ShorthandMaster.www</string>
</array>
<key>com.apple.developer.icloud-services</key>
<array>
<string>CloudKit</string>
<string>CloudDocuments</string>
</array>
<key>com.apple.developer.ubiquity-container-identifiers</key>
<array>
<string>iCloud.com.ShorthandMaster.www</string>
</array>
<key>com.apple.developer.ubiquity-kvstore-identifier</key>
<string>$(TeamIdentifierPrefix)$(CFBundleIdentifier)</string>
</dict>
</plist>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment