AudioQueue采集音频并保存

90 阅读7分钟

学习大神资料

  • 本文章只是自己练手,所有代码都是大神的文章上,特此感谢大神分享

  • AudioFileHandler类 将采集的音频数据存储

#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>

@interface AudioFileHandler : NSObject

// 单例
+(AudioFileHandler *)shareInstance;

/*
 将录制的音频写入文件
 */
- (void)writeFileWithInNumBytes:(UInt32)inNumBytes
                   ioNumPackets:(UInt32 )ioNumPackets
                       inBuffer:(const void *)inBuffer
                   inPacketDesc:(const AudioStreamPacketDescription*)inPacketDesc;

#pragma mark - Audio Queue
/**
 * Start / Stop record By Audio Queue.
 */
-(void)startVoiceRecordByAudioQueue:(AudioQueueRef)audioQueue
                  isNeedMagicCookie:(BOOL)isNeedMagicCookie
                          audioDesc:(AudioStreamBasicDescription)audioDesc;

-(void)stopVoiceRecordByAudioQueue:(AudioQueueRef)audioQueue
                   needMagicCookie:(BOOL)isNeedMagicCookie;


/**
 * Start / Stop record By Audio Converter.
 */
-(void)startVoiceRecordByAudioUnitByAudioConverter:(AudioConverterRef)audioConverter
                                   needMagicCookie:(BOOL)isNeedMagicCookie
                                         audioDesc:(AudioStreamBasicDescription)audioDesc;

-(void)stopVoiceRecordAudioConverter:(AudioConverterRef)audioConverter
                     needMagicCookie:(BOOL)isNeedMagicCookie;

@end
#import "AudioFileHandler.h"

@interface AudioFileHandler ()
{
    AudioFileID m_recordFile;
    SInt64      m_recordCurrentPacket;      // current packet number in record file
}

@property (nonatomic, copy) NSString *recordFilePath;

@end

@implementation AudioFileHandler

#pragma mark - public
-(void)startVoiceRecordByAudioUnitByAudioConverter:(AudioConverterRef)audioConverter needMagicCookie:(BOOL)isNeedMagicCookie audioDesc:(AudioStreamBasicDescription)audioDesc {
    self.recordFilePath = [self createFilePath];
    NSLog(@"采集的音频文件存储地址:%@",self.recordFilePath);
    
    // create the audio file
    m_recordFile = [self createAudioFileWithFilePath:self.recordFilePath
                                           AudioDesc:audioDesc];
    
    if (isNeedMagicCookie) {
        // add magic cookie contain header file info for VBR data
        [self copyEncoderCookieToFileByAudioConverter:audioConverter
                                               inFile:m_recordFile];
    }
}

-(void)stopVoiceRecordAudioConverter:(AudioConverterRef)audioConverter needMagicCookie:(BOOL)isNeedMagicCookie {
    if (isNeedMagicCookie) {
        // reconfirm magic cookie at the end.
        [self copyEncoderCookieToFileByAudioConverter:audioConverter
                                               inFile:m_recordFile];
    }
    
    AudioFileClose(m_recordFile);
    m_recordCurrentPacket = 0;
}

-(void)startVoiceRecordByAudioQueue:(AudioQueueRef)audioQueue isNeedMagicCookie:(BOOL)isNeedMagicCookie audioDesc:(AudioStreamBasicDescription)audioDesc {
    self.recordFilePath = [self createFilePath];
    NSLog(@"%s - record file path:%@",__func__,self.recordFilePath);
    
    // create the audio file
    m_recordFile = [self createAudioFileWithFilePath:self.recordFilePath
                                           AudioDesc:audioDesc];
    
    if (isNeedMagicCookie) {
        // add magic cookie contain header file info for VBR data
        [self copyEncoderCookieToFileByAudioQueue:audioQueue
                                           inFile:m_recordFile];
    }
}

-(void)stopVoiceRecordByAudioQueue:(AudioQueueRef)audioQueue needMagicCookie:(BOOL)isNeedMagicCookie {
    if (isNeedMagicCookie) {
        // reconfirm magic cookie at the end.
        [self copyEncoderCookieToFileByAudioQueue:audioQueue
                                           inFile:m_recordFile];
    }

    AudioFileClose(m_recordFile);
    m_recordCurrentPacket = 0;
}

- (void)writeFileWithInNumBytes:(UInt32)inNumBytes ioNumPackets:(UInt32 )ioNumPackets inBuffer:(const void *)inBuffer inPacketDesc:(const AudioStreamPacketDescription*)inPacketDesc {
    if (!m_recordFile) {
        return;
    }
    
//    AudioStreamPacketDescription outputPacketDescriptions;
    OSStatus status = AudioFileWritePackets(m_recordFile,
                                            false,
                                            inNumBytes,
                                            inPacketDesc,
                                            m_recordCurrentPacket,
                                            &ioNumPackets,
                                            inBuffer);
    
    if (status == noErr) {
        m_recordCurrentPacket += ioNumPackets;  // 用于记录起始位置
    }else {
        NSLog(@"%s - write file status = %d \n",__func__,(int)status);
    }
    
}

#pragma mark - private
// 音频数据存储地址
- (NSString *)createFilePath {
    NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
    dateFormatter.dateFormat = @"yyyy_MM_dd__HH_mm_ss";
    NSString *date = [dateFormatter stringFromDate:[NSDate date]];
    
    NSArray *searchPaths    = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
                                                                  NSUserDomainMask,
                                                                  YES);
    
    NSString *documentPath  = [[searchPaths objectAtIndex:0] stringByAppendingPathComponent:@"Voice"];
    
    // 先创建子目录. 注意,若果直接调用AudioFileCreateWithURL创建一个不存在的目录创建文件会失败
    NSFileManager *fileManager = [NSFileManager defaultManager];
    if (![fileManager fileExistsAtPath:documentPath]) {
        [fileManager createDirectoryAtPath:documentPath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    
    NSString *fullFileName  = [NSString stringWithFormat:@"%@.caf",date];
    NSString *filePath      = [documentPath stringByAppendingPathComponent:fullFileName];
    return filePath;
}

- (AudioFileID)createAudioFileWithFilePath:(NSString *)filePath AudioDesc:(AudioStreamBasicDescription)audioDesc {
    CFURLRef url            = CFURLCreateWithString(kCFAllocatorDefault, (CFStringRef)filePath, NULL);

    AudioFileID audioFile;
    // create the audio file
    OSStatus status = AudioFileCreateWithURL(url,
                                             kAudioFileCAFType,
                                             &audioDesc,
                                             kAudioFileFlags_EraseFile,
                                             &audioFile);
    if (status != noErr) {
        NSLog(@"AudioFileCreateWithURL Failed, status:%d",(int)status);
    }
    
    CFRelease(url);
    
    return audioFile;
}

#pragma mark Magic Cookie
- (void)copyEncoderCookieToFileByAudioQueue:(AudioQueueRef)inQueue inFile:(AudioFileID)inFile {
    OSStatus result = noErr;
    UInt32 cookieSize;
    
    result = AudioQueueGetPropertySize (
                                        inQueue,
                                        kAudioQueueProperty_MagicCookie,
                                        &cookieSize
                                        );
    if (result == noErr) {
        char* magicCookie = (char *) malloc (cookieSize);
        result =AudioQueueGetProperty (
                                       inQueue,
                                       kAudioQueueProperty_MagicCookie,
                                       magicCookie,
                                       &cookieSize
                                       );
        if (result == noErr) {
            result = AudioFileSetProperty (
                                           inFile,
                                           kAudioFilePropertyMagicCookieData,
                                           cookieSize,
                                           magicCookie
                                           );
            if (result == noErr) {
                NSLog(@"%s - set Magic cookie successful.",__func__);
            }else {
                NSLog(@"%s - set Magic cookie failed.",__func__);
            }
        }else {
            NSLog(@"%s - get Magic cookie failed.",__func__);
        }
        free (magicCookie);
            
    }else {
        NSLog(@"%s - Magic cookie: get size failed.",__func__);
    }

}

-(void)copyEncoderCookieToFileByAudioConverter:(AudioConverterRef)audioConverter inFile:(AudioFileID)inFile {
    // Grab the cookie from the converter and write it to the destination file.
    UInt32 cookieSize = 0;
    OSStatus error = AudioConverterGetPropertyInfo(audioConverter, kAudioConverterCompressionMagicCookie, &cookieSize, NULL);
    
    if (error == noErr && cookieSize != 0) {
        char *cookie = (char *)malloc(cookieSize * sizeof(char));
        error        = AudioConverterGetProperty(audioConverter, kAudioConverterCompressionMagicCookie, &cookieSize, cookie);
        
        if (error == noErr) {
            error = AudioFileSetProperty(inFile, kAudioFilePropertyMagicCookieData, cookieSize, cookie);
            if (error == noErr) {
                UInt32 willEatTheCookie = false;
                error = AudioFileGetPropertyInfo(inFile, kAudioFilePropertyMagicCookieData, NULL, &willEatTheCookie);
                if (error == noErr) {
                    NSLog(@"%s - Writing magic cookie to destination file: %u   cookie:%d \n",__func__, (unsigned int)cookieSize, willEatTheCookie);
                }else {
                    NSLog(@"%s - Could not Writing magic cookie to destination file status:%d \n",__func__,(int)error);
                }
            } else {
                NSLog(@"%s - Even though some formats have cookies, some files don't take them and that's OK,set cookie status:%d \n",__func__,(int)error);
            }
        } else {
            NSLog(@"%s - Could not Get kAudioConverterCompressionMagicCookie from Audio Converter!\n status:%d ",__func__,(int)error);
        }
        
        free(cookie);
    }else {
        // If there is an error here, then the format doesn't have a cookie - this is perfectly fine as som formats do not.
        NSLog(@"%s - cookie status:%d, %d \n",__func__,(int)error, cookieSize);
    }
}

#pragma mark - 单例
+ (AudioFileHandler *)shareInstance
{
    static AudioFileHandler *singleton = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        singleton = [[super allocWithZone:NULL] init];
    });
    return singleton;;
}

+ (instancetype)allocWithZone:(struct _NSZone *)zone
{
    return [AudioFileHandler shareInstance];
}

-(instancetype)copyWithZone:(struct _NSZone *)zone
{
    return [AudioFileHandler shareInstance];
}

-(instancetype)mutableCopyWithZone:(struct _NSZone *)zone
{
    return [AudioFileHandler shareInstance];
}

@end
  • AudioQueueCapture 采集音频类
#import <Foundation/Foundation.h>
#import <AudioToolbox/AudioToolbox.h>

NS_ASSUME_NONNULL_BEGIN

@interface AudioQueueCapture : NSObject

@property (nonatomic, assign, readonly) BOOL isRunning;
@property (nonatomic, assign) BOOL isRecordVoice;

// 单例
+(AudioQueueCapture *)shareInstance;

/**
 * Start / Stop Audio Queue
 */
- (void)startAudioCapture;
- (void)stopAudioCapture;


/**
 * Start / Pause / Stop record file
 */
- (void)startRecordFile;
- (void)pauseAudioCapture;
- (void)stopRecordFile;


/**
 * free related resources
 */
- (void)freeAudioCapture;

@end
#import "AudioQueueCapture.h"
#import <AVFoundation/AVFoundation.h>
#import "AudioFileHandler.h"

#define kAudioPCMFramesPerPacket 1
#define kAudioPCMBitsPerChannel  16

static const int kNumberBuffers = 3;

struct RecorderInfo {
    AudioStreamBasicDescription  mDataFormat;
    AudioQueueRef                mQueue;
    AudioQueueBufferRef          mBuffers[kNumberBuffers];
};
typedef struct RecorderInfo *RecorderInfoType;

static RecorderInfoType m_audioInfo;

@interface AudioQueueCapture ()

@property (nonatomic, assign, readwrite) BOOL isRunning;

@end

@implementation AudioQueueCapture

#pragma mark - Callback
static void CaptureAudioDataCallback(void *                                 inUserData,
                                     AudioQueueRef                          inAQ,
                                     AudioQueueBufferRef                    inBuffer,
                                     const AudioTimeStamp *                 inStartTime,
                                     UInt32                                 inNumPackets,
                                     const AudioStreamPacketDescription*    inPacketDesc) {
    
    AudioQueueCapture *instance = (__bridge AudioQueueCapture *)inUserData;
    
    /*  Test audio fps
    static Float64 lastTime = 0;
    Float64 currentTime = CMTimeGetSeconds(CMClockMakeHostTimeFromSystemUnits(inStartTime->mHostTime))*1000;
    NSLog(@"Test duration - %f",currentTime - lastTime);
    lastTime = currentTime;
    */
    
    /*  Test size
    if (inPacketDesc) {
        NSLog(@"Test data: %d,%d,%d,%d",inBuffer->mAudioDataByteSize,inNumPackets,inPacketDesc->mDataByteSize,inPacketDesc->mVariableFramesInPacket);
    }else {
        NSLog(@"Test data: %d,%d",inBuffer->mAudioDataByteSize,inNumPackets);
    }
    */
    
    if (instance.isRecordVoice) {
        UInt32 bytesPerPacket = m_audioInfo->mDataFormat.mBytesPerPacket;
        if (inNumPackets == 0 && bytesPerPacket != 0) {
            inNumPackets = inBuffer->mAudioDataByteSize / bytesPerPacket;
        }
        
        [[AudioFileHandler shareInstance] writeFileWithInNumBytes:inBuffer->mAudioDataByteSize
                                                      ioNumPackets:inNumPackets
                                                          inBuffer:inBuffer->mAudioData
                                                      inPacketDesc:inPacketDesc];
    }
    
    if (instance.isRunning) {
        AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
    }
}

-(AudioStreamBasicDescription)getAudioFormatWithFormatID:(UInt32)formatID sampleRate:(Float64)sampleRate channelCount:(UInt32)channelCount {
    AudioStreamBasicDescription dataFormat = {0};
    
    UInt32 size = sizeof(dataFormat.mSampleRate);
    // Get hardware origin sample rate. (Recommended it)
    Float64 hardwareSampleRate = 0;
    AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareSampleRate,
                            &size,
                            &hardwareSampleRate);
    // Manual set sample rate
    dataFormat.mSampleRate = sampleRate;
    
    size = sizeof(dataFormat.mChannelsPerFrame);
    // Get hardware origin channels number. (Must refer to it)
    UInt32 hardwareNumberChannels = 0;
    AudioSessionGetProperty(kAudioSessionProperty_CurrentHardwareInputNumberChannels,
                            &size,
                            &hardwareNumberChannels);
    dataFormat.mChannelsPerFrame = channelCount;
    
    // Set audio format
    dataFormat.mFormatID = formatID;
    
    // Set detail audio format params
    if (formatID == kAudioFormatLinearPCM) {
        dataFormat.mFormatFlags     = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
        dataFormat.mBitsPerChannel  = kAudioPCMBitsPerChannel;
        dataFormat.mBytesPerPacket  = dataFormat.mBytesPerFrame = (dataFormat.mBitsPerChannel / 8) * dataFormat.mChannelsPerFrame;
        dataFormat.mFramesPerPacket = kAudioPCMFramesPerPacket;
    }else if (formatID == kAudioFormatMPEG4AAC) {
        dataFormat.mFormatFlags = kMPEG4Object_AAC_Main;
    }

    NSLog(@"Audio Recorder: starup PCM audio encoder:%f,%d",sampleRate,channelCount);
    return dataFormat;
}

#pragma mark - Public
- (void)startAudioCapture {
    [self startAudioCaptureWithAudioInfo:m_audioInfo
                               isRunning:&_isRunning];
}

- (void)pauseAudioCapture {
    [self pauseAudioCaptureWithAudioInfo:m_audioInfo
                               isRunning:&_isRunning];
}

- (void)stopAudioCapture {
    [self stopAudioQueueRecorderWithAudioInfo:m_audioInfo
                                    isRunning:&_isRunning];
}

- (void)freeAudioCapture {
    [self freeAudioQueueRecorderWithAudioInfo:m_audioInfo
                                    isRunning:&_isRunning];
}

- (void)startRecordFile {
    BOOL isNeedMagicCookie = NO;
    // 注意: 未压缩数据不需要PCM,可根据需求自行添加
    if (m_audioInfo->mDataFormat.mFormatID == kAudioFormatLinearPCM) {
        isNeedMagicCookie = NO;
    }else {
        isNeedMagicCookie = YES;
    }
    [[AudioFileHandler shareInstance] startVoiceRecordByAudioQueue:m_audioInfo->mQueue
                                                  isNeedMagicCookie:isNeedMagicCookie
                                                          audioDesc:m_audioInfo->mDataFormat];
    self.isRecordVoice = YES;
    NSLog(@"Audio Recorder: Start record file.");
}

- (void)stopRecordFile {
    self.isRecordVoice = NO;
    BOOL isNeedMagicCookie = NO;
    if (m_audioInfo->mDataFormat.mFormatID == kAudioFormatLinearPCM) {
        isNeedMagicCookie = NO;
    }else {
        isNeedMagicCookie = YES;
    }
    
    [[AudioFileHandler shareInstance] stopVoiceRecordByAudioQueue:m_audioInfo->mQueue
                                                   needMagicCookie:isNeedMagicCookie];
    NSLog(@"Audio Recorder: Stop record file.");
}

#pragma mark - Private
- (void)configureAudioCaptureWithAudioInfo:(RecorderInfoType)audioInfo formatID:(UInt32)formatID sampleRate:(Float64)sampleRate channelCount:(UInt32)channelCount durationSec:(float)durationSec bufferSize:(UInt32)bufferSize isRunning:(BOOL *)isRunning {
    // Get Audio format ASBD
    audioInfo->mDataFormat = [self getAudioFormatWithFormatID:formatID
                                                   sampleRate:sampleRate
                                                 channelCount:channelCount];
    
    // Set sample time
    [[AVAudioSession sharedInstance] setPreferredIOBufferDuration:durationSec error:NULL];
    
    // New queue
    OSStatus status = AudioQueueNewInput(&audioInfo->mDataFormat,
                                         CaptureAudioDataCallback,
                                         (__bridge void *)(self),
                                         NULL,
                                         kCFRunLoopCommonModes,
                                         0,
                                         &audioInfo->mQueue);
    
    if (status != noErr) {
        NSLog(@"Audio Recorder: audio queue new input failed status:%d \n",(int)status);
    }
    
    // Set audio format for audio queue
    UInt32 size = sizeof(audioInfo->mDataFormat);
    status = AudioQueueGetProperty(audioInfo->mQueue,
                                   kAudioQueueProperty_StreamDescription,
                                   &audioInfo->mDataFormat,
                                   &size);
    if (status != noErr) {
        NSLog(@"Audio Recorder: get ASBD status:%d",(int)status);
    }
    
    // Set capture data size
    UInt32 maxBufferByteSize;
    if (audioInfo->mDataFormat.mFormatID == kAudioFormatLinearPCM) {
        int frames = (int)ceil(durationSec * audioInfo->mDataFormat.mSampleRate);
        maxBufferByteSize = frames*audioInfo->mDataFormat.mBytesPerFrame*audioInfo->mDataFormat.mChannelsPerFrame;
    }else {
        // AAC durationSec MIN: 23.219708 ms
        maxBufferByteSize = durationSec * audioInfo->mDataFormat.mSampleRate;
        
        if (maxBufferByteSize < 1024) {
            maxBufferByteSize = 1024;
        }
    }
    
    if (bufferSize > maxBufferByteSize || bufferSize == 0) {
        bufferSize = maxBufferByteSize;
    }
    
    // Allocate and Enqueue
    for (int i = 0; i != kNumberBuffers; i++) {
        status = AudioQueueAllocateBuffer(audioInfo->mQueue,
                                          bufferSize,
                                          &audioInfo->mBuffers[i]);
        if (status != noErr) {
            NSLog(@"Audio Recorder: Allocate buffer status:%d",(int)status);
        }
        
        status = AudioQueueEnqueueBuffer(audioInfo->mQueue,
                                         audioInfo->mBuffers[i],
                                         0,
                                         NULL);
        if (status != noErr) {
            NSLog(@"Audio Recorder: Enqueue buffer status:%d",(int)status);
        }
    }
}

- (BOOL)startAudioCaptureWithAudioInfo:(RecorderInfoType)audioInfo isRunning:(BOOL *)isRunning {
    if (*isRunning) {
        NSLog(@"Audio Recorder: Start recorder repeat");
        return NO;
    }
    
    OSStatus status = AudioQueueStart(audioInfo->mQueue, NULL);
    if (status != noErr) {
        NSLog(@"Audio Recorder: Audio Queue Start failed status:%d \n",(int)status);
        return NO;
    }else {
        NSLog(@"Audio Recorder: Audio Queue Start successful");
        *isRunning = YES;
        return YES;
    }
}
- (BOOL)pauseAudioCaptureWithAudioInfo:(RecorderInfoType)audioInfo isRunning:(BOOL *)isRunning {
    if (!*isRunning) {
        NSLog(@"Audio Recorder: audio capture is not running !");
        return NO;
    }
    
    OSStatus status = AudioQueuePause(audioInfo->mQueue);
    if (status != noErr) {
        NSLog(@"Audio Recorder: Audio Queue pause failed status:%d \n",(int)status);
        return NO;
    }else {
        NSLog(@"Audio Recorder: Audio Queue pause successful");
        *isRunning = NO;
        return YES;
    }
}

-(BOOL)stopAudioQueueRecorderWithAudioInfo:(RecorderInfoType)audioInfo isRunning:(BOOL *)isRunning {
    if (*isRunning == NO) {
        NSLog(@"Audio Recorder: Stop recorder repeat \n");
        return NO;
    }
    
    if (audioInfo->mQueue) {
        OSStatus stopRes = AudioQueueStop(audioInfo->mQueue, true);
        
        if (stopRes == noErr){
            NSLog(@"Audio Recorder: stop Audio Queue success.");
            return YES;
        }else{
            NSLog(@"Audio Recorder: stop Audio Queue failed.");
            return NO;
        }
    }else {
        NSLog(@"Audio Recorder: stop Audio Queue failed, the queue is nil.");
        return NO;
    }
}

-(BOOL)freeAudioQueueRecorderWithAudioInfo:(RecorderInfoType)audioInfo isRunning:(BOOL *)isRunning {
    if (*isRunning) {
        [self stopAudioQueueRecorderWithAudioInfo:audioInfo isRunning:isRunning];
    }
    
    if (audioInfo->mQueue) {
        for (int i = 0; i < kNumberBuffers; i++) {
            AudioQueueFreeBuffer(audioInfo->mQueue, audioInfo->mBuffers[i]);
        }
        
        OSStatus status = AudioQueueDispose(audioInfo->mQueue, true);
        if (status != noErr) {
            NSLog(@"Audio Recorder: Dispose failed: %d",status);
        }else {
            audioInfo->mQueue = NULL;
            *isRunning = NO;
            NSLog(@"Audio Recorder: free AudioQueue successful.");
            return YES;
        }
    }else {
        NSLog(@"Audio Recorder: free Audio Queue failed, the queue is nil.");
    }
    
    return NO;
}


#pragma mark Other
-(int)computeRecordBufferSizeFrom:(const AudioStreamBasicDescription *)format audioQueue:(AudioQueueRef)audioQueue durationSec:(float)durationSec {
    int packets = 0;
    int frames  = 0;
    int bytes   = 0;
    
    frames = (int)ceil(durationSec * format->mSampleRate);
    
    if (format->mBytesPerFrame > 0)
        bytes = frames * format->mBytesPerFrame;
    else {
        UInt32 maxPacketSize;
        if (format->mBytesPerPacket > 0){   // CBR
            maxPacketSize = format->mBytesPerPacket;    // constant packet size
        }else { // VBR
            // AAC Format get kAudioQueueProperty_MaximumOutputPacketSize return -50. so the method is not effective.
            UInt32 propertySize = sizeof(maxPacketSize);
            OSStatus status     = AudioQueueGetProperty(audioQueue,
                                                        kAudioQueueProperty_MaximumOutputPacketSize,
                                                        &maxPacketSize,
                                                        &propertySize);
            if (status != noErr) {
                NSLog(@"%s: get max output packet size failed:%d",__func__,status);
            }
        }
        
        if (format->mFramesPerPacket > 0)
            packets = frames / format->mFramesPerPacket;
        else
            packets = frames;    // worst-case scenario: 1 frame in a packet
        if (packets == 0)        // sanity check
            packets = 1;
        bytes = packets * maxPacketSize;
    }
    
    return bytes;
}

- (void)printASBD:(AudioStreamBasicDescription)asbd {
    char formatIDString[5];
    UInt32 formatID = CFSwapInt32HostToBig (asbd.mFormatID);
    bcopy (&formatID, formatIDString, 4);
    formatIDString[4] = '\0';
    
    NSLog (@"  Sample Rate:         %10.0f",  asbd.mSampleRate);
    NSLog (@"  Format ID:           %10s",    formatIDString);
    NSLog (@"  Format Flags:        %10X",    asbd.mFormatFlags);
    NSLog (@"  Bytes per Packet:    %10d",    asbd.mBytesPerPacket);
    NSLog (@"  Frames per Packet:   %10d",    asbd.mFramesPerPacket);
    NSLog (@"  Bytes per Frame:     %10d",    asbd.mBytesPerFrame);
    NSLog (@"  Channels per Frame:  %10d",    asbd.mChannelsPerFrame);
    NSLog (@"  Bits per Channel:    %10d",    asbd.mBitsPerChannel);
}

- (void)dealloc {
    free(m_audioInfo);
}

- (instancetype)init
{
    if (self = [super init]) {
        [self configureAudioCaptureWithAudioInfo:m_audioInfo
                                        formatID:kAudioFormatMPEG4AAC // kAudioFormatLinearPCM
                                      sampleRate:44100
                                    channelCount:1
                                     durationSec:0.05
                                      bufferSize:1024
                                       isRunning:&self->_isRunning];
    }
    return self;
}


#pragma mark - Init
+ (void)initialize {
    m_audioInfo = malloc(sizeof(struct RecorderInfo));
}

#pragma mark - 单例
+ (AudioQueueCapture *)shareInstance
{
    static AudioQueueCapture *singleton = nil;
    static dispatch_once_t onceToken;
    dispatch_once(&onceToken, ^{
        singleton = [[super allocWithZone:NULL] init];
    });
    return singleton;;
}

+ (instancetype)allocWithZone:(struct _NSZone *)zone
{
    return [AudioQueueCapture shareInstance];
}

-(instancetype)copyWithZone:(struct _NSZone *)zone
{
    return [AudioQueueCapture shareInstance];
}

-(instancetype)mutableCopyWithZone:(struct _NSZone *)zone
{
    return [AudioQueueCapture shareInstance];
}

@end
[[AudioQueueCapture shareInstance] startAudioCapture]; // 开始采集数据
[[AudioQueueCapture shareInstance] startRecordFile]; // 开始存储音频数据
[[AudioQueueCapture shareInstance] stopRecordFile]; // 结束存储音频数据
[[AudioQueueCapture shareInstance] stopAudioCapture]; // 结束采集数据