题记
这个模块是我们儿童项目的一个小功能,占一个迭代版本需求的40%左右,开发周期12.5day,当然包含了整个IM逻辑,语音录制1天。
实现方案分析
需求:IM语音聊天,IM采用腾讯的IM,语音录制部分有自己的UI设计,大概是这个样子(弹出一个pop框,先用合成器说一段“现在给xxx留言吧”,马上开始录制音频,5秒后出现可以发送按钮,最大30s发送语音,留言过程中要展示声音波形图)
方案:1.用AVAudioRecorder,高度封装,之前项目已经实践过,有无声音分贝未知。 2.用AudioUnit实现,过于细粒度,要处理音频buffer。
根据需求来说方案1看起比较贴近。
实践
我就直接贴代码了,代码中会做注释,其中有写CallBack,权限使用到的其他类就不上了,理解就好。
#import <Foundation/Foundation.h>
#import "CallBackInterface.h" //一个通用的CallBack声明头
@interface AudioRecorder : NSObject
@property(nonatomic, copy) CallBackTwo callBack;
@property(nonatomic, copy) CallBackOne canSendCallBack;
@property(nonatomic, assign) NSInteger volume;
@property(readonly, assign) BOOL isRecording;
- (BOOL)startRecord:(NSString *)storeFile;
- (void)cancelRecord;
- (void)endRecord;
- (void)resetEnvironment;
@end
#import "AudioRecorder.h"
#import <ReactiveCocoa.h>
#import <AVFoundation/AVFoundation.h>
#import "UALogger.h"
#import "PermissionUtils.h" //一些手机权限调用类,包括这次的micro权限请求
#import "HWWeakTimer.h" //这个就不说了搜一下
#import "NSFileManager+Message.h" //录音文件管理
@interface AudioRecorder () <AVAudioRecorderDelegate>
@property(nonatomic, strong) NSString *storeFile;
@property(nonatomic, strong) AVAudioRecorder *recorder;
@property(nonatomic, strong) NSTimer *timer;
@property(nonatomic, assign) NSTimeInterval time; ///< 记录当前录音时间
@end
@implementation AudioRecorder
- (void)dealloc {
[self endRecord];
}
- (BOOL)startRecord:(NSString *)storeFile {
self.storeFile = storeFile;
__block BOOL checkAuth = NO;
__block BOOL success = NO;
@weakify(self);
//录音权限打开
[PermissionUtils openMic:^(BOOL auth) {
@strongify(self);
if (auth) {
[self setActive];
success = [self.recorder prepareToRecord];
[self.recorder record];
[self timer];
}
checkAuth = YES;
}];
//可以看看这种方式处理线程等待,这儿想起了一个故事,就略了
while (!checkAuth) {
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:.05]];
}
return success;
}
- (void)setActive {
@try {
NSError *error;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error) {
UALog(@"[AudioRecorder][ERROR][StartRecord] %@", error);
}
error = nil;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error) {
UALog(@"[AudioRecorder][ERROR][CallBack] %@", error);
}
} @catch (NSException *e) {
}
}
- (void)cancelRecord {
self.callBack = NULL;
[self endRecord];
_recorder.delegate = nil; // cancel 时快速释放,避免引用造成的AVAudioSession Deactive失败
[self resetEnvironment];
}
- (void)endRecord {
self.canSendCallBack = NULL;
[self invalidateTimer];
if (_recorder) {
[self.recorder stop];
[self deactivedAudioSession];
_recorder = nil;
}
}
- (void)deactivedAudioSession {
NSError *error = nil;
if (![[AVAudioSession sharedInstance] setActive:NO error:&error]) {
NSLog(@"%@: AVAudioSession.setDeActive failed: %@\n", NSStringFromClass(self.class), error ? [error localizedDescription] : @"nil");
}
}
- (void)resetEnvironment {
//check doc path
NSFileManager *fm = [NSFileManager defaultManager];
if ([fm fileExistsAtPath:self.storeFile]) {
NSError *error;
// [fm removeItemAtPath:self.storeFile error:&error];
if (error) {
UALog(@"[AudioRecorder][FM]ERROR %@", error);
}
}
self.time = 0.0f;
}
#pragma mark - AVAudioRecorderDelegate
- (void)audioRecorderDidFinishRecording:(AVAudioRecorder *)recorder successfully:(BOOL)flag {
if (self.callBack) {
self.callBack(@(flag), nil);
}
}
- (void)audioRecorderEncodeErrorDidOccur:(AVAudioRecorder *)recorder error:(NSError *__nullable)error {
if (self.callBack) {
self.callBack(@(NO), error);
}
}
- (void)audioRecorderBeginInterruption:(AVAudioRecorder *)recorder {
}
- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withOptions:(NSUInteger)flags {
}
- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder withFlags:(NSUInteger)flags {
}
- (void)audioRecorderEndInterruption:(AVAudioRecorder *)recorder {
}
#pragma mark - getter
- (AVAudioRecorder *)recorder {
if (!_recorder) {
//录音设置
NSMutableDictionary *recordSetting = [[NSMutableDictionary alloc] init];
//设置录音格式 AVFormatIDKey==kAudioFormatLinearPCM
[recordSetting setValue:[NSNumber numberWithInt:kAudioFormatMPEG4AAC] forKey:AVFormatIDKey];
//设置录音采样率(Hz) 如:AVSampleRateKey==8000/44100/96000(影响音频的质量)
[recordSetting setValue:[NSNumber numberWithFloat:44100] forKey:AVSampleRateKey];
//录音通道数 1 或 2
[recordSetting setValue:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
//线性采样位数 8、16、24、32
[recordSetting setValue:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
//录音的质量
[recordSetting setValue:[NSNumber numberWithInt:AVAudioQualityHigh] forKey:AVEncoderAudioQualityKey];
NSError *err;
[self resetEnvironment];
_recorder = [[AVAudioRecorder alloc] initWithURL:[NSURL fileURLWithPath:self.storeFile] settings:recordSetting error:&err];
_recorder.delegate = self;
_recorder.meteringEnabled = YES;
if (err) {
UALog(@"[AudioRecorder][Error] %@", err);
}
}
return _recorder;
}
- (NSString *)storeFile {
if (!_storeFile) {
_storeFile = [[NSFileManager defaultManager] temRecordAudioFile];
}
return _storeFile;
}
- (BOOL)isRecording {
return self.recorder.isRecording;
}
- (NSTimer *)timer {
if (!_timer) {
@weakify(self);
_timer = [HWWeakTimer scheduledTimerWithTimeInterval:.05 block:^(id userInfo) {
@strongify(self);
self.time += .05;
[self log];
if (self.canSendCallBack) {
self.canSendCallBack(@(self.time));
}
if (self.time >= 30.0f) {
[self endRecord];
}
} userInfo:nil repeats:YES];
}
return _timer;
}
- (void)log {
[_recorder updateMeters];
self.volume = (NSInteger) (pow(10, 0.05 * [_recorder peakPowerForChannel:0]) * 100);
}
- (void)invalidateTimer {
if (!_timer) {
[_timer invalidate];
_timer = nil;
}
}
@end
上面说到,UI设计有一个音频分贝的波形图,开始写的时候找到AVAudioRecoder有分贝最大值和平均值,
- (float)peakPowerForChannel:(NSUInteger)channelNumber;
- (float)averagePowerForChannel:(NSUInteger)channelNumber;
但你何时取都是-160,读文档发现构造recoder时候要设置 meteringEnabled = YES;
@property(getter=isMeteringEnabled) BOOL meteringEnabled;
且在每次取分贝的时候要更新一次
- (void)updateMeters;
转化为 0~100的值作为波形图的参数
self.volume = (NSInteger) (pow(10, 0.05 * [_recorder peakPowerForChannel:0]) * 100);
主意事项
1.AudioSession 持有和释放 由于在语音录制前有段语音合成“现在给xxx留言吧”,导致后面总是会报"AVAudioSession I/O"的一个错误,开始以为是各处AVAudioSession.active 与 setDeActive
[[AVAudioSession sharedInstance] setActive:YES error:&error];
与
[[AVAudioSession sharedInstance] setActive:NO error:&error
不成对造成的
排查了播放器,语音合成播放器(百度和讯飞的),以及录音模块成对释放问题,最后验证显示调用还是没用,最后分析合成器哪一块释放是不是耗时比较多,而我很快进入录制,哪边还没有结束,然后造成我录制一到两秒,那边的释放信号过来了,而这回AVAudioSession中确实录制。 最后给合成器那边给了0.5s处理释放,延迟录制0.5s解决此问题,由于合成器是第三方的,这种妥协是没有办法的。
2.AVRecorder崩溃问题 AVRecorder页面逻辑我是Present一个ViewController上来,在点击周边灰色层是取消录制,但往往我们会很快的点击灰色层,又很快的点击录制,周而复始崩溃。 这块处理有个缺陷,我的cancel释放有一个是放到
- dismissViewControllerAnimated:completion:
的completion中,其实往往没有时间处理释放,所以模态出来的页面的释放不要把耗时的操作放到这块。 但所谓的用户暴力开始录制取消录制任然会出现崩溃,这块只能上防止重复点击大法一个category奉上:
#import <Foundation/Foundation.h>
@interface NSObject (MultiClick)
@property (nonatomic, assign) NSTimeInterval acceptInterval; ///< 间隔 s
@property (nonatomic, assign) NSTimeInterval acceptTime; ///< 上次触发事件时间
/**
* 是否接受本次事件
*/
- (BOOL)canAcceptEvent;
@end
#import "NSObject+MultiClick.h"
#import <objc/runtime.h>
#import "UALogger.h"
const char *NSObject_acceptTime = "NSObject_acceptTime";
const char *NSObject_acceptInterval = "NSObject_acceptInterval";
@implementation NSObject (MultiClick)
- (NSTimeInterval)acceptTime {
return [objc_getAssociatedObject(self, NSObject_acceptTime) doubleValue];
}
- (void)setAcceptTime:(NSTimeInterval)acceptTime {
objc_setAssociatedObject(self, NSObject_acceptTime, @(acceptTime), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (NSTimeInterval)acceptInterval {
return [objc_getAssociatedObject(self, NSObject_acceptInterval) doubleValue];
}
- (void)setAcceptInterval:(NSTimeInterval)acceptInterval {
objc_setAssociatedObject(self, NSObject_acceptInterval, @(acceptInterval), OBJC_ASSOCIATION_RETAIN_NONATOMIC);
}
- (BOOL)canAcceptEvent {
if (self.acceptInterval == 0) {
return NO;
}
if ([[NSDate date] timeIntervalSince1970] - self.acceptTime > self.acceptInterval) {
self.acceptTime = [[NSDate date] timeIntervalSince1970];
return YES;
}
return NO;
}
@end
使用方法
具体某一个类初始化的时候写入防重点间隔
self.acceptInterval = 1;
事件方法第一行加
if (![self canAcceptEvent]) return;
最后,感谢!感谢身边的coder能答疑解惑,感谢合作partner很给力,so 能愉快的写几行代码还是挺愉快的。