我们经常会在一些音视频剪辑软件中看到音频的波纹图,来方便我们去进行音频定位和剪辑,那我们怎么去开发一个音频波纹图呢?
使用到的框架AVFoundation QuartzCore
使用到的关键类
AVAssetReader 这个类用来读取音频文件的原始样本数据
AVAssetTrack 音频轨道
AVAssetReaderTrackOutput 音频数据输出,可以获取到CMSampleBufferRef音频样本数据
实现步骤:
一.获取音频样本数据
+ (void)loadAudioSamplesFromAsset:(AVAsset *)asset completionBlock:(QXSampleDataCompletionBlock)completionBlock
{
NSString *tracks = @"tracks";
[asset loadValuesAsynchronouslyForKeys:@[tracks] completionHandler:^{
AVKeyValueStatus status = [asset statusOfValueForKey:tracks error:nil];
NSData *sampleData = nil;
if (status == AVKeyValueStatusLoaded) {
sampleData = [self readAudioSampleFromAsset:asset];
}
dispatch_async(dispatch_get_main_queue(), ^{
completionBlock(sampleData);
});
}];
}
+ (NSData *)readAudioSampleFromAsset:(AVAsset *)asset
{
NSError *error = nil;
AVAssetReader *assetReader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (!assetReader) {
NSLog(@"Error creating asset reader: %@",error.localizedDescription);
return nil;
}
AVAssetTrack *track = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject];
NSDictionary *outputSettings = @{
AVFormatIDKey : @(kAudioFormatLinearPCM),
AVLinearPCMIsBigEndianKey : @NO,//是否是大端,移动端是小端模式
AVLinearPCMIsFloatKey : @NO,//是否使用浮点采样
AVLinearPCMBitDepthKey : @(16)
};
AVAssetReaderTrackOutput *trackOutput = [[AVAssetReaderTrackOutput alloc] initWithTrack:track outputSettings:outputSettings];
[assetReader addOutput:trackOutput];
[assetReader startReading];
NSMutableData *sampleData = [NSMutableData data];
while (assetReader.status == AVAssetReaderStatusReading) {
CMSampleBufferRef sampleBuffer = [trackOutput copyNextSampleBuffer];
if (sampleBuffer) {
CMBlockBufferRef blockBufferRef = CMSampleBufferGetDataBuffer(sampleBuffer);
size_t length = CMBlockBufferGetDataLength(blockBufferRef);
SInt16 sampleBytes[length];
CMBlockBufferCopyDataBytes(blockBufferRef, 0, length, sampleBytes);
[sampleData appendBytes:sampleBytes length:length];
CMSampleBufferInvalidate(sampleBuffer);
CFRelease(sampleBuffer);
}
}
if (assetReader.status == AVAssetReaderStatusCompleted) {
return sampleData;
} else {
NSLog(@"readAudioSampleFromAsset error");
return nil;
}
}
二.筛选音频样本数据,这个时候我们获取到的音频数据是很多的,需要根据我们视图长度进行筛选
- (NSArray *)filteredSamplesForSize:(CGSize)size
{
NSMutableArray *filteredSamples = [[NSMutableArray alloc] init];
NSUInteger sampleCount = self.sampleData.length / sizeof(SInt16);
NSUInteger binSize = sampleCount / size.width;
SInt16 *bytes = (SInt16 *) **self**.sampleData.bytes;
SInt16 maxSample = 0;
for (NSUInteger i = 0; i < sampleCount; i += binSize) {
SInt16 sampleBin[binSize];
for (NSUInteger j = 0; j < binSize; j++) {
sampleBin[j] = CFSwapInt16LittleToHost(bytes[i + j]);
}
SInt16 value = [self maxValueInArray:sampleBin ofSize:binSize];
[filteredSamples addObject:@(value)];
if (value > maxSample) {
maxSample = value;
}
}
CGFloat scaleFactor = (size.height / 2) / maxSample;
for (NSUInteger i = 0; i < filteredSamples.count; i++) {
filteredSamples[i] = @([filteredSamples[i] integerValue] * scaleFactor);
}
return filteredSamples;
}
三.使用QuartzCore进行渲染
- (void)drawRect:(CGRect)rect
{
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextScaleCTM(context, QXWidthScaling, QXHeightScaling);
CGFloat xOffset = self.bounds.size.width -
(self.bounds.size.width * QXWidthScaling);
CGFloat yOffset = self.bounds.size.height -
(self.bounds.size.height * QXHeightScaling);
CGContextTranslateCTM(context, xOffset / 2, yOffset / 2);
NSArray *filteredSamples = // 2
[self.filter filteredSamplesForSize:self.bounds.size];
CGFloat midY = CGRectGetMidY(rect);
CGMutablePathRef halfPath = CGPathCreateMutable(); // 3
CGPathMoveToPoint(halfPath, NULL, 0.0f, midY);
for (NSUInteger i = 0; i < filteredSamples.count; i++) {
float sample = [filteredSamples[i] floatValue];
CGPathAddLineToPoint(halfPath, NULL, i, midY - sample);
}
CGPathAddLineToPoint(halfPath, NULL, filteredSamples.count, midY);
CGMutablePathRef fullPath = CGPathCreateMutable(); // 4
CGPathAddPath(fullPath, NULL, halfPath);
CGAffineTransform transform = CGAffineTransformIdentity; // 5
transform = CGAffineTransformTranslate(transform, 0, CGRectGetHeight(rect));
transform = CGAffineTransformScale(transform, 1.0, -1.0);
CGPathAddPath(fullPath, &transform, halfPath);
CGContextAddPath(context, fullPath); // 6
CGContextSetFillColorWithColor(context, self.waveColor.CGColor);
CGContextDrawPath(context, kCGPathFill);
CGPathRelease(halfPath); // 7
CGPathRelease(fullPath);
}