之前我们使用AVCaptureMovieFileOutput(# AVFoundation-实现一个系统相机)实现了简单的视频录制,但是当我们视频通过openGL添加滤镜的时候就不能再使用这个类进行视频录制,那我们用什么方法实现呢?
关键类:
AVCaptureVideoDataOutput视频原始数据输出CMSampleBufferRef
AVCaptureAudioDataOutput音频原始数据输出CMSampleBufferRef
AVAssetWriter核心类,用于音视频数据写入
AVAssetWriterInput音视频数据写入的单个轨道
AVAssetWriterInputPixelBufferAdaptor提供了一个CVPixelBufferPool,可用于分配像素缓冲区,以写入输出文件。使用所提供的像素缓冲池进行缓冲区分配通常比附加使用单独池分配的像素缓冲区更有效。
接下来让我们去实现在添加滤镜的情况下如何进行视频录制
一.音视频捕获
创建AVCaptureSession
self.captureSession = [[AVCaptureSession alloc] init];
创建输入输出
- (**BOOL**)setupSessionInputs:(NSError * **__autoreleasing** **_Nullable** *)error
{
AVCaptureDevice *videoDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
AVCaptureDeviceInput *videoInput =
[AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error];
**if** (videoInput) {
**if** ([**self**.captureSession canAddInput:videoInput]) {
[**self**.captureSession addInput:videoInput];
**self**.activeVideoInput = videoInput;
} **else** {
**return** **NO**;
}
} **else** {
**return** **NO**;
}
// Setup default microphone
AVCaptureDevice *audioDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
AVCaptureDeviceInput *audioInput =
[AVCaptureDeviceInput deviceInputWithDevice:audioDevice error:error];
**if** (audioInput) {
**if** ([**self**.captureSession canAddInput:audioInput]) {
[**self**.captureSession addInput:audioInput];
} **else** {
**return** **NO**;
}
} **else** {
**return** **NO**;
}
**return** **YES**;
}
- (**BOOL**)setupSessionOutputs:(NSError * **__autoreleasing** **_Nullable** *)error
{
**self**.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
NSDictionary *outputSettings = @{(**id**)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA)};
**self**.videoDataOutput.videoSettings = outputSettings;
self.videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[self.videoDataOutput setSampleBufferDelegate:**self** queue:self.dispatchQueue];
if ([self.captureSession canAddOutput:self.videoDataOutput]) {
[self.captureSession addOutput:self.videoDataOutput];
} else {
return NO;
}
self.audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
[self.audioDataOutput setSampleBufferDelegate:self
queue:self.dispatchQueue];
if ([self.captureSession canAddOutput:self.audioDataOutput]) {
[self.captureSession addOutput:self.audioDataOutput];
} else {
return NO;
}
NSString *fileType = AVFileTypeQuickTimeMovie;
NSDictionary *videoSettings = [self.videoDataOutput recommendedVideoSettingsForAssetWriterWithOutputFileType:fileType];
NSDictionary *audioSettings = [self.audioDataOutput recommendedAudioSettingsForAssetWriterWithOutputFileType:fileType];
self.movieWriter = [[QXMovieWriter alloc] initWithVideoSettings:videoSettings audioSettings:audioSettings dispatchQueue:self.dispatchQueue];
self.movieWriter.delegate = self;
return YES;
}
二.开始录制
- (**void**)startWriting
{
dispatch_async(**self**.dispatchQueue, ^{
NSError *error = **nil**;
NSString *fileType = AVFileTypeQuickTimeMovie;
**self**.assetWriter = [AVAssetWriter assetWriterWithURL:[**self** outputURL] fileType:fileType error:&error];
**if** (!**self**.assetWriter || error) {
NSLog(@"error create AVAssetWriter");
**return**;
}
**self**.assetWriteVideoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:**self**.videoSettings];
**self**.assetWriteVideoInput.expectsMediaDataInRealTime = **YES**;
UIDeviceOrientation orientation = UIDeviceOrientationPortrait;
**self**.assetWriteVideoInput.transform = // 4
QXTransformForDeviceOrientation(orientation);
NSDictionary *attributes = @{
(**id**)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA),
(**id**)kCVPixelBufferWidthKey : **self**.videoSettings[AVVideoWidthKey],
(**id**)kCVPixelBufferHeightKey : **self**.videoSettings[AVVideoHeightKey],
(**id**)kCVPixelFormatOpenGLESCompatibility : (**id**)kCFBooleanTrue
};
**self**.assetWriteInputPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:**self**.assetWriteVideoInput sourcePixelBufferAttributes:attributes];
**if** ([**self**.assetWriter canAddInput:**self**.assetWriteVideoInput]) {
[**self**.assetWriter addInput:**self**.assetWriteVideoInput];
} **else** {
NSLog(@"error add video input");
**return**;
}
**self**.assetWriterAudioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:**self**.audioSettings];
**self**.assetWriterAudioInput.expectsMediaDataInRealTime = **YES**;
**if** ([**self**.assetWriter canAddInput:**self**.assetWriterAudioInput]) {
[**self**.assetWriter addInput:**self**.assetWriterAudioInput];
} **else** {
NSLog(@"error add audio input");
}
**self**.isWriting = **YES**;
**self**.firstSample = **YES**;
});
}
三.从代理方法中获取原始数据CMSampleBufferRef进行转换CVPixelBufferRef
- (**void**)processSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
**if** (!**self**.isWriting) {
**return**;
}
CMFormatDescriptionRef formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer);
CMMediaType mediaType = CMFormatDescriptionGetMediaType(formatDesc);
**if** (mediaType == kCMMediaType_Video) {
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
**if** (**self**.firstSample) {
**if** ([**self**.assetWriter startWriting]) {
[**self**.assetWriter startSessionAtSourceTime:timestamp];
} **else** {
NSLog(@"Failed to start writing");
}
**self**.firstSample = **NO**;
}
CVPixelBufferRef outputRenderBuffer = **NULL**;
CVPixelBufferPoolRef pixelBufferPool = **self**.assetWriteInputPixelBufferAdaptor.pixelBufferPool;
OSStatus err = CVPixelBufferPoolCreatePixelBuffer(**NULL**, pixelBufferPool, &outputRenderBuffer);
**if** (err) {
NSLog(@"unable ti create a pixel buffer from the pool");
**return**;
}
CVPixelBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *sourceImage = [CIImage imageWithCVPixelBuffer:imageBuffer];
[**self**.activeFilter setValue:sourceImage forKey:kCIInputImageKey];
CIImage *filteredImage = **self**.activeFilter.outputImage;
**if** (!filteredImage) {
filteredImage = sourceImage;
}
[**self**.ciContext render:filteredImage toCVPixelBuffer:outputRenderBuffer bounds:filteredImage.extent colorSpace:**self**.colorSpace];
**if** (**self**.assetWriteVideoInput.readyForMoreMediaData) { // 6
**if** (![**self**.assetWriteInputPixelBufferAdaptor
appendPixelBuffer:outputRenderBuffer
withPresentationTime:timestamp]) {
NSLog(@"Error appending pixel buffer.");
}
}
CVPixelBufferRelease(outputRenderBuffer);
} **else** **if** (!**self**.firstSample && mediaType == kCMMediaType_Audio) {
**if** (**self**.assetWriterAudioInput.isReadyForMoreMediaData) {
**if** (![**self**.assetWriterAudioInput appendSampleBuffer:sampleBuffer]) {
NSLog(@"Error appending audio sample buffer.");
}
}
}
}
四.结束录制
- (**void**)stopWriting
{
**self**.isWriting = **NO**;
dispatch_async(**self**.dispatchQueue, ^{
[**self**.assetWriter finishWritingWithCompletionHandler:^{
**if** (**self**.assetWriter.status == AVAssetWriterStatusCompleted) {
dispatch_async(dispatch_get_main_queue(), ^{ // 3
NSURL *fileURL = [**self**.assetWriter outputURL];
[**self**.delegate didWriteMovieURL:fileURL];
});
} **else** {
NSLog(@"Failed to write movie: %@", **self**.assetWriter.error);
}
}];
});
}