AVCaptureSession 采集 并用AVAssetWriter写入保存下来

262 阅读5分钟

大神链接

  1. AVCaptureSession 和之前的流程一样只不过音频视频都放在同一个队列
  2. 注意视频文件不要先创建,我这里只是先创建一个Video目录然后拼接一个.mp4格式的地址没有创建.mp4的视频文件否则会报错
以下是全部代码
  • 权限类 Authorization
#import "Authorization.h"
#import <AVFoundation/AVFoundation.h>

@implementation Authorization

+ (void)authorizationStatusForVideo
{
    AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo];
    switch (status) {
        case AVAuthorizationStatusNotDetermined: // 未授权
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
                if (granted) {
                    NSLog(@"相机授权成功");
                } else {
                    NSLog(@"相机授权失败");
                }
            }];
            break;
        case AVAuthorizationStatusRestricted:
            NSLog(@"AVAuthorizationStatusRestricted");
            break;
        case AVAuthorizationStatusDenied:
            NSLog(@"AVAuthorizationStatusDenied");
            break;
        case AVAuthorizationStatusAuthorized:
            NSLog(@"AVAuthorizationStatusAuthorized");
            break;
        default:
            NSLog(@"Video Unknow");
            break;
    }
}

+ (void)authorizationStatusForAudio
{
    AVAuthorizationStatus status = [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeAudio];
    switch (status) {
        case AVAuthorizationStatusNotDetermined:
            [AVCaptureDevice requestAccessForMediaType:AVMediaTypeAudio completionHandler:^(BOOL granted) {
                if (granted) {
                    NSLog(@"麦克风授权成功");
                } else {
                    NSLog(@"麦克风授权失败");
                }
            }];
            break;
        case AVAuthorizationStatusRestricted:
            NSLog(@"AVAuthorizationStatusRestricted");
            break;
        case AVAuthorizationStatusDenied:
            NSLog(@"AVAuthorizationStatusDenied");
            break;
        case AVAuthorizationStatusAuthorized:
            NSLog(@"AVAuthorizationStatusAuthorized");
            break;
        default:
            NSLog(@"Audio Unknow");
            break;
    }
}

@end
  • 文件类 VideoFileManager
#import "VideoFileManager.h"

@implementation VideoFileManager

+ (NSString *)fileInDirectoryWithDocument:(NSString *)directory
{
    NSString *resultDirectory = [NSString stringWithFormat:@"/%@",directory];
    // Document目录
    NSString *documentDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
    NSString *directoryPath = [documentDirectory stringByAppendingPathComponent:resultDirectory];
    BOOL isDirectory;
    BOOL isExitDirectory = [[NSFileManager defaultManager] fileExistsAtPath:directoryPath isDirectory:&isDirectory]; // 创建目录
    if (isExitDirectory) {
        NSLog(@"%@ 目录存在",directory);
    } else {
        NSLog(@"%@ 不目录存在",directory);
        BOOL isCreateDirectory = [[NSFileManager defaultManager] createDirectoryAtPath:directoryPath withIntermediateDirectories:YES attributes:nil error:nil];
        if (isCreateDirectory) {
            NSLog(@"%@ 目录创建成功",directory);
        } else {
            NSLog(@"%@ 目录创建失败",directory);
        }
    }
    NSDateFormatter *formatter = [[NSDateFormatter alloc] init];
    [formatter setDateFormat:@"yyyyMMddHHmmss"];
    NSString *fileName = [NSString stringWithFormat:@"%@.mp4",[formatter stringFromDate:[NSDate date]]];
    NSString *videoPath = [directoryPath stringByAppendingPathComponent:fileName];
    if ([[NSFileManager defaultManager] fileExistsAtPath:videoPath] == YES) {
        NSError *error;
        if ([[NSFileManager defaultManager] removeItemAtPath:videoPath error:&error] == NO) {
            NSLog(@"视频存在删除失败 - %@",error.localizedDescription);
        } else {
            NSLog(@"视频存在删除成功");
        }
    }
    return videoPath;
}

@end
  • VideoCapture 采集存储类
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <AVFoundation/AVFoundation.h>

NS_ASSUME_NONNULL_BEGIN

@interface VideoCapture : NSObject

-(instancetype)initWithPreview:(UIView *)preview; 

-(void)startCapture:(void(^)(void))block; // 开始采集
-(void)stopCapture; // 结束采集

-(void)startRecord; // 开始录制
-(void)stopRecord; // 结束录制


@end

NS_ASSUME_NONNULL_END
#import "VideoCapture.h"
#import "VideoFileManager.h"

@interface VideoCapture ()<AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate>

@property(nonatomic,strong)dispatch_queue_t sessionQueue; // 采集线程

@property(nonatomic,strong)AVCaptureSession *session;
@property(nonatomic,strong)AVCaptureDeviceInput *videoInput;
@property(nonatomic,strong)AVCaptureDeviceInput *audioInput;
@property(nonatomic,strong)AVCaptureVideoDataOutput *videoOutput;
@property(nonatomic,strong)AVCaptureAudioDataOutput *audioOutput;
@property(nonatomic,strong)AVCaptureVideoPreviewLayer *previewLayer;

@property(nonatomic,strong)AVAssetWriter *assetWriter;
@property(nonatomic,strong)AVAssetWriterInput *audioWriterInput;
@property(nonatomic,strong)AVAssetWriterInput *videoWriterInput;

@property(nonatomic,assign)BOOL isRecording; // 是否记录
@property(nonatomic,assign)BOOL canWrite; // 是否写入

@end

@implementation VideoCapture

#pragma mark - public
-(instancetype)initWithPreview:(UIView *)preview
{
    self = [super init];
    if (self) {
        _sessionQueue = dispatch_queue_create("com.video.session", DISPATCH_QUEUE_SERIAL);
        
        // 1. 创建捕捉会话
        [self configCaptureSession];
        // 2. 设置视频的输入
        [self configVideoInput];
        // 3. 设置音频的输入
        [self configAudioInput];
        // 4. 输出源设置
        [self configVideoDataOutput];
        [self configAudioDataOutput];
        // 5. 预览层
        [self configPreviewLayer:preview];
        
    }
    return self;
}

// 开始采集
- (void)startCapture:(void (^)(void))block
{
    if (![self.session isRunning]) {
        __weak typeof (self) weakSelf = self;
        dispatch_async(_sessionQueue, ^{
            [weakSelf.session startRunning];
            dispatch_async(dispatch_get_main_queue(), ^{
                block();
            });
        });
    }
}

// 结束采集
- (void)stopCapture
{
    if ([self.session isRunning]) {
        __weak typeof (self) weakSelf = self;
        dispatch_async(_sessionQueue, ^{
            [weakSelf.session stopRunning];
        });
    }
}

// 开始录制
- (void)startRecord
{
    __weak typeof (self) weakSelf = self;
    dispatch_async(_sessionQueue, ^{
        // 获取存储地址
        NSString *videoPath = [VideoFileManager fileInDirectoryWithDocument:@"Video"];
        NSURL *videoURL = [NSURL fileURLWithPath:videoPath];
        
        // AVAssetWriter 配置
        NSError *error;
        weakSelf.assetWriter = [AVAssetWriter assetWriterWithURL:videoURL fileType:AVFileTypeMPEG4 error:&error];
        if (error) { NSLog(@"AVAssetWriter error - %@",error.localizedDescription); }
        // AVAssetWriterInput
        NSDictionary *audioOutputSetting =  @{
            AVEncoderBitRatePerChannelKey : @(28000),
            AVFormatIDKey : @(kAudioFormatMPEG4AAC),
            AVNumberOfChannelsKey : @(1),
            AVSampleRateKey : @(22050)};
        weakSelf.audioWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSetting];
        weakSelf.audioWriterInput.expectsMediaDataInRealTime = YES;
        
        NSDictionary *videoOutputSetting = @{
            AVVideoCodecKey : AVVideoCodecTypeH264,
            AVVideoWidthKey : @(1280),
            AVVideoHeightKey : @(720),
            AVVideoCompressionPropertiesKey:@{
                    AVVideoAverageBitRateKey : @(1280*720*3),
            AVVideoExpectedSourceFrameRateKey : @(15),
            AVVideoMaxKeyFrameIntervalKey : @(15),
            AVVideoProfileLevelKey : AVVideoProfileLevelH264BaselineAutoLevel
            }
        };
        weakSelf.videoWriterInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSetting];
        weakSelf.videoWriterInput.expectsMediaDataInRealTime = YES;
        //画面需要旋转90度
        weakSelf.videoWriterInput.transform = CGAffineTransformMakeRotation(M_PI / 2.0);
        if ([weakSelf.assetWriter canAddInput:weakSelf.audioWriterInput]) {
            [weakSelf.assetWriter addInput:weakSelf.audioWriterInput];
        }
        if ([weakSelf.assetWriter canAddInput:weakSelf.videoWriterInput]) {
            [weakSelf.assetWriter addInput:weakSelf.videoWriterInput];
        }
        
        weakSelf.isRecording = YES;
        weakSelf.canWrite = NO;
    });
}

// 结束录制
- (void)stopRecord
{
    __weak typeof (self) weakSelf = self;
    dispatch_async(_sessionQueue, ^{
        weakSelf.isRecording = NO;
        if (weakSelf.assetWriter.status == AVAssetWriterStatusWriting) {
            [weakSelf.videoWriterInput markAsFinished];
            [weakSelf.audioWriterInput markAsFinished];
            [weakSelf.assetWriter finishWritingWithCompletionHandler:^{
                
                [weakSelf stopCapture];
                dispatch_async(dispatch_get_main_queue(), ^{
                    NSLog(@"结束录制");
                });
            }];
        }
    });
}


#pragma mark - life cycle
- (void)dealloc
{
    NSLog(@"%s",__FUNCTION__);
}

#pragma mark - private
/*
 * 1. 创建捕捉会话
 */
-(void)configCaptureSession
{
    self.session = [[AVCaptureSession alloc] init];
    if ([self.session canSetSessionPreset:AVCaptureSessionPreset1280x720]) { // 设置分辨率
        [self.session setSessionPreset:AVCaptureSessionPreset1280x720];
    }
}

/*
 * 2. 设置视频的输入
 */
-(void)configVideoInput
{
    NSError *error;
    // 添加视频捕捉设备
    // AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 默认获取后置摄像头
    AVCaptureDevice *captureDevice = [self getCameraPosition:AVCaptureDevicePositionBack];
    // 将捕捉设备转化为 AVCaptureDeviceInput
    // 不能直接使用AVCaptureDevice,必须将AVCaptureDevice转化为AVCaptureDeviceInput
    self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
    // 将捕捉设备添加给会话
    if (self.videoInput && [self.session canAddInput:self.videoInput]) {
        [self.session addInput:self.videoInput];
    }
}

// 移除视频输入
-(void)removeVideoDeviceInput
{
    if (self.videoInput) [self.session removeInput:self.videoInput];
    self.videoInput = nil;
}

// 获取摄像头
-(AVCaptureDevice *)getCameraPosition:(AVCaptureDevicePosition)position
{
    /*
     AVCaptureDeviceTypeBuiltInWideAngleCamera 广角(默认设备,28mm左右焦段)
     AVCaptureDeviceTypeBuiltInTelephotoCamera 长焦(默认设备的2x或3x,只能使用AVCaptureDeviceDiscoverySession获取)
     AVCaptureDeviceTypeBuiltInUltraWideCamera 超广角(默认设备的0.5x,只能使用AVCaptureDeviceDiscoverySession获取)
     AVCaptureDeviceTypeBuiltInDualCamera (一个广角一个长焦(iPhone7P,iPhoneX),可以自动切换摄像头,只能使用AVCaptureDeviceDiscoverySession获取)
     AVCaptureDeviceTypeBuiltInDualWideCamera (一个超广一个广角(iPhone12 iPhone13),可以自动切换摄像头,只能使用AVCaptureDeviceDiscoverySession获取)
     AVCaptureDeviceTypeBuiltInTripleCamera (超广,广角,长焦三摄像头,iPhone11ProMax iPhone12ProMax iPhone13ProMax,可以自动切换摄像头,只能使用AVCaptureDeviceDiscoverySession获取)
     AVCaptureDeviceTypeBuiltInTrueDepthCamera (红外和摄像头, iPhone12ProMax iPhone13ProMax )
     */
    NSArray *deviceTypes;
    if (position == AVCaptureDevicePositionBack) {
        deviceTypes = @[
            AVCaptureDeviceTypeBuiltInDualCamera,
            AVCaptureDeviceTypeBuiltInDualWideCamera,
            AVCaptureDeviceTypeBuiltInTripleCamera
        ];
    } else {
        deviceTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera];
    }
    AVCaptureDeviceDiscoverySession *deviceSession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo position:position];
    if (deviceSession.devices.count) return deviceSession.devices.firstObject;
    
    if (position == AVCaptureDevicePositionBack) { // 非多摄手机
        deviceTypes = @[AVCaptureDeviceTypeBuiltInWideAngleCamera];
        AVCaptureDeviceDiscoverySession *deviceSession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:deviceTypes mediaType:AVMediaTypeVideo position:position];
        if (deviceSession.devices.count) return deviceSession.devices.firstObject;
    }
    return nil;
}

/*
 * 3. 设置音频的输入
 */
-(void)configAudioInput
{
    NSError *error;
    // 添加音频捕捉设备 ,如果只是拍摄静态图片,可以不用设置
    AVCaptureDevice *captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    self.audioInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:&error];
    if (self.audioInput && [self.session canAddInput:self.audioInput]) {
        [self.session addInput:self.audioInput];
    }
}

// 移除音频输入设备
-(void)removeAudioDeviceInput
{
    if (self.audioInput) [self.session removeInput:self.audioInput];
    self.audioInput = nil;
}

/*
 * 4.配置输出
 */
// 配置视频输出
-(void)configVideoDataOutput
{
    self.videoOutput = [[AVCaptureVideoDataOutput alloc] init];
    [self.videoOutput setSampleBufferDelegate:self queue:_sessionQueue];

    self.videoOutput.alwaysDiscardsLateVideoFrames = YES; // // YES 表示:采集的下一帧到来前,如果有还未处理完的帧,丢掉。
    /*
     * kCVPixelBufferPixelFormatTypeKey 像素的输出格式 kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
     */
    self.videoOutput.videoSettings = @{
        (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
    };
    if ([self.session canAddOutput:self.videoOutput]) {
        [self.session addOutput:self.videoOutput];
    }
}

// 移除视频输出
-(void)removeVideoOutput
{
    if (self.videoOutput) [self.session removeOutput:self.videoOutput];
    self.videoOutput = nil;
}

// 配置音频输出
-(void)configAudioDataOutput
{
    self.audioOutput = [[AVCaptureAudioDataOutput alloc] init];
    [self.audioOutput setSampleBufferDelegate:self queue:_sessionQueue];
    if ([self.session canAddOutput:self.audioOutput]) {
        [self.session addOutput:self.audioOutput];
    }
}

// 移除音频输出
-(void)removeAudioOutput
{
    if (self.audioOutput) [self.session removeOutput:self.audioOutput];
}

/*
 * 5. 预览层
 */
-(void)configPreviewLayer:(UIView *)preview
{
    UIView *mainPreView = [[UIView alloc] initWithFrame:CGRectMake(0.0, 50.0, [UIScreen mainScreen].bounds.size.width, 400.0)];
    mainPreView.backgroundColor = [UIColor grayColor];
    [preview addSubview:mainPreView];
    
    self.previewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:self.session];
    self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
    self.previewLayer.frame = mainPreView.bounds;
    [mainPreView.layer addSublayer:self.previewLayer];
}

#pragma mark - AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    [self appendSampleBuffer:sampleBuffer];
}

-(void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer{
    if (_isRecording == NO) { return; }
    //获取 mediaType
    CMTextFormatDescriptionRef formatDes = CMSampleBufferGetFormatDescription(sampleBuffer);
    CMMediaType mediaType = CMFormatDescriptionGetMediaType(formatDes);
    if (mediaType == kCMMediaType_Video) {
        if (!self.canWrite) {
            CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
            if ([self.assetWriter startWriting]) {
                [self.assetWriter startSessionAtSourceTime:timestamp];
            }
            self.canWrite = YES;
        }
        
        if (self.assetWriter && self.videoWriterInput.readyForMoreMediaData) {
            BOOL success = [self.videoWriterInput appendSampleBuffer:sampleBuffer];
            if (!success) {
                NSLog(@"写入失败");
            }
        }
        
    }else if (mediaType == kCMMediaType_Audio){
        if (self.audioWriterInput.readyForMoreMediaData) {
            BOOL success = [self.audioWriterInput appendSampleBuffer:sampleBuffer];
            if (!success) {
                NSLog(@"音频写入失败");
            }
        }
    }
}

@end