//
// IDCaptureSessionAssetWriterCoordinator.h
// VideoCaptureDemo
//
// Created by lj on 2023/3/22.
// Copyright © 2023 lj. All rights reserved.
//
#import "IDCaptureSessionCoordinator.h"
NS_ASSUME_NONNULL_BEGIN
@interface IDCaptureSessionAssetWriterCoordinator : IDCaptureSessionCoordinator
- (instancetype)initWithUsesApplicationAudioSession:(BOOL)audioSession;
//切换前后置摄像头
- (void)switchCamera;
//暂停录制
- (void)pauseRecording;
//继续录制
-
(void)resumeRecording;
@end
NS_ASSUME_NONNULL_END
//
// IDCaptureSessionAssetWriterCoordinator.m
// VideoCaptureDemo
//
// Created by lj on 2023/3/22.
// Copyright © 2023 lj. All rights reserved.
//
#import "IDCaptureSessionAssetWriterCoordinator.h"
#import "IDAssetWriterCoordinator.h"
//#define kAvailable(version) ([UIDevice currentDevice].systemVersion.floatValue >= (version))
typedef NS_ENUM(NSInteger, RecordingStatus)
{
RecordingStatusIdle = 0,
RecordingStatusStartingRecording,
RecordingStatusRecording,
RecordingStatusStoppingRecording,
RecordingStatusPaused
};
typedef NS_ENUM(NSInteger, RecordingBufferType)
{
RecordingBufferTypeVideo = 0,
RecordingBufferTypeAudio
};
@interface IDCaptureSessionAssetWriterCoordinator ()<AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureVideoDataOutputSampleBufferDelegate,IDAssetWriterCoordinatorDelegate>
{
CMTime firstSampleBufferTime;
CMTime _lastVideoBufferTime;//暂停前最后一帧视频帧时间戳
CMTime _audioTimeOffset;//电话中断的音频偏移时间
CMTime _videoTimeOffset;//视频时间戳偏移
CFAbsoluteTime timelogccc;
CFAbsoluteTime timelogsss;
long ccount;
long scount;
}
@property (nonatomic, strong) dispatch_queue_t audioDataOutputQueue;
@property (nonatomic, strong) dispatch_queue_t videoDataOutputQueue;
@property (nonatomic, strong) AVCaptureAudioDataOutput *audioDataOutput;
@property (nonatomic, strong) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic, strong) AVCaptureConnection *audioConnection;
@property (nonatomic, strong) AVCaptureConnection *videoConnection;
@property (nonatomic, assign) BOOL firstCapture;
@property (nonatomic, retain) IDAssetWriterCoordinator *assetWriterCoordinator;
@property (nonatomic, assign) RecordingStatus recordingStatus;
@property (nonatomic, strong) NSURL *recordingURL;
@property (nonatomic, strong) NSDictionary *videoCompressionSettings;
@property (nonatomic, strong) NSDictionary *audioCompressionSettings;
@property (nonatomic, assign) BOOL isVideoDiscount;//是否中断过
@property (nonatomic, assign) BOOL isAudioDiscount;//是否中断过
@end
@implementation IDCaptureSessionAssetWriterCoordinator
#pragma mark - life cycle
- (instancetype)initWithUsesApplicationAudioSession:(BOOL)audioSession {
self = [super init];
if(self){
self.videoDataOutputQueue = dispatch_queue_create( "com.example.capturesession.videodata", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(self.videoDataOutputQueue, dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_HIGH, 0 ) );
self.audioDataOutputQueue = dispatch_queue_create( "com.example.capturesession.audiodata", DISPATCH_QUEUE_SERIAL );
NSError *error;
AVAudioSession *as = [AVAudioSession sharedInstance];
[as setCategory:AVAudioSessionCategoryPlayAndRecord error:&error];
if(error){
NSLog(@"%@", [error localizedDescription]);
}
[as setActive:YES error:&error];
self.captureSession = [self setupCaptureSessionWithUsesApplicationAudioSession:audioSession];
[self addDataOutputsToCaptureSession:self.captureSession];
}
return self;
}
- (AVCaptureSession *)setupCaptureSessionWithUsesApplicationAudioSession:(BOOL)audioSession
{
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
captureSession.usesApplicationAudioSession = audioSession;
captureSession.automaticallyConfiguresApplicationAudioSession = NO;
[captureSession setSessionPreset:AVCaptureSessionPreset640x480];
[[AVAudioSession sharedInstance] setPreferredSampleRate:48000 error:nil];
//为了重置音频配置,如果不重置,录制一次后,采样率会变。
//添加摄像头的输入
if ([captureSession canAddInput:self.videoInput]) {
[captureSession addInput:self.videoInput];
self.positionSetting = AVCaptureDevicePositionFront;
}
//添加后置麦克风的输入
if ([captureSession canAddInput:self.audioMicInput]) {
[captureSession addInput:self.audioMicInput];
}
return captureSession;
}
- (void)addDataOutputsToCaptureSession:(AVCaptureSession *)captureSession
{
NSDictionary *captureSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey,
nil];
self.videoDataOutput.videoSettings = captureSettings;
self.videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[self.videoDataOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue];
[self.audioDataOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue];
[self addOutput:self.videoDataOutput toCaptureSession:self.captureSession];
[self addOutput:self.audioDataOutput toCaptureSession:self.captureSession];
// self.videoConnection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
self.firstCapture = YES;
}
- (BOOL)addOutput:(AVCaptureOutput *)output toCaptureSession:(AVCaptureSession *)captureSession
{
if([captureSession canAddOutput:output]){
[captureSession addOutput:output];
return YES;
} else {
NSLog(@"can't add output: %@", [output description]);
}
return NO;
}
#pragma mark - Public Method
- (void)switchCamera
{
if (_recordingStatus == RecordingStatusRecording) {
NSLog(@"__log__视频录制期间不允许切换摄像头");
return;
}
[super switchCamera];
//摄像头切换,导致生成视频旋转180
// self.videoConnection.videoOrientation = AVCaptureVideoOrientationLandscapeRight;
}
- (void)startRecording
{
if (self.path.length == 0) {
return;
}
@synchronized(self)
{
if(_recordingStatus != RecordingStatusIdle) {
// @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Already recording" userInfo:nil];
return;
}
[self transitionToRecordingStatus:RecordingStatusStartingRecording error:nil];
}
_recordingURL = [NSURL fileURLWithPath:self.path];
self.assetWriterCoordinator = [[IDAssetWriterCoordinator alloc] initWithURL:_recordingURL];
[_assetWriterCoordinator addAudioTrackWithSettings:self.audioCompressionSettings];
[_assetWriterCoordinator addVideoTrackWithSettings:self.videoCompressionSettings];
dispatch_queue_t callbackQueue = dispatch_queue_create( "com.example.capturesession.writercallback", DISPATCH_QUEUE_SERIAL ); // guarantee ordering of callbacks with a serial queue
[_assetWriterCoordinator setDelegate:self callbackQueue:callbackQueue];
[_assetWriterCoordinator prepareToRecord]; // asynchronous, will call us back with recorderDidFinishPreparing: or recorder:didFailWithError: when done
}
- (void)stopRecording:(void(^)(NSError *error))complete;
{
@synchronized(self)
{
if (_recordingStatus != RecordingStatusRecording){
return;
}
[self transitionToRecordingStatus:RecordingStatusStoppingRecording error:nil];
}
[self.assetWriterCoordinator finishRecording:complete]; // asynchronous, will call us back with
}
- (void)pauseRecording
{
@synchronized(self) {
if (self.recordingStatus == RecordingStatusRecording) {
[self transitionToRecordingStatus:RecordingStatusPaused error:nil];
if (self.isVideoDiscount) {
_lastVideoBufferTime.flags = 0;
}
self.isVideoDiscount = YES;
self.isAudioDiscount = YES;
}
}
}
- (void)resumeRecording
{
@synchronized(self) {
if (self.recordingStatus == RecordingStatusPaused) {
[self transitionToRecordingStatus:RecordingStatusRecording error:nil];
}
}
}
#pragma mark - private method
// call under @synchonized( self )
- (void)transitionToRecordingStatus:(RecordingStatus)newStatus error:(NSError *)error
{
RecordingStatus oldStatus = self.recordingStatus;
self.recordingStatus = newStatus;
if (newStatus != oldStatus){
__weak typeof(self) weakSelf = self;
if (error && (newStatus == RecordingStatusIdle)){
dispatch_async(self.delegateCallbackQueue, ^{
@autoreleasepool
{
[self.delegate coordinator:self didFinishRecordingToOutputFileURL:weakSelf.recordingURL error:error];
}
});
} else {
error = nil; // only the above delegate method takes an error
if (oldStatus == RecordingStatusStartingRecording && newStatus == RecordingStatusRecording){
dispatch_async( self.delegateCallbackQueue, ^{
@autoreleasepool
{
[self.delegate coordinatorDidBeginRecording:self];
}
});
} else if (oldStatus == RecordingStatusStoppingRecording && newStatus == RecordingStatusIdle) {
dispatch_async( self.delegateCallbackQueue, ^{
@autoreleasepool
{
[self.delegate coordinator:self didFinishRecordingToOutputFileURL:weakSelf.recordingURL error:error];
}
});
}
}
}
}
- (void)appendBuffer:(CMSampleBufferRef)sampleBuffer sourceType:(RecordingSourceType)sourceType bufferType:(RecordingBufferType)bufferType
{
CFRetain(sampleBuffer);
@synchronized(self) {
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (bufferType == RecordingBufferTypeVideo) {
if (sourceType == RecordingSourceTypeCamera) {
if (self.isVideoDiscount) {
self.isVideoDiscount = NO;
if (!CMTIME_IS_VALID(_lastVideoBufferTime)) {
_lastVideoBufferTime = CMTimeMake(_assetWriterCoordinator.videoEndTimes.value, _assetWriterCoordinator.videoEndTimes.timescale);
}
CMTime last = _lastVideoBufferTime;
if (last.flags & kCMTimeFlags_Valid) {
if (_videoTimeOffset.flags & kCMTimeFlags_Valid) {
pts = CMTimeSubtract(pts, _videoTimeOffset);
}
//取得现在的时间 和 上一次时间 的时间差
CMTime offset = CMTimeSubtract(pts, last);
// 赋值给 _timeOfSet
if (_videoTimeOffset.value == 0) {
_videoTimeOffset = offset;
}else {
_videoTimeOffset = CMTimeAdd(_videoTimeOffset, offset);
}
}
// if (!self.isScreenDicount) {
// _lastVideoBufferTime.flags = 0;
// }
}
if (_videoTimeOffset.value > 0) {
NSLog(@"😁😁😁_videoTimeOffset.value");
CFRelease(sampleBuffer);
//根据得到的timeOffset调整
sampleBuffer = [self adjustTime:sampleBuffer by:_videoTimeOffset];
}
}
if (sourceType == RecordingSourceTypeCamera) {
static long vvcount = 0;
vvcount ++;
if (CFAbsoluteTimeGetCurrent() - timelogccc > 1) {
// NSLog(@"每秒摄像头视频帧返回:%ld个", vvcount - ccount);
ccount = vvcount;
timelogccc = CFAbsoluteTimeGetCurrent();
// NSLog(@"视频帧返回时长:%f", CMTimeGetSeconds(CMTimeSubtract(pts, firstSampleBufferTime)));
}
} else {
static long sscount = 0;
sscount ++;
if (CFAbsoluteTimeGetCurrent() - timelogsss > 1) {
// NSLog(@"每秒屏幕录制帧返回:%ld个", sscount - scount);
scount = sscount;
timelogsss = CFAbsoluteTimeGetCurrent();
// NSLog(@"屏幕帧返回时长:%f", CMTimeGetSeconds(CMTimeSubtract(pts, firstSampleBufferTime)));
}
}
} else if (bufferType == RecordingBufferTypeAudio) {
if (self.isAudioDiscount) {
self.isAudioDiscount = NO;
CMTime last = _assetWriterCoordinator.audioEndTimes;
if (last.flags & kCMTimeFlags_Valid) {
if (_audioTimeOffset.flags & kCMTimeFlags_Valid) {
pts = CMTimeSubtract(pts, _audioTimeOffset);
}
//取得现在的时间 和 上一次时间 的时间差
CMTime offset = CMTimeSubtract(pts, last);
// 赋值给 _timeOfSet
if (_audioTimeOffset.value == 0) {
_audioTimeOffset = offset;
}else {
_audioTimeOffset = CMTimeAdd(_audioTimeOffset, offset);
}
}
}
if (_audioTimeOffset.value > 0) {
CFRelease(sampleBuffer);
//根据得到的timeOffset调整
sampleBuffer = [self adjustTime:sampleBuffer by:_audioTimeOffset];
}
}
}
if (bufferType == RecordingBufferTypeAudio) {
if (sourceType == RecordingSourceTypeCamera) {
[_assetWriterCoordinator appendAudioSampleBuffer:sampleBuffer];
}
} else if (bufferType == RecordingBufferTypeVideo) {
if (sourceType == RecordingSourceTypeCamera) {
[_assetWriterCoordinator appendVideoSampleBuffer:sampleBuffer sourceType:sourceType];
}
}
if (sampleBuffer && CFGetRetainCount(sampleBuffer) > 0) {
CFRelease(sampleBuffer);
}
}
//调整媒体数据的时间
- (CMSampleBufferRef)adjustTime:(CMSampleBufferRef)sampleBuffer by:(CMTime)timeOffset {
CMItemCount itemCount;
OSStatus status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, 0, NULL, &itemCount);
if (status) {
return NULL;
}
CMSampleTimingInfo *timingInfo = (CMSampleTimingInfo *)malloc(sizeof(CMSampleTimingInfo) * (unsigned long)itemCount);
if (!timingInfo) {
return NULL;
}
status = CMSampleBufferGetSampleTimingInfoArray(sampleBuffer, itemCount, timingInfo, &itemCount);
if (status) {
free(timingInfo);
timingInfo = NULL;
return NULL;
}
for (CMItemCount i = 0; i < itemCount; i++) {
timingInfo[i].presentationTimeStamp = CMTimeSubtract(timingInfo[i].presentationTimeStamp, timeOffset);
timingInfo[i].decodeTimeStamp = CMTimeSubtract(timingInfo[i].decodeTimeStamp, timeOffset);
}
CMSampleBufferRef offsetSampleBuffer;
CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, sampleBuffer, itemCount, timingInfo, &offsetSampleBuffer);
if (timingInfo) {
free(timingInfo);
timingInfo = NULL;
}
return offsetSampleBuffer;
}
#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate,AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
@synchronized(self) {
if ([self.delegate respondsToSelector:@selector(IDCaptureOutput:didOutputSampleBuffer:fromConnection:)]) {
[self.delegate IDCaptureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection];
}
}
if (self.recordingStatus == RecordingStatusPaused) {
return;
}
CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
if (formatDescription != NULL) {
if (output == self.videoDataOutput){
if (self.firstCapture) {
// Don't render the first sample buffer.
self.firstCapture = NO;
firstSampleBufferTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
} else {
// [self setupVideoPipelineWithInputFormatDescription:formatDescription];
@synchronized(self) {
if(_recordingStatus == RecordingStatusRecording){
[self appendBuffer:sampleBuffer sourceType:RecordingSourceTypeCamera bufferType:RecordingBufferTypeVideo];
}
}
}
} else if (output == self.audioDataOutput){
@synchronized(self) {
if ([self.delegate respondsToSelector:@selector(IDNeedDetectAudioSampleBuffer:)]) {
[self.delegate IDNeedDetectAudioSampleBuffer:sampleBuffer];
}
if(_recordingStatus == RecordingStatusRecording) {
[self appendBuffer:sampleBuffer sourceType:RecordingSourceTypeCamera bufferType:RecordingBufferTypeAudio];
}
}
}
}
}
#pragma mark - IDAssetWriterCoordinatorDelegate
- (void)writerCoordinatorDidFinishPreparing:(IDAssetWriterCoordinator *)coordinator {
@synchronized(self)
{
if(_recordingStatus != RecordingStatusStartingRecording){
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StartingRecording state" userInfo:nil];
return;
}
[self transitionToRecordingStatus:RecordingStatusRecording error:nil];
}
}
- (void)writerCoordinator:(IDAssetWriterCoordinator *)coordinator didFailWithError:(NSError *)error {
@synchronized( self ) {
self.assetWriterCoordinator = nil;
[self transitionToRecordingStatus:RecordingStatusIdle error:error];
if ([self.delegate respondsToSelector:@selector(IDWriterCoordinator:didFailWithError:)]) {
[self.delegate IDWriterCoordinator:self.assetWriterCoordinator didFailWithError:error];
}
}
}
- (void)writerCoordinatorDidFinishRecording:(IDAssetWriterCoordinator *)coordinator {
@synchronized( self )
{
if ( _recordingStatus != RecordingStatusStoppingRecording ) {
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Expected to be in StoppingRecording state" userInfo:nil];
return;
}
// No state transition, we are still in the process of stopping.
// We will be stopped once we save to the assets library.
}
self.assetWriterCoordinator = nil;
@synchronized( self ) {
[self transitionToRecordingStatus:RecordingStatusIdle error:nil];
}
}
#pragma mark - getter and setter
- (AVCaptureAudioDataOutput *)audioDataOutput
{
if (!_audioDataOutput) {
_audioDataOutput = [[AVCaptureAudioDataOutput alloc] init];
}
return _audioDataOutput;
}
- (AVCaptureVideoDataOutput *)videoDataOutput
{
if (!_videoDataOutput) {
_videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
}
return _videoDataOutput;
}
- (AVCaptureConnection *)videoConnection
{
//摄像头切换,导致生成视频旋转180
// if (!_videoConnection) {
_videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
// }
return _videoConnection;
}
- (AVCaptureConnection *)audioConnection
{
if (!_audioConnection) {
_audioConnection = [self.audioDataOutput connectionWithMediaType:AVMediaTypeAudio];
}
return _audioConnection;
}
- (NSDictionary *)audioCompressionSettings
{
if (!_audioCompressionSettings) {
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,
[NSNumber numberWithFloat: 16000], AVSampleRateKey,
[NSNumber numberWithInt: 16000], AVEncoderBitRateKey,
AVAudioBitRateStrategy_Constant, AVEncoderBitRateStrategyKey,
nil];
_audioCompressionSettings = settings;
}
return _audioCompressionSettings;
}
- (NSDictionary *)videoCompressionSettings
{
if (!_videoCompressionSettings) {
// float compressRatio = [OCFTGlobalConfig shareInstance].compressionRatio;
float compressRatio = 0.5f;
CGFloat videoWidth = 1280;
CGFloat videoHeight = 960;
NSString *videoCodecKey;
// if (kAvailable(11)) {
videoCodecKey = AVVideoCodecTypeH264;
// if (kAvailable(15)) {
// videoHeight = 720;
// }
// } else {
// videoCodecKey = AVVideoCodecH264;
// }
//每像素比特
CGFloat bitsPerPixel = 3.f/4 * compressRatio;
NSInteger numPixels = videoWidth * videoHeight;
NSInteger bitsPerSecond = numPixels* bitsPerPixel;//比特率
// bitsPerSecond = 1;//比特率
// 码率和帧率设置829439/1399679
NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond),
AVVideoExpectedSourceFrameRateKey : @(25),
AVVideoMaxKeyFrameIntervalKey:@(60),
};
NSDictionary *videoCompressionSettings = @{ AVVideoCodecKey : videoCodecKey,
AVVideoWidthKey : @(videoWidth),
AVVideoHeightKey : @(videoHeight),
AVVideoCompressionPropertiesKey : compressionProperties,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspect
};
_videoCompressionSettings = videoCompressionSettings;
}
return _videoCompressionSettings;
}
@end