//
// IDAssetWriterCoordinator.h
// VideoCaptureDemo
//
// Created by lj on 2023/3/22.
// Copyright © 2023 lj. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <CoreMedia/CoreMedia.h>
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, RecordingSourceType)
{
RecordingSourceTypeCamera = 0,
RecordingSourceTypeScreen
};
@class IDAssetWriterCoordinator;
@protocol IDAssetWriterCoordinatorDelegate
-
(void)writerCoordinatorDidFinishPreparing:(IDAssetWriterCoordinator *)coordinator;
-
(void)writerCoordinator:(IDAssetWriterCoordinator *)coordinator didFailWithError:(NSError *)error;
-
(void)writerCoordinatorDidFinishRecording:(IDAssetWriterCoordinator *)coordinator;
@end
@interface IDAssetWriterCoordinator : NSObject
@property (nonatomic, readonly) CMTime audioEndTimes;
@property (nonatomic, readonly) CMTime videoEndTimes;
@property (nonatomic, readonly) CMTime audioStartTimes;
@property (nonatomic, readonly) CMTime videoStartTimes;
-
(instancetype)initWithURL:(NSURL *)URL;
-
(void)addAudioTrackWithSettings:(NSDictionary *)audioSettings;
-
(void)addVideoTrackWithSettings:(NSDictionary *)videoSettings;
-
(void)setDelegate:(id)delegate callbackQueue:(dispatch_queue_t)delegateCallbackQueue;
-
(void)prepareToRecord;
-
(void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-
(void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer sourceType:(RecordingSourceType)sourceType;
-
(void)finishRecording:(void(^)(NSError *error))complete;
@end
NS_ASSUME_NONNULL_END
//
// IDAssetWriterCoordinator.m
// VideoCaptureDemo
//
// Created by lj on 2023/3/22.
// Copyright © 2023 lj. All rights reserved.
//
#import "IDAssetWriterCoordinator.h"
#import <AVFoundation/AVFoundation.h>
typedef NS_ENUM(NSInteger, WriterStatus){
WriterStatusIdle = 0,
WriterStatusPreparingToRecord,
WriterStatusRecording,
WriterStatusFinishingRecordingPart1, // waiting for inflight buffers to be appended
WriterStatusFinishingRecordingPart2, // calling finish writing on the asset writer
WriterStatusFinished,
WriterStatusFailed // terminal state
};
@interface IDAssetWriterCoordinator ()
@property (nonatomic, weak) id delegate;
@property (nonatomic) dispatch_queue_t delegateCallbackQueue;
@property (nonatomic, assign) WriterStatus status;
@property (nonatomic) dispatch_queue_t writingQueue;
@property (nonatomic) NSURL *URL;
@property (nonatomic, strong) AVAssetWriter *assetWriter;
@property (nonatomic, strong) AVAssetWriterInput *audioInput;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic) CGAffineTransform videoTrackTransform;
@property (nonatomic) NSDictionary *audioTrackSettings;
@property (nonatomic) NSDictionary *videoTrackSettings;
@property (nonatomic) AVAssetWriterInputPixelBufferAdaptor *adaptor;
@property (nonatomic) BOOL haveStartedSession;
@property (nonatomic) CMTime startSB;
@property (nonatomic) CFAbsoluteTime vltime;
@property (nonatomic) CFAbsoluteTime altime;
@end
@implementation IDAssetWriterCoordinator
#pragma mark - init
- (instancetype)initWithURL:(NSURL *)URL
{
if (!URL) {
return nil;
}
if (self = [super init]) {
_writingQueue = dispatch_queue_create( "com.example.assetwriter.writing", DISPATCH_QUEUE_SERIAL );
_videoTrackTransform = CGAffineTransformMakeRotation(M_PI_2); //portrait orientation
_URL = URL;
}
return self;
}
- (void)addAudioTrackWithSettings:(NSDictionary *)audioSettings {
@synchronized( self )
{
if ( _status != WriterStatusIdle ) {
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add tracks while not idle" userInfo:nil];
return;
}
_audioTrackSettings = [audioSettings copy];
}
}
- (void)addVideoTrackWithSettings:(NSDictionary *)videoSettings {
@synchronized( self )
{
if (_status != WriterStatusIdle){
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add tracks while not idle" userInfo:nil];
return;
}
_videoTrackSettings = [videoSettings copy];
}
}
- (void)setDelegate:(id)delegate callbackQueue:(dispatch_queue_t)delegateCallbackQueue {
if ( delegate && ( delegateCallbackQueue == NULL ) ) {
@throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Caller must provide a delegateCallbackQueue" userInfo:nil];
}
@synchronized( self )
{
_delegate = delegate;
if ( delegateCallbackQueue != _delegateCallbackQueue ) {
_delegateCallbackQueue = delegateCallbackQueue;
}
}
}
- (void)prepareToRecord {
@synchronized( self )
{
if (_status != WriterStatusIdle){
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Already prepared, cannot prepare again" userInfo:nil];
return;
}
[self transitionToStatus:WriterStatusPreparingToRecord error:nil];
}
__weak typeof(self) weakSelf = self;
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0 ), ^{
@autoreleasepool
{
NSError *error = nil;
[[NSFileManager defaultManager] removeItemAtURL:weakSelf.URL error:NULL];
weakSelf.assetWriter = [[AVAssetWriter alloc] initWithURL:weakSelf.URL fileType:AVFileTypeQuickTimeMovie error:&error];
weakSelf.assetWriter.shouldOptimizeForNetworkUse = YES;
if (!error) {
[weakSelf setupAssetWriterVideoInputWithTransform:weakSelf.videoTrackTransform settings:_videoTrackSettings error:&error];
[weakSelf setupAssetWriterAudioInputWithSettings:weakSelf.audioTrackSettings error:&error];
}
if(!error) {
BOOL success = [weakSelf.assetWriter startWriting];
if (!success) {
error = weakSelf.assetWriter.error;
}
}
@synchronized(self)
{
if (error) {
[weakSelf transitionToStatus:WriterStatusFailed error:error];
} else {
[weakSelf transitionToStatus:WriterStatusRecording error:nil];
}
}
}
} );
}
- (void)finishRecording:(void(^)(NSError *error))complete;
{
@synchronized(self)
{
BOOL shouldFinishRecording = NO;
switch (_status)
{
case WriterStatusIdle:
case WriterStatusPreparingToRecord:
case WriterStatusFinishingRecordingPart1:
case WriterStatusFinishingRecordingPart2:
case WriterStatusFinished:
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Not recording" userInfo:nil];
break;
case WriterStatusFailed:
// From the client's perspective the movie recorder can asynchronously transition to an error state as the result of an append.
// Because of this we are lenient when finishRecording is called and we are in an error state.
NSLog( @"Recording has failed, nothing to do" );
break;
case WriterStatusRecording:
shouldFinishRecording = YES;
break;
}
if (shouldFinishRecording){
[self transitionToStatus:WriterStatusFinishingRecordingPart1 error:nil];
}
else {
return;
}
}
__weak typeof(self) weakSelf = self;
dispatch_async( _writingQueue, ^{
@autoreleasepool
{
@synchronized(self)
{
// We may have transitioned to an error state as we appended inflight buffers. In that case there is nothing to do now.
if ( weakSelf.status != WriterStatusFinishingRecordingPart1 ) {
return;
}
// It is not safe to call -[AVAssetWriter finishWriting*] concurrently with -[AVAssetWriterInput appendSampleBuffer:]
// We transition to MovieRecorderStatusFinishingRecordingPart2 while on _writingQueue, which guarantees that no more buffers will be appended.
[self transitionToStatus:WriterStatusFinishingRecordingPart2 error:nil];
}
[weakSelf.assetWriter finishWritingWithCompletionHandler:^{
@synchronized( self )
{
NSError *error = weakSelf.assetWriter.error;
if(error){
NSLog(@"合并视频文件失败--error= %@",error);
[self transitionToStatus:WriterStatusFailed error:error];
}
else {
NSLog(@"合并视频文件成功!!!");
[self transitionToStatus:WriterStatusFinished error:nil];
}
weakSelf.assetWriter = nil;
!complete ?: complete(error);
}
}];
}
} );
}
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer {
[self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeAudio andSouceType:RecordingSourceTypeCamera];
}
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer sourceType:(RecordingSourceType)sourceType {
// BOOL isIpad = [[UIDevice currentDevice].model containsString:@"iPad"];
// BOOL isScreen = sourceType == RecordingSourceTypeScreen;
// //万能配置为1时,表示摄像头视频通过sampleBuffer写入;原逻辑是pixelBuffer写入
// BOOL cameraDataWithSampleBuffer = [OCFTGlobalConfig shareInstance].iOSWriteVideoDataWithSampleBuffer;
// ///在录屏数据或者万能配置为0时,写pixelBuffer,否则通过sampleBuffer写数据。
// ///视频写入失败问题,是摄像头数据通过sampleBuffer数据写入视频来修复的。
// BOOL writeWithPixelBuffer = (!cameraDataWithSampleBuffer || isScreen);
// if (kAvailable(15) && !isIpad && writeWithPixelBuffer) {
// // ZNSLLog(@"测试视频写入失败:pixel 写入");
//
// @synchronized(self) {
// if (_status < WriterStatusRecording) {
// NSLog(@"😁😁😁Not ready to record yet");
// return;
// }
// if (_status > WriterStatusFinishingRecordingPart1) {
// NSLog(@"😁😁😁WriterStatusFinishingRecordingPart1");
// return;
// }
// if (_status > WriterStatusRecording) {
// NSLog(@"😁😁😁End record");
// return ;
// }
// }
// if (!CMSampleBufferDataIsReady(sampleBuffer)) {
// NSLog(@"😁😁😁CMSampleBufferDataIsReady");
// return;
// }
// if (!self->_videoInput.readyForMoreMediaData) {
// NSLog(@"😁😁😁!ready drop");
// return;
// }
// CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
// if(!_haveStartedSession) {
// [_assetWriter startSessionAtSourceTime:pts];
// _startSB = pts;
// _haveStartedSession = YES;
// }
// if (_videoStartTimes.value && CMTimeCompare(pts, _videoStartTimes) <= 0) {
// NSLog(@"😁😁😁CMTimeCompare");
// return;
// }
// CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
// CMTime endTimes = dur.value > 0 ? CMTimeAdd(pts, dur):pts;
// CVPixelBufferRef pixel = CMSampleBufferGetImageBuffer(sampleBuffer);
// BOOL needRelease = NO;
//
//// if (sourceType == RecordingSourceTypeScreen) {
//// size_t width = CVPixelBufferGetWidth(pixel);
//// size_t height = CVPixelBufferGetHeight(pixel);
//// if (width > 720 && height > 720) {
//// pixel = [RSLManager convertNV12ToI420Scale:pixel scaleSize:CGSizeMake(1280, 720)];
//// needRelease = YES;
//// }
//// }
// #if DEBUG
// if (CMSampleBufferIsValid(sampleBuffer) == 0) {
// NSLog(@"写入失败 sampleBuffer无效");
// }
// #endif
// [self appendPixelBuffer:pixel timestramp:pts endTime:endTimes];
// if (needRelease) {
// CVPixelBufferRelease(pixel);
// }
// } else {
[self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo andSouceType:sourceType];
// }
}
#pragma mark - private method
- (void)transitionToStatus:(WriterStatus)newStatus error:(NSError *)error
{
BOOL shouldNotifyDelegate = NO;
if (newStatus != _status){
// terminal states
if ((newStatus == WriterStatusFinished) || (newStatus == WriterStatusFailed)){
shouldNotifyDelegate = YES;
// make sure there are no more sample buffers in flight before we tear down the asset writer and inputs
__weak typeof(self) weakSelf = self;
dispatch_async(_writingQueue, ^{
weakSelf.assetWriter = nil;
weakSelf.videoInput = nil;
weakSelf.audioInput = nil;
if (newStatus == WriterStatusFailed) {
[[NSFileManager defaultManager] removeItemAtURL:weakSelf.URL error:nil];
}
} );
} else if (newStatus == WriterStatusRecording){
shouldNotifyDelegate = YES;
}
_status = newStatus;
}
if (shouldNotifyDelegate && self.delegate){
dispatch_async( _delegateCallbackQueue, ^{
@autoreleasepool
{
switch(newStatus){
case WriterStatusRecording:
[self.delegate writerCoordinatorDidFinishPreparing:self];
break;
case WriterStatusFinished:
[self.delegate writerCoordinatorDidFinishRecording:self];
break;
case WriterStatusFailed:
[self.delegate writerCoordinator:self didFailWithError:error];
break;
default:
break;
}
}
});
}
}
- (BOOL)setupAssetWriterVideoInputWithTransform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings error:(NSError **)errorOut
{
if (!videoSettings){
videoSettings = [self fallbackVideoSettings];
}
if ([_assetWriter canApplyOutputSettings:videoSettings forMediaType:AVMediaTypeVideo])
{
_videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
_videoInput.expectsMediaDataInRealTime = YES;
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil];
_adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
if ([_assetWriter canAddInput:_videoInput]){
[_assetWriter addInput:_videoInput];
} else {
if (errorOut) {
*errorOut = [self cannotSetupInputError];
}
return NO;
}
} else {
if (errorOut) {
*errorOut = [self cannotSetupInputError];
}
return NO;
}
return YES;
}
- (BOOL)setupAssetWriterAudioInputWithSettings:(NSDictionary *)audioSettings error:(NSError **)errorOut
{
if (!audioSettings) {
audioSettings = @{ AVFormatIDKey : @(kAudioFormatMPEG4AAC),
AVSampleRateKey: @(16000),
AVNumberOfChannelsKey: @(1)
};
}
if ( [_assetWriter canApplyOutputSettings:audioSettings forMediaType:AVMediaTypeAudio] ){
_audioInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings];
_audioInput.expectsMediaDataInRealTime = YES;
if ([_assetWriter canAddInput:_audioInput]){
[_assetWriter addInput:_audioInput];
} else {
if (errorOut ) {
*errorOut = [self cannotSetupInputError];
}
return NO;
}
}
else
{
if (errorOut) {
*errorOut = [self cannotSetupInputError];
}
return NO;
}
return YES;
}
- (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType andSouceType: (RecordingSourceType)sourceType {
if(sampleBuffer == NULL){
@throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL sample buffer" userInfo:nil];
return;
}
@synchronized(self){
if (_status < WriterStatusRecording){
@throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Not ready to record yet" userInfo:nil];
return;
}
}
CFRetain(sampleBuffer);
dispatch_async( _writingQueue, ^{
@autoreleasepool
{
@synchronized(self)
{
if (_status > WriterStatusFinishingRecordingPart1){
CFRelease(sampleBuffer);
return;
}
}
if (!CMSampleBufferDataIsReady(sampleBuffer)) {
CFRelease(sampleBuffer);
return;
}
if(!_haveStartedSession) {
[_assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
_haveStartedSession = YES;
_startSB = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}
AVAssetWriterInput *input = ( mediaType == AVMediaTypeVideo ) ? _videoInput : _audioInput;
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
if (mediaType == AVMediaTypeVideo) {
if (_videoStartTimes.value && CMTimeCompare(pts, _videoStartTimes) <= 0) {
CFRelease(sampleBuffer);
return;
}
} else {
if (_audioStartTimes.value && CMTimeCompare(pts, _audioStartTimes) <= 0) {
CFRelease(sampleBuffer);
return;
}
}
//不能无限runUntilDate
for(int i =0; i < 10; i ++)
{
if(input.readyForMoreMediaData == false)
{
NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1];
[[NSRunLoop currentRunLoop] runUntilDate:maxDate];
}
else
{
break;
}
}
if(input.readyForMoreMediaData) {
//写入文件(核心方法)
CMTime ptss = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
float timescle = CMTimeGetSeconds(CMTimeSubtract(ptss, _videoStartTimes));
if (timescle > 1.0) {
NSLog(@"与上一帧间隔超过1秒");
}
static int count = 0;
BOOL success = [input appendSampleBuffer:sampleBuffer];
count ++;
if (!success){
NSLog(@"😁😁😁拼接buffer失败");
@synchronized(self){
[self transitionToStatus:WriterStatusFailed error:_assetWriter.error];
}
} else {
//记录暂停上一次录制的时间
if (mediaType == AVMediaTypeVideo) {
//ZNSLLog(@"😁😁😁拼接videobuffer成功,%d", count);
}
CMTime ptss = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
CMTime endTimes = dur.value > 0 ? CMTimeAdd(ptss, dur):ptss;
// ZNSLLog(@"正在持续写入buffer");
if (mediaType == AVMediaTypeVideo) {
if (CFAbsoluteTimeGetCurrent() - _vltime > 0.5) {
_vltime = CFAbsoluteTimeGetCurrent();
}
_videoEndTimes = endTimes;
_videoStartTimes = ptss;
} else {
if (CFAbsoluteTimeGetCurrent() - _altime > 0.5) {
_altime = CFAbsoluteTimeGetCurrent();
}
_audioEndTimes = endTimes;
_audioStartTimes = ptss;
}
// if (mediaType == AVMediaTypeVideo && sourceType == RecordingSourceTypeScreen) {
// if (self.screenBufferWriteCount == 0) {
// self.screenBufferWriteCount ++;
// }
// }
}
} else {
NSLog(@"😁😁😁%@ input not ready for more media data, dropping buffer", mediaType );
}
CFRelease(sampleBuffer);
}
});
}
#pragma mark - getter and setter
- (NSDictionary *)fallbackVideoSettings
{
// (CMFormatDescriptionRef)videoFormatDescription
// CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(videoFormatDescription);
// int numPixels = dimensions.width * dimensions.height;
float bitsPerPixel;
int numPixels = 640 * 480;
int bitsPerSecond;
NSLog( @"No video settings provided, using default settings" );
// Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate
if (numPixels < ( 640 * 480 ) ) {
bitsPerPixel = 4.05; // This bitrate approximately matches the quality produced by AVCaptureSessionPresetMedium or Low.
}
else {
bitsPerPixel = 10.1; // This bitrate approximately matches the quality produced by AVCaptureSessionPresetHigh.
}
bitsPerSecond = numPixels * bitsPerPixel;
NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond),
AVVideoExpectedSourceFrameRateKey : @(30),
AVVideoMaxKeyFrameIntervalKey : @(30) };
return @{ AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(640),
AVVideoHeightKey : @(480),
AVVideoCompressionPropertiesKey : compressionProperties };
}
- (NSError *)cannotSetupInputError
{
NSString *localizedDescription = NSLocalizedString( @"Recording cannot be started", nil );
NSString *localizedFailureReason = NSLocalizedString( @"Cannot setup asset writer input.", nil );
NSDictionary *errorDict = @{ NSLocalizedDescriptionKey : localizedDescription,
NSLocalizedFailureReasonErrorKey : localizedFailureReason };
return [NSError errorWithDomain:@"com.example" code:0 userInfo:errorDict];
}
@end