AVFoundation 主要是用于多媒体信息的采集。
- 用途:照片采集和音、视频采集
- 应用:相机,小视频,直播
1. 基本概念
-
捕捉会话:
AVCaptureSession, 需要配置采集的分辨率sessionPreset -
捕捉设备:
AVCaptureDevice(前、后摄像头,麦克风)- 获取视频捕捉设备
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] - 获取音频捕捉设备
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]
- 获取视频捕捉设备
-
捕捉设备的输入:
AVCaptureDeviceInput(音频输入, 视频输入),注意在添加输入设备的时候需要做一下判断AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:error]; if ([self.captureSession canAddInput:videoInput]) { //将videoInput 添加到 captureSession中 [self.captureSession addInput:videoInput]; } -
捕捉设备输出:
AVCaptureOutput(抽象基类),在添加输出设备的时候需要做一下判断- (BOOL)canAddOutput:(AVCaptureOutput *)output;AVCaptureStillImageOutput:输出静态图片AVCaptureMovieFileOutput:输出视频文件(格式为 MOV)AVCaptureAudioDataOutput: 输出音频数据AVCaptureVideoDataOutput: 输出视频数据
-
捕捉连接:
AVCaptureConnection(建立输入输出的连接)- 音频连接:
[self.audioDataOutput connectionWithMediaType:AVMediaTypeAudio] - 视频连接:
[self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo]
- 音频连接:
-
捕捉预览:
AVCaptureVideoPreviewLayer(预览图层)
2. 回调处理
2.1 音频数据 & 视频数据
实现 AVCaptureAudioDataOutputSampleBufferDelegate 和 AVCaptureVideoDataOutputSampleBufferDelegate 的代理方法:
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
/*
在这里可以根据 captureOutput 来判断媒体类型, 也可以根据 connection 来判断
*/
if (connection == self.audioConnection) {
if (self.delegate && [self.delegate respondsToSelector:@selector(didCaptureSampleBuffer:type:)]) {
[self.delegate didCaptureSampleBuffer:sampleBuffer type:YYCaptureTypeAudio];
}
} else if (connection == self.videoConnection) {
if (self.delegate && [self.delegate respondsToSelector:@selector(didCaptureSampleBuffer:type:)]) {
[self.delegate didCaptureSampleBuffer:sampleBuffer type:YYCaptureTypeVideo];
}
}
}
2.2 捕获静态图片(拍照)
AVCaptureConnection *connection = [self.imageOutput connectionWithMediaType:AVMediaTypeVideo];
[self.imageOutput captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef _Nullable imageDataSampleBuffer, NSError * _Nullable error) {
if (imageDataSampleBuffer) {
// 获取图片数据
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
// 生成 UIImage
UIImage *image = [[UIImage alloc]initWithData:imageData];
}
}];
2.3 视频文件录制
实现 AVCaptureFileOutputRecordingDelegate 的代理方法:
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didStartRecordingToOutputFileAtURL:(NSURL *)fileURL fromConnections:(NSArray *)connections
{
// 开始录制...
}
-(void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error
{
// 视频录制完成...
}
3. 控制
3.1 摄像头
-
是否支持切换摄像头,如果摄像头个数大于 1 就支持
- (BOOL)canSwitchCameras { return [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1; } -
切换摄像头,获取当前不在使用的摄像头,如果是前置摄像头就返回后置,反之亦然,然后修改
session配置。// 返回当前未使用的摄像头 - (AVCaptureDevice *)inactiveCamera { //通过查找当前激活摄像头的反向摄像头获得,如果设备只有1个摄像头,则返回nil AVCaptureDevice *device = nil; if (self.cameraCount > 1) { if ([self activeCamera].position == AVCaptureDevicePositionBack) { device = [self cameraWithPosition:AVCaptureDevicePositionFront]; } else { device = [self cameraWithPosition:AVCaptureDevicePositionBack]; } } return device; } //切换摄像头 - (BOOL)switchCameras { //判断是否有多个摄像头 if (![self canSwitchCameras]) { return NO; } //获取当前设备的反向设备 NSError *error; AVCaptureDevice *videoDevice = [self inactiveCamera]; //将输入设备封装成AVCaptureDeviceInput AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; //判断videoInput 是否为nil if (videoInput) { //标注原配置变化开始 [self.captureSession beginConfiguration]; //将捕捉会话中,原本的捕捉输入设备移除 [self.captureSession removeInput:self.activeVideoInput]; //判断新的设备是否能加入 if ([self.captureSession canAddInput:videoInput]) { //能加入成功,则将videoInput 作为新的视频捕捉设备 [self.captureSession addInput:videoInput]; //将获得设备 改为 videoInput self.activeVideoInput = videoInput; } else { //如果新设备,无法加入。则将原本的视频捕捉设备重新加入到捕捉会话中 [self.captureSession addInput:self.activeVideoInput]; } //配置完成后 AVCaptureSession commitConfiguration 会分批的将所有变更整合在一起。 [self.captureSession commitConfiguration]; } else { //创建AVCaptureDeviceInput 出现错误,则通知委托来处理该错误 [self.delegate deviceConfigurationFailedWithError:error]; return NO; } return YES; }
3.2 聚焦
-
是否支持聚焦
- (BOOL)cameraSupportsTapToFocus { //询问激活中的摄像头是否支持兴趣点对焦 return [[self activeCamera] isFocusPointOfInterestSupported]; } -
聚焦
- (void)focusAtPoint:(CGPoint)point { AVCaptureDevice *device = [self activeCamera]; //是否支持兴趣点对焦 & 是否自动对焦模式 if (device.isFocusPointOfInterestSupported && [device isFocusModeSupported:AVCaptureFocusModeAutoFocus]) { NSError *error; //锁定设备准备配置,如果获得了锁 if ([device lockForConfiguration:&error]) { //将focusPointOfInterest属性设置CGPoint device.focusPointOfInterest = point; //focusMode 设置为AVCaptureFocusModeAutoFocus device.focusMode = AVCaptureFocusModeAutoFocus; //释放该锁定 [device unlockForConfiguration]; }else{ //错误时,则返回给错误处理代理 [self.delegate deviceConfigurationFailedWithError:error]; } } }3.3 曝光
-
是否支持曝光
- (BOOL)cameraSupportsTapToExpose { //询问设备是否支持对一个兴趣点进行曝光 return [[self activeCamera] isExposurePointOfInterestSupported]; } -
修改曝光设置
static const NSString *THCameraAdjustingExposureContext; - (void)exposeAtPoint:(CGPoint)point { AVCaptureDevice *device = [self activeCamera]; AVCaptureExposureMode exposureMode = AVCaptureExposureModeContinuousAutoExposure; //判断是否支持 AVCaptureExposureModeContinuousAutoExposure 模式 if (device.isExposurePointOfInterestSupported && [device isExposureModeSupported:exposureMode]) { [device isExposureModeSupported:exposureMode]; NSError *error; //锁定设备准备配置 if ([device lockForConfiguration:&error]) { //配置期望值 device.exposurePointOfInterest = point; device.exposureMode = exposureMode; //判断设备是否支持锁定曝光的模式。 if ([device isExposureModeSupported:AVCaptureExposureModeLocked]) { //支持,则使用kvo确定设备的adjustingExposure属性的状态。 [device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:&THCameraAdjustingExposureContext]; } //释放该锁定 [device unlockForConfiguration]; } else { [self.delegate deviceConfigurationFailedWithError:error]; } } } - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { //判断context(上下文)是否为THCameraAdjustingExposureContext if (context == &THCameraAdjustingExposureContext) { //获取device AVCaptureDevice *device = (AVCaptureDevice *)object; //判断设备是否不再调整曝光等级,确认设备的exposureMode是否可以设置为AVCaptureExposureModeLocked if(!device.isAdjustingExposure && [device isExposureModeSupported:AVCaptureExposureModeLocked]) { //移除作为adjustingExposure 的self,就不会得到后续变更的通知 [object removeObserver:self forKeyPath:@"adjustingExposure" context:&THCameraAdjustingExposureContext]; //异步方式调回主队列, dispatch_async(dispatch_get_main_queue(), ^{ NSError *error; if ([device lockForConfiguration:&error]) { //修改exposureMode device.exposureMode = AVCaptureExposureModeLocked; //释放该锁定 [device unlockForConfiguration]; } else { [self.delegate deviceConfigurationFailedWithError:error]; } }); } } }
3.4 闪光灯
-
判断是否有闪光灯
//判断是否有闪光灯 - (BOOL)cameraHasFlash { return [[self activeCamera]hasFlash]; } -
获取当前闪光灯模式
//闪光灯模式 - (AVCaptureFlashMode)flashMode { return [[self activeCamera] flashMode]; } -
设置闪光灯模式
- (void)setFlashMode:(AVCaptureFlashMode)flashMode { //获取会话 AVCaptureDevice *device = [self activeCamera]; //判断是否支持闪光灯模式 if ([device isFlashModeSupported:flashMode]) { //如果支持,则锁定设备 NSError *error; if ([device lockForConfiguration:&error]) { //修改闪光灯模式 device.flashMode = flashMode; //修改完成,解锁释放设备 [device unlockForConfiguration]; } else { [self.delegate deviceConfigurationFailedWithError:error]; } } }
3.5 手电筒
-
是否支持手电筒
- (BOOL)cameraHasTorch { return [[self activeCamera]hasTorch]; } -
获取当前手电筒模式
- (AVCaptureTorchMode)torchMode { return [[self activeCamera]torchMode]; } -
修改手电筒模式
- (void)setTorchMode:(AVCaptureTorchMode)torchMode { AVCaptureDevice *device = [self activeCamera]; if ([device isTorchModeSupported:torchMode]) { NSError *error; if ([device lockForConfiguration:&error]) { device.torchMode = torchMode; [device unlockForConfiguration]; } else { [self.delegate deviceConfigurationFailedWithError:error]; } } }
4. 其他
4.1 给录制的视频生成缩略图
- (void)createThumbnailImageForVideoWithURL:(NSURL *)videoURL {
//在 videoQueue 上生成图片
dispatch_async(self.videoQueue, ^{
//建立新的AVAsset & AVAssetImageGenerator
AVAsset *asset = [AVAsset assetWithURL:videoURL];
AVAssetImageGenerator *imageGenerator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
//设置maximumSize 宽为100,高为0 根据视频的宽高比来计算图片的高度
imageGenerator.maximumSize = CGSizeMake(100.0f, 0.0f);
//捕捉视频缩略图会考虑视频的变化(如视频的方向变化),如果不设置,缩略图的方向可能出错
imageGenerator.appliesPreferredTrackTransform = YES;
//获取CGImageRef图片 注意需要自己管理它的创建和释放
CGImageRef imageRef = [imageGenerator copyCGImageAtTime:kCMTimeZero actualTime:NULL error:nil];
//将图片转化为UIImage
UIImage *image = [UIImage imageWithCGImage:imageRef];
//释放CGImageRef imageRef 防止内存泄漏
CGImageRelease(imageRef);
dispatch_async(dispatch_get_main_queue(), ^{
// 回到主线程使用最新生成的image...
});
});
}
4.2 不同的坐标系 CGPoint 转换
- (CGPoint)captureDevicePointFromScreenPoint:(CGPoint)point {
/**
AVCaptureVideoPreviewLayer 定义了两个坐标系 CGPoint 的转换方法:
captureDevicePointOfInterestForPoint: 根据屏幕坐标系的 CGPoint 转为摄像头坐标系的 CGPoint
layer pointForCaptureDevicePointOfInterest: 根据摄像头坐标系的 CGPoint 转为屏幕坐标系的 CGPoint
*/
AVCaptureVideoPreviewLayer *layer = (AVCaptureVideoPreviewLayer*)self.layer;
return [layer captureDevicePointOfInterestForPoint:point];
}