- 创建
AVCaputureSession
作为协调输入与输出的中心,我们第一步需要创建一个Session_captureSession = [[AVCaptureSession alloc]init];
- 创建
AVCaptureDevice
创建一个AVCaptureDevice代表代表输入设备_captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
- 创建
AVCaptureDeviceInput,并添加到Session中
AVCaptureDeviceInput负责管理设备端口。我们可以理解它为设备的抽象。一个设备可能可以同时提供视频和音频的捕捉。_deviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:_captureDevice error:&error];
[_captureSession addInput:_deviceInput];
- 创建
AVCaptureOutput,并添加到Session中
为了从session中取得数据,我们需要创建一个AVCaptureMetadataOutput_captureMetadataOutPut = [[AVCaptureMetadataOutput alloc]init];
[_captureSession addOutput:_captureMetadataOutPut];
- 设置
AVCaptureMetadataOutput的delegate和扫描类型以及扫描区域
需要指定特定的扫描区域AVCaptureMetadataOutput的rectOfInterest,AVCapture输出的图片大小都是横着的,而iPhone的屏幕是竖着的,那么我把它旋转90°才正确。[_captureMetadataOutPut setMetadataObjectsDelegate:self queue:dispatch_queue_create("ease_capture_queue",NULL)];
[_captureMetadataOutPut setMetadataObjectTypes:[NSArray arrayWithObject:AVMetadataObjectTypeQRCode]];
_captureMetadataOutPut.rectOfInterest = CGRectMake(0,0,1,1);
- 设置
AVCaptureVideoPreviewLayer,开始扫描
显示捕获到的相机输出流_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_captureSession];
[_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
[self.previewLayer.session startRunning];
- 从
AVCaptureMetadataOutputObjectsDelegate获取扫描结果- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection{
//判断是否有数据,是否是二维码数据
if (metadataObjects.count > 0) {
__block AVMetadataMachineReadableCodeObject *result = nil;
[metadataObjects enumerateObjectsUsingBlock:^(AVMetadataMachineReadableCodeObject *obj, NSUInteger idx, BOOL *stop) {
if ([obj.type isEqualToString:AVMetadataObjectTypeQRCode]) {
result = obj;
*stop = YES;
}
}];
if (!result) {
result = [metadataObjects firstObject];
}
dispatch_async(dispatch_get_main_queue(), ^{
[self analyseResult:result];
});
}