AVFoundation-人脸识别

1,158 阅读2分钟
原文链接: developerdoc.com

Core Image框架中定义了CIDetector和CUFaceFeature,它们使用起来提供了简单而强大的人脸检测功能
但是这些方法没有针对实时性进行优化,在AVFoundation中通过特定的AVCaptureOutput类型的 AVCaptureMetadataOutput也可以实现人脸检测。

当使用人脸检测时,会输出一个具体子类类型AVMetadataFaceObject。AVMetadataFaceObject 定义了多个用户描述被检测到的人脸的属性,最重要的是人脸的边界(bounds),还给出了用于定义检测人脸倾斜角(roll angle)表示人的头部向肩膀方向侧倾角度,偏转角(yaw angle)表示人脸绕y轴旋转的角度。

配置会话与之前使用摄像头拍照类似,区别在于更换一个output,如:

- (BOOL)setupSessionOutputs:(NSError **)error {

    self.metadataOutput = [[AVCaptureMetadataOutput alloc] init];           // 2

    if ([self.captureSession canAddOutput:self.metadataOutput]) {
        [self.captureSession addOutput:self.metadataOutput];

        //指定对象输出的元数据类型,AV Foundation支持多种类型 这里限制使用人脸元数据
        NSArray *metadataObjectTypes = @[AVMetadataObjectTypeFace];         // 3
        self.metadataOutput.metadataObjectTypes = metadataObjectTypes;

        //人脸检测用到的硬件加速,而且许多重要的任务都在主线程,一般指定主线程
        dispatch_queue_t mainQueue = dispatch_get_main_queue();
        //指定AVCaptureMetadataOutputObjectsDelegate
        [self.metadataOutput setMetadataObjectsDelegate:self                // 4
                                                  queue:mainQueue];

        return YES;

    } else {                                                                // 5
        if (error) {
            NSDictionary *userInfo = @{NSLocalizedDescriptionKey:
                                           @"Failed to still image output."};
            *error = [NSError errorWithDomain:THCameraErrorDomain
                                         code:THCameraErrorFailedToAddOutput
                                     userInfo:userInfo];
        }
        return NO;
    }
}

AVCaptureMetadataOutputObjectsDelegate 需要实现:

- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
       fromConnection:(AVCaptureConnection *)connection {

    //简单输出一些AVMetadataFaceObject数据
    for (AVMetadataFaceObject *face in metadataObjects) {                   // 2
        NSLog(@"Face detected with ID: %li", (long)face.faceID);
        NSLog(@"Face bounds: %@", NSStringFromCGRect(face.bounds));
    }
    //将人脸数据传给委托对象
    [self.faceDetectionDelegate didDetectFaces:metadataObjects];            // 3
    
}

THFaceDetectionDelegate长这样:

@protocol THFaceDetectionDelegate <NSObject>

- (void)didDetectFaces:(NSArray *)faces;

@end

获取到AVMetadataFaceObject 后,将人脸数据可视化:

- (void)didDetectFaces:(NSArray *)faces {
    //将设备坐标空间转换为视图空间对象集合
    NSArray *transformedFaces = [self transformedFacesFromFaces:faces];
    //faceLayers 保存人脸数据对应的layer
    NSMutableArray *lostFaces = [self.faceLayers.allKeys mutableCopy];      // 1

    for (AVMetadataFaceObject *face in transformedFaces) {
        //移除已不存在的人脸,添加新的人脸
        NSNumber *faceID = @(face.faceID);                                  // 2
		[lostFaces removeObject:faceID];

        CALayer *layer = [self.faceLayers objectForKey:faceID];             // 3
        if (!layer) {
            // no layer for faceID, create new face layer
            layer = [self makeFaceLayer];                                   // 4
            [self.overlayLayer addSublayer:layer];
            self.faceLayers[faceID] = layer;
        }

        layer.transform = CATransform3DIdentity;                            // 1
        layer.frame = face.bounds;
        
        //修改人脸的倾斜角
        if (face.hasRollAngle) {
            CATransform3D t = [self transformForRollAngle:face.rollAngle];  // 2
            layer.transform = CATransform3DConcat(layer.transform, t);
        }
        //修改人脸的偏转角
        if (face.hasYawAngle) {
            CATransform3D t = [self transformForYawAngle:face.yawAngle];    // 4
            layer.transform = CATransform3DConcat(layer.transform, t);
        }
    }

    for (NSNumber *faceID in lostFaces) {                                   // 6
        CALayer *layer = [self.faceLayers objectForKey:faceID];
        [layer removeFromSuperlayer];
        [self.faceLayers removeObjectForKey:faceID];
    }

}

//将设备坐标空间转换为视图空间对象集合
- (NSArray *)transformedFacesFromFaces:(NSArray *)faces {                   // 2
    NSMutableArray *transformedFaces = [NSMutableArray array];
    for (AVMetadataObject *face in faces) {
        AVMetadataObject *transformedFace =                                 // 3
            [self.previewLayer transformedMetadataObjectForMetadataObject:face];
        [transformedFaces addObject:transformedFace];
    }
    return transformedFaces;
}

- (CALayer *)makeFaceLayer {
    CALayer *layer = [CALayer layer];
    layer.borderWidth = 5.0f;
    layer.borderColor =
        [UIColor colorWithRed:0.188 green:0.517 blue:0.877 alpha:1.000].CGColor;
    return layer;
}

// Rotate around Z-axis
- (CATransform3D)transformForRollAngle:(CGFloat)rollAngleInDegrees {        // 3
    CGFloat rollAngleInRadians = THDegreesToRadians(rollAngleInDegrees);
    return CATransform3DMakeRotation(rollAngleInRadians, 0.0f, 0.0f, 1.0f);
}

// Rotate around Y-axis
- (CATransform3D)transformForYawAngle:(CGFloat)yawAngleInDegrees {          // 5
    CGFloat yawAngleInRadians = THDegreesToRadians(yawAngleInDegrees);

    CATransform3D yawTransform =
        CATransform3DMakeRotation(yawAngleInRadians, 0.0f, -1.0f, 0.0f);

    return CATransform3DConcat(yawTransform, [self orientationTransform]);
}