【iOS】一个简单的人脸跟踪Demo

436 阅读1分钟

1、 sessionView - 相机画面的容器View self.detector - 脸部特征识别器

- (void)viewDidLoad {
    [super viewDidLoad];
    
    self.sessionView = [[UIView alloc] initWithFrame:self.view.bounds];
    [self.view addSubview:self.sessionView];
    
    self.faceView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"a"]];
    self.faceView.frame = CGRectZero;
    [self.view addSubview:self.faceView];
    
    self.leftEyeView = [[UIView alloc] init];
    self.leftEyeView.alpha = 0.4;
    self.leftEyeView.backgroundColor = [UIColor greenColor];
    [self.view addSubview:self.leftEyeView];
    
    self.rightEyeView = [[UIView alloc] init];
    self.rightEyeView.alpha = 0.4;
    self.rightEyeView.backgroundColor = [UIColor yellowColor];
    [self.view addSubview:self.rightEyeView];

    self.mouthView = [[UIView alloc] init];
    self.mouthView.alpha = 0.4;
    self.mouthView.backgroundColor = [UIColor redColor];
    [self.view addSubview:self.mouthView];

    self.context = [CIContext context];
    self.detector = [CIDetector detectorOfType:CIDetectorTypeFace context:self.context options:@{CIDetectorAccuracy:CIDetectorAccuracyHigh}];
}

2、点击屏幕任意地方打开相机

- (void)touchesBegan:(NSSet<UITouch *> *)touches withEvent:(UIEvent *)event {
    // 避免重复打开,首先关闭原先的session
    [self.session stopRunning];
    self.session = [[AVCaptureSession alloc] init];

    // 移除原有的相机画面Layer
    [self.layer removeFromSuperlayer];
    
    NSError *error;
    
    // Device
    NSArray *devices = [AVCaptureDevice devices];
    NSLog(@"devices = %@", devices);
    AVCaptureDevice *defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    
    // Input
    AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:defaultDevice error:&error];
    [self.session addInput:input];

    // Output
    AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
    [output setSampleBufferDelegate:(id)self queue:dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)];
    [self.session addOutput:output];
    
    // 开始捕获相机画面
    [self.session startRunning];
    
    // 将相机画面添加到容器View中
    self.layer = [AVCaptureVideoPreviewLayer layerWithSession:self.session];
    self.layer.frame = self.view.bounds;
    [self.sessionView.layer addSublayer:self.layer];
}

3、脸部特征跟踪

// AVCaptureAudioDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//    printf("%s\n", __func__);
    // 1、获取当前帧图像
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    CIImage *image = [[CIImage alloc] initWithCVImageBuffer:imageBuffer];
    
    CGFloat imageW = image.extent.size.width;
    CGFloat imageH = image.extent.size.height;
    
    2、对图像进行脸部特征识别
    CIFeature *feature = [[self.detector featuresInImage:image] lastObject];
    if (feature) {
        if (self.leftEyeView.frame.size.width == 0) {
            self.leftEyeView.frame = CGRectMake(0, 0, 20, 20);
        }
        if (self.rightEyeView.frame.size.width == 0) {
            self.rightEyeView.frame = CGRectMake(0, 0, 20, 20);
        }
        if (self.mouthView.frame.size.width == 0) {
            self.mouthView.frame = CGRectMake(0, 0, 20, 20);
        }
        NSLog(@"find");
        CIFaceFeature *face = (CIFaceFeature *)feature;
        dispatch_async(dispatch_get_main_queue(), ^{
            self.faceView.frame = CGRectMake(face.bounds.origin.y / imageW * self.sessionView.frame.size.height,
                                             face.bounds.origin.x / imageH * self.sessionView.frame.size.width,
                                             face.bounds.size.width / imageH * self.sessionView.frame.size.width,
                                             face.bounds.size.height / imageW * self.sessionView.frame.size.height);
            
            self.leftEyeView.center = CGPointMake(face.leftEyePosition.y / imageW * self.sessionView.frame.size.height,
                                                  face.leftEyePosition.x / imageH * self.sessionView.frame.size.width);
            
            self.rightEyeView.center = CGPointMake(face.rightEyePosition.y / imageW * self.sessionView.frame.size.height,
                                                   face.rightEyePosition.x / imageH * self.sessionView.frame.size.width);
            
            self.mouthView.center = CGPointMake(face.mouthPosition.y / imageW * self.sessionView.frame.size.height,
                                                face.mouthPosition.x / imageH * self.sessionView.frame.size.width);
            
        });
    }
}

#大功告成 手机记得横过来,home键在右边 Demo地址:github.com/MagicBlind/…