Vue3中使用face-api.js的demo

1,374 阅读1分钟

话不多说,直接上代码

  <div class="video-container card">
    <video ref="video" width="720" height="560" autoplay muted></video>
    <canvas ref="canvas"></canvas>
  </div>
</template>
  
<script lang="ts" setup>
import { onMounted, ref } from 'vue';
import * as faceapi from 'face-api.js';
const video = ref();
const canvas = ref();
onMounted(async () => {
  await faceapi.nets.tinyFaceDetector.loadFromUri('/models'),
    await faceapi.nets.faceLandmark68Net.loadFromUri('/models'),
    await faceapi.nets.faceRecognitionNet.loadFromUri('/models'),
    await faceapi.nets.faceExpressionNet.loadFromUri('/models');
  startVideo();
  const canvasElement = canvas.value;
  faceapi.matchDimensions(canvasElement, { width: video.value.width, height: video.value.height });

  const drawFace = async () => {
    const detections = await faceapi
      .detectAllFaces(video.value, new faceapi.TinyFaceDetectorOptions())
      .withFaceLandmarks()
      .withFaceExpressions();
    const resizedDetections = faceapi.resizeResults(detections, { width: video.value.width, height: video.value.height });
    canvasElement.getContext('2d').clearRect(0, 0, canvasElement.width, canvasElement.height);
    faceapi.draw.drawDetections(canvasElement, resizedDetections);
    faceapi.draw.drawFaceLandmarks(canvasElement, resizedDetections);
    faceapi.draw.drawFaceExpressions(canvasElement, resizedDetections);
  };

  setInterval(drawFace, 100);
});

function startVideo() {
  navigator.getUserMedia(
    { video: true },
    (stream: any) => (video.value.srcObject = stream),
    (error: any) => console.log(error)
  );
}
</script>
  
<style scoped>
canvas {
  position: absolute;
}

.video-container {
  display: flex;
  justify-content: center;
  align-items: center;
  width: 100%;
  height: 100%;
}
</style>