h5项目,video + canvas 实现身份证ocr拍照功能

781 阅读2分钟

实现背景

如果不想调用原生拍照功能的话,使用video标签+canvas也可试实现拍照功能,而且还可以在实现拍照功能的基础上实现添加蒙板等功能, 那么要怎么实现呢?

实现原理

使用video标签获取用户媒体权限,实现实时视频功能。点击拍照,使用canvas将该video标签中的这一帧数据流捕获并保存起来,并将数据流数据转成图片类型的文件进行展示和保存。最后将保存的图片数据流传送给后端,整个拍照的实现流程就完事了。具体的添加蒙板和样式,在使用标签的时候添加就可以啦~

实现的具体tips

一、使用video标签的时候,ios系统出现黑屏状态

可以在video标签上添加以下属性来解决。

muted x5-playsinline playsinline webkit-playsinline

二、如何使video的视频画面按照自己的比例来展示并且画质不会变的模糊?

有的同学会使用object-fit: cover; 来实现将画面撑开整个屏幕,我开始的时候也是这么实现的,后来发现进行图片剪裁的时候会因为获取不到适配的坐标,从而剪出的图片总是有问题的。后来我发现使用这两个css样式可以解决上述问题,将video的画面帧从左上角开始,画面裁剪的时候坐标问题就解决了,想裁剪什么样的图片都可以准确的裁剪出来了。

object-fit: none; 
object-position: left top;

三、如何实现画面捕捉的数据流转成img临时图片用于展示?

使用canvas将video数据捕捉绘制到画布上后,就可以使用canvas的toDataURL方法将其转为base64数据,然后调用工具函数将base64转成file,再由file文件转成img的临时url来进行展示。

/**
 * 将base64图片数据转为file
 * @param dataUrl
 * @param filename
 * @returns {File}
 */
export function dataURLtoFile(dataUrl, filename) {
    let arr = dataUrl.split(',')
    let mime = arr[0].match(/:(.*?);/)[1]
    let suffix = mime.split('/')[1]
    let bstr = atob(arr[1])
    let n = bstr.length
    let u8arr = new Uint8Array(n)

    while (n--) {
        u8arr[n] = bstr.charCodeAt(n);
    }
    return new File([u8arr], `${filename}.${suffix}`, {type: mime})
}

let file = dataURLtoFile(data, 'attachmentFile')
let url = window.URL.createObjectURL(file)

完整代码

因为想兼容ios系统,这里使用了recordrtc这个插件 在使用之前先使用npm install recordrtc 命令安装,然后再使用。

<template>
  <div>
    <div class="wrapper">
      <div class="video-wrapper" :style="{display: !hasTake ? 'none' : null}">
        <video id="video" :width="identityWidth" :height="identityHeight" muted x5-playsinline playsinline webkit-playsinline style="object-fit: none; object-position: left top"></video>
      </div>
      <div class="canvas-wrapper">
        <canvas class="canvas-content" id="canvas" :width="identityWidth" :height="identityHeight"></canvas>
      </div>
      <div class="video-mask">
        <img v-if="ocrType === 'rx_photo'" src="../assets/img/identity/ocr_img_rxm@2x.png">
        <img v-else-if="ocrType === 'gh_photo'" src="../assets/img/identity/ocr_img_ghm@2x.png">
        <img v-else src="../assets/img/identity/ocr_img_rxm@2x.png">
      </div>
      <div class="btn-wrapper">
        <div class="btn-cycle">
          <div class="btn-content" id="takePhoto" @click="takePhoto">拍照</div>
        </div>
      </div>
    </div>
    <loading v-if="uploading" />
  </div>
</template>

<script>
import RecordRTC from "recordrtc";

export default {
  name: "index",

  props: {
    ocrType: {
      type: String,
      default: 'rx_photo'
    }
  },

  data() {
    return {
      video: {},
      hasTake: true,
      identityWidth: 375,
      identityHeight: 578,
      urlParams: {},
      uploading: false,  //文件上传中
      deviceIds: [],  // 手机摄像头数组
      mediaRecorder: {},
      recorderFile: {},
    }
  },
  mounted() {
    this.initCamera()
  },
  methods: {
    /**
     * 拍照功能初始化
     * */
    initCamera() {
      const constraints = {
        audio: false, // 调用录音
        video: {
          deviceId: "default",
          // facingMode: "user", //调用前置摄像头
          facingMode: 'environment', //调用后置摄像头
        }
      }

      this.video = document.getElementById('video');
      //访问摄像头
      if (navigator.mediaDevices.getUserMedia || navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia) {
        //调用用户媒体设备, 访问摄像头
        if (navigator.mediaDevices.getUserMedia) {
          //最新的标准API
          navigator.mediaDevices.getUserMedia(constraints).then(this.success).catch(this.error);
        }
      } else {
        alert('不支持访问用户媒体');
      }
    },

    /**
     * 媒体权限成功将执行该函数 将数据流保存到video标签中且播放影像
     * */
    success(stream) {
      this.recorder = RecordRTC(stream, {
        type: "video",
      });
      this.streamInfo = stream
      this.recorder.stream = stream;
      this.video = document.querySelector("video");
      video.srcObject = this.recorder.stream;
      video.play()
    },

    /**
     * 媒体权限失败时调用该函数
     * */
    error(error) {
      console.log("访问用户媒体设备失败");
    },

    /**
     * 点击拍照 进行身份拍照  捕获拍照信息 并上传图片数据
     * */
    takePhoto() {
      let _this = this
      //获取图片流
      //动态创建画布对象
      let canvas = document.getElementById("canvas");

      let context
      if (canvas.getContext) {//代表支持canvas
        context = this.setupCanvas(canvas);   //这里是调用封装的处理方法
      }

      let video = this.video
      const { targetWidth, targetHeight } = this.photoSizeFormat({ video })
      canvas.width = targetWidth
      canvas.height = targetHeight
      //将video对象内指定的区域捕捉绘制到画布上指定的区域,可进行不等大不等位的绘制
      context.drawImage(video, 0, 0, video.width, video.height, 0, 0, targetWidth, targetHeight)
      //将Canvas的数据转换为base64位编码的PNG图像
      var imageData = canvas.toDataURL("image/png")

      this.uploading = true
      this.video = {}

      setTimeout(() => {
        _this.hasTake = false
        _this.$emit('uploadOCRCallback', imageData)
      }, 600)
      this.closeStream()
    },

    /**
     * 图片尺寸格式处理
     * @param video
     * @param baseWith
     * @param baseHeight
     * @returns {{targetHeight: number, targetWidth: number}}
     */
    photoSizeFormat({ video, baseWith = 2000, baseHeight = 2000}) {
      let targetWidth
      let targetHeight
      if (video.width > baseWith && video.height > baseHeight) {
        const rate = Math.min(baseWith / video.width, baseHeight / video.height)
        targetWidth = video.width * rate
        targetHeight = video.height * rate
      } else if (video.width > baseWith) {
        targetWidth = baseWith
        targetHeight = (baseWith / video.width) * video.height
      } else if (video.height > baseHeight) {
        targetHeight = baseHeight
        targetWidth = (baseHeight / video.height) * video.width
      } else {
        targetWidth = video.width
        targetHeight = video.height
      }

      return {
        targetWidth,
        targetHeight
      }
    },

    setupCanvas(canvas) {
      var dpr = window.devicePixelRatio || 1;
      var rect = canvas.getBoundingClientRect();
      canvas.width = rect.width * dpr;
      canvas.height = rect.height * dpr;
      var ctx = canvas.getContext('2d');
      ctx.scale(dpr, dpr);
      return ctx;
    },

    /**
     * 关闭流
     * @param stream
     */
    closeStream() {
      const tracks = this.streamInfo.getTracks();
      tracks.forEach(track => {
        track.stop();
      });
    }
  }
}
</script>

<style scoped lang="less">
.wrapper {
  position: relative;
  top: 0px;
  display: flex;
  justify-content: center;
  align-items: center;
  background-color: rgba(0, 0, 0, 0.92);
  height: 100vh;
  width: 100%;
}

.video-wrapper {
  position: absolute;
  top: 0px;
  z-index: 2;
}
.canvas-wrapper {
  position: absolute;
  top: 0px;
  z-index: 3;
}

.video-mask {
  position: absolute;
  top: 0px;
  left: 0px;
  right: 0px;
  z-index: 4;

  img {
    width: 100%;
    height: auto;
    text-align: center;
    left: calc(0.5* 100vw - 176px);
  }
}

.canvas-content {
  display: flex;
  justify-content: center;
  align-items: center;
}

@media screen and (max-height: 700px) {
  .btn-wrapper {
    position: absolute;
    top: calc(0.5 * 100vh + 254px);
    z-index: 4;

    .btn-cycle {
      width: 70px;
      height: 70px;
      background: rgba(255, 255, 255, 0.86);
      border-radius: 50%;
      z-index: 5;
      display: flex;
      justify-content: center;
      align-items: center;
    }

    .btn-content {
      width: 60px;
      height: 60px;
      border-radius: 50%;
      background-color: #FFF;
      color: #EE1710;
      z-index: 6;
      display: flex;
      justify-content: center;
      align-items: center;
    }
  }
}

@media screen and (min-height: 720px) {
  .btn-wrapper {
    position: absolute;
    top: calc(0.5 * 100vh + 240px);
    z-index: 4;

    .btn-cycle {
      width: 80px;
      height: 80px;
      background: rgba(255, 255, 255, 0.86);
      border-radius: 50%;
      z-index: 5;
      display: flex;
      justify-content: center;
      align-items: center;
    }

    .btn-content {
      width: 70px;
      height: 70px;
      border-radius: 50%;
      background-color: #FFF;
      color: #EE1710;
      z-index: 6;
      display: flex;
      justify-content: center;
      align-items: center;
    }
  }
}
</style>