我司是有人脸抓拍需求,所以我决定在网页中使用getUserMedia
因为我是使用uni-app来进行编写(为了以后多端情况)
相比较uni-app的video还进行了封装不同,使用原生video就简单很多。
就以我使用uni-app进行webRTC的情况记录一下,也许能够其他人一个启发。
因为getUserMedia设计到video,但是在uni-app中video是经过封装的组件,外面套了好几层,如图
所以我们需要找到原生video,使用querySelector找到video,canvas同理
最最关键的一点是,如果在iOS端动态创建dom有可能会出现异常失败的情况,就是你可以调用getUserMedia,也可能在dom树中看到video,但是页面就是不显示,这是在uni-app中的情况,原生没试。
猜测原因:如果启用了x5同层播放器,动态插入的video是会被浏览器劫持的
代码简略如下:
<template>
<view class="Track">
<div class="track-container">
<div class="track_content">
<video id="video" class="uni-video" loop :autoplay="true" muted webkit-playsinline="true" playsinline="true" x5-video-player-type="h5-page" :controls="false"></video>
<canvas id="canvas" class="uni-canvas"></canvas>
</div>
</div>
</view>
</template>
<script>
const {
windowWidth,
windowHeight
} = uni.getSystemInfoSync();
export default {
name: 'track-face',
data() {
return {
videoWidth: 180,
videoHeight: 180,
videoPlyer: null,
context: null,
canvasDraw: null
};
},
mounted() {
this.videoPlyer = document.querySelector('#video video');
this.canvasDraw = document.querySelector('#canvas canvas');
this.context = this.canvasDraw.getContext('2d', {
willReadFrequently: true
});
},
beforeDestroy() {
// 关闭摄像头,释放资源
window.stream && window.stream.getTracks().forEach(function(track) {
track.stop();
});
},
methods: {
// 视频初始化
initVideo() {
if (window.stream) {
// 调用之前首先关闭
window.stream.getTracks().forEach(track => {
track.stop();
});
}
const constraints = {
audio: false,
video: {
width: this.videoWidth,
height: this.videoHeight,
facingMode: 'user'
}
};
if (navigator.mediaDevices.getUserMedia || navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia) {
//调用用户媒体设备, 访问摄像头
this.getUserMedia(constraints, this.success, this.error);
} else {
//另一个页面有接收这个组件的处理
this.$emit('error', {
message: 'getUserMedia error'
});
}
},
//访问用户媒体设备的兼容方法
getUserMedia(constraints, success, error) {
if (navigator.mediaDevices.getUserMedia) {
//最新的标准API
navigator.mediaDevices.getUserMedia(constraints).then(success).catch(error);
} else if (navigator.webkitGetUserMedia) {
//webkit核心浏览器
navigator.webkitGetUserMedia(constraints, success, error);
} else if (navigator.mozGetUserMedia) {
//firfox浏览器
navigator.mozGetUserMedia(constraints, success, error);
} else if (navigator.getUserMedia) {
//旧版API
navigator.getUserMedia(constraints, success, error);
}
},
success(stream) {
window.stream = stream;
try {
this.videoPlyer.srcObject = stream;
} catch (err) {
const CompatibleURL = window.URL || window.webkitURL;
this.videoPlyer.src = CompatibleURL.createObjectURL(stream);
}
this.videoPlyer.play()
},
error(error) {
if (error.name == 'NotAllowedError' || error.message == 'Permission denied') {
//用户关闭了摄像头
}
this.$emit('error', error);
},
capture() {
// 指定图片的DataURL(图片的base64编码数据)
this.context.drawImage(this.videoPlyer, 0, 0, 60, 60);
/*
*判断是否为纯黑或纯白
*/
const imgData = this.context.getImageData(0, 0, 60, 60).data;
let flag = 0;
let count = 0;
let points = []
// 四个像素点为一组,前三个之间不同,最后一个一直为255
for (let i = 0; i < imgData.length - 1; i += 16) {
points.push([imgData[i], imgData[i + 1], imgData[i + 2]])
}
//这是我判断纯色的参考文章
//[JS判断图像背景颜色单一还是丰富 « 张鑫旭-鑫空间-鑫生活 (zhangxinxu.com)](https://www.zhangxinxu.com/wordpress/2021/06/js-image-colorful-or-pure/)
let arrDistance = [];
points.forEach((arrRGB) => {
points.forEach((arrRGB2) => {
if (arrRGB2 != arrRGB) {
arrDistance.push(this.colorDistance(arrRGB, arrRGB2));
}
});
});
let sum = arrDistance.reduce(function(prev, curv) {
return prev + curv;
});
if (Math.round(100 * sum / arrDistance.length) / 100 < 30) {
// 拍照为纯色,这里可以写一些判断逻辑
return false;
}
let file = this.canvasDraw.toDataURL('image/png');
this.uploadFaceImg(file);
if (this.context) {
this.context.clearRect(0, 0, 60, 60);
}
}
}
};
</script>
<style lang="scss">
.track-container {
position: fixed;
z-index: 30;
width: 180px;
}
.track_content {
width: 100%;
position: relative;
.uni-video {
width: 180px;
height: 180px;
z-index: 100;
}
.uni-canvas {
width: 60px;
height: 60px;
z-index: 99;
position: absolute;
top: 0;
left: 0;
}
}
</style>
`