功能
通过getDisplayMedia
获取视频流,对视频流添加特效,然后再返回一个新的流,用于发送到远端
实现思路
getDisplayMedia
获取视频流mediaStream
- 将
Stream
播放在video
上 MediaRecorder
定时任务, 将video
绘制到canvas
上- 在
canvas
上进行数据处理, 然后绘制到另一个canvas2
上- 数据处理
canvas
->blob
->buffer
->changeBuffer()
->blob
->dataUrl
->canvas2
- 数据处理
canva2.captureStream()
获取新的媒体流- 将源
MediaStream
中的音频添加到新的媒体流内
示例代码内的变量参考完整代码内的变量声明
获取屏幕共享mediaStream
async function start () {
stream = await navigator.mediaDevices.getDisplayMedia({ video: true, audio: false })
video.srcObject = stream
}
function stop() {
stream.getTracks().forEach(track => {
track.stop()
});
recorder.stop()
}
video
内播放stream
video.srcObject = stream
定时任务
这里方法不固定, 可以使用MediaRecorder
录制媒体流, 通过设置start
参数, 来实现; 也可以使用setInterval
或者requestAnimationFrame
. 这里使用MediaRecorder
是为了保存下载原始媒体流
recorder = new MediaRecorder(stream);
recorder.ondataavailable = async (e) => {
// 每隔50ms执行一次
doSomething()
}
recorder.start(50);
绘制canvas
function drawImage() {
ctx.drawImage(video2, 0, 0, 480, 270) // video2 为`MediaSource`播放的载体
}
数据格式转换
function getBlobFromCanvas () {
return new Promise((resolve, reject) => {
c1.toBlob(resolve)
})
}
function blobToBuffer (blob) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.onload = function(result) {
resolve(result.target.result);
}
reader.readAsArrayBuffer(blob);
})
}
function bufferToBlob (buffer) {
return new Blob([buffer])
}
function blobToDataUrl (blob) {
return new Promise((resolve, reject) => {
var a = new FileReader();
a.onload = function (e){
resolve(e.target.result);
}
a.readAsDataURL(blob);
})
}
function dataUrlToCanvas (url) {
var img = new Image();
img.onload = function() {
ctx2.drawImage(img, 0, 0);
};
img.onerror = () => {
console.log('error')
}
img.src = url;
}
从canvas
获取新的stream
function getNewStream () {
newStream = canvas.captureStream()
}
将源MediaStream
中的音频添加到新的媒体流内
stream.getAudioTracks().map(track => {
newStream.addTrack(track)
})
完整代码
Javasript
let c1 = document.getElementById('c1')
let c2 = document.getElementById('c2')
let offsetWidth = 1080
let offsetHeight = 720
// let c1 = document.createElement('canvas')
// let c2 = document.createElement('canvas')
// c1.width = offsetWidth
// c1.height = offsetHeight
// c2.width = offsetWidth
// c2.height = offsetHeight
let video = document.getElementById('video')
let video2 = document.getElementById('video2')
let image = document.getElementById('img')
let btnStart = document.getElementById('btn-start')
let btnStop = document.getElementById('btn-stop')
btnStart.addEventListener('click', start)
btnStop.addEventListener('click', stop)
let ctx1 = c1.getContext('2d')
let ctx2 = c2.getContext('2d')
let stream
let recorder
let newStream
let isRunning = false
async function start() {
stream = await navigator.mediaDevices.getDisplayMedia({ video: true, audio: true })
video.srcObject = stream
video.play()
newStream = await changeStream(stream)
stream.getAudioTracks().map(track => {
newStream.addTrack(track)
})
video2.srcObject = newStream
video2.play()
}
function changeStream (stream, callback) {
return new Promise(async (resolve, reject) => {
recorder = new MediaRecorder(stream);
recorder.ondataavailable = async (e) => {
if (isRunning) return
try {
isRunning = true
let blob = await draw()
let buffer = await blobToBuffer(blob)
if (callback && typeof callback === 'function') {
buffer = callback(buffer)
}
let b = await bufferToBlob(buffer)
let url = await blobToDataUrl(b)
await dataUrlToCanvas(url)
isRunning = false
} catch (error) {
isRunning = false
reject(error)
}
}
recorder.start(40);
resolve(mediaStream = c2.captureStream())
})
}
function stop() {
recorder.stop()
stream.getTracks().map(track => track.stop())
console.log(newStream.getTracks())
newStream.getTracks().map(track => track.stop())
}
function draw () {
ctx1.drawImage(video, 0, 0, offsetWidth, offsetHeight)
return new Promise((resolve, reject) => {
c1.toBlob(resolve)
})
}
function blobToBuffer (blob) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.onload = function(result) {
resolve(result.target.result);
}
reader.readAsArrayBuffer(blob);
})
}
function bufferToBlob (buffer) {
return new Blob([buffer])
}
function blobToDataUrl (blob) {
return new Promise((resolve, reject) => {
var a = new FileReader();
a.onload = function (e){
resolve(e.target.result);
}
a.readAsDataURL(blob);
})
}
function dataUrlToCanvas (url) {
var img = new Image();
img.onload = function() {
ctx2.drawImage(img, 0, 0, offsetWidth, offsetHeight);
};
img.onerror = () => {
console.log('error')
}
img.src = url;
}
HTML
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
<style>
canvas, video {
width: 480px;
height: 270px;
}
</style>
</head>
<body>
<h5 id="btn-start">开始</h5>
<h5 id="btn-stop">停止</h5>
<div style="display: flex">
<div>
<h3>原始视频</h3>
<video id="video"></video>
</div>
<div>
<h3>原始画布</h3>
<canvas id="c1" width="480" height="270"></canvas>
</div>
</div>
<h3>修改后</h3>
<div style="display: flex">
<canvas id="c2" width="480" height="270"></canvas>
<video id="video2"></video>
</div>
<img id="img" src="" alt="" srcset="">
<script src="./main.js"></script>
</body>
</html>